App Engine Python SDK version 1.9.9

git-svn-id: http://googleappengine.googlecode.com/svn/trunk/python@457 80f5ef21-4148-0410-bacc-cfb02402ada8
diff --git a/RELEASE_NOTES b/RELEASE_NOTES
index 1944d0b..f189833 100644
--- a/RELEASE_NOTES
+++ b/RELEASE_NOTES
@@ -3,6 +3,22 @@
 
 App Engine SDK - Release Notes
 
+Version 1.9.9
+
+All
+==============================
+- TLS is now supported and enabled by default in the dev_appserver.
+    https://code.google.com/p/googleappengine/issues/detail?id=497
+- Fixed an issue with the Datastore Admin failing to load due to a
+  DeadlineExceededError when retrieving kinds.
+    https://code.google.com/p/googleappengine/issues/detail?id=11145
+
+PHP
+=============================
+- Fixed an issue where form fields submitted with Google Cloud Storage upload
+  may not get encoded correctly.
+    https://code.google.com/p/googleappengine/issues/detail?id=9534
+
 Version 1.9.8
 
 All
diff --git a/VERSION b/VERSION
index 4f84863..dc3d71b 100644
--- a/VERSION
+++ b/VERSION
@@ -1,5 +1,5 @@
-release: "1.9.8"
-timestamp: 1405557678
+release: "1.9.9"
+timestamp: 1406573704
 api_versions: ['1']
 supported_api_versions:
   python:
diff --git a/google/appengine/api/appinfo.py b/google/appengine/api/appinfo.py
index d9d0070..523ea74 100644
--- a/google/appengine/api/appinfo.py
+++ b/google/appengine/api/appinfo.py
@@ -1384,13 +1384,9 @@
       appyaml.vm_settings = VmSettings()
 
 
-    appyaml.vm_settings['has_docker_image'] = True
-
-
-
     if runtime == 'dart' or runtime == 'contrib-dart':
       runtime = 'dart'
-
+      appyaml.vm_settings['has_docker_image'] = True
 
 
     appyaml.vm_settings['vm_runtime'] = runtime
diff --git a/google/appengine/api/datastore_file_stub.py b/google/appengine/api/datastore_file_stub.py
index bc0424d..586a05e 100644
--- a/google/appengine/api/datastore_file_stub.py
+++ b/google/appengine/api/datastore_file_stub.py
@@ -647,7 +647,8 @@
     eg_k = datastore_types.ReferenceToKeyValue(entity_group)
     return self.__entities_by_group[eg_k].copy()
 
-  def _GetQueryCursor(self, query, filters, orders, index_list):
+  def _GetQueryCursor(self, query, filters, orders, index_list,
+                      filter_predicate=None):
     app_id = query.app()
     namespace = query.name_space()
 
@@ -677,8 +678,8 @@
     finally:
       self.__entities_lock.release()
 
-    return datastore_stub_util._ExecuteQuery(results, query,
-                                             filters, orders, index_list)
+    return datastore_stub_util._ExecuteQuery(results, query, filters, orders,
+                                             index_list, filter_predicate)
 
   def _SetIdCounter(self, id_space, value):
     """Set the ID counter for id_space to value."""
diff --git a/google/appengine/api/mail_stub.py b/google/appengine/api/mail_stub.py
index 157747a..63239d2 100644
--- a/google/appengine/api/mail_stub.py
+++ b/google/appengine/api/mail_stub.py
@@ -66,7 +66,7 @@
                enable_sendmail=False,
                show_mail_body=False,
                service_name='mail',
-               allow_tls=False):
+               allow_tls=True):
     """Constructor.
 
     Args:
diff --git a/google/appengine/api/search/geo_util.py b/google/appengine/api/search/geo_util.py
new file mode 100644
index 0000000..739ba20
--- /dev/null
+++ b/google/appengine/api/search/geo_util.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Utilities to support geo fields on the Python dev server."""
+
+import math
+
+
+class LatLng(object):
+  """A class representing a Latitude/Longitude pair."""
+
+  _EARTH_RADIUS_METERS = 6371010
+
+  def __init__(self, latitude, longitude):
+    """Initializer.
+
+    Args:
+      latitude: The latitude in degrees.
+      longitude: The longitude in degrees.
+
+    Raises:
+      TypeError: If a non-numeric latitude or longitude is passed.
+    """
+    self._lat = latitude
+    self._lng = longitude
+
+  @property
+  def latitude(self):
+    """Returns the latitude in degrees."""
+    return self._lat
+
+  @property
+  def longitude(self):
+    """Returns the longitude in degrees."""
+    return self._lng
+
+  def __sub__(self, other):
+    """Subtraction.
+
+    Args:
+      other: the LatLng which this LatLng is subtracted by.
+
+    Returns:
+      the great circle distance between two LatLng objects as computed
+      by the Haversine formula.
+    """
+
+    assert isinstance(other, LatLng)
+
+    lat_rad = math.radians(self._lat)
+    lng_rad = math.radians(self._lng)
+    other_lat_rad = math.radians(other.latitude)
+    other_lng_rad = math.radians(other.longitude)
+
+    dlat = lat_rad - other_lat_rad
+    dlng = lng_rad - other_lng_rad
+    a1 = math.sin(dlat / 2)**2
+    a2 = math.cos(lat_rad) * math.cos(other_lat_rad) * math.sin(dlng / 2)**2
+    return 2 * self._EARTH_RADIUS_METERS * math.asin(math.sqrt(a1 + a2))
diff --git a/google/appengine/api/search/search.py b/google/appengine/api/search/search.py
index 15a9d0c..57bff9a 100644
--- a/google/appengine/api/search/search.py
+++ b/google/appengine/api/search/search.py
@@ -2479,8 +2479,7 @@
     """Index the collection of documents.
 
     If any of the documents are already in the index, then reindex them with
-    their corresponding fresh document. If any of the documents fail to be
-    indexed, then none of the documents will be indexed.
+    their corresponding fresh document.
 
     Args:
       documents: A Document or iterable of Documents to index.
@@ -2569,8 +2568,7 @@
     """Delete the documents with the corresponding document ids from the index.
 
     If no document exists for the identifier in the list, then that document
-    identifier is ignored. If any document delete fails, then no documents
-    will be deleted.
+    identifier is ignored.
 
     Args:
       document_ids: A single identifier or list of identifiers of documents
diff --git a/google/appengine/api/search/stub/document_matcher.py b/google/appengine/api/search/stub/document_matcher.py
index 56391e4..fae19d6 100644
--- a/google/appengine/api/search/stub/document_matcher.py
+++ b/google/appengine/api/search/stub/document_matcher.py
@@ -23,10 +23,10 @@
 from google.appengine.datastore import document_pb
 
 from google.appengine._internal.antlr3 import tree
+from google.appengine.api.search import geo_util
 from google.appengine.api.search import query_parser
 from google.appengine.api.search import QueryParser
 from google.appengine.api.search import search_util
-from google.appengine.api.search.stub import geo_util
 from google.appengine.api.search.stub import simple_tokenizer
 from google.appengine.api.search.stub import tokens
 
diff --git a/google/appengine/api/search/stub/expression_evaluator.py b/google/appengine/api/search/stub/expression_evaluator.py
index fde4ee8..e0b839b 100644
--- a/google/appengine/api/search/stub/expression_evaluator.py
+++ b/google/appengine/api/search/stub/expression_evaluator.py
@@ -46,15 +46,15 @@
 
 import logging
 
+from google.appengine.datastore import document_pb
 
 from google.appengine.api.search import expression_parser
 from google.appengine.api.search import ExpressionParser
+from google.appengine.api.search import geo_util
 from google.appengine.api.search import query_parser
 from google.appengine.api.search import search_util
-from google.appengine.api.search.stub import geo_util
 from google.appengine.api.search.stub import simple_tokenizer
 from google.appengine.api.search.stub import tokens
-from google.appengine.datastore import document_pb
 
 
 
diff --git a/google/appengine/api/search/stub/geo_util.py b/google/appengine/api/search/stub/geo_util.py
index fadde47..9ad72a5 100644
--- a/google/appengine/api/search/stub/geo_util.py
+++ b/google/appengine/api/search/stub/geo_util.py
@@ -14,48 +14,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-"""Utilities to support geo fields on the Python dev server."""
 
-import math
 
-class LatLng(object):
-  """A class representing a Latitude/Longitude pair."""
+import logging
 
-  _EARTH_RADIUS_METERS = 6371010
+from google.appengine.api.search import *
 
-  def __init__(self, latitude, longitude):
-    """Initializer.
-
-    Args:
-      latitude: The latitude in degrees.
-      longitude: The longitude in degrees.
-
-    Raises:
-      TypeError: If a non-numeric latitude or longitude is passed.
-    """
-    self._lat = math.radians(latitude)
-    self._lng = math.radians(longitude)
-
-  @property
-  def latitude(self):
-    """Returns the latitude in degrees."""
-    return math.degrees(self._lat)
-
-  @property
-  def longitude(self):
-    """Returns the longitude in degrees."""
-    return math.degrees(self._lng)
-
-  def __sub__(self, other):
-    """Subtraction.
-
-    Returns the great circle distance between two LatLng objects as computed
-    by the Haversine formula.
-    """
-
-    assert isinstance(other, LatLng)
-    dlat = self._lat - other._lat
-    dlng = self._lng - other._lng
-    a1 = math.sin(dlat / 2)**2
-    a2 = math.cos(self._lat) * math.cos(other._lat) * math.sin(dlng / 2)**2
-    return 2 * self._EARTH_RADIUS_METERS * math.asin(math.sqrt(a1 + a2))
+logging.warning('google.appengine.api.search.stub.geo_util is deprecated; '
+                'please use google.appengine.api.search.geo_util')
diff --git a/google/appengine/api/urlfetch.py b/google/appengine/api/urlfetch.py
index 50b3aac..f1a6144 100644
--- a/google/appengine/api/urlfetch.py
+++ b/google/appengine/api/urlfetch.py
@@ -399,7 +399,7 @@
           + url + error_detail)
     if (err.application_error ==
         urlfetch_service_pb.URLFetchServiceError.INTERNAL_TRANSIENT_ERROR):
-      raise InteralTransientError(
+      raise InternalTransientError(
           'Temporary error in fetching URL: ' + url + ', please re-try')
     if (err.application_error ==
         urlfetch_service_pb.URLFetchServiceError.DNS_ERROR):
diff --git a/google/appengine/datastore/datastore_pbs.py b/google/appengine/datastore/datastore_pbs.py
index e44b891..87fe62a 100644
--- a/google/appengine/datastore/datastore_pbs.py
+++ b/google/appengine/datastore/datastore_pbs.py
@@ -332,6 +332,10 @@
         v3_entity_value = entity_pb.EntityProto()
         self.v4_to_v3_entity(v4_entity_value, v3_entity_value)
         v3_value.set_stringvalue(v3_entity_value.SerializePartialToString())
+    elif v4_value.has_geo_point_value():
+      point_value = v3_value.mutable_pointvalue()
+      point_value.set_x(v4_value.geo_point_value().latitude())
+      point_value.set_y(v4_value.geo_point_value().longitude())
     else:
 
       pass
@@ -425,11 +429,16 @@
 
 
     elif v3_property_value.has_pointvalue():
-      self.__v3_to_v4_point_entity(v3_property_value.pointvalue(),
-                                   v4_value.mutable_entity_value())
-      if v3_meaning != entity_pb.Property.GEORSS_POINT:
+      if v3_meaning == MEANING_GEORSS_POINT:
+        point_value = v3_property_value.pointvalue()
+        v4_value.mutable_geo_point_value().set_latitude(point_value.x())
+        v4_value.mutable_geo_point_value().set_longitude(point_value.y())
+      else:
+        self.__v3_to_v4_point_entity(v3_property_value.pointvalue(),
+                                     v4_value.mutable_entity_value())
         v4_value.set_meaning(MEANING_PREDEFINED_ENTITY_POINT)
-        v3_meaning = None
+
+      v3_meaning = None
     elif v3_property_value.has_uservalue():
       self.__v3_to_v4_user_entity(v3_property_value.uservalue(),
                                   v4_value.mutable_entity_value())
@@ -491,6 +500,8 @@
             and v4_meaning != MEANING_PREDEFINED_ENTITY_USER):
           v3_property.set_meaning(entity_pb.Property.ENTITY_PROTO)
         v4_meaning = None
+    elif v4_value.has_geo_point_value():
+      v3_property.set_meaning(MEANING_GEORSS_POINT)
     else:
 
       pass
diff --git a/google/appengine/datastore/datastore_query.py b/google/appengine/datastore/datastore_query.py
index ab9b67d..0c81568 100644
--- a/google/appengine/datastore/datastore_query.py
+++ b/google/appengine/datastore/datastore_query.py
@@ -55,12 +55,14 @@
           ]
 
 import base64
-import pickle
 import collections
+import pickle
 
 from google.appengine.datastore import entity_pb
+
 from google.appengine.api import datastore_errors
 from google.appengine.api import datastore_types
+from google.appengine.api.search import geo_util
 from google.appengine.datastore import datastore_index
 from google.appengine.datastore import datastore_pb
 from google.appengine.datastore import datastore_rpc
@@ -887,6 +889,48 @@
     return False
 
 
+class _BoundingCircleFilter(_SinglePropertyFilter):
+  """An immutable bounding circle filter for geo locations.
+
+  An immutable filter predicate that constrains a geo location property to a
+  bounding circle region. The filter is inclusive at the border. The property
+  has to be of type V3 PointValue. V4 GeoPoints converts to this type.
+  """
+
+
+
+
+
+
+  def __init__(self, property_name, latitude, longitude, radius_meters):
+    self._property_name = property_name
+    self._lat_lng = geo_util.LatLng(latitude, longitude)
+    self._radius_meters = radius_meters
+
+  @classmethod
+  def _from_v4_pb(cls, bounding_circle_v4_pb):
+    return _BoundingCircleFilter(bounding_circle_v4_pb.property().name(),
+                                 bounding_circle_v4_pb.center().latitude(),
+                                 bounding_circle_v4_pb.center().longitude(),
+                                 bounding_circle_v4_pb.radius_meters())
+
+  def _get_prop_name(self):
+    return self._property_name
+
+  def _apply_to_value(self, value):
+
+
+    if value[0] != entity_pb.PropertyValue.kPointValueGroup:
+      return False
+
+    _, latitude, longitude = value
+
+    lat_lng = geo_util.LatLng(latitude, longitude)
+
+
+    return self._lat_lng - lat_lng <= self._radius_meters
+
+
 class Order(_PropertyComponent):
   """A base class that represents a sort order on a query.
 
diff --git a/google/appengine/datastore/datastore_sqlite_stub.py b/google/appengine/datastore/datastore_sqlite_stub.py
index db8ab95..fc97993 100644
--- a/google/appengine/datastore/datastore_sqlite_stub.py
+++ b/google/appengine/datastore/datastore_sqlite_stub.py
@@ -1299,12 +1299,19 @@
 
       self._ReleaseConnection(conn)
 
-  def _GetQueryCursor(self, query, filters, orders, index_list):
+  def _GetQueryCursor(self, query, filters, orders, index_list,
+                      filter_predicate=None):
     """Returns a query cursor for the provided query.
 
     Args:
-      conn: The SQLite connection.
-      query: A datastore_pb.Query protobuf.
+      query: The datastore_pb.Query to run.
+      filters: A list of filters that override the ones found on query.
+      orders: A list of orders that override the ones found on query.
+      index_list: A list of indexes used by the query.
+      filter_predicate: an additional filter of type
+          datastore_query.FilterPredicate. This is passed along to implement V4
+          specific filters without changing the entire stub.
+
     Returns:
       A QueryCursor object.
     """
@@ -1335,10 +1342,17 @@
               conn.execute(sql_stmt, params))
         else:
           db_cursor = _DedupingEntityGenerator(conn.execute(sql_stmt, params))
-        dsquery = datastore_stub_util._MakeQuery(query, filters, orders)
+        dsquery = datastore_stub_util._MakeQuery(query, filters, orders,
+                                                 filter_predicate)
+
+        filtered_entities = [r for r in db_cursor]
+
+
+        if filter_predicate:
+          filtered_entities = filter(filter_predicate, filtered_entities)
+
         cursor = datastore_stub_util.ListCursor(
-            query, dsquery, orders, index_list,
-            [r for r in db_cursor])
+            query, dsquery, orders, index_list, filtered_entities)
       finally:
         self._ReleaseConnection(conn)
     return cursor
diff --git a/google/appengine/datastore/datastore_stub_util.py b/google/appengine/datastore/datastore_stub_util.py
index be7bc62..ba264c5 100644
--- a/google/appengine/datastore/datastore_stub_util.py
+++ b/google/appengine/datastore/datastore_stub_util.py
@@ -1445,7 +1445,8 @@
     return LoadEntity(entity)
 
   @_SynchronizeTxn
-  def GetQueryCursor(self, query, filters, orders, index_list):
+  def GetQueryCursor(self, query, filters, orders, index_list,
+                     filter_predicate=None):
     """Runs the given datastore_pb.Query and returns a QueryCursor for it.
 
     Does not see any modifications in the current txn.
@@ -1455,6 +1456,9 @@
       filters: A list of filters that override the ones found on query.
       orders: A list of orders that override the ones found on query.
       index_list: A list of indexes used by the query.
+      filter_predicate: an additional filter of type
+          datastore_query.FilterPredicate. This is passed along to implement V4
+          specific filters without changing the entire stub.
 
     Returns:
       A BaseCursor that can be used to fetch query results.
@@ -1462,7 +1466,8 @@
     Check(query.has_ancestor(),
           'Query must have an ancestor when performed in a transaction.')
     snapshot = self._GrabSnapshot(query.ancestor())
-    return _ExecuteQuery(snapshot.values(), query, filters, orders, index_list)
+    return _ExecuteQuery(snapshot.values(), query, filters, orders, index_list,
+                         filter_predicate)
 
   @_SynchronizeTxn
   def Put(self, entity, insert, indexes):
@@ -2300,7 +2305,8 @@
 
 
 
-  def GetQueryCursor(self, raw_query, trusted=False, calling_app=None):
+  def GetQueryCursor(self, raw_query, trusted=False, calling_app=None,
+                     filter_predicate=None):
     """Execute a query.
 
     Args:
@@ -2308,6 +2314,9 @@
       trusted: If the calling app is trusted.
       calling_app: The app requesting the results or None to pull the app from
         the environment.
+      filter_predicate: an additional filter of type
+          datastore_query.FilterPredicate. This is passed along to implement V4
+          specific filters without changing the entire stub.
 
     Returns:
       A BaseCursor that can be used to retrieve results.
@@ -2325,11 +2334,15 @@
     CheckQuery(raw_query, filters, orders, self._MAX_QUERY_COMPONENTS)
     FillUsersInQuery(filters)
 
-
-    self._CheckHasIndex(raw_query, trusted, calling_app)
+    index_list = []
 
 
-    index_list = self.__IndexListForQuery(raw_query)
+
+    if filter_predicate is None:
+      self._CheckHasIndex(raw_query, trusted, calling_app)
+
+
+      index_list = self.__IndexListForQuery(raw_query)
 
 
     if raw_query.has_transaction():
@@ -2342,11 +2355,13 @@
     if raw_query.has_ancestor() and raw_query.kind() not in self._pseudo_kinds:
 
       txn = self._BeginTransaction(raw_query.app(), False)
-      return txn.GetQueryCursor(raw_query, filters, orders, index_list)
+      return txn.GetQueryCursor(raw_query, filters, orders, index_list,
+                                filter_predicate)
 
 
     self.Groom()
-    return self._GetQueryCursor(raw_query, filters, orders, index_list)
+    return self._GetQueryCursor(raw_query, filters, orders, index_list,
+                                filter_predicate)
 
   def __IndexListForQuery(self, query):
     """Get the single composite index pb used by the query, if any, as a list.
@@ -2664,7 +2679,8 @@
     """Writes the datastore to disk."""
     self.Flush()
 
-  def _GetQueryCursor(self, query, filters, orders, index_list):
+  def _GetQueryCursor(self, query, filters, orders, index_list,
+                      filter_predicate):
     """Runs the given datastore_pb.Query and returns a QueryCursor for it.
 
     This must be implemented by a sub-class. The sub-class does not need to
@@ -2675,6 +2691,9 @@
       filters: A list of filters that override the ones found on query.
       orders: A list of orders that override the ones found on query.
       index_list: A list of indexes used by the query.
+      filter_predicate: an additional filter of type
+          datastore_query.FilterPredicate. This is passed along to implement V4
+          specific filters without changing the entire stub.
 
     Returns:
       A BaseCursor that can be used to fetch query results.
@@ -2949,9 +2968,10 @@
     self._datastore.Touch(req.key_list(), self._trusted, self._app_id)
 
   @_NeedsIndexes
-  def _Dynamic_RunQuery(self, query, query_result):
+  def _Dynamic_RunQuery(self, query, query_result, filter_predicate=None):
     self.__UpgradeCursors(query)
-    cursor = self._datastore.GetQueryCursor(query, self._trusted, self._app_id)
+    cursor = self._datastore.GetQueryCursor(query, self._trusted, self._app_id,
+                                            filter_predicate)
 
     if query.has_count():
       count = query.count()
@@ -3401,6 +3421,12 @@
       v4_filter: a datastore_v4_pb.Filter
       v3_query: a datastore_pb.Query to populate with filters
     """
+
+    datastore_pbs.check_conversion(not v4_filter.has_bounding_circle_filter(),
+                                   'bounding circle filter not supported')
+    datastore_pbs.check_conversion(not v4_filter.has_bounding_box_filter(),
+                                   'bounding box filter not supported')
+
     if v4_filter.has_property_filter():
       v4_property_filter = v4_filter.property_filter()
       if (v4_property_filter.operator()
@@ -4018,17 +4044,56 @@
   return orders
 
 
-def _MakeQuery(query, filters, orders):
+def _MakeQuery(query_pb, filters, orders, filter_predicate):
   """Make a datastore_query.Query for the given datastore_pb.Query.
 
-  Overrides filters and orders in query with the specified arguments."""
-  clone = datastore_pb.Query()
-  clone.CopyFrom(query)
-  clone.clear_filter()
-  clone.clear_order()
-  clone.filter_list().extend(filters)
-  clone.order_list().extend(orders)
-  return datastore_query.Query._from_pb(clone)
+  Overrides filters and orders in query with the specified arguments.
+
+  Args:
+    query_pb: a datastore_pb.Query.
+    filters: the filters from query.
+    orders: the orders from query.
+    filter_predicate: an additional filter of type
+          datastore_query.FilterPredicate. This is passed along to implement V4
+          specific filters without changing the entire stub.
+
+  Returns:
+    A datastore_query.Query for the datastore_pb.Query."""
+
+
+
+
+
+  clone_pb = datastore_pb.Query()
+  clone_pb.CopyFrom(query_pb)
+  clone_pb.clear_filter()
+  clone_pb.clear_order()
+  clone_pb.filter_list().extend(filters)
+  clone_pb.order_list().extend(orders)
+
+  query = datastore_query.Query._from_pb(clone_pb)
+
+  assert datastore_v4_pb.CompositeFilter._Operator_NAMES.values() == ['AND']
+
+
+
+
+  if filter_predicate is not None:
+    if query.filter_predicate is not None:
+
+
+      filter_predicate = datastore_query.CompositeFilter(
+          datastore_query.CompositeFilter.AND,
+          [filter_predicate, query.filter_predicate])
+
+    return datastore_query.Query(app=query.app,
+                                 namespace=query.namespace,
+                                 ancestor=query.ancestor,
+                                 filter_predicate=filter_predicate,
+                                 group_by=query.group_by,
+                                 order=query.order)
+  else:
+    return query
 
 def _CreateIndexEntities(entity, postfix_props):
   """Creates entities for index values that would appear in prodcution.
@@ -4112,7 +4177,8 @@
   return new_results
 
 
-def _ExecuteQuery(results, query, filters, orders, index_list):
+def _ExecuteQuery(results, query, filters, orders, index_list,
+                  filter_predicate=None):
   """Executes the query on a superset of its results.
 
   Args:
@@ -4121,12 +4187,15 @@
     filters: the filters from query.
     orders: the orders from query.
     index_list: the list of indexes used by the query.
+    filter_predicate: an additional filter of type
+          datastore_query.FilterPredicate. This is passed along to implement V4
+          specific filters without changing the entire stub.
 
   Returns:
     A ListCursor over the results of applying query to results.
   """
   orders = _GuessOrders(filters, orders)
-  dsquery = _MakeQuery(query, filters, orders)
+  dsquery = _MakeQuery(query, filters, orders, filter_predicate)
 
   if query.property_name_size():
     results = _CreateIndexOnlyQueryResults(
diff --git a/google/appengine/datastore/datastore_v3_pb.py b/google/appengine/datastore/datastore_v3_pb.py
index 25eb7d9..56312bd 100644
--- a/google/appengine/datastore/datastore_v3_pb.py
+++ b/google/appengine/datastore/datastore_v3_pb.py
@@ -6046,6 +6046,7 @@
 
   def __init__(self, contents=None):
     self.key_ = []
+    self.composite_index_ = []
     self.snapshot_ = []
     self.lazy_init_lock_ = thread.allocate_lock()
     if contents is not None: self.MergeFromString(contents)
@@ -6104,6 +6105,22 @@
 
   def has_transaction(self): return self.has_transaction_
 
+  def composite_index_size(self): return len(self.composite_index_)
+  def composite_index_list(self): return self.composite_index_
+
+  def composite_index(self, i):
+    return self.composite_index_[i]
+
+  def mutable_composite_index(self, i):
+    return self.composite_index_[i]
+
+  def add_composite_index(self):
+    x = CompositeIndex()
+    self.composite_index_.append(x)
+    return x
+
+  def clear_composite_index(self):
+    self.composite_index_ = []
   def trusted(self): return self.trusted_
 
   def set_trusted(self, x):
@@ -6165,6 +6182,7 @@
     if (x.has_header()): self.mutable_header().MergeFrom(x.header())
     for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
     if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+    for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
     if (x.has_trusted()): self.set_trusted(x.trusted())
     if (x.has_force()): self.set_force(x.force())
     if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
@@ -6179,6 +6197,9 @@
       if e1 != e2: return 0
     if self.has_transaction_ != x.has_transaction_: return 0
     if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+    if len(self.composite_index_) != len(x.composite_index_): return 0
+    for e1, e2 in zip(self.composite_index_, x.composite_index_):
+      if e1 != e2: return 0
     if self.has_trusted_ != x.has_trusted_: return 0
     if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
     if self.has_force_ != x.has_force_: return 0
@@ -6196,6 +6217,8 @@
     for p in self.key_:
       if not p.IsInitialized(debug_strs): initialized=0
     if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
+    for p in self.composite_index_:
+      if not p.IsInitialized(debug_strs): initialized=0
     for p in self.snapshot_:
       if not p.IsInitialized(debug_strs): initialized=0
     return initialized
@@ -6206,6 +6229,8 @@
     n += 1 * len(self.key_)
     for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
     if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
+    n += 1 * len(self.composite_index_)
+    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
     if (self.has_trusted_): n += 2
     if (self.has_force_): n += 2
     if (self.has_mark_changes_): n += 2
@@ -6219,6 +6244,8 @@
     n += 1 * len(self.key_)
     for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
     if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
+    n += 1 * len(self.composite_index_)
+    for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
     if (self.has_trusted_): n += 2
     if (self.has_force_): n += 2
     if (self.has_mark_changes_): n += 2
@@ -6230,6 +6257,7 @@
     self.clear_header()
     self.clear_key()
     self.clear_transaction()
+    self.clear_composite_index()
     self.clear_trusted()
     self.clear_force()
     self.clear_mark_changes()
@@ -6261,6 +6289,10 @@
       out.putVarInt32(82)
       out.putVarInt32(self.header_.ByteSize())
       self.header_.OutputUnchecked(out)
+    for i in xrange(len(self.composite_index_)):
+      out.putVarInt32(90)
+      out.putVarInt32(self.composite_index_[i].ByteSize())
+      self.composite_index_[i].OutputUnchecked(out)
 
   def OutputPartial(self, out):
     if (self.has_trusted_):
@@ -6288,6 +6320,10 @@
       out.putVarInt32(82)
       out.putVarInt32(self.header_.ByteSizePartial())
       self.header_.OutputPartial(out)
+    for i in xrange(len(self.composite_index_)):
+      out.putVarInt32(90)
+      out.putVarInt32(self.composite_index_[i].ByteSizePartial())
+      self.composite_index_[i].OutputPartial(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -6325,6 +6361,12 @@
         d.skip(length)
         self.mutable_header().TryMerge(tmp)
         continue
+      if tt == 90:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.add_composite_index().TryMerge(tmp)
+        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -6349,6 +6391,14 @@
       res+=prefix+"transaction <\n"
       res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
+    cnt=0
+    for e in self.composite_index_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("composite_index%s <\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+      cnt+=1
     if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
     if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
     if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
@@ -6369,6 +6419,7 @@
   kheader = 10
   kkey = 6
   ktransaction = 5
+  kcomposite_index = 11
   ktrusted = 4
   kforce = 7
   kmark_changes = 8
@@ -6383,7 +6434,8 @@
     8: "mark_changes",
     9: "snapshot",
     10: "header",
-  }, 10)
+    11: "composite_index",
+  }, 11)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
@@ -6394,7 +6446,8 @@
     8: ProtocolBuffer.Encoder.NUMERIC,
     9: ProtocolBuffer.Encoder.STRING,
     10: ProtocolBuffer.Encoder.STRING,
-  }, 10, ProtocolBuffer.Encoder.MAX_TYPE)
+    11: ProtocolBuffer.Encoder.STRING,
+  }, 11, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
diff --git a/google/appengine/datastore/datastore_v4_pb.py b/google/appengine/datastore/datastore_v4_pb.py
index 424303d..ba11407 100644
--- a/google/appengine/datastore/datastore_v4_pb.py
+++ b/google/appengine/datastore/datastore_v4_pb.py
@@ -177,7 +177,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Error'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9yc3oJRXJyb3JDb2RliwGSAQtCQURfUkVRVUVTVJgBAYwBiwGSARZDT05DVVJSRU5UX1RSQU5TQUNUSU9OmAECjAGLAZIBDklOVEVSTkFMX0VSUk9SmAEDjAGLAZIBCk5FRURfSU5ERViYAQSMAYsBkgEHVElNRU9VVJgBBYwBiwGSARFQRVJNSVNTSU9OX0RFTklFRJgBBowBiwGSAQ5CSUdUQUJMRV9FUlJPUpgBB4wBiwGSARxDT01NSVRURURfQlVUX1NUSUxMX0FQUExZSU5HmAEIjAGLAZIBE0NBUEFCSUxJVFlfRElTQUJMRUSYAQmMAYsBkgEVVFJZX0FMVEVSTkFURV9CQUNLRU5EmAEKjAGLAZIBEVNBRkVfVElNRV9UT09fT0xEmAELjAF0ugHoMgonYXBwaG9zdGluZy9kYXRhc3RvcmUvZGF0YXN0b3JlX3Y0LnByb3RvEhdhcHBob3N0aW5nLmRhdGFzdG9yZS52NBokYXBwaG9zdGluZy9kYXRhc3RvcmUvZW50aXR5X3Y0LnByb3RvIosCCgVFcnJvciKBAgoJRXJyb3JDb2RlEg8KC0JBRF9SRVFVRVNUEAESGgoWQ09OQ1VSUkVOVF9UUkFOU0FDVElPThACEhIKDklOVEVSTkFMX0VSUk9SEAMSDgoKTkVFRF9JTkRFWBAEEgsKB1RJTUVPVVQQBRIVChFQRVJNSVNTSU9OX0RFTklFRBAGEhIKDkJJR1RBQkxFX0VSUk9SEAcSIAocQ09NTUlUVEVEX0JVVF9TVElMTF9BUFBMWUlORxAIEhcKE0NBUEFCSUxJVFlfRElTQUJMRUQQCRIZChVUUllfQUxURVJOQVRFX0JBQ0tFTkQQChIVChFTQUZFX1RJTUVfVE9PX09MRBALIoYBCgxFbnRpdHlSZXN1bHQSLwoGZW50aXR5GAEgAigLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Eg8KB3ZlcnNpb24YAiABKAMiNAoKUmVzdWx0VHlwZRIICgRGVUxMEAESDgoKUFJPSkVDVElPThACEgwKCEtFWV9PTkxZEAMi8QIKBVF1ZXJ5Ej8KCnByb2plY3Rpb24YAiADKAsyKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUV4cHJlc3Npb24SNQoEa2luZBgDIAMoCzInLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktpbmRFeHByZXNzaW9uEi8KBmZpbHRlchgEIAEoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlchI1CgVvcmRlchgFIAMoCzImLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXISPAoIZ3JvdXBfYnkYBiADKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRIUCgxzdGFydF9jdXJzb3IYByABKAwSEgoKZW5kX2N1cnNvchgIIAEoDBIRCgZvZmZzZXQYCiABKAU6ATASDQoFbGltaXQYCyABKAUiHgoOS2luZEV4cHJlc3Npb24SDAoEbmFtZRgBIAIoCSIhChFQcm9wZXJ0eVJlZmVyZW5jZRIMCgRuYW1lGAIgAigJItMBChJQcm9wZXJ0eUV4cHJlc3Npb24SPAoIcHJvcGVydHkYASACKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRJdChRhZ2dyZWdhdGlvbl9mdW5jdGlvbhgCIAEoDjI/LmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RXhwcmVzc2lvbi5BZ2dyZWdhdGlvbkZ1bmN0aW9uIiAKE0FnZ3JlZ2F0aW9uRnVuY3Rpb24SCQoFRklSU1QQASLJAQoNUHJvcGVydHlPcmRlchI8Cghwcm9wZXJ0eRgBIAIoCzIqLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlEk4KCWRpcmVjdGlvbhgCIAEoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXIuRGlyZWN0aW9uOglBU0NFTkRJTkciKgoJRGlyZWN0aW9uEg0KCUFTQ0VORElORxABEg4KCkRFU0NFTkRJTkcQAiKOAQoGRmlsdGVyEkIKEGNvbXBvc2l0ZV9maWx0ZXIYASABKAsyKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21wb3NpdGVGaWx0ZXISQAoPcHJvcGVydHlfZmlsdGVyGAIgASgLMicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlGaWx0ZXIinAEKD0NvbXBvc2l0ZUZpbHRlchJDCghvcGVyYXRvchgBIAIoDjIxLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbXBvc2l0ZUZpbHRlci5PcGVyYXRvchIvCgZmaWx0ZXIYAiADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5GaWx0ZXIiEwoIT3BlcmF0b3ISBwoDQU5EEAEivgIKDlByb3BlcnR5RmlsdGVyEjwKCHByb3BlcnR5GAEgAigLMiouYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2USQgoIb3BlcmF0b3IYAiACKA4yMC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUZpbHRlci5PcGVyYXRvchItCgV2YWx1ZRgDIAIoCzIeLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlZhbHVlInsKCE9wZXJhdG9yEg0KCUxFU1NfVEhBThABEhYKEkxFU1NfVEhBTl9PUl9FUVVBTBACEhAKDEdSRUFURVJfVEhBThADEhkKFUdSRUFURVJfVEhBTl9PUl9FUVVBTBAEEgkKBUVRVUFMEAUSEAoMSEFTX0FOQ0VTVE9SEAsisAEKCEdxbFF1ZXJ5EhQKDHF1ZXJ5X3N0cmluZxgBIAIoCRIcCg1hbGxvd19saXRlcmFsGAIgASgIOgVmYWxzZRI2CghuYW1lX2FyZxgDIAMoCzIkLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5QXJnEjgKCm51bWJlcl9hcmcYBCADKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeUFyZyJaCgtHcWxRdWVyeUFyZxIMCgRuYW1lGAEgASgJEi0KBXZhbHVlGAIgASgLMh4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWUSDgoGY3Vyc29yGAMgASgMIpEDChBRdWVyeVJlc3VsdEJhdGNoEkwKEmVudGl0eV9yZXN1bHRfdHlwZRgBIAIoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdC5SZXN1bHRUeXBlEjwKDWVudGl0eV9yZXN1bHQYAiADKAsyJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQSEgoKZW5kX2N1cnNvchgEIAEoDBJPCgxtb3JlX3Jlc3VsdHMYBSACKA4yOS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoLk1vcmVSZXN1bHRzVHlwZRIaCg9za2lwcGVkX3Jlc3VsdHMYBiABKAU6ATASGAoQc25hcHNob3RfdmVyc2lvbhgHIAEoAyJWCg9Nb3JlUmVzdWx0c1R5cGUSEAoMTk9UX0ZJTklTSEVEEAESHAoYTU9SRV9SRVNVTFRTX0FGVEVSX0xJTUlUEAISEwoPTk9fTU9SRV9SRVNVTFRTEAMi8gEKCE11dGF0aW9uEkAKAm9wGAEgASgOMisuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTXV0YXRpb24uT3BlcmF0aW9uOgdVTktOT1dOEikKA2tleRgCIAEoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleRIvCgZlbnRpdHkYAyABKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkiSAoJT3BlcmF0aW9uEgsKB1VOS05PV04QABIKCgZJTlNFUlQQARIKCgZVUERBVEUQAhIKCgZVUFNFUlQQAxIKCgZERUxFVEUQBCJTCg5NdXRhdGlvblJlc3VsdBIpCgNrZXkYAyABKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSFgoLbmV3X3ZlcnNpb24YBCABKAM6ATAipAIKEkRlcHJlY2F0ZWRNdXRhdGlvbhIvCgZ1cHNlcnQYASADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSLwoGdXBkYXRlGAIgAygLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Ei8KBmluc2VydBgDIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRI3Cg5pbnNlcnRfYXV0b19pZBgEIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRIsCgZkZWxldGUYBSADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSFAoFZm9yY2UYBiABKAg6BWZhbHNlIusBChhEZXByZWNhdGVkTXV0YXRpb25SZXN1bHQSFQoNaW5kZXhfdXBkYXRlcxgBIAIoBRI4ChJpbnNlcnRfYXV0b19pZF9rZXkYAiADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSFgoOdXBzZXJ0X3ZlcnNpb24YAyADKAMSFgoOdXBkYXRlX3ZlcnNpb24YBCADKAMSFgoOaW5zZXJ0X3ZlcnNpb24YBSADKAMSHgoWaW5zZXJ0X2F1dG9faWRfdmVyc2lvbhgGIAMoAxIWCg5kZWxldGVfdmVyc2lvbhgHIAMoAyK1AQoLUmVhZE9wdGlvbnMSVwoQcmVhZF9jb25zaXN0ZW5jeRgBIAEoDjI0LmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJlYWRPcHRpb25zLlJlYWRDb25zaXN0ZW5jeToHREVGQVVMVBITCgt0cmFuc2FjdGlvbhgCIAEoDCI4Cg9SZWFkQ29uc2lzdGVuY3kSCwoHREVGQVVMVBAAEgoKBlNUUk9ORxABEgwKCEVWRU5UVUFMEAIidgoNTG9va3VwUmVxdWVzdBI6CgxyZWFkX29wdGlvbnMYASABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucxIpCgNrZXkYAyADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkirgEKDkxvb2t1cFJlc3BvbnNlEjQKBWZvdW5kGAEgAygLMiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0EjYKB21pc3NpbmcYAiADKAsyJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQSLgoIZGVmZXJyZWQYAyADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkiqwIKD1J1blF1ZXJ5UmVxdWVzdBI6CgxyZWFkX29wdGlvbnMYASABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucxI6CgxwYXJ0aXRpb25faWQYAiABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5QYXJ0aXRpb25JZBItCgVxdWVyeRgDIAEoCzIeLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5EjQKCWdxbF9xdWVyeRgHIAEoCzIhLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5Eh0KFW1pbl9zYWZlX3RpbWVfc2Vjb25kcxgEIAEoAxIcChRzdWdnZXN0ZWRfYmF0Y2hfc2l6ZRgFIAEoBSJiChBSdW5RdWVyeVJlc3BvbnNlEjgKBWJhdGNoGAEgAigLMikuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUXVlcnlSZXN1bHRCYXRjaBIUCgxxdWVyeV9oYW5kbGUYAiABKAwiLAoUQ29udGludWVRdWVyeVJlcXVlc3QSFAoMcXVlcnlfaGFuZGxlGAEgAigMIlEKFUNvbnRpbnVlUXVlcnlSZXNwb25zZRI4CgViYXRjaBgBIAIoCzIpLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2giUwoXQmVnaW5UcmFuc2FjdGlvblJlcXVlc3QSGgoLY3Jvc3NfZ3JvdXAYASABKAg6BWZhbHNlEhwKDWNyb3NzX3JlcXVlc3QYAiABKAg6BWZhbHNlIi8KGEJlZ2luVHJhbnNhY3Rpb25SZXNwb25zZRITCgt0cmFuc2FjdGlvbhgBIAIoDCImCg9Sb2xsYmFja1JlcXVlc3QSEwoLdHJhbnNhY3Rpb24YASACKAwiEgoQUm9sbGJhY2tSZXNwb25zZSLAAgoNQ29tbWl0UmVxdWVzdBITCgt0cmFuc2FjdGlvbhgBIAEoDBIzCghtdXRhdGlvbhgFIAMoCzIhLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uEkgKE2RlcHJlY2F0ZWRfbXV0YXRpb24YAiABKAsyKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5EZXByZWNhdGVkTXV0YXRpb24SSAoEbW9kZRgEIAEoDjIrLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlcXVlc3QuTW9kZToNVFJBTlNBQ1RJT05BTBIfChBpZ25vcmVfcmVhZF9vbmx5GAYgASgIOgVmYWxzZSIwCgRNb2RlEhEKDVRSQU5TQUNUSU9OQUwQARIVChFOT05fVFJBTlNBQ1RJT05BTBACIsABCg5Db21taXRSZXNwb25zZRJACg9tdXRhdGlvbl9yZXN1bHQYAyADKAsyJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvblJlc3VsdBJVChpkZXByZWNhdGVkX211dGF0aW9uX3Jlc3VsdBgBIAEoCzIxLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkRlcHJlY2F0ZWRNdXRhdGlvblJlc3VsdBIVCg1pbmRleF91cGRhdGVzGAQgASgFInMKEkFsbG9jYXRlSWRzUmVxdWVzdBIuCghhbGxvY2F0ZRgBIAMoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleRItCgdyZXNlcnZlGAIgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5IkYKE0FsbG9jYXRlSWRzUmVzcG9uc2USLwoJYWxsb2NhdGVkGAEgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5IlgKDFdyaXRlUmVxdWVzdBJIChNkZXByZWNhdGVkX211dGF0aW9uGAEgAigLMisuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRGVwcmVjYXRlZE11dGF0aW9uMqIHChJEYXRhc3RvcmVWNFNlcnZpY2USeQoQQmVnaW5UcmFuc2FjdGlvbhIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkJlZ2luVHJhbnNhY3Rpb25SZXF1ZXN0GjEuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQmVnaW5UcmFuc2FjdGlvblJlc3BvbnNlIgASYQoIUm9sbGJhY2sSKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1JlcXVlc3QaKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1Jlc3BvbnNlIgASWwoGQ29tbWl0EiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tbWl0UmVxdWVzdBonLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlc3BvbnNlIgASYQoIUnVuUXVlcnkSKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlcXVlc3QaKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlc3BvbnNlIgAScAoNQ29udGludWVRdWVyeRItLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbnRpbnVlUXVlcnlSZXF1ZXN0Gi4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29udGludWVRdWVyeVJlc3BvbnNlIgASWwoGTG9va3VwEiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTG9va3VwUmVxdWVzdBonLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkxvb2t1cFJlc3BvbnNlIgASagoLQWxsb2NhdGVJZHMSKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1JlcXVlc3QaLC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1Jlc3BvbnNlIgASWAoDR2V0EiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTG9va3VwUmVxdWVzdBonLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkxvb2t1cFJlc3BvbnNlIgASWQoFV3JpdGUSJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Xcml0ZVJlcXVlc3QaJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21taXRSZXNwb25zZSIAQiMKH2NvbS5nb29nbGUuYXBwaG9zdGluZy5kYXRhc3RvcmUgAQ=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9yc3oJRXJyb3JDb2RliwGSAQtCQURfUkVRVUVTVJgBAYwBiwGSARZDT05DVVJSRU5UX1RSQU5TQUNUSU9OmAECjAGLAZIBDklOVEVSTkFMX0VSUk9SmAEDjAGLAZIBCk5FRURfSU5ERViYAQSMAYsBkgEHVElNRU9VVJgBBYwBiwGSARFQRVJNSVNTSU9OX0RFTklFRJgBBowBiwGSAQ5CSUdUQUJMRV9FUlJPUpgBB4wBiwGSARxDT01NSVRURURfQlVUX1NUSUxMX0FQUExZSU5HmAEIjAGLAZIBE0NBUEFCSUxJVFlfRElTQUJMRUSYAQmMAYsBkgEVVFJZX0FMVEVSTkFURV9CQUNLRU5EmAEKjAGLAZIBEVNBRkVfVElNRV9UT09fT0xEmAELjAF0ugHhNgonYXBwaG9zdGluZy9kYXRhc3RvcmUvZGF0YXN0b3JlX3Y0LnByb3RvEhdhcHBob3N0aW5nLmRhdGFzdG9yZS52NBokYXBwaG9zdGluZy9kYXRhc3RvcmUvZW50aXR5X3Y0LnByb3RvIosCCgVFcnJvciKBAgoJRXJyb3JDb2RlEg8KC0JBRF9SRVFVRVNUEAESGgoWQ09OQ1VSUkVOVF9UUkFOU0FDVElPThACEhIKDklOVEVSTkFMX0VSUk9SEAMSDgoKTkVFRF9JTkRFWBAEEgsKB1RJTUVPVVQQBRIVChFQRVJNSVNTSU9OX0RFTklFRBAGEhIKDkJJR1RBQkxFX0VSUk9SEAcSIAocQ09NTUlUVEVEX0JVVF9TVElMTF9BUFBMWUlORxAIEhcKE0NBUEFCSUxJVFlfRElTQUJMRUQQCRIZChVUUllfQUxURVJOQVRFX0JBQ0tFTkQQChIVChFTQUZFX1RJTUVfVE9PX09MRBALIoYBCgxFbnRpdHlSZXN1bHQSLwoGZW50aXR5GAEgAigLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Eg8KB3ZlcnNpb24YAiABKAMiNAoKUmVzdWx0VHlwZRIICgRGVUxMEAESDgoKUFJPSkVDVElPThACEgwKCEtFWV9PTkxZEAMi8QIKBVF1ZXJ5Ej8KCnByb2plY3Rpb24YAiADKAsyKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUV4cHJlc3Npb24SNQoEa2luZBgDIAMoCzInLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktpbmRFeHByZXNzaW9uEi8KBmZpbHRlchgEIAEoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlchI1CgVvcmRlchgFIAMoCzImLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXISPAoIZ3JvdXBfYnkYBiADKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRIUCgxzdGFydF9jdXJzb3IYByABKAwSEgoKZW5kX2N1cnNvchgIIAEoDBIRCgZvZmZzZXQYCiABKAU6ATASDQoFbGltaXQYCyABKAUiHgoOS2luZEV4cHJlc3Npb24SDAoEbmFtZRgBIAIoCSIhChFQcm9wZXJ0eVJlZmVyZW5jZRIMCgRuYW1lGAIgAigJItMBChJQcm9wZXJ0eUV4cHJlc3Npb24SPAoIcHJvcGVydHkYASACKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRJdChRhZ2dyZWdhdGlvbl9mdW5jdGlvbhgCIAEoDjI/LmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RXhwcmVzc2lvbi5BZ2dyZWdhdGlvbkZ1bmN0aW9uIiAKE0FnZ3JlZ2F0aW9uRnVuY3Rpb24SCQoFRklSU1QQASLJAQoNUHJvcGVydHlPcmRlchI8Cghwcm9wZXJ0eRgBIAIoCzIqLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlEk4KCWRpcmVjdGlvbhgCIAEoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXIuRGlyZWN0aW9uOglBU0NFTkRJTkciKgoJRGlyZWN0aW9uEg0KCUFTQ0VORElORxABEg4KCkRFU0NFTkRJTkcQAiKmAgoGRmlsdGVyEkIKEGNvbXBvc2l0ZV9maWx0ZXIYASABKAsyKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21wb3NpdGVGaWx0ZXISQAoPcHJvcGVydHlfZmlsdGVyGAIgASgLMicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlGaWx0ZXISTQoWYm91bmRpbmdfY2lyY2xlX2ZpbHRlchgDIAEoCzItLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkJvdW5kaW5nQ2lyY2xlRmlsdGVyEkcKE2JvdW5kaW5nX2JveF9maWx0ZXIYBCABKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Cb3VuZGluZ0JveEZpbHRlciKcAQoPQ29tcG9zaXRlRmlsdGVyEkMKCG9wZXJhdG9yGAEgAigOMjEuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tcG9zaXRlRmlsdGVyLk9wZXJhdG9yEi8KBmZpbHRlchgCIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlciITCghPcGVyYXRvchIHCgNBTkQQASK+AgoOUHJvcGVydHlGaWx0ZXISPAoIcHJvcGVydHkYASACKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRJCCghvcGVyYXRvchgCIAIoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RmlsdGVyLk9wZXJhdG9yEi0KBXZhbHVlGAMgAigLMh4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWUiewoIT3BlcmF0b3ISDQoJTEVTU19USEFOEAESFgoSTEVTU19USEFOX09SX0VRVUFMEAISEAoMR1JFQVRFUl9USEFOEAMSGQoVR1JFQVRFUl9USEFOX09SX0VRVUFMEAQSCQoFRVFVQUwQBRIQCgxIQVNfQU5DRVNUT1IQCyKeAQoUQm91bmRpbmdDaXJjbGVGaWx0ZXISPAoIcHJvcGVydHkYASACKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRIxCgZjZW50ZXIYAiACKAsyIS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HZW9Qb2ludBIVCg1yYWRpdXNfbWV0ZXJzGAMgAigBIr0BChFCb3VuZGluZ0JveEZpbHRlchI8Cghwcm9wZXJ0eRgBIAIoCzIqLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlEjQKCXNvdXRod2VzdBgCIAIoCzIhLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkdlb1BvaW50EjQKCW5vcnRoZWFzdBgDIAIoCzIhLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkdlb1BvaW50IrABCghHcWxRdWVyeRIUCgxxdWVyeV9zdHJpbmcYASACKAkSHAoNYWxsb3dfbGl0ZXJhbBgCIAEoCDoFZmFsc2USNgoIbmFtZV9hcmcYAyADKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeUFyZxI4CgpudW1iZXJfYXJnGAQgAygLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuR3FsUXVlcnlBcmciWgoLR3FsUXVlcnlBcmcSDAoEbmFtZRgBIAEoCRItCgV2YWx1ZRgCIAEoCzIeLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlZhbHVlEg4KBmN1cnNvchgDIAEoDCKRAwoQUXVlcnlSZXN1bHRCYXRjaBJMChJlbnRpdHlfcmVzdWx0X3R5cGUYASACKA4yMC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQuUmVzdWx0VHlwZRI8Cg1lbnRpdHlfcmVzdWx0GAIgAygLMiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0EhIKCmVuZF9jdXJzb3IYBCABKAwSTwoMbW9yZV9yZXN1bHRzGAUgAigOMjkuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUXVlcnlSZXN1bHRCYXRjaC5Nb3JlUmVzdWx0c1R5cGUSGgoPc2tpcHBlZF9yZXN1bHRzGAYgASgFOgEwEhgKEHNuYXBzaG90X3ZlcnNpb24YByABKAMiVgoPTW9yZVJlc3VsdHNUeXBlEhAKDE5PVF9GSU5JU0hFRBABEhwKGE1PUkVfUkVTVUxUU19BRlRFUl9MSU1JVBACEhMKD05PX01PUkVfUkVTVUxUUxADIvIBCghNdXRhdGlvbhJACgJvcBgBIAEoDjIrLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uLk9wZXJhdGlvbjoHVU5LTk9XThIpCgNrZXkYAiABKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSLwoGZW50aXR5GAMgASgLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5IkgKCU9wZXJhdGlvbhILCgdVTktOT1dOEAASCgoGSU5TRVJUEAESCgoGVVBEQVRFEAISCgoGVVBTRVJUEAMSCgoGREVMRVRFEAQiUwoOTXV0YXRpb25SZXN1bHQSKQoDa2V5GAMgASgLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5EhYKC25ld192ZXJzaW9uGAQgASgDOgEwIqQCChJEZXByZWNhdGVkTXV0YXRpb24SLwoGdXBzZXJ0GAEgAygLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Ei8KBnVwZGF0ZRgCIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRIvCgZpbnNlcnQYAyADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSNwoOaW5zZXJ0X2F1dG9faWQYBCADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSLAoGZGVsZXRlGAUgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5EhQKBWZvcmNlGAYgASgIOgVmYWxzZSLrAQoYRGVwcmVjYXRlZE11dGF0aW9uUmVzdWx0EhUKDWluZGV4X3VwZGF0ZXMYASACKAUSOAoSaW5zZXJ0X2F1dG9faWRfa2V5GAIgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5EhYKDnVwc2VydF92ZXJzaW9uGAMgAygDEhYKDnVwZGF0ZV92ZXJzaW9uGAQgAygDEhYKDmluc2VydF92ZXJzaW9uGAUgAygDEh4KFmluc2VydF9hdXRvX2lkX3ZlcnNpb24YBiADKAMSFgoOZGVsZXRlX3ZlcnNpb24YByADKAMitQEKC1JlYWRPcHRpb25zElcKEHJlYWRfY29uc2lzdGVuY3kYASABKA4yNC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucy5SZWFkQ29uc2lzdGVuY3k6B0RFRkFVTFQSEwoLdHJhbnNhY3Rpb24YAiABKAwiOAoPUmVhZENvbnNpc3RlbmN5EgsKB0RFRkFVTFQQABIKCgZTVFJPTkcQARIMCghFVkVOVFVBTBACInYKDUxvb2t1cFJlcXVlc3QSOgoMcmVhZF9vcHRpb25zGAEgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUmVhZE9wdGlvbnMSKQoDa2V5GAMgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5Iq4BCg5Mb29rdXBSZXNwb25zZRI0CgVmb3VuZBgBIAMoCzIlLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdBI2CgdtaXNzaW5nGAIgAygLMiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0Ei4KCGRlZmVycmVkGAMgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5IqsCCg9SdW5RdWVyeVJlcXVlc3QSOgoMcmVhZF9vcHRpb25zGAEgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUmVhZE9wdGlvbnMSOgoMcGFydGl0aW9uX2lkGAIgASgLMiQuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUGFydGl0aW9uSWQSLQoFcXVlcnkYAyABKAsyHi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeRI0CglncWxfcXVlcnkYByABKAsyIS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeRIdChVtaW5fc2FmZV90aW1lX3NlY29uZHMYBCABKAMSHAoUc3VnZ2VzdGVkX2JhdGNoX3NpemUYBSABKAUiYgoQUnVuUXVlcnlSZXNwb25zZRI4CgViYXRjaBgBIAIoCzIpLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2gSFAoMcXVlcnlfaGFuZGxlGAIgASgMIiwKFENvbnRpbnVlUXVlcnlSZXF1ZXN0EhQKDHF1ZXJ5X2hhbmRsZRgBIAIoDCJRChVDb250aW51ZVF1ZXJ5UmVzcG9uc2USOAoFYmF0Y2gYASACKAsyKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoIlMKF0JlZ2luVHJhbnNhY3Rpb25SZXF1ZXN0EhoKC2Nyb3NzX2dyb3VwGAEgASgIOgVmYWxzZRIcCg1jcm9zc19yZXF1ZXN0GAIgASgIOgVmYWxzZSIvChhCZWdpblRyYW5zYWN0aW9uUmVzcG9uc2USEwoLdHJhbnNhY3Rpb24YASACKAwiJgoPUm9sbGJhY2tSZXF1ZXN0EhMKC3RyYW5zYWN0aW9uGAEgAigMIhIKEFJvbGxiYWNrUmVzcG9uc2UiwAIKDUNvbW1pdFJlcXVlc3QSEwoLdHJhbnNhY3Rpb24YASABKAwSMwoIbXV0YXRpb24YBSADKAsyIS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvbhJIChNkZXByZWNhdGVkX211dGF0aW9uGAIgASgLMisuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRGVwcmVjYXRlZE11dGF0aW9uEkgKBG1vZGUYBCABKA4yKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21taXRSZXF1ZXN0Lk1vZGU6DVRSQU5TQUNUSU9OQUwSHwoQaWdub3JlX3JlYWRfb25seRgGIAEoCDoFZmFsc2UiMAoETW9kZRIRCg1UUkFOU0FDVElPTkFMEAESFQoRTk9OX1RSQU5TQUNUSU9OQUwQAiLAAQoOQ29tbWl0UmVzcG9uc2USQAoPbXV0YXRpb25fcmVzdWx0GAMgAygLMicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTXV0YXRpb25SZXN1bHQSVQoaZGVwcmVjYXRlZF9tdXRhdGlvbl9yZXN1bHQYASABKAsyMS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5EZXByZWNhdGVkTXV0YXRpb25SZXN1bHQSFQoNaW5kZXhfdXBkYXRlcxgEIAEoBSJzChJBbGxvY2F0ZUlkc1JlcXVlc3QSLgoIYWxsb2NhdGUYASADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSLQoHcmVzZXJ2ZRgCIAMoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleSJGChNBbGxvY2F0ZUlkc1Jlc3BvbnNlEi8KCWFsbG9jYXRlZBgBIAMoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleSJYCgxXcml0ZVJlcXVlc3QSSAoTZGVwcmVjYXRlZF9tdXRhdGlvbhgBIAIoCzIrLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkRlcHJlY2F0ZWRNdXRhdGlvbjKiBwoSRGF0YXN0b3JlVjRTZXJ2aWNlEnkKEEJlZ2luVHJhbnNhY3Rpb24SMC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5CZWdpblRyYW5zYWN0aW9uUmVxdWVzdBoxLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkJlZ2luVHJhbnNhY3Rpb25SZXNwb25zZSIAEmEKCFJvbGxiYWNrEiguYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUm9sbGJhY2tSZXF1ZXN0GikuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUm9sbGJhY2tSZXNwb25zZSIAElsKBkNvbW1pdBImLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlcXVlc3QaJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21taXRSZXNwb25zZSIAEmEKCFJ1blF1ZXJ5EiguYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUnVuUXVlcnlSZXF1ZXN0GikuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUnVuUXVlcnlSZXNwb25zZSIAEnAKDUNvbnRpbnVlUXVlcnkSLS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db250aW51ZVF1ZXJ5UmVxdWVzdBouLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbnRpbnVlUXVlcnlSZXNwb25zZSIAElsKBkxvb2t1cBImLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkxvb2t1cFJlcXVlc3QaJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Mb29rdXBSZXNwb25zZSIAEmoKC0FsbG9jYXRlSWRzEisuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQWxsb2NhdGVJZHNSZXF1ZXN0GiwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQWxsb2NhdGVJZHNSZXNwb25zZSIAElgKA0dldBImLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkxvb2t1cFJlcXVlc3QaJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Mb29rdXBSZXNwb25zZSIAElkKBVdyaXRlEiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuV3JpdGVSZXF1ZXN0GicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tbWl0UmVzcG9uc2UiAEIjCh9jb20uZ29vZ2xlLmFwcGhvc3RpbmcuZGF0YXN0b3JlIAE="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -1497,6 +1497,10 @@
   composite_filter_ = None
   has_property_filter_ = 0
   property_filter_ = None
+  has_bounding_circle_filter_ = 0
+  bounding_circle_filter_ = None
+  has_bounding_box_filter_ = 0
+  bounding_box_filter_ = None
 
   def __init__(self, contents=None):
     self.lazy_init_lock_ = thread.allocate_lock()
@@ -1540,11 +1544,51 @@
 
   def has_property_filter(self): return self.has_property_filter_
 
+  def bounding_circle_filter(self):
+    if self.bounding_circle_filter_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.bounding_circle_filter_ is None: self.bounding_circle_filter_ = BoundingCircleFilter()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.bounding_circle_filter_
+
+  def mutable_bounding_circle_filter(self): self.has_bounding_circle_filter_ = 1; return self.bounding_circle_filter()
+
+  def clear_bounding_circle_filter(self):
+
+    if self.has_bounding_circle_filter_:
+      self.has_bounding_circle_filter_ = 0;
+      if self.bounding_circle_filter_ is not None: self.bounding_circle_filter_.Clear()
+
+  def has_bounding_circle_filter(self): return self.has_bounding_circle_filter_
+
+  def bounding_box_filter(self):
+    if self.bounding_box_filter_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.bounding_box_filter_ is None: self.bounding_box_filter_ = BoundingBoxFilter()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.bounding_box_filter_
+
+  def mutable_bounding_box_filter(self): self.has_bounding_box_filter_ = 1; return self.bounding_box_filter()
+
+  def clear_bounding_box_filter(self):
+
+    if self.has_bounding_box_filter_:
+      self.has_bounding_box_filter_ = 0;
+      if self.bounding_box_filter_ is not None: self.bounding_box_filter_.Clear()
+
+  def has_bounding_box_filter(self): return self.has_bounding_box_filter_
+
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_composite_filter()): self.mutable_composite_filter().MergeFrom(x.composite_filter())
     if (x.has_property_filter()): self.mutable_property_filter().MergeFrom(x.property_filter())
+    if (x.has_bounding_circle_filter()): self.mutable_bounding_circle_filter().MergeFrom(x.bounding_circle_filter())
+    if (x.has_bounding_box_filter()): self.mutable_bounding_box_filter().MergeFrom(x.bounding_box_filter())
 
   if _net_proto___parse__python is not None:
     def _CMergeFromString(self, s):
@@ -1579,29 +1623,41 @@
     if self.has_composite_filter_ and self.composite_filter_ != x.composite_filter_: return 0
     if self.has_property_filter_ != x.has_property_filter_: return 0
     if self.has_property_filter_ and self.property_filter_ != x.property_filter_: return 0
+    if self.has_bounding_circle_filter_ != x.has_bounding_circle_filter_: return 0
+    if self.has_bounding_circle_filter_ and self.bounding_circle_filter_ != x.bounding_circle_filter_: return 0
+    if self.has_bounding_box_filter_ != x.has_bounding_box_filter_: return 0
+    if self.has_bounding_box_filter_ and self.bounding_box_filter_ != x.bounding_box_filter_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
     initialized = 1
     if (self.has_composite_filter_ and not self.composite_filter_.IsInitialized(debug_strs)): initialized = 0
     if (self.has_property_filter_ and not self.property_filter_.IsInitialized(debug_strs)): initialized = 0
+    if (self.has_bounding_circle_filter_ and not self.bounding_circle_filter_.IsInitialized(debug_strs)): initialized = 0
+    if (self.has_bounding_box_filter_ and not self.bounding_box_filter_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
     n = 0
     if (self.has_composite_filter_): n += 1 + self.lengthString(self.composite_filter_.ByteSize())
     if (self.has_property_filter_): n += 1 + self.lengthString(self.property_filter_.ByteSize())
+    if (self.has_bounding_circle_filter_): n += 1 + self.lengthString(self.bounding_circle_filter_.ByteSize())
+    if (self.has_bounding_box_filter_): n += 1 + self.lengthString(self.bounding_box_filter_.ByteSize())
     return n
 
   def ByteSizePartial(self):
     n = 0
     if (self.has_composite_filter_): n += 1 + self.lengthString(self.composite_filter_.ByteSizePartial())
     if (self.has_property_filter_): n += 1 + self.lengthString(self.property_filter_.ByteSizePartial())
+    if (self.has_bounding_circle_filter_): n += 1 + self.lengthString(self.bounding_circle_filter_.ByteSizePartial())
+    if (self.has_bounding_box_filter_): n += 1 + self.lengthString(self.bounding_box_filter_.ByteSizePartial())
     return n
 
   def Clear(self):
     self.clear_composite_filter()
     self.clear_property_filter()
+    self.clear_bounding_circle_filter()
+    self.clear_bounding_box_filter()
 
   def OutputUnchecked(self, out):
     if (self.has_composite_filter_):
@@ -1612,6 +1668,14 @@
       out.putVarInt32(18)
       out.putVarInt32(self.property_filter_.ByteSize())
       self.property_filter_.OutputUnchecked(out)
+    if (self.has_bounding_circle_filter_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.bounding_circle_filter_.ByteSize())
+      self.bounding_circle_filter_.OutputUnchecked(out)
+    if (self.has_bounding_box_filter_):
+      out.putVarInt32(34)
+      out.putVarInt32(self.bounding_box_filter_.ByteSize())
+      self.bounding_box_filter_.OutputUnchecked(out)
 
   def OutputPartial(self, out):
     if (self.has_composite_filter_):
@@ -1622,6 +1686,14 @@
       out.putVarInt32(18)
       out.putVarInt32(self.property_filter_.ByteSizePartial())
       self.property_filter_.OutputPartial(out)
+    if (self.has_bounding_circle_filter_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.bounding_circle_filter_.ByteSizePartial())
+      self.bounding_circle_filter_.OutputPartial(out)
+    if (self.has_bounding_box_filter_):
+      out.putVarInt32(34)
+      out.putVarInt32(self.bounding_box_filter_.ByteSizePartial())
+      self.bounding_box_filter_.OutputPartial(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -1638,6 +1710,18 @@
         d.skip(length)
         self.mutable_property_filter().TryMerge(tmp)
         continue
+      if tt == 26:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_bounding_circle_filter().TryMerge(tmp)
+        continue
+      if tt == 34:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_bounding_box_filter().TryMerge(tmp)
+        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -1654,6 +1738,14 @@
       res+=prefix+"property_filter <\n"
       res+=self.property_filter_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
+    if self.has_bounding_circle_filter_:
+      res+=prefix+"bounding_circle_filter <\n"
+      res+=self.bounding_circle_filter_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_bounding_box_filter_:
+      res+=prefix+"bounding_box_filter <\n"
+      res+=self.bounding_box_filter_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
     return res
 
 
@@ -1662,25 +1754,31 @@
 
   kcomposite_filter = 1
   kproperty_filter = 2
+  kbounding_circle_filter = 3
+  kbounding_box_filter = 4
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
     1: "composite_filter",
     2: "property_filter",
-  }, 2)
+    3: "bounding_circle_filter",
+    4: "bounding_box_filter",
+  }, 4)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
     1: ProtocolBuffer.Encoder.STRING,
     2: ProtocolBuffer.Encoder.STRING,
-  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.STRING,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Filter'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlchMaEGNvbXBvc2l0ZV9maWx0ZXIgASgCMAs4AUonYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tcG9zaXRlRmlsdGVyowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaD3Byb3BlcnR5X2ZpbHRlciACKAIwCzgBSiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUZpbHRlcqMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlchMaEGNvbXBvc2l0ZV9maWx0ZXIgASgCMAs4AUonYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tcG9zaXRlRmlsdGVyowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaD3Byb3BlcnR5X2ZpbHRlciACKAIwCzgBSiZhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUZpbHRlcqMBqgEFY3R5cGWyAQZwcm90bzKkARQTGhZib3VuZGluZ19jaXJjbGVfZmlsdGVyIAMoAjALOAFKLGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkJvdW5kaW5nQ2lyY2xlRmlsdGVyowGqAQVjdHlwZbIBBnByb3RvMqQBFBMaE2JvdW5kaW5nX2JveF9maWx0ZXIgBCgCMAs4AUopYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQm91bmRpbmdCb3hGaWx0ZXKjAaoBBWN0eXBlsgEGcHJvdG8ypAEUwgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -2122,6 +2220,446 @@
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
 
+class BoundingCircleFilter(ProtocolBuffer.ProtocolMessage):
+  has_property_ = 0
+  has_center_ = 0
+  has_radius_meters_ = 0
+  radius_meters_ = 0.0
+
+  def __init__(self, contents=None):
+    self.property_ = PropertyReference()
+    self.center_ = google.appengine.datastore.entity_v4_pb.GeoPoint()
+    if contents is not None: self.MergeFromString(contents)
+
+  def property(self): return self.property_
+
+  def mutable_property(self): self.has_property_ = 1; return self.property_
+
+  def clear_property(self):self.has_property_ = 0; self.property_.Clear()
+
+  def has_property(self): return self.has_property_
+
+  def center(self): return self.center_
+
+  def mutable_center(self): self.has_center_ = 1; return self.center_
+
+  def clear_center(self):self.has_center_ = 0; self.center_.Clear()
+
+  def has_center(self): return self.has_center_
+
+  def radius_meters(self): return self.radius_meters_
+
+  def set_radius_meters(self, x):
+    self.has_radius_meters_ = 1
+    self.radius_meters_ = x
+
+  def clear_radius_meters(self):
+    if self.has_radius_meters_:
+      self.has_radius_meters_ = 0
+      self.radius_meters_ = 0.0
+
+  def has_radius_meters(self): return self.has_radius_meters_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_property()): self.mutable_property().MergeFrom(x.property())
+    if (x.has_center()): self.mutable_center().MergeFrom(x.center())
+    if (x.has_radius_meters()): self.set_radius_meters(x.radius_meters())
+
+  if _net_proto___parse__python is not None:
+    def _CMergeFromString(self, s):
+      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.BoundingCircleFilter', s)
+
+  if _net_proto___parse__python is not None:
+    def _CEncode(self):
+      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.BoundingCircleFilter')
+
+  if _net_proto___parse__python is not None:
+    def _CEncodePartial(self):
+      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.BoundingCircleFilter')
+
+  if _net_proto___parse__python is not None:
+    def _CToASCII(self, output_format):
+      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.BoundingCircleFilter', output_format)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCII(self, s):
+      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.BoundingCircleFilter', s)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCIIIgnoreUnknown(self, s):
+      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.BoundingCircleFilter', s)
+
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_property_ != x.has_property_: return 0
+    if self.has_property_ and self.property_ != x.property_: return 0
+    if self.has_center_ != x.has_center_: return 0
+    if self.has_center_ and self.center_ != x.center_: return 0
+    if self.has_radius_meters_ != x.has_radius_meters_: return 0
+    if self.has_radius_meters_ and self.radius_meters_ != x.radius_meters_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_property_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: property not set.')
+    elif not self.property_.IsInitialized(debug_strs): initialized = 0
+    if (not self.has_center_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: center not set.')
+    elif not self.center_.IsInitialized(debug_strs): initialized = 0
+    if (not self.has_radius_meters_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: radius_meters not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(self.property_.ByteSize())
+    n += self.lengthString(self.center_.ByteSize())
+    return n + 11
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_property_):
+      n += 1
+      n += self.lengthString(self.property_.ByteSizePartial())
+    if (self.has_center_):
+      n += 1
+      n += self.lengthString(self.center_.ByteSizePartial())
+    if (self.has_radius_meters_):
+      n += 9
+    return n
+
+  def Clear(self):
+    self.clear_property()
+    self.clear_center()
+    self.clear_radius_meters()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putVarInt32(self.property_.ByteSize())
+    self.property_.OutputUnchecked(out)
+    out.putVarInt32(18)
+    out.putVarInt32(self.center_.ByteSize())
+    self.center_.OutputUnchecked(out)
+    out.putVarInt32(25)
+    out.putDouble(self.radius_meters_)
+
+  def OutputPartial(self, out):
+    if (self.has_property_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.property_.ByteSizePartial())
+      self.property_.OutputPartial(out)
+    if (self.has_center_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.center_.ByteSizePartial())
+      self.center_.OutputPartial(out)
+    if (self.has_radius_meters_):
+      out.putVarInt32(25)
+      out.putDouble(self.radius_meters_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_property().TryMerge(tmp)
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_center().TryMerge(tmp)
+        continue
+      if tt == 25:
+        self.set_radius_meters(d.getDouble())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_property_:
+      res+=prefix+"property <\n"
+      res+=self.property_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_center_:
+      res+=prefix+"center <\n"
+      res+=self.center_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_radius_meters_: res+=prefix+("radius_meters: %s\n" % self.DebugFormat(self.radius_meters_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kproperty = 1
+  kcenter = 2
+  kradius_meters = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "property",
+    2: "center",
+    3: "radius_meters",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.DOUBLE,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.BoundingCircleFilter'
+  _SERIALIZED_DESCRIPTOR = array.array('B')
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KLGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkJvdW5kaW5nQ2lyY2xlRmlsdGVyExoIcHJvcGVydHkgASgCMAs4AkopYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2WjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoGY2VudGVyIAIoAjALOAJKIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkdlb1BvaW50owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaDXJhZGl1c19tZXRlcnMgAygBMAE4AhTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
+  if _net_proto___parse__python is not None:
+    _net_proto___parse__python.RegisterType(
+        _SERIALIZED_DESCRIPTOR.tostring())
+
+class BoundingBoxFilter(ProtocolBuffer.ProtocolMessage):
+  has_property_ = 0
+  has_southwest_ = 0
+  has_northeast_ = 0
+
+  def __init__(self, contents=None):
+    self.property_ = PropertyReference()
+    self.southwest_ = google.appengine.datastore.entity_v4_pb.GeoPoint()
+    self.northeast_ = google.appengine.datastore.entity_v4_pb.GeoPoint()
+    if contents is not None: self.MergeFromString(contents)
+
+  def property(self): return self.property_
+
+  def mutable_property(self): self.has_property_ = 1; return self.property_
+
+  def clear_property(self):self.has_property_ = 0; self.property_.Clear()
+
+  def has_property(self): return self.has_property_
+
+  def southwest(self): return self.southwest_
+
+  def mutable_southwest(self): self.has_southwest_ = 1; return self.southwest_
+
+  def clear_southwest(self):self.has_southwest_ = 0; self.southwest_.Clear()
+
+  def has_southwest(self): return self.has_southwest_
+
+  def northeast(self): return self.northeast_
+
+  def mutable_northeast(self): self.has_northeast_ = 1; return self.northeast_
+
+  def clear_northeast(self):self.has_northeast_ = 0; self.northeast_.Clear()
+
+  def has_northeast(self): return self.has_northeast_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_property()): self.mutable_property().MergeFrom(x.property())
+    if (x.has_southwest()): self.mutable_southwest().MergeFrom(x.southwest())
+    if (x.has_northeast()): self.mutable_northeast().MergeFrom(x.northeast())
+
+  if _net_proto___parse__python is not None:
+    def _CMergeFromString(self, s):
+      _net_proto___parse__python.MergeFromString(self, 'apphosting.datastore.v4.BoundingBoxFilter', s)
+
+  if _net_proto___parse__python is not None:
+    def _CEncode(self):
+      return _net_proto___parse__python.Encode(self, 'apphosting.datastore.v4.BoundingBoxFilter')
+
+  if _net_proto___parse__python is not None:
+    def _CEncodePartial(self):
+      return _net_proto___parse__python.EncodePartial(self, 'apphosting.datastore.v4.BoundingBoxFilter')
+
+  if _net_proto___parse__python is not None:
+    def _CToASCII(self, output_format):
+      return _net_proto___parse__python.ToASCII(self, 'apphosting.datastore.v4.BoundingBoxFilter', output_format)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCII(self, s):
+      _net_proto___parse__python.ParseASCII(self, 'apphosting.datastore.v4.BoundingBoxFilter', s)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCIIIgnoreUnknown(self, s):
+      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.datastore.v4.BoundingBoxFilter', s)
+
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_property_ != x.has_property_: return 0
+    if self.has_property_ and self.property_ != x.property_: return 0
+    if self.has_southwest_ != x.has_southwest_: return 0
+    if self.has_southwest_ and self.southwest_ != x.southwest_: return 0
+    if self.has_northeast_ != x.has_northeast_: return 0
+    if self.has_northeast_ and self.northeast_ != x.northeast_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_property_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: property not set.')
+    elif not self.property_.IsInitialized(debug_strs): initialized = 0
+    if (not self.has_southwest_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: southwest not set.')
+    elif not self.southwest_.IsInitialized(debug_strs): initialized = 0
+    if (not self.has_northeast_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: northeast not set.')
+    elif not self.northeast_.IsInitialized(debug_strs): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(self.property_.ByteSize())
+    n += self.lengthString(self.southwest_.ByteSize())
+    n += self.lengthString(self.northeast_.ByteSize())
+    return n + 3
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_property_):
+      n += 1
+      n += self.lengthString(self.property_.ByteSizePartial())
+    if (self.has_southwest_):
+      n += 1
+      n += self.lengthString(self.southwest_.ByteSizePartial())
+    if (self.has_northeast_):
+      n += 1
+      n += self.lengthString(self.northeast_.ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_property()
+    self.clear_southwest()
+    self.clear_northeast()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putVarInt32(self.property_.ByteSize())
+    self.property_.OutputUnchecked(out)
+    out.putVarInt32(18)
+    out.putVarInt32(self.southwest_.ByteSize())
+    self.southwest_.OutputUnchecked(out)
+    out.putVarInt32(26)
+    out.putVarInt32(self.northeast_.ByteSize())
+    self.northeast_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_property_):
+      out.putVarInt32(10)
+      out.putVarInt32(self.property_.ByteSizePartial())
+      self.property_.OutputPartial(out)
+    if (self.has_southwest_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.southwest_.ByteSizePartial())
+      self.southwest_.OutputPartial(out)
+    if (self.has_northeast_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.northeast_.ByteSizePartial())
+      self.northeast_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_property().TryMerge(tmp)
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_southwest().TryMerge(tmp)
+        continue
+      if tt == 26:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_northeast().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_property_:
+      res+=prefix+"property <\n"
+      res+=self.property_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_southwest_:
+      res+=prefix+"southwest <\n"
+      res+=self.southwest_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_northeast_:
+      res+=prefix+"northeast <\n"
+      res+=self.northeast_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kproperty = 1
+  ksouthwest = 2
+  knortheast = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "property",
+    2: "southwest",
+    3: "northeast",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.BoundingBoxFilter'
+  _SERIALIZED_DESCRIPTOR = array.array('B')
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KKWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkJvdW5kaW5nQm94RmlsdGVyExoIcHJvcGVydHkgASgCMAs4AkopYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2WjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoJc291dGh3ZXN0IAIoAjALOAJKIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkdlb1BvaW50owGqAQVjdHlwZbIBBnByb3RvMqQBFBMaCW5vcnRoZWFzdCADKAIwCzgCSiBhcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HZW9Qb2ludKMBqgEFY3R5cGWyAQZwcm90bzKkARTCAR1hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FcnJvcg=="))
+  if _net_proto___parse__python is not None:
+    _net_proto___parse__python.RegisterType(
+        _SERIALIZED_DESCRIPTOR.tostring())
+
 class GqlQuery(ProtocolBuffer.ProtocolMessage):
   has_query_string_ = 0
   query_string_ = ""
@@ -7676,4 +8214,4 @@
 if _extension_runtime:
   pass
 
-__all__ = ['Error','EntityResult','Query','KindExpression','PropertyReference','PropertyExpression','PropertyOrder','Filter','CompositeFilter','PropertyFilter','GqlQuery','GqlQueryArg','QueryResultBatch','Mutation','MutationResult','DeprecatedMutation','DeprecatedMutationResult','ReadOptions','LookupRequest','LookupResponse','RunQueryRequest','RunQueryResponse','ContinueQueryRequest','ContinueQueryResponse','BeginTransactionRequest','BeginTransactionResponse','RollbackRequest','RollbackResponse','CommitRequest','CommitResponse','AllocateIdsRequest','AllocateIdsResponse','WriteRequest','DatastoreV4Service']
+__all__ = ['Error','EntityResult','Query','KindExpression','PropertyReference','PropertyExpression','PropertyOrder','Filter','CompositeFilter','PropertyFilter','BoundingCircleFilter','BoundingBoxFilter','GqlQuery','GqlQueryArg','QueryResultBatch','Mutation','MutationResult','DeprecatedMutation','DeprecatedMutationResult','ReadOptions','LookupRequest','LookupResponse','RunQueryRequest','RunQueryResponse','ContinueQueryRequest','ContinueQueryResponse','BeginTransactionRequest','BeginTransactionResponse','RollbackRequest','RollbackResponse','CommitRequest','CommitResponse','AllocateIdsRequest','AllocateIdsResponse','WriteRequest','DatastoreV4Service']
diff --git a/google/appengine/datastore/datastore_v4_stub.py b/google/appengine/datastore/datastore_v4_stub.py
index 2d27f02..7297d29 100644
--- a/google/appengine/datastore/datastore_v4_stub.py
+++ b/google/appengine/datastore/datastore_v4_stub.py
@@ -38,6 +38,7 @@
 from google.appengine.api import apiproxy_stub_map
 from google.appengine.datastore import datastore_pb
 from google.appengine.datastore import datastore_pbs
+from google.appengine.datastore import datastore_query
 from google.appengine.datastore import datastore_stub_util
 from google.appengine.datastore import datastore_v4_pb
 from google.appengine.datastore import datastore_v4_validator
@@ -121,13 +122,118 @@
       raise apiproxy_errors.ApplicationError(
           datastore_v4_pb.Error.BAD_REQUEST, str(e))
 
+  def _GetQueryCompositeFilter(self, filters, operator):
+    """Wraps the filters in a datastore_query.CompositeFilter if length > 1."""
+
+    if not filters:
+      return None
+    elif len(filters) == 1:
+      return filters[0]
+    else:
+      return datastore_query.CompositeFilter(operator, filters)
+
+  def _GetV4PbCompositeFilter(self, filter_pbs, operator_pb):
+    """Wraps the filters in a datastore_v4_pb.CompositeFilter if length > 1."""
+    if not filter_pbs:
+      return None
+    elif len(filter_pbs) == 1:
+      return filter_pbs[0]
+    else:
+      res_filter_pb = datastore_v4_pb.Filter()
+      composite_filter_pb = res_filter_pb.mutable_composite_filter()
+      composite_filter_pb.set_operator(operator_pb)
+      composite_filter_pb.filter_list().extend(filter_pbs)
+      return res_filter_pb
+
+  def _GetFilterPbList(self, filter_pb):
+    if filter_pb.has_composite_filter():
+      composite_filter = filter_pb.composite_filter()
+      assert composite_filter.operator() == datastore_v4_pb.CompositeFilter.AND
+
+      return composite_filter.filter_list()
+    else:
+      return [filter_pb]
+
+  def _ConvertGeospatialFilterOrNone(self, filter_pb):
+    """Converts geo-spatial filters to filter predicates."""
+
+    if filter_pb.has_bounding_circle_filter():
+      return (datastore_query._BoundingCircleFilter._from_v4_pb(
+          filter_pb.bounding_circle_filter()))
+    else:
+      return None
+
+  def _SplitGeospatialFilters(self, req):
+    """Extracts, converts and removes geo-filters from a request.
+
+    Args:
+      req: a datastore_v4_pb.RunQueryRequest
+
+    Returns:
+      a pair (new_req, filter_predicate) where new_req is req with unsupported
+      filters removed and filter_predicate is a datastore_query.FilterPredicate
+      with the unsupported filters. filter_predicate is None if no unsupported
+      filters were removed.
+    """
+
+    assert datastore_v4_pb.CompositeFilter._Operator_NAMES.values() == ['AND']
+
+
+
+    filter_predicate = None
+    new_req = datastore_v4_pb.RunQueryRequest()
+    new_req.CopyFrom(req)
+
+    query = new_req.mutable_query()
+
+
+    sub_filter_pbs = []
+    sub_filter_predicates = []
+
+    for filter_pb in self._GetFilterPbList(req.query().filter()):
+      sub_filter_predicate = self._ConvertGeospatialFilterOrNone(filter_pb)
+
+      if sub_filter_predicate is None:
+        sub_filter_pbs.append(filter_pb)
+      else:
+        sub_filter_predicates.append(sub_filter_predicate)
+
+    op_pb = datastore_v4_pb.CompositeFilter.AND
+    op = datastore_query.CompositeFilter.AND
+
+    filter_pb = self._GetV4PbCompositeFilter(sub_filter_pbs, op_pb)
+    filter_predicate = self._GetQueryCompositeFilter(sub_filter_predicates, op)
+
+
+
+    if filter_pb is None:
+      query.clear_filter()
+    else:
+      query.mutable_filter().CopyFrom(filter_pb)
+
+    return (new_req, filter_predicate)
+
   def _Dynamic_RunQuery(self, req, resp):
     try:
       self.__normalize_v4_run_query_request(req)
       self.__service_validator.validate_run_query_req(req)
-      v3_req = self.__service_converter.v4_run_query_req_to_v3_query(req)
-      v3_resp = datastore_pb.QueryResult()
-      self.__make_v3_call('RunQuery', v3_req, v3_resp)
+
+      v3_stub = apiproxy_stub_map.apiproxy.GetStub(V3_SERVICE_NAME)
+
+      new_req, filter_predicate = self._SplitGeospatialFilters(req)
+
+
+
+
+      if (issubclass(v3_stub.__class__, datastore_stub_util.BaseDatastore)
+          and filter_predicate is not None):
+        v3_req = self.__service_converter.v4_run_query_req_to_v3_query(new_req)
+        v3_resp = datastore_pb.QueryResult()
+        v3_stub._Dynamic_RunQuery(v3_req, v3_resp, filter_predicate)
+      else:
+        v3_req = self.__service_converter.v4_run_query_req_to_v3_query(req)
+        v3_resp = datastore_pb.QueryResult()
+        self.__make_v3_call('RunQuery', v3_req, v3_resp)
     except datastore_pbs.InvalidConversionError, e:
       raise apiproxy_errors.ApplicationError(
           datastore_v4_pb.Error.BAD_REQUEST, str(e))
diff --git a/google/appengine/datastore/datastore_v4_validator.py b/google/appengine/datastore/datastore_v4_validator.py
index 94069b5..403efea 100644
--- a/google/appengine/datastore/datastore_v4_validator.py
+++ b/google/appengine/datastore/datastore_v4_validator.py
@@ -882,7 +882,9 @@
       ValidationError: if the filter is invalid
     """
     _assert_condition((filt.has_composite_filter()
-                       + filt.has_property_filter() == 1),
+                       + filt.has_property_filter()
+                       + filt.has_bounding_circle_filter()
+                       + filt.has_bounding_box_filter() == 1),
                       'A filter must have exactly one of its fields set.')
     if filt.has_composite_filter():
       comp_filter = filt.composite_filter()
diff --git a/google/appengine/datastore/entity_pb.py b/google/appengine/datastore/entity_pb.py
index b42c5ee..2e3d908 100644
--- a/google/appengine/datastore/entity_pb.py
+++ b/google/appengine/datastore/entity_pb.py
@@ -2836,10 +2836,12 @@
 class Index_Property(ProtocolBuffer.ProtocolMessage):
 
 
+  DIRECTION_UNSPECIFIED =    0
   ASCENDING    =    1
   DESCENDING   =    2
 
   _Direction_NAMES = {
+    0: "DIRECTION_UNSPECIFIED",
     1: "ASCENDING",
     2: "DESCENDING",
   }
@@ -2847,10 +2849,25 @@
   def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
   Direction_Name = classmethod(Direction_Name)
 
+
+
+  MODE_UNSPECIFIED =    0
+  GEOSPATIAL   =    3
+
+  _Mode_NAMES = {
+    0: "MODE_UNSPECIFIED",
+    3: "GEOSPATIAL",
+  }
+
+  def Mode_Name(cls, x): return cls._Mode_NAMES.get(x, "")
+  Mode_Name = classmethod(Mode_Name)
+
   has_name_ = 0
   name_ = ""
   has_direction_ = 0
   direction_ = 1
+  has_mode_ = 0
+  mode_ = 0
 
   def __init__(self, contents=None):
     if contents is not None: self.MergeFromString(contents)
@@ -2881,11 +2898,25 @@
 
   def has_direction(self): return self.has_direction_
 
+  def mode(self): return self.mode_
+
+  def set_mode(self, x):
+    self.has_mode_ = 1
+    self.mode_ = x
+
+  def clear_mode(self):
+    if self.has_mode_:
+      self.has_mode_ = 0
+      self.mode_ = 0
+
+  def has_mode(self): return self.has_mode_
+
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_name()): self.set_name(x.name())
     if (x.has_direction()): self.set_direction(x.direction())
+    if (x.has_mode()): self.set_mode(x.mode())
 
   def Equals(self, x):
     if x is self: return 1
@@ -2893,6 +2924,8 @@
     if self.has_name_ and self.name_ != x.name_: return 0
     if self.has_direction_ != x.has_direction_: return 0
     if self.has_direction_ and self.direction_ != x.direction_: return 0
+    if self.has_mode_ != x.has_mode_: return 0
+    if self.has_mode_ and self.mode_ != x.mode_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -2907,6 +2940,7 @@
     n = 0
     n += self.lengthString(len(self.name_))
     if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
+    if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
     return n + 1
 
   def ByteSizePartial(self):
@@ -2915,11 +2949,13 @@
       n += 1
       n += self.lengthString(len(self.name_))
     if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
+    if (self.has_mode_): n += 1 + self.lengthVarInt64(self.mode_)
     return n
 
   def Clear(self):
     self.clear_name()
     self.clear_direction()
+    self.clear_mode()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(26)
@@ -2927,6 +2963,9 @@
     if (self.has_direction_):
       out.putVarInt32(32)
       out.putVarInt32(self.direction_)
+    if (self.has_mode_):
+      out.putVarInt32(48)
+      out.putVarInt32(self.mode_)
 
   def OutputPartial(self, out):
     if (self.has_name_):
@@ -2935,6 +2974,9 @@
     if (self.has_direction_):
       out.putVarInt32(32)
       out.putVarInt32(self.direction_)
+    if (self.has_mode_):
+      out.putVarInt32(48)
+      out.putVarInt32(self.mode_)
 
   def TryMerge(self, d):
     while 1:
@@ -2946,6 +2988,9 @@
       if tt == 32:
         self.set_direction(d.getVarInt32())
         continue
+      if tt == 48:
+        self.set_mode(d.getVarInt32())
+        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -2956,6 +3001,7 @@
     res=""
     if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
     if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
+    if self.has_mode_: res+=prefix+("mode: %s\n" % self.DebugFormatInt32(self.mode_))
     return res
 
 class Index(ProtocolBuffer.ProtocolMessage):
@@ -3128,6 +3174,7 @@
   kPropertyGroup = 2
   kPropertyname = 3
   kPropertydirection = 4
+  kPropertymode = 6
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
@@ -3136,7 +3183,8 @@
     3: "name",
     4: "direction",
     5: "ancestor",
-  }, 5)
+    6: "mode",
+  }, 6)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
@@ -3145,7 +3193,8 @@
     3: ProtocolBuffer.Encoder.STRING,
     4: ProtocolBuffer.Encoder.NUMERIC,
     5: ProtocolBuffer.Encoder.NUMERIC,
-  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+    6: ProtocolBuffer.Encoder.NUMERIC,
+  }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
@@ -3178,9 +3227,12 @@
   state_ = 0
   has_only_use_if_required_ = 0
   only_use_if_required_ = 0
+  has_write_division_family_ = 0
+  write_division_family_ = ""
 
   def __init__(self, contents=None):
     self.definition_ = Index()
+    self.read_division_family_ = []
     if contents is not None: self.MergeFromString(contents)
 
   def app_id(self): return self.app_id_
@@ -3243,6 +3295,34 @@
 
   def has_only_use_if_required(self): return self.has_only_use_if_required_
 
+  def read_division_family_size(self): return len(self.read_division_family_)
+  def read_division_family_list(self): return self.read_division_family_
+
+  def read_division_family(self, i):
+    return self.read_division_family_[i]
+
+  def set_read_division_family(self, i, x):
+    self.read_division_family_[i] = x
+
+  def add_read_division_family(self, x):
+    self.read_division_family_.append(x)
+
+  def clear_read_division_family(self):
+    self.read_division_family_ = []
+
+  def write_division_family(self): return self.write_division_family_
+
+  def set_write_division_family(self, x):
+    self.has_write_division_family_ = 1
+    self.write_division_family_ = x
+
+  def clear_write_division_family(self):
+    if self.has_write_division_family_:
+      self.has_write_division_family_ = 0
+      self.write_division_family_ = ""
+
+  def has_write_division_family(self): return self.has_write_division_family_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -3251,6 +3331,8 @@
     if (x.has_definition()): self.mutable_definition().MergeFrom(x.definition())
     if (x.has_state()): self.set_state(x.state())
     if (x.has_only_use_if_required()): self.set_only_use_if_required(x.only_use_if_required())
+    for i in xrange(x.read_division_family_size()): self.add_read_division_family(x.read_division_family(i))
+    if (x.has_write_division_family()): self.set_write_division_family(x.write_division_family())
 
   def Equals(self, x):
     if x is self: return 1
@@ -3264,6 +3346,11 @@
     if self.has_state_ and self.state_ != x.state_: return 0
     if self.has_only_use_if_required_ != x.has_only_use_if_required_: return 0
     if self.has_only_use_if_required_ and self.only_use_if_required_ != x.only_use_if_required_: return 0
+    if len(self.read_division_family_) != len(x.read_division_family_): return 0
+    for e1, e2 in zip(self.read_division_family_, x.read_division_family_):
+      if e1 != e2: return 0
+    if self.has_write_division_family_ != x.has_write_division_family_: return 0
+    if self.has_write_division_family_ and self.write_division_family_ != x.write_division_family_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -3294,6 +3381,9 @@
     n += self.lengthString(self.definition_.ByteSize())
     n += self.lengthVarInt64(self.state_)
     if (self.has_only_use_if_required_): n += 2
+    n += 1 * len(self.read_division_family_)
+    for i in xrange(len(self.read_division_family_)): n += self.lengthString(len(self.read_division_family_[i]))
+    if (self.has_write_division_family_): n += 1 + self.lengthString(len(self.write_division_family_))
     return n + 4
 
   def ByteSizePartial(self):
@@ -3311,6 +3401,9 @@
       n += 1
       n += self.lengthVarInt64(self.state_)
     if (self.has_only_use_if_required_): n += 2
+    n += 1 * len(self.read_division_family_)
+    for i in xrange(len(self.read_division_family_)): n += self.lengthString(len(self.read_division_family_[i]))
+    if (self.has_write_division_family_): n += 1 + self.lengthString(len(self.write_division_family_))
     return n
 
   def Clear(self):
@@ -3319,6 +3412,8 @@
     self.clear_definition()
     self.clear_state()
     self.clear_only_use_if_required()
+    self.clear_read_division_family()
+    self.clear_write_division_family()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -3333,6 +3428,12 @@
     if (self.has_only_use_if_required_):
       out.putVarInt32(48)
       out.putBoolean(self.only_use_if_required_)
+    for i in xrange(len(self.read_division_family_)):
+      out.putVarInt32(58)
+      out.putPrefixedString(self.read_division_family_[i])
+    if (self.has_write_division_family_):
+      out.putVarInt32(66)
+      out.putPrefixedString(self.write_division_family_)
 
   def OutputPartial(self, out):
     if (self.has_app_id_):
@@ -3351,6 +3452,12 @@
     if (self.has_only_use_if_required_):
       out.putVarInt32(48)
       out.putBoolean(self.only_use_if_required_)
+    for i in xrange(len(self.read_division_family_)):
+      out.putVarInt32(58)
+      out.putPrefixedString(self.read_division_family_[i])
+    if (self.has_write_division_family_):
+      out.putVarInt32(66)
+      out.putPrefixedString(self.write_division_family_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -3373,6 +3480,12 @@
       if tt == 48:
         self.set_only_use_if_required(d.getBoolean())
         continue
+      if tt == 58:
+        self.add_read_division_family(d.getPrefixedString())
+        continue
+      if tt == 66:
+        self.set_write_division_family(d.getPrefixedString())
+        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -3389,6 +3502,13 @@
       res+=prefix+">\n"
     if self.has_state_: res+=prefix+("state: %s\n" % self.DebugFormatInt32(self.state_))
     if self.has_only_use_if_required_: res+=prefix+("only_use_if_required: %s\n" % self.DebugFormatBool(self.only_use_if_required_))
+    cnt=0
+    for e in self.read_division_family_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("read_division_family%s: %s\n" % (elm, self.DebugFormatString(e)))
+      cnt+=1
+    if self.has_write_division_family_: res+=prefix+("write_division_family: %s\n" % self.DebugFormatString(self.write_division_family_))
     return res
 
 
@@ -3400,6 +3520,8 @@
   kdefinition = 3
   kstate = 4
   konly_use_if_required = 6
+  kread_division_family = 7
+  kwrite_division_family = 8
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
@@ -3408,7 +3530,9 @@
     3: "definition",
     4: "state",
     6: "only_use_if_required",
-  }, 6)
+    7: "read_division_family",
+    8: "write_division_family",
+  }, 8)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
@@ -3417,12 +3541,368 @@
     3: ProtocolBuffer.Encoder.STRING,
     4: ProtocolBuffer.Encoder.NUMERIC,
     6: ProtocolBuffer.Encoder.NUMERIC,
-  }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
+    7: ProtocolBuffer.Encoder.STRING,
+    8: ProtocolBuffer.Encoder.STRING,
+  }, 8, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.CompositeIndex'
+class SearchIndexEntry(ProtocolBuffer.ProtocolMessage):
+  has_index_id_ = 0
+  index_id_ = 0
+  has_fingerprint_1999_ = 0
+  fingerprint_1999_ = 0
+  has_fingerprint_2011_ = 0
+  fingerprint_2011_ = 0
+
+  def __init__(self, contents=None):
+    self.division_family_ = []
+    if contents is not None: self.MergeFromString(contents)
+
+  def index_id(self): return self.index_id_
+
+  def set_index_id(self, x):
+    self.has_index_id_ = 1
+    self.index_id_ = x
+
+  def clear_index_id(self):
+    if self.has_index_id_:
+      self.has_index_id_ = 0
+      self.index_id_ = 0
+
+  def has_index_id(self): return self.has_index_id_
+
+  def division_family_size(self): return len(self.division_family_)
+  def division_family_list(self): return self.division_family_
+
+  def division_family(self, i):
+    return self.division_family_[i]
+
+  def set_division_family(self, i, x):
+    self.division_family_[i] = x
+
+  def add_division_family(self, x):
+    self.division_family_.append(x)
+
+  def clear_division_family(self):
+    self.division_family_ = []
+
+  def fingerprint_1999(self): return self.fingerprint_1999_
+
+  def set_fingerprint_1999(self, x):
+    self.has_fingerprint_1999_ = 1
+    self.fingerprint_1999_ = x
+
+  def clear_fingerprint_1999(self):
+    if self.has_fingerprint_1999_:
+      self.has_fingerprint_1999_ = 0
+      self.fingerprint_1999_ = 0
+
+  def has_fingerprint_1999(self): return self.has_fingerprint_1999_
+
+  def fingerprint_2011(self): return self.fingerprint_2011_
+
+  def set_fingerprint_2011(self, x):
+    self.has_fingerprint_2011_ = 1
+    self.fingerprint_2011_ = x
+
+  def clear_fingerprint_2011(self):
+    if self.has_fingerprint_2011_:
+      self.has_fingerprint_2011_ = 0
+      self.fingerprint_2011_ = 0
+
+  def has_fingerprint_2011(self): return self.has_fingerprint_2011_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_index_id()): self.set_index_id(x.index_id())
+    for i in xrange(x.division_family_size()): self.add_division_family(x.division_family(i))
+    if (x.has_fingerprint_1999()): self.set_fingerprint_1999(x.fingerprint_1999())
+    if (x.has_fingerprint_2011()): self.set_fingerprint_2011(x.fingerprint_2011())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_index_id_ != x.has_index_id_: return 0
+    if self.has_index_id_ and self.index_id_ != x.index_id_: return 0
+    if len(self.division_family_) != len(x.division_family_): return 0
+    for e1, e2 in zip(self.division_family_, x.division_family_):
+      if e1 != e2: return 0
+    if self.has_fingerprint_1999_ != x.has_fingerprint_1999_: return 0
+    if self.has_fingerprint_1999_ and self.fingerprint_1999_ != x.fingerprint_1999_: return 0
+    if self.has_fingerprint_2011_ != x.has_fingerprint_2011_: return 0
+    if self.has_fingerprint_2011_ and self.fingerprint_2011_ != x.fingerprint_2011_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_index_id_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: index_id not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthVarInt64(self.index_id_)
+    n += 1 * len(self.division_family_)
+    for i in xrange(len(self.division_family_)): n += self.lengthString(len(self.division_family_[i]))
+    if (self.has_fingerprint_1999_): n += 9
+    if (self.has_fingerprint_2011_): n += 9
+    return n + 1
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_index_id_):
+      n += 1
+      n += self.lengthVarInt64(self.index_id_)
+    n += 1 * len(self.division_family_)
+    for i in xrange(len(self.division_family_)): n += self.lengthString(len(self.division_family_[i]))
+    if (self.has_fingerprint_1999_): n += 9
+    if (self.has_fingerprint_2011_): n += 9
+    return n
+
+  def Clear(self):
+    self.clear_index_id()
+    self.clear_division_family()
+    self.clear_fingerprint_1999()
+    self.clear_fingerprint_2011()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(8)
+    out.putVarInt64(self.index_id_)
+    for i in xrange(len(self.division_family_)):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.division_family_[i])
+    if (self.has_fingerprint_1999_):
+      out.putVarInt32(25)
+      out.put64(self.fingerprint_1999_)
+    if (self.has_fingerprint_2011_):
+      out.putVarInt32(33)
+      out.put64(self.fingerprint_2011_)
+
+  def OutputPartial(self, out):
+    if (self.has_index_id_):
+      out.putVarInt32(8)
+      out.putVarInt64(self.index_id_)
+    for i in xrange(len(self.division_family_)):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.division_family_[i])
+    if (self.has_fingerprint_1999_):
+      out.putVarInt32(25)
+      out.put64(self.fingerprint_1999_)
+    if (self.has_fingerprint_2011_):
+      out.putVarInt32(33)
+      out.put64(self.fingerprint_2011_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.set_index_id(d.getVarInt64())
+        continue
+      if tt == 18:
+        self.add_division_family(d.getPrefixedString())
+        continue
+      if tt == 25:
+        self.set_fingerprint_1999(d.get64())
+        continue
+      if tt == 33:
+        self.set_fingerprint_2011(d.get64())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_index_id_: res+=prefix+("index_id: %s\n" % self.DebugFormatInt64(self.index_id_))
+    cnt=0
+    for e in self.division_family_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("division_family%s: %s\n" % (elm, self.DebugFormatString(e)))
+      cnt+=1
+    if self.has_fingerprint_1999_: res+=prefix+("fingerprint_1999: %s\n" % self.DebugFormatFixed64(self.fingerprint_1999_))
+    if self.has_fingerprint_2011_: res+=prefix+("fingerprint_2011: %s\n" % self.DebugFormatFixed64(self.fingerprint_2011_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kindex_id = 1
+  kdivision_family = 2
+  kfingerprint_1999 = 3
+  kfingerprint_2011 = 4
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "index_id",
+    2: "division_family",
+    3: "fingerprint_1999",
+    4: "fingerprint_2011",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.DOUBLE,
+    4: ProtocolBuffer.Encoder.DOUBLE,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.SearchIndexEntry'
+class SearchIndexExternalId(ProtocolBuffer.ProtocolMessage):
+  has_index_id_ = 0
+  index_id_ = 0
+  has_primary_key_ = 0
+
+  def __init__(self, contents=None):
+    self.primary_key_ = Reference()
+    if contents is not None: self.MergeFromString(contents)
+
+  def index_id(self): return self.index_id_
+
+  def set_index_id(self, x):
+    self.has_index_id_ = 1
+    self.index_id_ = x
+
+  def clear_index_id(self):
+    if self.has_index_id_:
+      self.has_index_id_ = 0
+      self.index_id_ = 0
+
+  def has_index_id(self): return self.has_index_id_
+
+  def primary_key(self): return self.primary_key_
+
+  def mutable_primary_key(self): self.has_primary_key_ = 1; return self.primary_key_
+
+  def clear_primary_key(self):self.has_primary_key_ = 0; self.primary_key_.Clear()
+
+  def has_primary_key(self): return self.has_primary_key_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_index_id()): self.set_index_id(x.index_id())
+    if (x.has_primary_key()): self.mutable_primary_key().MergeFrom(x.primary_key())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_index_id_ != x.has_index_id_: return 0
+    if self.has_index_id_ and self.index_id_ != x.index_id_: return 0
+    if self.has_primary_key_ != x.has_primary_key_: return 0
+    if self.has_primary_key_ and self.primary_key_ != x.primary_key_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_index_id_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: index_id not set.')
+    if (not self.has_primary_key_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: primary_key not set.')
+    elif not self.primary_key_.IsInitialized(debug_strs): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthVarInt64(self.index_id_)
+    n += self.lengthString(self.primary_key_.ByteSize())
+    return n + 2
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_index_id_):
+      n += 1
+      n += self.lengthVarInt64(self.index_id_)
+    if (self.has_primary_key_):
+      n += 1
+      n += self.lengthString(self.primary_key_.ByteSizePartial())
+    return n
+
+  def Clear(self):
+    self.clear_index_id()
+    self.clear_primary_key()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(8)
+    out.putVarInt64(self.index_id_)
+    out.putVarInt32(18)
+    out.putVarInt32(self.primary_key_.ByteSize())
+    self.primary_key_.OutputUnchecked(out)
+
+  def OutputPartial(self, out):
+    if (self.has_index_id_):
+      out.putVarInt32(8)
+      out.putVarInt64(self.index_id_)
+    if (self.has_primary_key_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.primary_key_.ByteSizePartial())
+      self.primary_key_.OutputPartial(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.set_index_id(d.getVarInt64())
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_primary_key().TryMerge(tmp)
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_index_id_: res+=prefix+("index_id: %s\n" % self.DebugFormatInt64(self.index_id_))
+    if self.has_primary_key_:
+      res+=prefix+"primary_key <\n"
+      res+=self.primary_key_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kindex_id = 1
+  kprimary_key = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "index_id",
+    2: "primary_key",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.SearchIndexExternalId'
 class IndexPostfix_IndexValue(ProtocolBuffer.ProtocolMessage):
   has_property_name_ = 0
   property_name_ = ""
@@ -3930,4 +4410,4 @@
 if _extension_runtime:
   pass
 
-__all__ = ['PropertyValue','PropertyValue_ReferenceValuePathElement','PropertyValue_PointValue','PropertyValue_UserValue','PropertyValue_ReferenceValue','Property','Path','Path_Element','Reference','User','EntityProto','CompositeProperty','Index','Index_Property','CompositeIndex','IndexPostfix_IndexValue','IndexPostfix','IndexPosition']
+__all__ = ['PropertyValue','PropertyValue_ReferenceValuePathElement','PropertyValue_PointValue','PropertyValue_UserValue','PropertyValue_ReferenceValue','Property','Path','Path_Element','Reference','User','EntityProto','CompositeProperty','Index','Index_Property','CompositeIndex','SearchIndexEntry','SearchIndexExternalId','IndexPostfix_IndexValue','IndexPostfix','IndexPosition']
diff --git a/google/appengine/ext/appstats/static/appstats_js.js b/google/appengine/ext/appstats/static/appstats_js.js
index 3562b63..6eba6a9 100644
--- a/google/appengine/ext/appstats/static/appstats_js.js
+++ b/google/appengine/ext/appstats/static/appstats_js.js
@@ -1,87 +1,88 @@
-/* Copyright 2008-10 Google Inc. All Rights Reserved. */ (function(){var f,l=this,aa=function(){},ba=function(a){a.ha=function(){return a.Fb?a.Fb:a.Fb=new a}},ca=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=
-typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a.call)return"object";return b},da=function(a){return"array"==ca(a)},ea=function(a){var b=ca(a);return"array"==b||"object"==b&&"number"==typeof a.length},m=function(a){return"string"==typeof a},n=function(a){return"function"==ca(a)},fa=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},ka=function(a){return a[ga]||
-(a[ga]=++ja)},ga="closure_uid_"+(1E9*Math.random()>>>0),ja=0,la=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=c.slice();b.push.apply(b,arguments);return a.apply(this,b)}},ma=function(a,b){var c=a.split("."),d=l;c[0]in d||!d.execScript||d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)c.length||void 0===b?d=d[e]?d[e]:d[e]={}:d[e]=b},p=function(a,b){function c(){}c.prototype=b.prototype;a.e=b.prototype;a.prototype=new c;a.prototype.constructor=a;a.mc=
-function(a,c,g){return b.prototype[c].apply(a,Array.prototype.slice.call(arguments,2))}};var na=function(a){if(Error.captureStackTrace)Error.captureStackTrace(this,na);else{var b=Error().stack;b&&(this.stack=b)}a&&(this.message=String(a))};p(na,Error);na.prototype.name="CustomError";var oa;var pa=function(a,b){for(var c=a.split("%s"),d="",e=Array.prototype.slice.call(arguments,1);e.length&&1<c.length;)d+=c.shift()+e.shift();return d+c.join("%s")},qa=function(a){return a.replace(/^[\s\xa0]+|[\s\xa0]+$/g,"")},ya=function(a){if(!ra.test(a))return a;-1!=a.indexOf("&")&&(a=a.replace(sa,"&amp;"));-1!=a.indexOf("<")&&(a=a.replace(ta,"&lt;"));-1!=a.indexOf(">")&&(a=a.replace(ua,"&gt;"));-1!=a.indexOf('"')&&(a=a.replace(va,"&quot;"));-1!=a.indexOf("'")&&(a=a.replace(wa,"&#39;"));-1!=a.indexOf("\x00")&&
-(a=a.replace(xa,"&#0;"));return a},sa=/&/g,ta=/</g,ua=/>/g,va=/"/g,wa=/'/g,xa=/\x00/g,ra=/[\x00&<>"']/,za=function(a,b){return a<b?-1:a>b?1:0};var Aa=function(a,b){b.unshift(a);na.call(this,pa.apply(null,b));b.shift()};p(Aa,na);Aa.prototype.name="AssertionError";var Ba=function(a,b,c){var d="Assertion failed";if(b)var d=d+(": "+b),e=c;else a&&(d+=": "+a,e=null);throw new Aa(""+d,e||[]);},r=function(a,b,c){a||Ba("",b,Array.prototype.slice.call(arguments,2))},Ca=function(a,b,c,d){a instanceof b||Ba("instanceof check failed.",c,Array.prototype.slice.call(arguments,3))};var s=Array.prototype,Da=s.indexOf?function(a,b,c){r(null!=a.length);return s.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(m(a))return m(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},t=s.forEach?function(a,b,c){r(null!=a.length);s.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)g in e&&b.call(c,e[g],g,a)},Ea=s.filter?function(a,b,c){r(null!=a.length);return s.filter.call(a,b,
-c)}:function(a,b,c){for(var d=a.length,e=[],g=0,h=m(a)?a.split(""):a,k=0;k<d;k++)if(k in h){var q=h[k];b.call(c,q,k,a)&&(e[g++]=q)}return e},Fa=s.every?function(a,b,c){r(null!=a.length);return s.every.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)if(g in e&&!b.call(c,e[g],g,a))return!1;return!0},u=function(a,b){return 0<=Da(a,b)},Ga=function(a,b){var c=Da(a,b),d;if(d=0<=c)r(null!=a.length),s.splice.call(a,c,1);return d},Ha=function(a){var b=a.length;if(0<b){for(var c=
-Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},Ja=function(a,b,c,d){r(null!=a.length);s.splice.apply(a,Ia(arguments,1))},Ia=function(a,b,c){r(null!=a.length);return 2>=arguments.length?s.slice.call(a,b):s.slice.call(a,b,c)};var Ka=function(a){a=a.className;return m(a)&&a.match(/\S+/g)||[]},La=function(a,b){for(var c=Ka(a),d=Ia(arguments,1),e=c,g=0;g<d.length;g++)u(e,d[g])||e.push(d[g]);a.className=c.join(" ")},Na=function(a,b){var c=Ka(a),d=Ia(arguments,1),c=Ma(c,d);a.className=c.join(" ")},Ma=function(a,b){return Ea(a,function(a){return!u(b,a)})};var Oa=function(a,b){for(var c in a)b.call(void 0,a[c],c,a)},Pa=function(a,b){for(var c in a)if(a[c]==b)return!0;return!1},Qa=function(a,b,c){if(b in a)throw Error('The object already contains the key "'+b+'"');a[b]=c},Ra=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Sa="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ta=function(a,b){for(var c,d,e=1;e<arguments.length;e++){d=arguments[e];for(c in d)a[c]=d[c];for(var g=0;g<Sa.length;g++)c=
-Sa[g],Object.prototype.hasOwnProperty.call(d,c)&&(a[c]=d[c])}};var v;t:{var Ua=l.navigator;if(Ua){var Va=Ua.userAgent;if(Va){v=Va;break t}}v=""}var w=function(a){return-1!=v.indexOf(a)};var Wa=w("Opera")||w("OPR"),x=w("Trident")||w("MSIE"),y=w("Gecko")&&-1==v.toLowerCase().indexOf("webkit")&&!(w("Trident")||w("MSIE")),z=-1!=v.toLowerCase().indexOf("webkit"),Xa=l.navigator||null,A=-1!=(Xa&&Xa.platform||"").indexOf("Mac"),Ya=function(){var a=l.document;return a?a.documentMode:void 0},Za=function(){var a="",b;if(Wa&&l.opera)return a=l.opera.version,n(a)?a():a;y?b=/rv\:([^\);]+)(\)|;)/:x?b=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:z&&(b=/WebKit\/(\S+)/);b&&(a=(a=b.exec(v))?a[1]:"");return x&&
-(b=Ya(),b>parseFloat(a))?String(b):a}(),$a={},B=function(a){var b;if(!(b=$a[a])){b=0;for(var c=qa(String(Za)).split("."),d=qa(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g<e;g++){var h=c[g]||"",k=d[g]||"",q=RegExp("(\\d*)(\\D*)","g"),ha=RegExp("(\\d*)(\\D*)","g");do{var N=q.exec(h)||["","",""],ia=ha.exec(k)||["","",""];if(0==N[0].length&&0==ia[0].length)break;b=za(0==N[1].length?0:parseInt(N[1],10),0==ia[1].length?0:parseInt(ia[1],10))||za(0==N[2].length,0==ia[2].length)||za(N[2],
-ia[2])}while(0==b)}b=$a[a]=0<=b}return b},ab=l.document,bb=ab&&x?Ya()||("CSS1Compat"==ab.compatMode?parseInt(Za,10):5):void 0;var cb=!x||x&&9<=bb;!y&&!x||x&&x&&9<=bb||y&&B("1.9.1");var db=x&&!B("9");var gb=function(a){return a?new eb(fb(a)):oa||(oa=new eb)},hb=function(a,b){return m(b)?a.getElementById(b):b},ib=function(a,b,c){var d=document;c=c||d;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var d={},e=0,g=0,h;h=c[g];g++)a==h.nodeName&&(d[e++]=h);d.length=e;return d}return c}c=c.getElementsByTagName(a||"*");if(b){d={};for(g=e=0;h=c[g];g++)a=h.className,
-"function"==typeof a.split&&u(a.split(/\s+/),b)&&(d[e++]=h);d.length=e;return d}return c},kb=function(a,b){Oa(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in jb?a.setAttribute(jb[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},jb={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",usemap:"useMap",
-valign:"vAlign",width:"width"},mb=function(a,b,c){return lb(document,arguments)},lb=function(a,b){var c=b[0],d=b[1];if(!cb&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',ya(d.name),'"');if(d.type){c.push(' type="',ya(d.type),'"');var e={};Ta(e,d);delete e.type;d=e}c.push(">");c=c.join("")}c=a.createElement(c);d&&(m(d)?c.className=d:da(d)?c.className=d.join(" "):kb(c,d));2<b.length&&nb(a,c,b);return c},nb=function(a,b,c){function d(c){c&&b.appendChild(m(c)?a.createTextNode(c):c)}for(var e=
-2;e<c.length;e++){var g=c[e];if(!ea(g)||fa(g)&&0<g.nodeType)d(g);else{var h;t:{if(g&&"number"==typeof g.length){if(fa(g)){h="function"==typeof g.item||"string"==typeof g.item;break t}if(n(g)){h="function"==typeof g.item;break t}}h=!1}t(h?Ha(g):g,d)}}},ob=function(a,b){if(a.contains&&1==b.nodeType)return a==b||a.contains(b);if("undefined"!=typeof a.compareDocumentPosition)return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b.parentNode;return b==a},fb=function(a){r(a,"Node cannot be null or undefined.");
-return 9==a.nodeType?a:a.ownerDocument||a.document},pb=function(a,b){r(null!=a,"goog.dom.setTextContent expects a non-null value for node");if("textContent"in a)a.textContent=b;else if(3==a.nodeType)a.data=b;else if(a.firstChild&&3==a.firstChild.nodeType){for(;a.lastChild!=a.firstChild;)a.removeChild(a.lastChild);a.firstChild.data=b}else{for(var c;c=a.firstChild;)a.removeChild(c);c=fb(a);a.appendChild(c.createTextNode(String(b)))}},qb={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},rb={IMG:" ",BR:"\n"},
-sb=function(a){a=a.getAttributeNode("tabindex");return null!=a&&a.specified},tb=function(a){a=a.tabIndex;return"number"==typeof a&&0<=a&&32768>a},ub=function(a,b,c){if(!(a.nodeName in qb))if(3==a.nodeType)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in rb)b.push(rb[a.nodeName]);else for(a=a.firstChild;a;)ub(a,b,c),a=a.nextSibling},eb=function(a){this.Q=a||l.document||document};f=eb.prototype;f.lb=gb;f.a=function(a){return hb(this.Q,a)};
-f.o=function(a,b,c){return lb(this.Q,arguments)};f.createElement=function(a){return this.Q.createElement(a)};f.createTextNode=function(a){return this.Q.createTextNode(String(a))};f.appendChild=function(a,b){a.appendChild(b)};f.contains=ob;
-f.I=function(a){var b;(b="A"==a.tagName||"INPUT"==a.tagName||"TEXTAREA"==a.tagName||"SELECT"==a.tagName||"BUTTON"==a.tagName?!a.disabled&&(!sb(a)||tb(a)):sb(a)&&tb(a))&&x?(a=n(a.getBoundingClientRect)?a.getBoundingClientRect():{height:a.offsetHeight,width:a.offsetWidth},a=null!=a&&0<a.height&&0<a.width):a=b;return a};var vb=function(a){vb[" "](a);return a};vb[" "]=aa;var wb=!x||x&&9<=bb,xb=!x||x&&9<=bb,yb=x&&!B("9");!z||B("528");y&&B("1.9b")||x&&B("8")||Wa&&B("9.5")||z&&B("528");y&&!B("8")||x&&B("9");var zb=function(){};zb.prototype.Ub=!1;var C=function(a,b){this.type=a;this.currentTarget=this.target=b;this.defaultPrevented=this.$=!1;this.wb=!0};C.prototype.stopPropagation=function(){this.$=!0};C.prototype.preventDefault=function(){this.defaultPrevented=!0;this.wb=!1};var D=function(a,b){C.call(this,a?a.type:"");this.relatedTarget=this.currentTarget=this.target=null;this.charCode=this.keyCode=this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.state=null;this.jb=!1;this.O=null;if(a){var c=this.type=a.type;this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(y){var e;t:{try{vb(d.nodeName);e=!0;break t}catch(g){}e=!1}e||(d=null)}}else"mouseover"==
-c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=z||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=z||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||("keypress"==c?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=
-a.metaKey;this.jb=A?a.metaKey:a.ctrlKey;this.state=a.state;this.O=a;a.defaultPrevented&&this.preventDefault()}};p(D,C);var Ab=[1,4,2],Bb=function(a){return wb?0==a.O.button:"click"==a.type?!0:!!(a.O.button&Ab[0])};D.prototype.stopPropagation=function(){D.e.stopPropagation.call(this);this.O.stopPropagation?this.O.stopPropagation():this.O.cancelBubble=!0};
-D.prototype.preventDefault=function(){D.e.preventDefault.call(this);var a=this.O;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,yb)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var Cb="closure_listenable_"+(1E6*Math.random()|0),Db=0;var Eb=function(a,b,c,d,e){this.W=a;this.Ea=null;this.src=b;this.type=c;this.Fa=!!d;this.Ha=e;this.key=++Db;this.ga=this.Ga=!1},Fb=function(a){a.ga=!0;a.W=null;a.Ea=null;a.src=null;a.Ha=null};var E=function(a){this.src=a;this.m={};this.ua=0};E.prototype.add=function(a,b,c,d,e){var g=a.toString();a=this.m[g];a||(a=this.m[g]=[],this.ua++);var h=Gb(a,b,d,e);-1<h?(b=a[h],c||(b.Ga=!1)):(b=new Eb(b,this.src,g,!!d,e),b.Ga=c,a.push(b));return b};E.prototype.remove=function(a,b,c,d){a=a.toString();if(!(a in this.m))return!1;var e=this.m[a];b=Gb(e,b,c,d);return-1<b?(Fb(e[b]),r(null!=e.length),s.splice.call(e,b,1),0==e.length&&(delete this.m[a],this.ua--),!0):!1};
-var Hb=function(a,b){var c=b.type;if(!(c in a.m))return!1;var d=Ga(a.m[c],b);d&&(Fb(b),0==a.m[c].length&&(delete a.m[c],a.ua--));return d};E.prototype.ab=function(a){a=a&&a.toString();var b=0,c;for(c in this.m)if(!a||c==a){for(var d=this.m[c],e=0;e<d.length;e++)++b,Fb(d[e]);delete this.m[c];this.ua--}return b};E.prototype.wa=function(a,b,c,d){a=this.m[a.toString()];var e=-1;a&&(e=Gb(a,b,c,d));return-1<e?a[e]:null};
-var Gb=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(!g.ga&&g.W==b&&g.Fa==!!c&&g.Ha==d)return e}return-1};var Ib="closure_lm_"+(1E6*Math.random()|0),Jb={},Kb=0,F=function(a,b,c,d,e){if(da(b)){for(var g=0;g<b.length;g++)F(a,b[g],c,d,e);return null}c=Lb(c);if(a&&a[Cb])a=a.c(b,c,d,e);else{if(!b)throw Error("Invalid event type");var g=!!d,h=Mb(a);h||(a[Ib]=h=new E(a));c=h.add(b,c,!1,d,e);c.Ea||(d=Nb(),c.Ea=d,d.src=a,d.W=c,a.addEventListener?a.addEventListener(b.toString(),d,g):a.attachEvent(Ob(b.toString()),d),Kb++);a=c}return a},Nb=function(){var a=Pb,b=xb?function(c){return a.call(b.src,b.W,c)}:function(c){c=
-a.call(b.src,b.W,c);if(!c)return c};return b},Qb=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)Qb(a,b[g],c,d,e);else c=Lb(c),a&&a[Cb]?a.u(b,c,d,e):a&&(a=Mb(a))&&(b=a.wa(b,c,!!d,e))&&G(b)},G=function(a){if("number"==typeof a||!a||a.ga)return!1;var b=a.src;if(b&&b[Cb])return Hb(b.Y,a);var c=a.type,d=a.Ea;b.removeEventListener?b.removeEventListener(c,d,a.Fa):b.detachEvent&&b.detachEvent(Ob(c),d);Kb--;(c=Mb(b))?(Hb(c,a),0==c.ua&&(c.src=null,b[Ib]=null)):Fb(a);return!0},Ob=function(a){return a in
-Jb?Jb[a]:Jb[a]="on"+a},Sb=function(a,b,c,d){var e=1;if(a=Mb(a))if(b=a.m[b.toString()])for(b=b.concat(),a=0;a<b.length;a++){var g=b[a];g&&g.Fa==c&&!g.ga&&(e&=!1!==Rb(g,d))}return Boolean(e)},Rb=function(a,b){var c=a.W,d=a.Ha||a.src;a.Ga&&G(a);return c.call(d,b)},Pb=function(a,b){if(a.ga)return!0;if(!xb){var c;if(!(c=b))t:{c=["window","event"];for(var d=l,e;e=c.shift();)if(null!=d[e])d=d[e];else{c=null;break t}c=d}e=c;c=new D(e,this);d=!0;if(!(0>e.keyCode||void 0!=e.returnValue)){t:{var g=!1;if(0==
-e.keyCode)try{e.keyCode=-1;break t}catch(h){g=!0}if(g||void 0==e.returnValue)e.returnValue=!0}e=[];for(g=c.currentTarget;g;g=g.parentNode)e.push(g);for(var g=a.type,k=e.length-1;!c.$&&0<=k;k--)c.currentTarget=e[k],d&=Sb(e[k],g,!0,c);for(k=0;!c.$&&k<e.length;k++)c.currentTarget=e[k],d&=Sb(e[k],g,!1,c)}return d}return Rb(a,new D(b,this))},Mb=function(a){a=a[Ib];return a instanceof E?a:null},Tb="__closure_events_fn_"+(1E9*Math.random()>>>0),Lb=function(a){r(a,"Listener can not be null.");if(n(a))return a;
-r(a.handleEvent,"An object listener must have handleEvent method.");a[Tb]||(a[Tb]=function(b){return a.handleEvent(b)});return a[Tb]};var H=function(a){this.Eb=a;this.Na={}};p(H,zb);var Ub=[];H.prototype.c=function(a,b,c,d){da(b)||(b&&(Ub[0]=b.toString()),b=Ub);for(var e=0;e<b.length;e++){var g=F(a,b[e],c||this.handleEvent,d||!1,this.Eb||this);if(!g)break;this.Na[g.key]=g}return this};H.prototype.u=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)this.u(a,b[g],c,d,e);else c=c||this.handleEvent,e=e||this.Eb||this,c=Lb(c),d=!!d,b=a&&a[Cb]?a.wa(b,c,d,e):a?(a=Mb(a))?a.wa(b,c,d,e):null:null,b&&(G(b),delete this.Na[b.key]);return this};
-H.prototype.ab=function(){Oa(this.Na,G);this.Na={}};H.prototype.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var I=function(){this.Y=new E(this);this.dc=this;this.mb=null};p(I,zb);I.prototype[Cb]=!0;f=I.prototype;f.gb=function(a){this.mb=a};f.addEventListener=function(a,b,c,d){F(this,a,b,c,d)};f.removeEventListener=function(a,b,c,d){Qb(this,a,b,c,d)};
-f.dispatchEvent=function(a){Vb(this);var b,c=this.mb;if(c){b=[];for(var d=1;c;c=c.mb)b.push(c),r(1E3>++d,"infinite loop")}c=this.dc;d=a.type||a;if(m(a))a=new C(a,c);else if(a instanceof C)a.target=a.target||c;else{var e=a;a=new C(d,c);Ta(a,e)}var e=!0,g;if(b)for(var h=b.length-1;!a.$&&0<=h;h--)g=a.currentTarget=b[h],e=Wb(g,d,!0,a)&&e;a.$||(g=a.currentTarget=c,e=Wb(g,d,!0,a)&&e,a.$||(e=Wb(g,d,!1,a)&&e));if(b)for(h=0;!a.$&&h<b.length;h++)g=a.currentTarget=b[h],e=Wb(g,d,!1,a)&&e;return e};
-f.c=function(a,b,c,d){Vb(this);return this.Y.add(String(a),b,!1,c,d)};f.u=function(a,b,c,d){return this.Y.remove(String(a),b,c,d)};var Wb=function(a,b,c,d){b=a.Y.m[String(b)];if(!b)return!0;b=b.concat();for(var e=!0,g=0;g<b.length;++g){var h=b[g];if(h&&!h.ga&&h.Fa==c){var k=h.W,q=h.Ha||h.src;h.Ga&&Hb(a.Y,h);e=!1!==k.call(q,d)&&e}}return e&&!1!=d.wb};I.prototype.wa=function(a,b,c,d){return this.Y.wa(String(a),b,c,d)};var Vb=function(a){r(a.Y,"Event target is not initialized. Did you call the superclass (goog.events.EventTarget) constructor?")};var J=function(a,b){a.style.display=b?"":"none"},Xb=y?"MozUserSelect":z?"WebkitUserSelect":null,Yb=function(a,b,c){c=c?null:a.getElementsByTagName("*");if(Xb){if(b=b?"none":"",a.style[Xb]=b,c){a=0;for(var d;d=c[a];a++)d.style[Xb]=b}}else if(x||Wa)if(b=b?"on":"",a.setAttribute("unselectable",b),c)for(a=0;d=c[a];a++)d.setAttribute("unselectable",b)};var Zb=function(){};ba(Zb);Zb.prototype.gc=0;var K=function(a){I.call(this);this.A=a||gb();this.Ca=$b;this.ca=null;this.f=!1;this.d=null;this.oa=void 0;this.F=this.q=this.p=null};p(K,I);K.prototype.fc=Zb.ha();
-var $b=null,ac=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close"}throw Error("Invalid component state");},bc=function(a){return a.ca||(a.ca=":"+(a.fc.gc++).toString(36))},cc=function(a,b){if(a.p&&a.p.F){var c=a.p.F,d=a.ca;d in c&&delete c[d];Qa(a.p.F,b,a)}a.ca=b};
-K.prototype.a=function(){return this.d};var dc=function(a){a.oa||(a.oa=new H(a));return a.oa},fc=function(a,b){if(a==b)throw Error("Unable to set parent component");if(b&&a.p&&a.ca&&ec(a.p,a.ca)&&a.p!=b)throw Error("Unable to set parent component");a.p=b;K.e.gb.call(a,b)};f=K.prototype;f.getParent=function(){return this.p};f.gb=function(a){if(this.p&&this.p!=a)throw Error("Method not supported");K.e.gb.call(this,a)};f.lb=function(){return this.A};f.o=function(){this.d=this.A.createElement("div")};
-f.K=function(a){if(this.f)throw Error("Component already rendered");if(a&&this.X(a)){var b=fb(a);this.A&&this.A.Q==b||(this.A=gb(a));this.Za(a);this.D()}else throw Error("Invalid element to decorate");};f.X=function(){return!0};f.Za=function(a){this.d=a};f.D=function(){this.f=!0;gc(this,function(a){!a.f&&a.a()&&a.D()})};f.ba=function(){gc(this,function(a){a.f&&a.ba()});this.oa&&this.oa.ab();this.f=!1};f.Da=function(a,b){this.Va(a,hc(this),b)};
-f.Va=function(a,b,c){r(!!a,"Provided element must not be null.");if(a.f&&(c||!this.f))throw Error("Component already rendered");if(0>b||b>hc(this))throw Error("Child component index out of bounds");this.F&&this.q||(this.F={},this.q=[]);if(a.getParent()==this){var d=bc(a);this.F[d]=a;Ga(this.q,a)}else Qa(this.F,bc(a),a);fc(a,this);Ja(this.q,b,0,a);if(a.f&&this.f&&a.getParent()==this)c=this.B(),c.insertBefore(a.a(),c.childNodes[b]||null);else if(c){this.d||this.o();c=L(this,b+1);b=this.B();c=c?c.d:
-null;if(a.f)throw Error("Component already rendered");a.d||a.o();b?b.insertBefore(a.d,c||null):a.A.Q.body.appendChild(a.d);a.p&&!a.p.f||a.D()}else this.f&&!a.f&&a.d&&a.d.parentNode&&1==a.d.parentNode.nodeType&&a.D()};f.B=function(){return this.d};
-var ic=function(a){if(null==a.Ca){var b=a.f?a.d:a.A.Q.body,c;t:{c=fb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(c=c.defaultView.getComputedStyle(b,null))){c=c.direction||c.getPropertyValue("direction")||"";break t}c=""}a.Ca="rtl"==(c||(b.currentStyle?b.currentStyle.direction:null)||b.style&&b.style.direction)}return a.Ca};K.prototype.qa=function(a){if(this.f)throw Error("Component already rendered");this.Ca=a};
-var hc=function(a){return a.q?a.q.length:0},ec=function(a,b){var c;a.F&&b?(c=a.F,c=(b in c?c[b]:void 0)||null):c=null;return c},L=function(a,b){return a.q?a.q[b]||null:null},gc=function(a,b,c){a.q&&t(a.q,b,c)},jc=function(a,b){return a.q&&b?Da(a.q,b):-1};
-K.prototype.removeChild=function(a,b){if(a){var c=m(a)?a:bc(a);a=ec(this,c);if(c&&a){var d=this.F;c in d&&delete d[c];Ga(this.q,a);b&&(a.ba(),a.d&&(c=a.d)&&c.parentNode&&c.parentNode.removeChild(c));fc(a,null)}}if(!a)throw Error("Child is not in parent component");return a};var kc,lc={nc:"activedescendant",sc:"atomic",tc:"autocomplete",vc:"busy",yc:"checked",Dc:"controls",Fc:"describedby",Ic:"disabled",Kc:"dropeffect",Lc:"expanded",Mc:"flowto",Oc:"grabbed",Sc:"haspopup",Uc:"hidden",Wc:"invalid",Xc:"label",Yc:"labelledby",Zc:"level",dd:"live",od:"multiline",pd:"multiselectable",td:"orientation",ud:"owns",vd:"posinset",xd:"pressed",Bd:"readonly",Dd:"relevant",Ed:"required",Kd:"selected",Md:"setsize",Od:"sort",ae:"valuemax",be:"valuemin",ce:"valuenow",de:"valuetext"};var mc={oc:"alert",pc:"alertdialog",qc:"application",rc:"article",uc:"banner",wc:"button",xc:"checkbox",zc:"columnheader",Ac:"combobox",Bc:"complementary",Cc:"contentinfo",Ec:"definition",Gc:"dialog",Hc:"directory",Jc:"document",Nc:"form",Pc:"grid",Qc:"gridcell",Rc:"group",Tc:"heading",Vc:"img",$c:"link",ad:"list",bd:"listbox",cd:"listitem",ed:"log",fd:"main",gd:"marquee",hd:"math",jd:"menu",kd:"menubar",ld:"menuitem",md:"menuitemcheckbox",nd:"menuitemradio",qd:"navigation",rd:"note",sd:"option",
-wd:"presentation",yd:"progressbar",zd:"radio",Ad:"radiogroup",Cd:"region",Fd:"row",Gd:"rowgroup",Hd:"rowheader",Id:"scrollbar",Jd:"search",Ld:"separator",Nd:"slider",Pd:"spinbutton",Qd:"status",Rd:"tab",Sd:"tablist",Td:"tabpanel",Ud:"textbox",Vd:"timer",Wd:"toolbar",Xd:"tooltip",Yd:"tree",Zd:"treegrid",$d:"treeitem"};var nc=function(a,b){b?(r(Pa(mc,b),"No such ARIA role "+b),a.setAttribute("role",b)):a.removeAttribute("role")},pc=function(a,b,c){ea(c)&&(c=c.join(" "));var d=oc(b);""===c||void 0==c?(kc||(kc={atomic:!1,autocomplete:"none",dropeffect:"none",haspopup:!1,live:"off",multiline:!1,multiselectable:!1,orientation:"vertical",readonly:!1,relevant:"additions text",required:!1,sort:"none",busy:!1,disabled:!1,hidden:!1,invalid:"false"}),c=kc,b in c?a.setAttribute(d,c[b]):a.removeAttribute(d)):a.setAttribute(d,
-c)},oc=function(a){r(a,"ARIA attribute cannot be empty.");r(Pa(lc,a),"No such ARIA attribute "+a);return"aria-"+a};var sc=function(a,b,c,d,e){if(!(x||z&&B("525")))return!0;if(A&&e)return qc(a);if(e&&!d)return!1;"number"==typeof b&&(b=rc(b));if(!c&&(17==b||18==b||A&&91==b))return!1;if(z&&d&&c)switch(a){case 220:case 219:case 221:case 192:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:return!1}if(x&&d&&b==a)return!1;switch(a){case 13:return!0;case 27:return!z}return qc(a)},qc=function(a){if(48<=a&&57>=a||96<=a&&106>=a||65<=a&&90>=a||z&&0==a)return!0;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 59:case 189:case 187:case 61:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return!0;
-default:return!1}},rc=function(a){if(y)a=tc(a);else if(A&&z)t:switch(a){case 93:a=91;break t}return a},tc=function(a){switch(a){case 61:return 187;case 59:return 186;case 173:return 189;case 224:return 91;case 0:return 224;default:return a}};var M=function(a,b){I.call(this);a&&uc(this,a,b)};p(M,I);f=M.prototype;f.d=null;f.Ia=null;f.Ya=null;f.Ja=null;f.r=-1;f.N=-1;f.kb=!1;
-var vc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},wc={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},xc=x||z&&B("525"),yc=A&&y;
-M.prototype.Sb=function(a){z&&(17==this.r&&!a.ctrlKey||18==this.r&&!a.altKey||A&&91==this.r&&!a.metaKey)&&(this.N=this.r=-1);-1==this.r&&(a.ctrlKey&&17!=a.keyCode?this.r=17:a.altKey&&18!=a.keyCode?this.r=18:a.metaKey&&91!=a.keyCode&&(this.r=91));xc&&!sc(a.keyCode,this.r,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.N=rc(a.keyCode),yc&&(this.kb=a.altKey))};M.prototype.Tb=function(a){this.N=this.r=-1;this.kb=a.altKey};
-M.prototype.handleEvent=function(a){var b=a.O,c,d,e=b.altKey;x&&"keypress"==a.type?(c=this.N,d=13!=c&&27!=c?b.keyCode:0):z&&"keypress"==a.type?(c=this.N,d=0<=b.charCode&&63232>b.charCode&&qc(c)?b.charCode:0):Wa?(c=this.N,d=qc(c)?b.keyCode:0):(c=b.keyCode||this.N,d=b.charCode||0,yc&&(e=this.kb),A&&63==d&&224==c&&(c=191));var g=c=rc(c),h=b.keyIdentifier;c?63232<=c&&c in vc?g=vc[c]:25==c&&a.shiftKey&&(g=9):h&&h in wc&&(g=wc[h]);a=g==this.r;this.r=g;b=new zc(g,d,a,b);b.altKey=e;this.dispatchEvent(b)};
-M.prototype.a=function(){return this.d};var uc=function(a,b,c){a.Ja&&a.detach();a.d=b;a.Ia=F(a.d,"keypress",a,c);a.Ya=F(a.d,"keydown",a.Sb,c,a);a.Ja=F(a.d,"keyup",a.Tb,c,a)};M.prototype.detach=function(){this.Ia&&(G(this.Ia),G(this.Ya),G(this.Ja),this.Ja=this.Ya=this.Ia=null);this.d=null;this.N=this.r=-1};var zc=function(a,b,c,d){D.call(this,d);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};p(zc,D);var O=function(a){if(a.classList)return a.classList;a=a.className;return m(a)&&a.match(/\S+/g)||[]},Ac=function(a,b){return a.classList?a.classList.contains(b):u(O(a),b)},P=function(a,b){a.classList?a.classList.add(b):Ac(a,b)||(a.className+=0<a.className.length?" "+b:b)},Bc=function(a,b){if(a.classList)t(b,function(b){P(a,b)});else{var c={};t(O(a),function(a){c[a]=!0});t(b,function(a){c[a]=!0});a.className="";for(var d in c)a.className+=0<a.className.length?" "+d:d}},Cc=function(a,b){a.classList?
-a.classList.remove(b):Ac(a,b)&&(a.className=Ea(O(a),function(a){return a!=b}).join(" "))},Dc=function(a,b){a.classList?t(b,function(b){Cc(a,b)}):a.className=Ea(O(a),function(a){return!u(b,a)}).join(" ")};var Fc=function(a,b){if(!a)throw Error("Invalid class name "+a);if(!n(b))throw Error("Invalid decorator function "+b);Ec[a]=b},Gc={},Ec={};var Q=function(a){this.Gb=a};ba(Q);Q.prototype.ea=function(){return this.Gb};var Hc=function(a,b){a&&(a.tabIndex=b?0:-1)};f=Q.prototype;f.o=function(a){return a.lb().o("div",this.ta(a).join(" "))};f.B=function(a){return a};f.X=function(a){return"DIV"==a.tagName};f.K=function(a,b){b.id&&cc(a,b.id);var c=this.v(),d=!1,e=O(b);e&&t(e,function(b){b==c?d=!0:b&&this.bb(a,b,c)},this);d||P(b,c);Ic(a,this.B(b));return b};
-f.bb=function(a,b,c){b==c+"-disabled"?a.ra(!1):b==c+"-horizontal"?Jc(a,"horizontal"):b==c+"-vertical"&&Jc(a,"vertical")};var Ic=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;t:{var g=c;e=void 0;r(g);for(var g=O(g),h=0,k=g.length;h<k;h++)if(e=g[h],e=e in Ec?Ec[e]():null)break t;e=null}e&&(e.d=c,a.isEnabled()||e.ra(!1),a.Da(e),e.K(c))}else c.nodeValue&&""!=qa(c.nodeValue)||b.removeChild(c);c=d}};
-Q.prototype.Oa=function(a){a=a.a();r(a,"The container DOM element cannot be null.");Yb(a,!0,y);x&&(a.hideFocus=!0);var b=this.ea();b&&nc(a,b)};Q.prototype.j=function(a){return a.a()};Q.prototype.v=function(){return"goog-container"};Q.prototype.ta=function(a){var b=this.v(),c=[b,"horizontal"==a.L?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var R=function(){},Kc;ba(R);var Lc={button:"pressed",checkbox:"checked",menuitem:"selected",menuitemcheckbox:"checked",menuitemradio:"checked",radio:"checked",tab:"selected",treeitem:"selected"};f=R.prototype;f.ea=function(){};f.o=function(a){var b=a.lb().o("div",this.ta(a).join(" "),a.Ba);Mc(a,b);return b};f.B=function(a){return a};f.sa=function(a,b,c){if(a=a.a?a.a():a){var d=[b];x&&!B("7")&&(d=Nc(O(a),b),d.push(b));(c?Bc:Dc)(a,d)}};f.X=function(){return!0};
-f.K=function(a,b){b.id&&cc(a,b.id);var c=this.B(b);c&&c.firstChild?Oc(a,c.firstChild.nextSibling?Ha(c.childNodes):c.firstChild):a.Ba=null;var d=0,e=this.v(),g=this.v(),h=!1,k=!1,c=!1,q=Ha(O(b));t(q,function(a){if(h||a!=e)if(k||a!=g){var b=d;this.ub||(this.Ka||Pc(this),this.ub=Ra(this.Ka));a=parseInt(this.ub[a],10);d=b|(isNaN(a)?0:a)}else k=!0;else h=!0,g==e&&(k=!0)},this);a.g=d;h||(q.push(e),g==e&&(k=!0));k||q.push(g);var ha=a.G;ha&&q.push.apply(q,ha);if(x&&!B("7")){var N=Nc(q);0<N.length&&(q.push.apply(q,
-N),c=!0)}if(!h||!k||ha||c)b.className=q.join(" ");Mc(a,b);return b};f.Oa=function(a){ic(a)&&this.qa(a.a(),!0);a.isEnabled()&&this.na(a,a.s())};var Qc=function(a,b,c){if(a=c||a.ea())r(b,"The element passed as a first parameter cannot be null."),c=b.getAttribute("role")||null,a!=c&&nc(b,a)},Mc=function(a,b){r(a);r(b);a.s()||pc(b,"hidden",!a.s());a.isEnabled()||Rc(b,1,!a.isEnabled());a.l&8&&Rc(b,8,!!(a.g&8));a.l&16&&Rc(b,16,!!(a.g&16));a.l&64&&Rc(b,64,!!(a.g&64))};f=R.prototype;
-f.za=function(a,b){Yb(a,!b,!x&&!Wa)};f.qa=function(a,b){this.sa(a,this.v()+"-rtl",b)};f.I=function(a){var b;return a.l&32&&(b=a.j())?sb(b)&&tb(b):!1};f.na=function(a,b){var c;if(a.l&32&&(c=a.j())){if(!b&&a.g&32){try{c.blur()}catch(d){}a.g&32&&a.la(null)}(sb(c)&&tb(c))!=b&&(b?c.tabIndex=0:(c.tabIndex=-1,c.removeAttribute("tabIndex")))}};f.ja=function(a,b){J(a,b);a&&pc(a,"hidden",!b)};f.t=function(a,b,c){var d=a.a();if(d){var e=Sc(this,b);e&&this.sa(a,e,c);Rc(d,b,c)}};
-var Rc=function(a,b,c){Kc||(Kc={1:"disabled",8:"selected",16:"checked",64:"expanded"});r(a,"The element passed as a first parameter cannot be null.");b=Kc[b];var d=a.getAttribute("role")||null;d&&(d=Lc[d]||b,b="checked"==b||"selected"==b?d:b);b&&pc(a,b,c)};R.prototype.j=function(a){return a.a()};R.prototype.v=function(){return"goog-control"};
-R.prototype.ta=function(a){var b=this.v(),c=[b],d=this.v();d!=b&&c.push(d);b=a.g;for(d=[];b;){var e=b&-b;d.push(Sc(this,e));b&=~e}c.push.apply(c,d);(a=a.G)&&c.push.apply(c,a);x&&!B("7")&&c.push.apply(c,Nc(c));return c};
-var Nc=function(a,b){var c=[];b&&(a=a.concat([b]));t([],function(d){!Fa(d,la(u,a))||b&&!u(d,b)||c.push(d.join("_"))});return c},Sc=function(a,b){a.Ka||Pc(a);return a.Ka[b]},Pc=function(a){var b=a.v(),c=b.replace(/\xa0|\s/g," ");r(-1==c.indexOf(" "),"ControlRenderer has an invalid css class: '"+b+"'");a.Ka={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+"-open"}};var S=function(a,b,c){K.call(this,c);if(!b){b=this.constructor;for(var d;b;){d=ka(b);if(d=Gc[d])break;b=b.e?b.e.constructor:null}b=d?n(d.ha)?d.ha():new d:null}this.b=b;this.Ba=void 0!==a?a:null};p(S,K);f=S.prototype;f.Ba=null;f.g=0;f.l=39;f.ec=255;f.T=0;f.n=!0;f.G=null;f.Z=!0;f.xa=!1;f.rb=null;f.pb=function(){return this.Z};f.Pa=function(a){this.f&&a!=this.Z&&Tc(this,a);this.Z=a};f.j=function(){return this.b.j(this)};f.ya=function(){return this.fa||(this.fa=new M)};f.Ab=function(){return this.b};
-f.sa=function(a,b){b?a&&(this.G?u(this.G,a)||this.G.push(a):this.G=[a],this.b.sa(this,a,!0)):a&&this.G&&Ga(this.G,a)&&(0==this.G.length&&(this.G=null),this.b.sa(this,a,!1))};f.o=function(){var a=this.b.o(this);this.d=a;Qc(this.b,a,this.rb);this.xa||this.b.za(a,!1);this.s()||this.b.ja(a,!1)};f.B=function(){return this.b.B(this.a())};f.X=function(a){return this.b.X(a)};f.Za=function(a){this.d=a=this.b.K(this,a);Qc(this.b,a,this.rb);this.xa||this.b.za(a,!1);this.n="none"!=a.style.display};
-f.D=function(){S.e.D.call(this);this.b.Oa(this);if(this.l&-2&&(this.pb()&&Tc(this,!0),this.l&32)){var a=this.j();if(a){var b=this.ya();uc(b,a);dc(this).c(b,"key",this.J).c(a,"focus",this.ma).c(a,"blur",this.la)}}};
-var Tc=function(a,b){var c=dc(a),d=a.a();b?(c.c(d,"mouseover",a.Sa).c(d,"mousedown",a.ka).c(d,"mouseup",a.Ta).c(d,"mouseout",a.Ra),a.pa!=aa&&c.c(d,"contextmenu",a.pa),x&&c.c(d,"dblclick",a.tb)):(c.u(d,"mouseover",a.Sa).u(d,"mousedown",a.ka).u(d,"mouseup",a.Ta).u(d,"mouseout",a.Ra),a.pa!=aa&&c.u(d,"contextmenu",a.pa),x&&c.u(d,"dblclick",a.tb))};S.prototype.ba=function(){S.e.ba.call(this);this.fa&&this.fa.detach();this.s()&&this.isEnabled()&&this.b.na(this,!1)};var Oc=function(a,b){a.Ba=b};f=S.prototype;
-f.qa=function(a){S.e.qa.call(this,a);var b=this.a();b&&this.b.qa(b,a)};f.za=function(a){this.xa=a;var b=this.a();b&&this.b.za(b,a)};f.s=function(){return this.n};f.ja=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){var c=this.a();c&&this.b.ja(c,a);this.isEnabled()&&this.b.na(this,a);this.n=a;return!0}return!1};f.isEnabled=function(){return!(this.g&1)};
-f.ra=function(a){var b=this.getParent();b&&"function"==typeof b.isEnabled&&!b.isEnabled()||!T(this,1,!a)||(a||(this.setActive(!1),this.C(!1)),this.s()&&this.b.na(this,a),this.t(1,!a))};f.C=function(a){T(this,2,a)&&this.t(2,a)};f.setActive=function(a){T(this,4,a)&&this.t(4,a)};var Uc=function(a,b){T(a,8,b)&&a.t(8,b)},Vc=function(a,b){T(a,64,b)&&a.t(64,b)};S.prototype.t=function(a,b){this.l&a&&b!=!!(this.g&a)&&(this.b.t(this,a,b),this.g=b?this.g|a:this.g&~a)};
-var Wc=function(a,b,c){if(a.f&&a.g&b&&!c)throw Error("Component already rendered");!c&&a.g&b&&a.t(b,!1);a.l=c?a.l|b:a.l&~b},U=function(a,b){return!!(a.ec&b)&&!!(a.l&b)},T=function(a,b,c){return!!(a.l&b)&&!!(a.g&b)!=c&&(!(a.T&b)||a.dispatchEvent(ac(b,c)))&&!a.Ub};f=S.prototype;f.Sa=function(a){(!a.relatedTarget||!ob(this.a(),a.relatedTarget))&&this.dispatchEvent("enter")&&this.isEnabled()&&U(this,2)&&this.C(!0)};
-f.Ra=function(a){a.relatedTarget&&ob(this.a(),a.relatedTarget)||!this.dispatchEvent("leave")||(U(this,4)&&this.setActive(!1),U(this,2)&&this.C(!1))};f.pa=aa;f.ka=function(a){this.isEnabled()&&(U(this,2)&&this.C(!0),!Bb(a)||z&&A&&a.ctrlKey||(U(this,4)&&this.setActive(!0),this.b.I(this)&&this.j().focus()));this.xa||!Bb(a)||z&&A&&a.ctrlKey||a.preventDefault()};f.Ta=function(a){this.isEnabled()&&(U(this,2)&&this.C(!0),this.g&4&&Xc(this,a)&&U(this,4)&&this.setActive(!1))};
-f.tb=function(a){this.isEnabled()&&Xc(this,a)};var Xc=function(a,b){if(U(a,16)){var c=!(a.g&16);T(a,16,c)&&a.t(16,c)}U(a,8)&&Uc(a,!0);U(a,64)&&Vc(a,!(a.g&64));c=new C("action",a);b&&(c.altKey=b.altKey,c.ctrlKey=b.ctrlKey,c.metaKey=b.metaKey,c.shiftKey=b.shiftKey,c.jb=b.jb);return a.dispatchEvent(c)};S.prototype.ma=function(){U(this,32)&&T(this,32,!0)&&this.t(32,!0)};S.prototype.la=function(){U(this,4)&&this.setActive(!1);U(this,32)&&T(this,32,!1)&&this.t(32,!1)};
-S.prototype.J=function(a){return this.s()&&this.isEnabled()&&this.nb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};S.prototype.nb=function(a){return 13==a.keyCode&&Xc(this,a)};if(!n(S))throw Error("Invalid component class "+S);if(!n(R))throw Error("Invalid renderer class "+R);var Yc=ka(S);Gc[Yc]=R;Fc("goog-control",function(){return new S(null)});var V=function(a,b,c){K.call(this,c);this.b=b||Q.ha();this.L=a||"vertical"};p(V,K);f=V.prototype;f.vb=null;f.fa=null;f.b=null;f.L=null;f.n=!0;f.U=!0;f.$a=!0;f.h=-1;f.i=null;f.aa=!1;f.Rb=!1;f.Qb=!0;f.M=null;f.j=function(){return this.vb||this.b.j(this)};f.ya=function(){return this.fa||(this.fa=new M(this.j()))};f.Ab=function(){return this.b};f.o=function(){this.d=this.b.o(this)};f.B=function(){return this.b.B(this.a())};f.X=function(a){return this.b.X(a)};
-f.Za=function(a){this.d=this.b.K(this,a);"none"==a.style.display&&(this.n=!1)};f.D=function(){V.e.D.call(this);gc(this,function(a){a.f&&Zc(this,a)},this);var a=this.a();this.b.Oa(this);this.ja(this.n,!0);dc(this).c(this,"enter",this.Kb).c(this,"highlight",this.Lb).c(this,"unhighlight",this.Nb).c(this,"open",this.Mb).c(this,"close",this.Ib).c(a,"mousedown",this.ka).c(fb(a),"mouseup",this.Jb).c(a,["mousedown","mouseup","mouseover","mouseout","contextmenu"],this.Hb);this.I()&&$c(this,!0)};
-var $c=function(a,b){var c=dc(a),d=a.j();b?c.c(d,"focus",a.ma).c(d,"blur",a.la).c(a.ya(),"key",a.J):c.u(d,"focus",a.ma).u(d,"blur",a.la).u(a.ya(),"key",a.J)};f=V.prototype;f.ba=function(){ad(this,-1);this.i&&Vc(this.i,!1);this.aa=!1;V.e.ba.call(this)};f.Kb=function(){return!0};
-f.Lb=function(a){var b=jc(this,a.target);if(-1<b&&b!=this.h){var c=L(this,this.h);c&&c.C(!1);this.h=b;c=L(this,this.h);this.aa&&c.setActive(!0);this.Qb&&this.i&&c!=this.i&&(c.l&64?Vc(c,!0):Vc(this.i,!1))}b=this.a();r(b,"The DOM element for the container cannot be null.");null!=a.target.a()&&pc(b,"activedescendant",a.target.a().id)};f.Nb=function(a){a.target==L(this,this.h)&&(this.h=-1);a=this.a();r(a,"The DOM element for the container cannot be null.");a.removeAttribute(oc("activedescendant"))};
-f.Mb=function(a){(a=a.target)&&a!=this.i&&a.getParent()==this&&(this.i&&Vc(this.i,!1),this.i=a)};f.Ib=function(a){a.target==this.i&&(this.i=null)};f.ka=function(a){this.U&&(this.aa=!0);var b=this.j();b&&sb(b)&&tb(b)?b.focus():a.preventDefault()};f.Jb=function(){this.aa=!1};
-f.Hb=function(a){var b;t:{b=a.target;if(this.M)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.M){b=this.M[d];break t}b=b.parentNode}b=null}if(b)switch(a.type){case "mousedown":b.ka(a);break;case "mouseup":b.Ta(a);break;case "mouseover":b.Sa(a);break;case "mouseout":b.Ra(a);break;case "contextmenu":b.pa(a)}};f.ma=function(){};f.la=function(){ad(this,-1);this.aa=!1;this.i&&Vc(this.i,!1)};
-f.J=function(a){return this.isEnabled()&&this.s()&&(0!=hc(this)||this.vb)&&this.nb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};
-f.nb=function(a){var b=L(this,this.h);if(b&&"function"==typeof b.J&&b.J(a)||this.i&&this.i!=b&&"function"==typeof this.i.J&&this.i.J(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.I())this.j().blur();else return!1;break;case 36:bd(this);break;case 35:cd(this);break;case 38:if("vertical"==this.L)dd(this);else return!1;break;case 37:if("horizontal"==this.L)ic(this)?ed(this):dd(this);else return!1;break;case 40:if("vertical"==this.L)ed(this);else return!1;
-break;case 39:if("horizontal"==this.L)ic(this)?dd(this):ed(this);else return!1;break;default:return!1}return!0};var Zc=function(a,b){var c=b.a(),c=c.id||(c.id=bc(b));a.M||(a.M={});a.M[c]=b};V.prototype.Da=function(a,b){Ca(a,S,"The child of a container must be a control");V.e.Da.call(this,a,b)};V.prototype.Va=function(a,b,c){a.T|=2;a.T|=64;!this.I()&&this.Rb||Wc(a,32,!1);a.Pa(!1);V.e.Va.call(this,a,b,c);a.f&&this.f&&Zc(this,a);b<=this.h&&this.h++};
-V.prototype.removeChild=function(a,b){if(a=m(a)?ec(this,a):a){var c=jc(this,a);-1!=c&&(c==this.h?(a.C(!1),this.h=-1):c<this.h&&this.h--);var d=a.a();d&&d.id&&this.M&&(c=this.M,d=d.id,d in c&&delete c[d])}a=V.e.removeChild.call(this,a,b);a.Pa(!0);return a};var Jc=function(a,b){if(a.a())throw Error("Component already rendered");a.L=b};f=V.prototype;f.s=function(){return this.n};
-f.ja=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){this.n=a;var c=this.a();c&&(J(c,a),this.I()&&Hc(this.j(),this.U&&this.n),b||this.dispatchEvent(this.n?"aftershow":"afterhide"));return!0}return!1};f.isEnabled=function(){return this.U};f.ra=function(a){this.U!=a&&this.dispatchEvent(a?"enable":"disable")&&(a?(this.U=!0,gc(this,function(a){a.xb?delete a.xb:a.ra(!0)})):(gc(this,function(a){a.isEnabled()?a.ra(!1):a.xb=!0}),this.aa=this.U=!1),this.I()&&Hc(this.j(),a&&this.n))};
-f.I=function(){return this.$a};f.na=function(a){a!=this.$a&&this.f&&$c(this,a);this.$a=a;this.U&&this.n&&Hc(this.j(),a)};var ad=function(a,b){var c=L(a,b);c?c.C(!0):-1<a.h&&L(a,a.h).C(!1)};V.prototype.C=function(a){ad(this,jc(this,a))};
-var bd=function(a){fd(a,function(a,c){return(a+1)%c},hc(a)-1)},cd=function(a){fd(a,function(a,c){a--;return 0>a?c-1:a},0)},ed=function(a){fd(a,function(a,c){return(a+1)%c},a.h)},dd=function(a){fd(a,function(a,c){a--;return 0>a?c-1:a},a.h)},fd=function(a,b,c){c=0>c?jc(a,a.i):c;var d=hc(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=L(a,c);if(g&&g.s()&&g.isEnabled()&&g.l&2){a.Wa(c);break}e++;c=b.call(a,c,d)}};V.prototype.Wa=function(a){ad(this,a)};var gd=function(){};p(gd,R);ba(gd);f=gd.prototype;f.v=function(){return"goog-tab"};f.ea=function(){return"tab"};f.o=function(a){var b=gd.e.o.call(this,a);(a=a.Ua())&&this.Xa(b,a);return b};f.K=function(a,b){b=gd.e.K.call(this,a,b);var c=this.Ua(b);c&&(a.sb=c);a.g&8&&(c=a.getParent())&&n(c.V)&&(a.t(8,!1),c.V(a));return b};f.Ua=function(a){return a.title||""};f.Xa=function(a,b){a&&(a.title=b||"")};var hd=function(a,b,c){S.call(this,a,b||gd.ha(),c);Wc(this,8,!0);this.T|=9};p(hd,S);hd.prototype.Ua=function(){return this.sb};hd.prototype.Xa=function(a){this.Ab().Xa(this.a(),a);this.sb=a};Fc("goog-tab",function(){return new hd(null)});var W=function(){this.Gb="tablist"};p(W,Q);ba(W);W.prototype.v=function(){return"goog-tab-bar"};W.prototype.bb=function(a,b,c){this.Bb||(this.La||id(this),this.Bb=Ra(this.La));var d=this.Bb[b];d?(Jc(a,jd(d)),a.yb=d):W.e.bb.call(this,a,b,c)};W.prototype.ta=function(a){var b=W.e.ta.call(this,a);this.La||id(this);b.push(this.La[a.yb]);return b};var id=function(a){var b=a.v();a.La={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var X=function(a,b,c){a=a||"top";Jc(this,jd(a));this.yb=a;V.call(this,this.L,b||W.ha(),c);kd(this)};p(X,V);f=X.prototype;f.ac=!0;f.H=null;f.D=function(){X.e.D.call(this);kd(this)};f.removeChild=function(a,b){ld(this,a);return X.e.removeChild.call(this,a,b)};f.Wa=function(a){X.e.Wa.call(this,a);this.ac&&this.V(L(this,a))};f.V=function(a){a?Uc(a,!0):this.H&&Uc(this.H,!1)};
-var ld=function(a,b){if(b&&b==a.H){for(var c=jc(a,b),d=c-1;b=L(a,d);d--)if(b.s()&&b.isEnabled()){a.V(b);return}for(c+=1;b=L(a,c);c++)if(b.s()&&b.isEnabled()){a.V(b);return}a.V(null)}};f=X.prototype;f.Zb=function(a){this.H&&this.H!=a.target&&Uc(this.H,!1);this.H=a.target};f.$b=function(a){a.target==this.H&&(this.H=null)};f.Xb=function(a){ld(this,a.target)};f.Yb=function(a){ld(this,a.target)};f.ma=function(){L(this,this.h)||this.C(this.H||L(this,0))};
-var kd=function(a){dc(a).c(a,"select",a.Zb).c(a,"unselect",a.$b).c(a,"disable",a.Xb).c(a,"hide",a.Yb)},jd=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Fc("goog-tab-bar",function(){return new X});var Y=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,nc(a,h.ea()),P(a,"goog-zippy-header"),md(h,a),a&&h.Ob.c(a,"keydown",h.Pb))}I.call(this);this.A=e||gb();this.R=this.A.a(a)||null;this.Aa=this.A.a(d||null);this.da=(this.Qa=n(b)?b:null)||!b?null:this.A.a(b);this.k=!0==c;this.Ob=new H(this);this.qb=new H(this);var h=this;g(this.R);g(this.Aa);this.S(this.k)};p(Y,I);f=Y.prototype;f.Z=!0;f.ea=function(){return"tab"};f.B=function(){return this.da};f.toggle=function(){this.S(!this.k)};
-f.S=function(a){this.da?J(this.da,a):a&&this.Qa&&(this.da=this.Qa());this.da&&P(this.da,"goog-zippy-content");if(this.Aa)J(this.R,!a),J(this.Aa,a);else if(this.R){var b=this.R;a?P(b,"goog-zippy-expanded"):Cc(b,"goog-zippy-expanded");b=this.R;a?Cc(b,"goog-zippy-collapsed"):P(b,"goog-zippy-collapsed");pc(this.R,"expanded",a)}this.k=a;this.dispatchEvent(new nd("toggle",this))};f.pb=function(){return this.Z};f.Pa=function(a){this.Z!=a&&((this.Z=a)?(md(this,this.R),md(this,this.Aa)):this.qb.ab())};
-var md=function(a,b){b&&a.qb.c(b,"click",a.bc)};Y.prototype.Pb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new C("action",this)),a.preventDefault(),a.stopPropagation()};Y.prototype.bc=function(){this.toggle();this.dispatchEvent(new C("action",this))};var nd=function(a,b){C.call(this,a,b)};p(nd,C);var Z=function(a,b){this.ob=[];for(var c=ib("span","ae-zippy",hb(document,a)),d=0,e;e=c[d];d++){var g=e.parentNode.parentNode.parentNode;if(void 0!=g.nextElementSibling)g=g.nextElementSibling;else for(g=g.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new Y(e,g,!1);this.ob.push(e)}this.hc=new od(this.ob,hb(document,b))};Z.prototype.kc=function(){return this.hc};Z.prototype.lc=function(){return this.ob};
-var od=function(a,b){this.va=a;if(this.va.length)for(var c=0,d;d=this.va[c];c++)F(d,"toggle",this.Wb,!1,this);this.Ma=0;this.k=!1;c="ae-toggle ae-plus ae-action";this.va.length||(c+=" ae-disabled");this.P=mb("span",{className:c},"Expand All");F(this.P,"click",this.Vb,!1,this);b&&b.appendChild(this.P)};od.prototype.Vb=function(){this.va.length&&this.S(!this.k)};
-od.prototype.Wb=function(a){a=a.currentTarget;this.Ma=a.k?this.Ma+1:this.Ma-1;a.k!=this.k&&(a.k?(this.k=!0,pd(this,!0)):0==this.Ma&&(this.k=!1,pd(this,!1)))};od.prototype.S=function(a){this.k=a;a=0;for(var b;b=this.va[a];a++)b.k!=this.k&&b.S(this.k);pd(this)};
-var pd=function(a,b){(void 0!==b?b:a.k)?(Na(a.P,"ae-plus"),La(a.P,"ae-minus"),pb(a.P,"Collapse All")):(Na(a.P,"ae-minus"),La(a.P,"ae-plus"),pb(a.P,"Expand All"))},qd=function(a){this.cc=a;this.Db={};var b,c=mb("div",{},b=mb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),mb("div",{className:"goog-tab-bar-clear"}),a=mb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new X;d.K(b);F(d,"select",this.Cb,!1,this);F(d,"unselect",this.Cb,!1,this);
-b=0;for(var e;e=this.cc[b];b++)if(e=hb(document,"ae-stats-details-"+e)){var g=ib("h2",null,e)[0],h;h=g;var k=void 0;db&&"innerText"in h?k=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(k=[],ub(h,k,!0),k=k.join(""));k=k.replace(/ \xAD /g," ").replace(/\xAD/g,"");k=k.replace(/\u200B/g,"");db||(k=k.replace(/ +/g," "));" "!=k&&(k=k.replace(/^\s*/,""));h=k;g&&g.parentNode&&g.parentNode.removeChild(g);g=new hd(h);this.Db[ka(g)]=e;d.Da(g,!0);a.appendChild(e);0==b?d.V(g):J(e,!1)}hb(document,"bd").appendChild(c)};
-qd.prototype.Cb=function(a){var b=this.Db[ka(a.target)];J(b,"select"==a.type)};ma("ae.Stats.Details.Tabs",qd);ma("goog.ui.Zippy",Y);Y.prototype.setExpanded=Y.prototype.S;ma("ae.Stats.MakeZippys",Z);Z.prototype.getExpandCollapse=Z.prototype.kc;Z.prototype.getZippys=Z.prototype.lc;od.prototype.setExpanded=od.prototype.S;var $=function(){this.cb=[];this.ib=[]},rd=[[5,.2,1],[6,.2,1.2],[5,.25,1.25],[6,.25,1.5],[4,.5,2],[5,.5,2.5],[6,.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],sd=function(a){if(0>=a)return[2,.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c<rd.length;c++)if(a<=rd[c][2])return[rd[c][0],rd[c][1]*b,rd[c][2]*b];return[5,2*b,10*b]};$.prototype.hb="stats/static/pix.gif";$.prototype.w="ae-stats-gantt-";$.prototype.fb=0;$.prototype.write=function(a){this.ib.push(a)};
-var td=function(a,b,c,d){a.write('<tr class="'+a.w+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.w+'axis">');for(var e=0;e<=b;e++)a.write('<img class="'+a.w+'tick" src="'+a.hb+'" alt="" '),a.write('style="left:'+e*c*d+'%"\n>'),a.write('<span class="'+a.w+'scale" style="left:'+e*c*d+'%">'),a.write("&nbsp;"+e*c+"</span>");a.write("</div></td></tr>\n")};
-$.prototype.jc=function(){this.ib=[];var a=sd(this.fb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.w+'table">\n');td(this,b,c,a);for(var d=0;d<this.cb.length;d++){var e=this.cb[d];this.write('<tr class="'+this.w+'datarow"><td width="20%">');0<e.label.length&&(0<e.ia.length&&this.write('<a class="'+this.w+'link" href="'+e.ia+'">'),this.write(e.label),0<e.ia.length&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.w+'container">');0<e.ia.length&&this.write('<a class="'+
-this.w+'link" href="'+e.ia+'"\n>');this.write('<img class="'+this.w+'bar" src="'+this.hb+'" alt="" ');this.write('style="left:'+e.start*a+"%;width:"+e.duration*a+'%;min-width:1px"\n>');0<e.eb&&(this.write('<img class="'+this.w+'extra" src="'+this.hb+'" alt="" '),this.write('style="left:'+e.start*a+"%;width:"+e.eb*a+'%"\n>'));0<e.zb.length&&(this.write('<span class="'+this.w+'inline" style="left:'+(e.start+Math.max(e.duration,e.eb))*a+'%">&nbsp;'),this.write(e.zb),this.write("</span>"));0<e.ia.length&&
-this.write("</a>");this.write("</div></td></tr>\n")}td(this,b,c,a);this.write("</table>\n");return this.ib.join("")};$.prototype.ic=function(a,b,c,d,e,g){this.fb=Math.max(this.fb,Math.max(b+c,b+d));this.cb.push({label:a,start:b,duration:c,eb:d,zb:e,ia:g})};ma("Gantt",$);$.prototype.add_bar=$.prototype.ic;$.prototype.draw=$.prototype.jc;})();
+/* Copyright 2008-10 Google Inc. All Rights Reserved. */ (function(){var f,l=this,aa=function(){},ba=function(a){a.ia=function(){return a.Eb?a.Eb:a.Eb=new a}},ca=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=
+typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a.call)return"object";return b},da=function(a){return"array"==ca(a)},ea=function(a){var b=ca(a);return"array"==b||"object"==b&&"number"==typeof a.length},m=function(a){return"string"==typeof a},n=function(a){return"function"==ca(a)},fa=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},ia=function(a){return a[ga]||
+(a[ga]=++ha)},ga="closure_uid_"+(1E9*Math.random()>>>0),ha=0,ja=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=c.slice();b.push.apply(b,arguments);return a.apply(this,b)}},ka=function(a,b){var c=a.split("."),d=l;c[0]in d||!d.execScript||d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)c.length||void 0===b?d=d[e]?d[e]:d[e]={}:d[e]=b},p=function(a,b){function c(){}c.prototype=b.prototype;a.e=b.prototype;a.prototype=new c;a.prototype.constructor=a;a.mc=
+function(a,c,g){return b.prototype[c].apply(a,Array.prototype.slice.call(arguments,2))}};var la=function(a){if(Error.captureStackTrace)Error.captureStackTrace(this,la);else{var b=Error().stack;b&&(this.stack=b)}a&&(this.message=String(a))};p(la,Error);la.prototype.name="CustomError";var ma;var na=function(a,b){for(var c=a.split("%s"),d="",e=Array.prototype.slice.call(arguments,1);e.length&&1<c.length;)d+=c.shift()+e.shift();return d+c.join("%s")},oa=function(a){return a.replace(/^[\s\xa0]+|[\s\xa0]+$/g,"")},wa=function(a){if(!pa.test(a))return a;-1!=a.indexOf("&")&&(a=a.replace(qa,"&amp;"));-1!=a.indexOf("<")&&(a=a.replace(ra,"&lt;"));-1!=a.indexOf(">")&&(a=a.replace(sa,"&gt;"));-1!=a.indexOf('"')&&(a=a.replace(ta,"&quot;"));-1!=a.indexOf("'")&&(a=a.replace(ua,"&#39;"));-1!=a.indexOf("\x00")&&
+(a=a.replace(va,"&#0;"));return a},qa=/&/g,ra=/</g,sa=/>/g,ta=/"/g,ua=/'/g,va=/\x00/g,pa=/[\x00&<>"']/,xa=function(a,b){return a<b?-1:a>b?1:0};var ya=function(a,b){b.unshift(a);la.call(this,na.apply(null,b));b.shift()};p(ya,la);ya.prototype.name="AssertionError";var za=function(a,b,c){var d="Assertion failed";if(b)var d=d+(": "+b),e=c;else a&&(d+=": "+a,e=null);throw new ya(""+d,e||[]);},q=function(a,b,c){a||za("",b,Array.prototype.slice.call(arguments,2))},Aa=function(a,b,c,d){a instanceof b||za("instanceof check failed.",c,Array.prototype.slice.call(arguments,3))};var r=Array.prototype,Ba=r.indexOf?function(a,b,c){q(null!=a.length);return r.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(m(a))return m(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},s=r.forEach?function(a,b,c){q(null!=a.length);r.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)g in e&&b.call(c,e[g],g,a)},Ca=r.filter?function(a,b,c){q(null!=a.length);return r.filter.call(a,b,
+c)}:function(a,b,c){for(var d=a.length,e=[],g=0,h=m(a)?a.split(""):a,k=0;k<d;k++)if(k in h){var I=h[k];b.call(c,I,k,a)&&(e[g++]=I)}return e},Da=r.every?function(a,b,c){q(null!=a.length);return r.every.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)if(g in e&&!b.call(c,e[g],g,a))return!1;return!0},t=function(a,b){return 0<=Ba(a,b)},Ea=function(a,b){var c=Ba(a,b),d;if(d=0<=c)q(null!=a.length),r.splice.call(a,c,1);return d},Fa=function(a){var b=a.length;if(0<b){for(var c=
+Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},Ha=function(a,b,c,d){q(null!=a.length);r.splice.apply(a,Ga(arguments,1))},Ga=function(a,b,c){q(null!=a.length);return 2>=arguments.length?r.slice.call(a,b):r.slice.call(a,b,c)};var Ia=function(a){a=a.className;return m(a)&&a.match(/\S+/g)||[]},Ja=function(a,b){for(var c=Ia(a),d=Ga(arguments,1),e=c,g=0;g<d.length;g++)t(e,d[g])||e.push(d[g]);a.className=c.join(" ")},La=function(a,b){var c=Ia(a),d=Ga(arguments,1),c=Ka(c,d);a.className=c.join(" ")},Ka=function(a,b){return Ca(a,function(a){return!t(b,a)})};var Ma=function(a,b){for(var c in a)b.call(void 0,a[c],c,a)},Na=function(a,b){for(var c in a)if(a[c]==b)return!0;return!1},Oa=function(a,b,c){if(b in a)throw Error('The object already contains the key "'+b+'"');a[b]=c},Pa=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Qa="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ra=function(a,b){for(var c,d,e=1;e<arguments.length;e++){d=arguments[e];for(c in d)a[c]=d[c];for(var g=0;g<Qa.length;g++)c=
+Qa[g],Object.prototype.hasOwnProperty.call(d,c)&&(a[c]=d[c])}};var Sa;t:{var Ta=l.navigator;if(Ta){var Ua=Ta.userAgent;if(Ua){Sa=Ua;break t}}Sa=""}var u=function(a){return-1!=Sa.indexOf(a)};var Va=u("Opera")||u("OPR"),v=u("Trident")||u("MSIE"),w=u("Gecko")&&-1==Sa.toLowerCase().indexOf("webkit")&&!(u("Trident")||u("MSIE")),x=-1!=Sa.toLowerCase().indexOf("webkit"),Wa=l.navigator||null,y=-1!=(Wa&&Wa.platform||"").indexOf("Mac"),Xa=function(){var a=l.document;return a?a.documentMode:void 0},Ya=function(){var a="",b;if(Va&&l.opera)return a=l.opera.version,n(a)?a():a;w?b=/rv\:([^\);]+)(\)|;)/:v?b=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:x&&(b=/WebKit\/(\S+)/);b&&(a=(a=b.exec(Sa))?a[1]:"");return v&&
+(b=Xa(),b>parseFloat(a))?String(b):a}(),Za={},A=function(a){var b;if(!(b=Za[a])){b=0;for(var c=oa(String(Ya)).split("."),d=oa(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g<e;g++){var h=c[g]||"",k=d[g]||"",I=RegExp("(\\d*)(\\D*)","g"),z=RegExp("(\\d*)(\\D*)","g");do{var K=I.exec(h)||["","",""],P=z.exec(k)||["","",""];if(0==K[0].length&&0==P[0].length)break;b=xa(0==K[1].length?0:parseInt(K[1],10),0==P[1].length?0:parseInt(P[1],10))||xa(0==K[2].length,0==P[2].length)||xa(K[2],P[2])}while(0==
+b)}b=Za[a]=0<=b}return b},$a=l.document,ab=$a&&v?Xa()||("CSS1Compat"==$a.compatMode?parseInt(Ya,10):5):void 0;var bb=!v||v&&9<=ab;!w&&!v||v&&v&&9<=ab||w&&A("1.9.1");var cb=v&&!A("9");var fb=function(a){return a?new db(eb(a)):ma||(ma=new db)},gb=function(a,b){return m(b)?a.getElementById(b):b},hb=function(a,b,c){var d=document;c=c||d;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var d={},e=0,g=0,h;h=c[g];g++)a==h.nodeName&&(d[e++]=h);d.length=e;return d}return c}c=c.getElementsByTagName(a||"*");if(b){d={};for(g=e=0;h=c[g];g++)a=h.className,
+"function"==typeof a.split&&t(a.split(/\s+/),b)&&(d[e++]=h);d.length=e;return d}return c},jb=function(a,b){Ma(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in ib?a.setAttribute(ib[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},ib={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",usemap:"useMap",
+valign:"vAlign",width:"width"},lb=function(a,b,c){return kb(document,arguments)},kb=function(a,b){var c=b[0],d=b[1];if(!bb&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',wa(d.name),'"');if(d.type){c.push(' type="',wa(d.type),'"');var e={};Ra(e,d);delete e.type;d=e}c.push(">");c=c.join("")}c=a.createElement(c);d&&(m(d)?c.className=d:da(d)?c.className=d.join(" "):jb(c,d));2<b.length&&mb(a,c,b);return c},mb=function(a,b,c){function d(c){c&&b.appendChild(m(c)?a.createTextNode(c):c)}for(var e=
+2;e<c.length;e++){var g=c[e];if(!ea(g)||fa(g)&&0<g.nodeType)d(g);else{var h;t:{if(g&&"number"==typeof g.length){if(fa(g)){h="function"==typeof g.item||"string"==typeof g.item;break t}if(n(g)){h="function"==typeof g.item;break t}}h=!1}s(h?Fa(g):g,d)}}},nb=function(a,b){if(a.contains&&1==b.nodeType)return a==b||a.contains(b);if("undefined"!=typeof a.compareDocumentPosition)return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b.parentNode;return b==a},eb=function(a){q(a,"Node cannot be null or undefined.");
+return 9==a.nodeType?a:a.ownerDocument||a.document},ob=function(a,b){q(null!=a,"goog.dom.setTextContent expects a non-null value for node");if("textContent"in a)a.textContent=b;else if(3==a.nodeType)a.data=b;else if(a.firstChild&&3==a.firstChild.nodeType){for(;a.lastChild!=a.firstChild;)a.removeChild(a.lastChild);a.firstChild.data=b}else{for(var c;c=a.firstChild;)a.removeChild(c);c=eb(a);a.appendChild(c.createTextNode(String(b)))}},pb={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},qb={IMG:" ",BR:"\n"},
+tb=function(a){return rb(a)&&sb(a)},ub=function(a,b){b?a.tabIndex=0:(a.tabIndex=-1,a.removeAttribute("tabIndex"))},rb=function(a){a=a.getAttributeNode("tabindex");return null!=a&&a.specified},sb=function(a){a=a.tabIndex;return"number"==typeof a&&0<=a&&32768>a},vb=function(a,b,c){if(!(a.nodeName in pb))if(3==a.nodeType)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in qb)b.push(qb[a.nodeName]);else for(a=a.firstChild;a;)vb(a,b,c),a=a.nextSibling},db=
+function(a){this.Q=a||l.document||document};f=db.prototype;f.lb=fb;f.a=function(a){return gb(this.Q,a)};f.o=function(a,b,c){return kb(this.Q,arguments)};f.createElement=function(a){return this.Q.createElement(a)};f.createTextNode=function(a){return this.Q.createTextNode(String(a))};f.appendChild=function(a,b){a.appendChild(b)};f.contains=nb;
+f.I=function(a){var b;(b="A"==a.tagName||"INPUT"==a.tagName||"TEXTAREA"==a.tagName||"SELECT"==a.tagName||"BUTTON"==a.tagName?!a.disabled&&(!rb(a)||sb(a)):tb(a))&&v?(a=n(a.getBoundingClientRect)?a.getBoundingClientRect():{height:a.offsetHeight,width:a.offsetWidth},a=null!=a&&0<a.height&&0<a.width):a=b;return a};var wb=function(a){wb[" "](a);return a};wb[" "]=aa;var xb=!v||v&&9<=ab,yb=!v||v&&9<=ab,zb=v&&!A("9");!x||A("528");w&&A("1.9b")||v&&A("8")||Va&&A("9.5")||x&&A("528");w&&!A("8")||v&&A("9");var Ab=function(){};Ab.prototype.Ub=!1;var B=function(a,b){this.type=a;this.currentTarget=this.target=b;this.defaultPrevented=this.$=!1;this.vb=!0};B.prototype.stopPropagation=function(){this.$=!0};B.prototype.preventDefault=function(){this.defaultPrevented=!0;this.vb=!1};var C=function(a,b){B.call(this,a?a.type:"");this.relatedTarget=this.currentTarget=this.target=null;this.charCode=this.keyCode=this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.state=null;this.jb=!1;this.O=null;if(a){var c=this.type=a.type;this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(w){var e;t:{try{wb(d.nodeName);e=!0;break t}catch(g){}e=!1}e||(d=null)}}else"mouseover"==
+c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=x||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=x||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||("keypress"==c?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=
+a.metaKey;this.jb=y?a.metaKey:a.ctrlKey;this.state=a.state;this.O=a;a.defaultPrevented&&this.preventDefault()}};p(C,B);var Bb=[1,4,2],Cb=function(a){return xb?0==a.O.button:"click"==a.type?!0:!!(a.O.button&Bb[0])};C.prototype.stopPropagation=function(){C.e.stopPropagation.call(this);this.O.stopPropagation?this.O.stopPropagation():this.O.cancelBubble=!0};
+C.prototype.preventDefault=function(){C.e.preventDefault.call(this);var a=this.O;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,zb)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var Db="closure_listenable_"+(1E6*Math.random()|0),Eb=0;var Fb=function(a,b,c,d,e){this.W=a;this.Ea=null;this.src=b;this.type=c;this.Fa=!!d;this.Ha=e;this.key=++Eb;this.ha=this.Ga=!1},Gb=function(a){a.ha=!0;a.W=null;a.Ea=null;a.src=null;a.Ha=null};var D=function(a){this.src=a;this.m={};this.ua=0};D.prototype.add=function(a,b,c,d,e){var g=a.toString();a=this.m[g];a||(a=this.m[g]=[],this.ua++);var h=Hb(a,b,d,e);-1<h?(b=a[h],c||(b.Ga=!1)):(b=new Fb(b,this.src,g,!!d,e),b.Ga=c,a.push(b));return b};D.prototype.remove=function(a,b,c,d){a=a.toString();if(!(a in this.m))return!1;var e=this.m[a];b=Hb(e,b,c,d);return-1<b?(Gb(e[b]),q(null!=e.length),r.splice.call(e,b,1),0==e.length&&(delete this.m[a],this.ua--),!0):!1};
+var Ib=function(a,b){var c=b.type;if(!(c in a.m))return!1;var d=Ea(a.m[c],b);d&&(Gb(b),0==a.m[c].length&&(delete a.m[c],a.ua--));return d};D.prototype.ab=function(a){a=a&&a.toString();var b=0,c;for(c in this.m)if(!a||c==a){for(var d=this.m[c],e=0;e<d.length;e++)++b,Gb(d[e]);delete this.m[c];this.ua--}return b};D.prototype.wa=function(a,b,c,d){a=this.m[a.toString()];var e=-1;a&&(e=Hb(a,b,c,d));return-1<e?a[e]:null};
+var Hb=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(!g.ha&&g.W==b&&g.Fa==!!c&&g.Ha==d)return e}return-1};var Jb="closure_lm_"+(1E6*Math.random()|0),Kb={},Lb=0,E=function(a,b,c,d,e){if(da(b)){for(var g=0;g<b.length;g++)E(a,b[g],c,d,e);return null}c=Mb(c);if(a&&a[Db])a=a.c(b,c,d,e);else{if(!b)throw Error("Invalid event type");var g=!!d,h=Nb(a);h||(a[Jb]=h=new D(a));c=h.add(b,c,!1,d,e);c.Ea||(d=Ob(),c.Ea=d,d.src=a,d.W=c,a.addEventListener?a.addEventListener(b.toString(),d,g):a.attachEvent(Pb(b.toString()),d),Lb++);a=c}return a},Ob=function(){var a=Qb,b=yb?function(c){return a.call(b.src,b.W,c)}:function(c){c=
+a.call(b.src,b.W,c);if(!c)return c};return b},Rb=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)Rb(a,b[g],c,d,e);else c=Mb(c),a&&a[Db]?a.u(b,c,d,e):a&&(a=Nb(a))&&(b=a.wa(b,c,!!d,e))&&F(b)},F=function(a){if("number"==typeof a||!a||a.ha)return!1;var b=a.src;if(b&&b[Db])return Ib(b.Y,a);var c=a.type,d=a.Ea;b.removeEventListener?b.removeEventListener(c,d,a.Fa):b.detachEvent&&b.detachEvent(Pb(c),d);Lb--;(c=Nb(b))?(Ib(c,a),0==c.ua&&(c.src=null,b[Jb]=null)):Gb(a);return!0},Pb=function(a){return a in
+Kb?Kb[a]:Kb[a]="on"+a},Tb=function(a,b,c,d){var e=1;if(a=Nb(a))if(b=a.m[b.toString()])for(b=b.concat(),a=0;a<b.length;a++){var g=b[a];g&&g.Fa==c&&!g.ha&&(e&=!1!==Sb(g,d))}return Boolean(e)},Sb=function(a,b){var c=a.W,d=a.Ha||a.src;a.Ga&&F(a);return c.call(d,b)},Qb=function(a,b){if(a.ha)return!0;if(!yb){var c;if(!(c=b))t:{c=["window","event"];for(var d=l,e;e=c.shift();)if(null!=d[e])d=d[e];else{c=null;break t}c=d}e=c;c=new C(e,this);d=!0;if(!(0>e.keyCode||void 0!=e.returnValue)){t:{var g=!1;if(0==
+e.keyCode)try{e.keyCode=-1;break t}catch(h){g=!0}if(g||void 0==e.returnValue)e.returnValue=!0}e=[];for(g=c.currentTarget;g;g=g.parentNode)e.push(g);for(var g=a.type,k=e.length-1;!c.$&&0<=k;k--)c.currentTarget=e[k],d&=Tb(e[k],g,!0,c);for(k=0;!c.$&&k<e.length;k++)c.currentTarget=e[k],d&=Tb(e[k],g,!1,c)}return d}return Sb(a,new C(b,this))},Nb=function(a){a=a[Jb];return a instanceof D?a:null},Ub="__closure_events_fn_"+(1E9*Math.random()>>>0),Mb=function(a){q(a,"Listener can not be null.");if(n(a))return a;
+q(a.handleEvent,"An object listener must have handleEvent method.");a[Ub]||(a[Ub]=function(b){return a.handleEvent(b)});return a[Ub]};var G=function(a){this.Db=a;this.Ma={}};p(G,Ab);var Vb=[];G.prototype.c=function(a,b,c,d){da(b)||(b&&(Vb[0]=b.toString()),b=Vb);for(var e=0;e<b.length;e++){var g=E(a,b[e],c||this.handleEvent,d||!1,this.Db||this);if(!g)break;this.Ma[g.key]=g}return this};G.prototype.u=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)this.u(a,b[g],c,d,e);else c=c||this.handleEvent,e=e||this.Db||this,c=Mb(c),d=!!d,b=a&&a[Db]?a.wa(b,c,d,e):a?(a=Nb(a))?a.wa(b,c,d,e):null:null,b&&(F(b),delete this.Ma[b.key]);return this};
+G.prototype.ab=function(){Ma(this.Ma,F);this.Ma={}};G.prototype.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var H=function(){this.Y=new D(this);this.dc=this;this.mb=null};p(H,Ab);H.prototype[Db]=!0;f=H.prototype;f.gb=function(a){this.mb=a};f.addEventListener=function(a,b,c,d){E(this,a,b,c,d)};f.removeEventListener=function(a,b,c,d){Rb(this,a,b,c,d)};
+f.dispatchEvent=function(a){Wb(this);var b,c=this.mb;if(c){b=[];for(var d=1;c;c=c.mb)b.push(c),q(1E3>++d,"infinite loop")}c=this.dc;d=a.type||a;if(m(a))a=new B(a,c);else if(a instanceof B)a.target=a.target||c;else{var e=a;a=new B(d,c);Ra(a,e)}var e=!0,g;if(b)for(var h=b.length-1;!a.$&&0<=h;h--)g=a.currentTarget=b[h],e=Xb(g,d,!0,a)&&e;a.$||(g=a.currentTarget=c,e=Xb(g,d,!0,a)&&e,a.$||(e=Xb(g,d,!1,a)&&e));if(b)for(h=0;!a.$&&h<b.length;h++)g=a.currentTarget=b[h],e=Xb(g,d,!1,a)&&e;return e};
+f.c=function(a,b,c,d){Wb(this);return this.Y.add(String(a),b,!1,c,d)};f.u=function(a,b,c,d){return this.Y.remove(String(a),b,c,d)};var Xb=function(a,b,c,d){b=a.Y.m[String(b)];if(!b)return!0;b=b.concat();for(var e=!0,g=0;g<b.length;++g){var h=b[g];if(h&&!h.ha&&h.Fa==c){var k=h.W,I=h.Ha||h.src;h.Ga&&Ib(a.Y,h);e=!1!==k.call(I,d)&&e}}return e&&0!=d.vb};H.prototype.wa=function(a,b,c,d){return this.Y.wa(String(a),b,c,d)};var Wb=function(a){q(a.Y,"Event target is not initialized. Did you call the superclass (goog.events.EventTarget) constructor?")};var J=function(a,b){a.style.display=b?"":"none"},Yb=w?"MozUserSelect":x?"WebkitUserSelect":null,Zb=function(a,b,c){c=c?null:a.getElementsByTagName("*");if(Yb){if(b=b?"none":"",a.style[Yb]=b,c){a=0;for(var d;d=c[a];a++)d.style[Yb]=b}}else if(v||Va)if(b=b?"on":"",a.setAttribute("unselectable",b),c)for(a=0;d=c[a];a++)d.setAttribute("unselectable",b)};var $b=function(){};ba($b);$b.prototype.gc=0;var L=function(a){H.call(this);this.A=a||fb();this.Ca=ac;this.da=null;this.f=!1;this.d=null;this.pa=void 0;this.F=this.q=this.p=null};p(L,H);L.prototype.fc=$b.ia();
+var ac=null,bc=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close"}throw Error("Invalid component state");},cc=function(a){return a.da||(a.da=":"+(a.fc.gc++).toString(36))},dc=function(a,b){if(a.p&&a.p.F){var c=a.p.F,d=a.da;d in c&&delete c[d];Oa(a.p.F,b,a)}a.da=b};
+L.prototype.a=function(){return this.d};var ec=function(a){a.pa||(a.pa=new G(a));return a.pa},gc=function(a,b){if(a==b)throw Error("Unable to set parent component");if(b&&a.p&&a.da&&fc(a.p,a.da)&&a.p!=b)throw Error("Unable to set parent component");a.p=b;L.e.gb.call(a,b)};f=L.prototype;f.getParent=function(){return this.p};f.gb=function(a){if(this.p&&this.p!=a)throw Error("Method not supported");L.e.gb.call(this,a)};f.lb=function(){return this.A};f.o=function(){this.d=this.A.createElement("div")};
+f.K=function(a){if(this.f)throw Error("Component already rendered");if(a&&this.X(a)){var b=eb(a);this.A&&this.A.Q==b||(this.A=fb(a));this.Za(a);this.D()}else throw Error("Invalid element to decorate");};f.X=function(){return!0};f.Za=function(a){this.d=a};f.D=function(){this.f=!0;hc(this,function(a){!a.f&&a.a()&&a.D()})};f.ba=function(){hc(this,function(a){a.f&&a.ba()});this.pa&&this.pa.ab();this.f=!1};f.Da=function(a,b){this.Va(a,ic(this),b)};
+f.Va=function(a,b,c){q(!!a,"Provided element must not be null.");if(a.f&&(c||!this.f))throw Error("Component already rendered");if(0>b||b>ic(this))throw Error("Child component index out of bounds");this.F&&this.q||(this.F={},this.q=[]);if(a.getParent()==this){var d=cc(a);this.F[d]=a;Ea(this.q,a)}else Oa(this.F,cc(a),a);gc(a,this);Ha(this.q,b,0,a);if(a.f&&this.f&&a.getParent()==this)c=this.C(),c.insertBefore(a.a(),c.childNodes[b]||null);else if(c){this.d||this.o();c=M(this,b+1);b=this.C();c=c?c.d:
+null;if(a.f)throw Error("Component already rendered");a.d||a.o();b?b.insertBefore(a.d,c||null):a.A.Q.body.appendChild(a.d);a.p&&!a.p.f||a.D()}else this.f&&!a.f&&a.d&&a.d.parentNode&&1==a.d.parentNode.nodeType&&a.D()};f.C=function(){return this.d};
+var jc=function(a){if(null==a.Ca){var b=a.f?a.d:a.A.Q.body,c;t:{c=eb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(c=c.defaultView.getComputedStyle(b,null))){c=c.direction||c.getPropertyValue("direction")||"";break t}c=""}a.Ca="rtl"==(c||(b.currentStyle?b.currentStyle.direction:null)||b.style&&b.style.direction)}return a.Ca};L.prototype.ra=function(a){if(this.f)throw Error("Component already rendered");this.Ca=a};
+var ic=function(a){return a.q?a.q.length:0},fc=function(a,b){var c;a.F&&b?(c=a.F,c=(b in c?c[b]:void 0)||null):c=null;return c},M=function(a,b){return a.q?a.q[b]||null:null},hc=function(a,b,c){a.q&&s(a.q,b,c)},kc=function(a,b){return a.q&&b?Ba(a.q,b):-1};
+L.prototype.removeChild=function(a,b){if(a){var c=m(a)?a:cc(a);a=fc(this,c);if(c&&a){var d=this.F;c in d&&delete d[c];Ea(this.q,a);b&&(a.ba(),a.d&&(c=a.d)&&c.parentNode&&c.parentNode.removeChild(c));gc(a,null)}}if(!a)throw Error("Child is not in parent component");return a};var lc,mc={nc:"activedescendant",sc:"atomic",tc:"autocomplete",vc:"busy",yc:"checked",Dc:"controls",Fc:"describedby",Ic:"disabled",Kc:"dropeffect",Lc:"expanded",Mc:"flowto",Oc:"grabbed",Sc:"haspopup",Uc:"hidden",Wc:"invalid",Xc:"label",Yc:"labelledby",Zc:"level",dd:"live",od:"multiline",pd:"multiselectable",td:"orientation",ud:"owns",vd:"posinset",xd:"pressed",Bd:"readonly",Dd:"relevant",Ed:"required",Kd:"selected",Md:"setsize",Od:"sort",ae:"valuemax",be:"valuemin",ce:"valuenow",de:"valuetext"};var nc={oc:"alert",pc:"alertdialog",qc:"application",rc:"article",uc:"banner",wc:"button",xc:"checkbox",zc:"columnheader",Ac:"combobox",Bc:"complementary",Cc:"contentinfo",Ec:"definition",Gc:"dialog",Hc:"directory",Jc:"document",Nc:"form",Pc:"grid",Qc:"gridcell",Rc:"group",Tc:"heading",Vc:"img",$c:"link",ad:"list",bd:"listbox",cd:"listitem",ed:"log",fd:"main",gd:"marquee",hd:"math",jd:"menu",kd:"menubar",ld:"menuitem",md:"menuitemcheckbox",nd:"menuitemradio",qd:"navigation",rd:"note",sd:"option",
+wd:"presentation",yd:"progressbar",zd:"radio",Ad:"radiogroup",Cd:"region",Fd:"row",Gd:"rowgroup",Hd:"rowheader",Id:"scrollbar",Jd:"search",Ld:"separator",Nd:"slider",Pd:"spinbutton",Qd:"status",Rd:"tab",Sd:"tablist",Td:"tabpanel",Ud:"textbox",Vd:"timer",Wd:"toolbar",Xd:"tooltip",Yd:"tree",Zd:"treegrid",$d:"treeitem"};var oc=function(a,b){b?(q(Na(nc,b),"No such ARIA role "+b),a.setAttribute("role",b)):a.removeAttribute("role")},qc=function(a,b,c){ea(c)&&(c=c.join(" "));var d=pc(b);""===c||void 0==c?(lc||(lc={atomic:!1,autocomplete:"none",dropeffect:"none",haspopup:!1,live:"off",multiline:!1,multiselectable:!1,orientation:"vertical",readonly:!1,relevant:"additions text",required:!1,sort:"none",busy:!1,disabled:!1,hidden:!1,invalid:"false"}),c=lc,b in c?a.setAttribute(d,c[b]):a.removeAttribute(d)):a.setAttribute(d,
+c)},pc=function(a){q(a,"ARIA attribute cannot be empty.");q(Na(mc,a),"No such ARIA attribute "+a);return"aria-"+a};var tc=function(a,b,c,d,e){if(!(v||x&&A("525")))return!0;if(y&&e)return rc(a);if(e&&!d)return!1;"number"==typeof b&&(b=sc(b));if(!c&&(17==b||18==b||y&&91==b))return!1;if(x&&d&&c)switch(a){case 220:case 219:case 221:case 192:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:return!1}if(v&&d&&b==a)return!1;switch(a){case 13:return!0;case 27:return!x}return rc(a)},rc=function(a){if(48<=a&&57>=a||96<=a&&106>=a||65<=a&&90>=a||x&&0==a)return!0;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 59:case 189:case 187:case 61:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return!0;
+default:return!1}},sc=function(a){if(w)a=uc(a);else if(y&&x)t:switch(a){case 93:a=91;break t}return a},uc=function(a){switch(a){case 61:return 187;case 59:return 186;case 173:return 189;case 224:return 91;case 0:return 224;default:return a}};var N=function(a,b){H.call(this);a&&vc(this,a,b)};p(N,H);f=N.prototype;f.d=null;f.Ia=null;f.Ya=null;f.Ja=null;f.r=-1;f.N=-1;f.kb=!1;
+var wc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},xc={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},yc=v||x&&A("525"),zc=y&&w;
+N.prototype.Sb=function(a){x&&(17==this.r&&!a.ctrlKey||18==this.r&&!a.altKey||y&&91==this.r&&!a.metaKey)&&(this.N=this.r=-1);-1==this.r&&(a.ctrlKey&&17!=a.keyCode?this.r=17:a.altKey&&18!=a.keyCode?this.r=18:a.metaKey&&91!=a.keyCode&&(this.r=91));yc&&!tc(a.keyCode,this.r,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.N=sc(a.keyCode),zc&&(this.kb=a.altKey))};N.prototype.Tb=function(a){this.N=this.r=-1;this.kb=a.altKey};
+N.prototype.handleEvent=function(a){var b=a.O,c,d,e=b.altKey;v&&"keypress"==a.type?(c=this.N,d=13!=c&&27!=c?b.keyCode:0):x&&"keypress"==a.type?(c=this.N,d=0<=b.charCode&&63232>b.charCode&&rc(c)?b.charCode:0):Va?(c=this.N,d=rc(c)?b.keyCode:0):(c=b.keyCode||this.N,d=b.charCode||0,zc&&(e=this.kb),y&&63==d&&224==c&&(c=191));var g=c=sc(c),h=b.keyIdentifier;c?63232<=c&&c in wc?g=wc[c]:25==c&&a.shiftKey&&(g=9):h&&h in xc&&(g=xc[h]);a=g==this.r;this.r=g;b=new Ac(g,d,a,b);b.altKey=e;this.dispatchEvent(b)};
+N.prototype.a=function(){return this.d};var vc=function(a,b,c){a.Ja&&a.detach();a.d=b;a.Ia=E(a.d,"keypress",a,c);a.Ya=E(a.d,"keydown",a.Sb,c,a);a.Ja=E(a.d,"keyup",a.Tb,c,a)};N.prototype.detach=function(){this.Ia&&(F(this.Ia),F(this.Ya),F(this.Ja),this.Ja=this.Ya=this.Ia=null);this.d=null;this.N=this.r=-1};var Ac=function(a,b,c,d){C.call(this,d);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};p(Ac,C);var O=function(a){if(a.classList)return a.classList;a=a.className;return m(a)&&a.match(/\S+/g)||[]},Bc=function(a,b){return a.classList?a.classList.contains(b):t(O(a),b)},Cc=function(a,b){a.classList?a.classList.add(b):Bc(a,b)||(a.className+=0<a.className.length?" "+b:b)},Dc=function(a,b){if(a.classList)s(b,function(b){Cc(a,b)});else{var c={};s(O(a),function(a){c[a]=!0});s(b,function(a){c[a]=!0});a.className="";for(var d in c)a.className+=0<a.className.length?" "+d:d}},Ec=function(a,b){a.classList?
+a.classList.remove(b):Bc(a,b)&&(a.className=Ca(O(a),function(a){return a!=b}).join(" "))},Fc=function(a,b){a.classList?s(b,function(b){Ec(a,b)}):a.className=Ca(O(a),function(a){return!t(b,a)}).join(" ")};var Hc=function(a,b){if(!a)throw Error("Invalid class name "+a);if(!n(b))throw Error("Invalid decorator function "+b);Gc[a]=b},Ic={},Gc={};var Q=function(a){this.Gb=a};ba(Q);Q.prototype.fa=function(){return this.Gb};var Jc=function(a,b){a&&(a.tabIndex=b?0:-1)};f=Q.prototype;f.o=function(a){return a.lb().o("div",this.ta(a).join(" "))};f.C=function(a){return a};f.X=function(a){return"DIV"==a.tagName};f.K=function(a,b){b.id&&dc(a,b.id);var c=this.v(),d=!1,e=O(b);e&&s(e,function(b){b==c?d=!0:b&&this.bb(a,b,c)},this);d||Cc(b,c);Kc(a,this.C(b));return b};
+f.bb=function(a,b,c){b==c+"-disabled"?a.ca(!1):b==c+"-horizontal"?Lc(a,"horizontal"):b==c+"-vertical"&&Lc(a,"vertical")};var Kc=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;t:{var g=c;e=void 0;q(g);for(var g=O(g),h=0,k=g.length;h<k;h++)if(e=g[h],e=e in Gc?Gc[e]():null)break t;e=null}e&&(e.d=c,a.isEnabled()||e.ca(!1),a.Da(e),e.K(c))}else c.nodeValue&&""!=oa(c.nodeValue)||b.removeChild(c);c=d}};
+Q.prototype.Oa=function(a){a=a.a();q(a,"The container DOM element cannot be null.");Zb(a,!0,w);v&&(a.hideFocus=!0);var b=this.fa();b&&oc(a,b)};Q.prototype.j=function(a){return a.a()};Q.prototype.v=function(){return"goog-container"};Q.prototype.ta=function(a){var b=this.v(),c=[b,"horizontal"==a.L?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var R=function(){},Mc;ba(R);var Nc={button:"pressed",checkbox:"checked",menuitem:"selected",menuitemcheckbox:"checked",menuitemradio:"checked",radio:"checked",tab:"selected",treeitem:"selected"};f=R.prototype;f.fa=function(){};f.o=function(a){var b=a.lb().o("div",this.ta(a).join(" "),a.Ba);Oc(a,b);return b};f.C=function(a){return a};f.sa=function(a,b,c){if(a=a.a?a.a():a){var d=[b];v&&!A("7")&&(d=Pc(O(a),b),d.push(b));(c?Dc:Fc)(a,d)}};f.X=function(){return!0};
+f.K=function(a,b){b.id&&dc(a,b.id);var c=this.C(b);c&&c.firstChild?Qc(a,c.firstChild.nextSibling?Fa(c.childNodes):c.firstChild):a.Ba=null;var d=0,e=this.v(),g=this.v(),h=!1,k=!1,I=!1,z=Fa(O(b));s(z,function(a){h||a!=e?k||a!=g?d|=Rc(this,a):k=!0:(h=!0,g==e&&(k=!0));1==Rc(this,a)&&tb(c)&&ub(c,!1)},this);a.g=d;h||(z.push(e),g==e&&(k=!0));k||z.push(g);var K=a.G;K&&z.push.apply(z,K);if(v&&!A("7")){var P=Pc(z);0<P.length&&(z.push.apply(z,P),I=!0)}if(!h||!k||K||I)b.className=z.join(" ");Oc(a,b);return b};
+f.Oa=function(a){jc(a)&&this.ra(a.a(),!0);a.isEnabled()&&this.oa(a,a.s())};var Sc=function(a,b,c){if(a=c||a.fa())q(b,"The element passed as a first parameter cannot be null."),c=b.getAttribute("role")||null,a!=c&&oc(b,a)},Oc=function(a,b){q(a);q(b);a.s()||qc(b,"hidden",!a.s());a.isEnabled()||Tc(b,1,!a.isEnabled());a.l&8&&Tc(b,8,!!(a.g&8));a.l&16&&Tc(b,16,!!(a.g&16));a.l&64&&Tc(b,64,!!(a.g&64))};f=R.prototype;f.za=function(a,b){Zb(a,!b,!v&&!Va)};f.ra=function(a,b){this.sa(a,this.v()+"-rtl",b)};
+f.I=function(a){var b;return a.l&32&&(b=a.j())?tb(b):!1};f.oa=function(a,b){var c;if(a.l&32&&(c=a.j())){if(!b&&a.g&32){try{c.blur()}catch(d){}a.g&32&&a.ma(null)}tb(c)!=b&&ub(c,b)}};f.ka=function(a,b){J(a,b);a&&qc(a,"hidden",!b)};f.t=function(a,b,c){var d=a.a();if(d){var e=Uc(this,b);e&&this.sa(a,e,c);Tc(d,b,c)}};
+var Tc=function(a,b,c){Mc||(Mc={1:"disabled",8:"selected",16:"checked",64:"expanded"});q(a,"The element passed as a first parameter cannot be null.");b=Mc[b];var d=a.getAttribute("role")||null;d&&(d=Nc[d]||b,b="checked"==b||"selected"==b?d:b);b&&qc(a,b,c)};R.prototype.j=function(a){return a.a()};R.prototype.v=function(){return"goog-control"};
+R.prototype.ta=function(a){var b=this.v(),c=[b],d=this.v();d!=b&&c.push(d);b=a.g;for(d=[];b;){var e=b&-b;d.push(Uc(this,e));b&=~e}c.push.apply(c,d);(a=a.G)&&c.push.apply(c,a);v&&!A("7")&&c.push.apply(c,Pc(c));return c};
+var Pc=function(a,b){var c=[];b&&(a=a.concat([b]));s([],function(d){!Da(d,ja(t,a))||b&&!t(d,b)||c.push(d.join("_"))});return c},Uc=function(a,b){a.Na||Vc(a);return a.Na[b]},Rc=function(a,b){a.Fb||(a.Na||Vc(a),a.Fb=Pa(a.Na));var c=parseInt(a.Fb[b],10);return isNaN(c)?0:c},Vc=function(a){var b=a.v(),c=b.replace(/\xa0|\s/g," ");q(-1==c.indexOf(" "),"ControlRenderer has an invalid css class: '"+b+"'");a.Na={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+
+"-open"}};var S=function(a,b,c){L.call(this,c);if(!b){b=this.constructor;for(var d;b;){d=ia(b);if(d=Ic[d])break;b=b.e?b.e.constructor:null}b=d?n(d.ia)?d.ia():new d:null}this.b=b;this.Ba=void 0!==a?a:null};p(S,L);f=S.prototype;f.Ba=null;f.g=0;f.l=39;f.ec=255;f.T=0;f.n=!0;f.G=null;f.Z=!0;f.xa=!1;f.rb=null;f.pb=function(){return this.Z};f.Pa=function(a){this.f&&a!=this.Z&&Wc(this,a);this.Z=a};f.j=function(){return this.b.j(this)};f.ya=function(){return this.ga||(this.ga=new N)};f.zb=function(){return this.b};
+f.sa=function(a,b){b?a&&(this.G?t(this.G,a)||this.G.push(a):this.G=[a],this.b.sa(this,a,!0)):a&&this.G&&Ea(this.G,a)&&(0==this.G.length&&(this.G=null),this.b.sa(this,a,!1))};f.o=function(){var a=this.b.o(this);this.d=a;Sc(this.b,a,this.rb);this.xa||this.b.za(a,!1);this.s()||this.b.ka(a,!1)};f.C=function(){return this.b.C(this.a())};f.X=function(a){return this.b.X(a)};f.Za=function(a){this.d=a=this.b.K(this,a);Sc(this.b,a,this.rb);this.xa||this.b.za(a,!1);this.n="none"!=a.style.display};
+f.D=function(){S.e.D.call(this);this.b.Oa(this);if(this.l&-2&&(this.pb()&&Wc(this,!0),this.l&32)){var a=this.j();if(a){var b=this.ya();vc(b,a);ec(this).c(b,"key",this.J).c(a,"focus",this.na).c(a,"blur",this.ma)}}};
+var Wc=function(a,b){var c=ec(a),d=a.a();b?(c.c(d,"mouseover",a.Sa).c(d,"mousedown",a.la).c(d,"mouseup",a.Ta).c(d,"mouseout",a.Ra),a.qa!=aa&&c.c(d,"contextmenu",a.qa),v&&c.c(d,"dblclick",a.tb)):(c.u(d,"mouseover",a.Sa).u(d,"mousedown",a.la).u(d,"mouseup",a.Ta).u(d,"mouseout",a.Ra),a.qa!=aa&&c.u(d,"contextmenu",a.qa),v&&c.u(d,"dblclick",a.tb))};S.prototype.ba=function(){S.e.ba.call(this);this.ga&&this.ga.detach();this.s()&&this.isEnabled()&&this.b.oa(this,!1)};var Qc=function(a,b){a.Ba=b};f=S.prototype;
+f.ra=function(a){S.e.ra.call(this,a);var b=this.a();b&&this.b.ra(b,a)};f.za=function(a){this.xa=a;var b=this.a();b&&this.b.za(b,a)};f.s=function(){return this.n};f.ka=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){var c=this.a();c&&this.b.ka(c,a);this.isEnabled()&&this.b.oa(this,a);this.n=a;return!0}return!1};f.isEnabled=function(){return!(this.g&1)};
+f.ca=function(a){var b=this.getParent();b&&"function"==typeof b.isEnabled&&!b.isEnabled()||!T(this,1,!a)||(a||(this.setActive(!1),this.B(!1)),this.s()&&this.b.oa(this,a),this.t(1,!a,!0))};f.B=function(a){T(this,2,a)&&this.t(2,a)};f.setActive=function(a){T(this,4,a)&&this.t(4,a)};var Xc=function(a,b){T(a,8,b)&&a.t(8,b)},Yc=function(a,b){T(a,64,b)&&a.t(64,b)};S.prototype.t=function(a,b,c){c||1!=a?this.l&a&&b!=!!(this.g&a)&&(this.b.t(this,a,b),this.g=b?this.g|a:this.g&~a):this.ca(!b)};
+var Zc=function(a,b,c){if(a.f&&a.g&b&&!c)throw Error("Component already rendered");!c&&a.g&b&&a.t(b,!1);a.l=c?a.l|b:a.l&~b},U=function(a,b){return!!(a.ec&b)&&!!(a.l&b)},T=function(a,b,c){return!!(a.l&b)&&!!(a.g&b)!=c&&(!(a.T&b)||a.dispatchEvent(bc(b,c)))&&!a.Ub};f=S.prototype;f.Sa=function(a){(!a.relatedTarget||!nb(this.a(),a.relatedTarget))&&this.dispatchEvent("enter")&&this.isEnabled()&&U(this,2)&&this.B(!0)};
+f.Ra=function(a){a.relatedTarget&&nb(this.a(),a.relatedTarget)||!this.dispatchEvent("leave")||(U(this,4)&&this.setActive(!1),U(this,2)&&this.B(!1))};f.qa=aa;f.la=function(a){this.isEnabled()&&(U(this,2)&&this.B(!0),!Cb(a)||x&&y&&a.ctrlKey||(U(this,4)&&this.setActive(!0),this.b.I(this)&&this.j().focus()));this.xa||!Cb(a)||x&&y&&a.ctrlKey||a.preventDefault()};f.Ta=function(a){this.isEnabled()&&(U(this,2)&&this.B(!0),this.g&4&&$c(this,a)&&U(this,4)&&this.setActive(!1))};
+f.tb=function(a){this.isEnabled()&&$c(this,a)};var $c=function(a,b){if(U(a,16)){var c=!(a.g&16);T(a,16,c)&&a.t(16,c)}U(a,8)&&Xc(a,!0);U(a,64)&&Yc(a,!(a.g&64));c=new B("action",a);b&&(c.altKey=b.altKey,c.ctrlKey=b.ctrlKey,c.metaKey=b.metaKey,c.shiftKey=b.shiftKey,c.jb=b.jb);return a.dispatchEvent(c)};S.prototype.na=function(){U(this,32)&&T(this,32,!0)&&this.t(32,!0)};S.prototype.ma=function(){U(this,4)&&this.setActive(!1);U(this,32)&&T(this,32,!1)&&this.t(32,!1)};
+S.prototype.J=function(a){return this.s()&&this.isEnabled()&&this.nb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};S.prototype.nb=function(a){return 13==a.keyCode&&$c(this,a)};if(!n(S))throw Error("Invalid component class "+S);if(!n(R))throw Error("Invalid renderer class "+R);var ad=ia(S);Ic[ad]=R;Hc("goog-control",function(){return new S(null)});var V=function(a,b,c){L.call(this,c);this.b=b||Q.ia();this.L=a||"vertical"};p(V,L);f=V.prototype;f.ub=null;f.ga=null;f.b=null;f.L=null;f.n=!0;f.U=!0;f.$a=!0;f.h=-1;f.i=null;f.aa=!1;f.Rb=!1;f.Qb=!0;f.M=null;f.j=function(){return this.ub||this.b.j(this)};f.ya=function(){return this.ga||(this.ga=new N(this.j()))};f.zb=function(){return this.b};f.o=function(){this.d=this.b.o(this)};f.C=function(){return this.b.C(this.a())};f.X=function(a){return this.b.X(a)};
+f.Za=function(a){this.d=this.b.K(this,a);"none"==a.style.display&&(this.n=!1)};f.D=function(){V.e.D.call(this);hc(this,function(a){a.f&&bd(this,a)},this);var a=this.a();this.b.Oa(this);this.ka(this.n,!0);ec(this).c(this,"enter",this.Kb).c(this,"highlight",this.Lb).c(this,"unhighlight",this.Nb).c(this,"open",this.Mb).c(this,"close",this.Ib).c(a,"mousedown",this.la).c(eb(a),"mouseup",this.Jb).c(a,["mousedown","mouseup","mouseover","mouseout","contextmenu"],this.Hb);this.I()&&cd(this,!0)};
+var cd=function(a,b){var c=ec(a),d=a.j();b?c.c(d,"focus",a.na).c(d,"blur",a.ma).c(a.ya(),"key",a.J):c.u(d,"focus",a.na).u(d,"blur",a.ma).u(a.ya(),"key",a.J)};f=V.prototype;f.ba=function(){dd(this,-1);this.i&&Yc(this.i,!1);this.aa=!1;V.e.ba.call(this)};f.Kb=function(){return!0};
+f.Lb=function(a){var b=kc(this,a.target);if(-1<b&&b!=this.h){var c=M(this,this.h);c&&c.B(!1);this.h=b;c=M(this,this.h);this.aa&&c.setActive(!0);this.Qb&&this.i&&c!=this.i&&(c.l&64?Yc(c,!0):Yc(this.i,!1))}b=this.a();q(b,"The DOM element for the container cannot be null.");null!=a.target.a()&&qc(b,"activedescendant",a.target.a().id)};f.Nb=function(a){a.target==M(this,this.h)&&(this.h=-1);a=this.a();q(a,"The DOM element for the container cannot be null.");a.removeAttribute(pc("activedescendant"))};
+f.Mb=function(a){(a=a.target)&&a!=this.i&&a.getParent()==this&&(this.i&&Yc(this.i,!1),this.i=a)};f.Ib=function(a){a.target==this.i&&(this.i=null)};f.la=function(a){this.U&&(this.aa=!0);var b=this.j();b&&tb(b)?b.focus():a.preventDefault()};f.Jb=function(){this.aa=!1};
+f.Hb=function(a){var b;t:{b=a.target;if(this.M)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.M){b=this.M[d];break t}b=b.parentNode}b=null}if(b)switch(a.type){case "mousedown":b.la(a);break;case "mouseup":b.Ta(a);break;case "mouseover":b.Sa(a);break;case "mouseout":b.Ra(a);break;case "contextmenu":b.qa(a)}};f.na=function(){};f.ma=function(){dd(this,-1);this.aa=!1;this.i&&Yc(this.i,!1)};
+f.J=function(a){return this.isEnabled()&&this.s()&&(0!=ic(this)||this.ub)&&this.nb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};
+f.nb=function(a){var b=M(this,this.h);if(b&&"function"==typeof b.J&&b.J(a)||this.i&&this.i!=b&&"function"==typeof this.i.J&&this.i.J(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.I())this.j().blur();else return!1;break;case 36:ed(this);break;case 35:fd(this);break;case 38:if("vertical"==this.L)gd(this);else return!1;break;case 37:if("horizontal"==this.L)jc(this)?hd(this):gd(this);else return!1;break;case 40:if("vertical"==this.L)hd(this);else return!1;
+break;case 39:if("horizontal"==this.L)jc(this)?gd(this):hd(this);else return!1;break;default:return!1}return!0};var bd=function(a,b){var c=b.a(),c=c.id||(c.id=cc(b));a.M||(a.M={});a.M[c]=b};V.prototype.Da=function(a,b){Aa(a,S,"The child of a container must be a control");V.e.Da.call(this,a,b)};V.prototype.Va=function(a,b,c){a.T|=2;a.T|=64;!this.I()&&this.Rb||Zc(a,32,!1);a.Pa(!1);V.e.Va.call(this,a,b,c);a.f&&this.f&&bd(this,a);b<=this.h&&this.h++};
+V.prototype.removeChild=function(a,b){if(a=m(a)?fc(this,a):a){var c=kc(this,a);-1!=c&&(c==this.h?(a.B(!1),this.h=-1):c<this.h&&this.h--);var d=a.a();d&&d.id&&this.M&&(c=this.M,d=d.id,d in c&&delete c[d])}a=V.e.removeChild.call(this,a,b);a.Pa(!0);return a};var Lc=function(a,b){if(a.a())throw Error("Component already rendered");a.L=b};f=V.prototype;f.s=function(){return this.n};
+f.ka=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){this.n=a;var c=this.a();c&&(J(c,a),this.I()&&Jc(this.j(),this.U&&this.n),b||this.dispatchEvent(this.n?"aftershow":"afterhide"));return!0}return!1};f.isEnabled=function(){return this.U};f.ca=function(a){this.U!=a&&this.dispatchEvent(a?"enable":"disable")&&(a?(this.U=!0,hc(this,function(a){a.wb?delete a.wb:a.ca(!0)})):(hc(this,function(a){a.isEnabled()?a.ca(!1):a.wb=!0}),this.aa=this.U=!1),this.I()&&Jc(this.j(),a&&this.n))};
+f.I=function(){return this.$a};f.oa=function(a){a!=this.$a&&this.f&&cd(this,a);this.$a=a;this.U&&this.n&&Jc(this.j(),a)};var dd=function(a,b){var c=M(a,b);c?c.B(!0):-1<a.h&&M(a,a.h).B(!1)};V.prototype.B=function(a){dd(this,kc(this,a))};
+var ed=function(a){id(a,function(a,c){return(a+1)%c},ic(a)-1)},fd=function(a){id(a,function(a,c){a--;return 0>a?c-1:a},0)},hd=function(a){id(a,function(a,c){return(a+1)%c},a.h)},gd=function(a){id(a,function(a,c){a--;return 0>a?c-1:a},a.h)},id=function(a,b,c){c=0>c?kc(a,a.i):c;var d=ic(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=M(a,c);if(g&&g.s()&&g.isEnabled()&&g.l&2){a.Wa(c);break}e++;c=b.call(a,c,d)}};V.prototype.Wa=function(a){dd(this,a)};var jd=function(){};p(jd,R);ba(jd);f=jd.prototype;f.v=function(){return"goog-tab"};f.fa=function(){return"tab"};f.o=function(a){var b=jd.e.o.call(this,a);(a=a.Ua())&&this.Xa(b,a);return b};f.K=function(a,b){b=jd.e.K.call(this,a,b);var c=this.Ua(b);c&&(a.sb=c);a.g&8&&(c=a.getParent())&&n(c.V)&&(a.t(8,!1),c.V(a));return b};f.Ua=function(a){return a.title||""};f.Xa=function(a,b){a&&(a.title=b||"")};var kd=function(a,b,c){S.call(this,a,b||jd.ia(),c);Zc(this,8,!0);this.T|=9};p(kd,S);kd.prototype.Ua=function(){return this.sb};kd.prototype.Xa=function(a){this.zb().Xa(this.a(),a);this.sb=a};Hc("goog-tab",function(){return new kd(null)});var W=function(){this.Gb="tablist"};p(W,Q);ba(W);W.prototype.v=function(){return"goog-tab-bar"};W.prototype.bb=function(a,b,c){this.Ab||(this.Ka||ld(this),this.Ab=Pa(this.Ka));var d=this.Ab[b];d?(Lc(a,md(d)),a.xb=d):W.e.bb.call(this,a,b,c)};W.prototype.ta=function(a){var b=W.e.ta.call(this,a);this.Ka||ld(this);b.push(this.Ka[a.xb]);return b};var ld=function(a){var b=a.v();a.Ka={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var X=function(a,b,c){a=a||"top";Lc(this,md(a));this.xb=a;V.call(this,this.L,b||W.ia(),c);nd(this)};p(X,V);f=X.prototype;f.ac=!0;f.H=null;f.D=function(){X.e.D.call(this);nd(this)};f.removeChild=function(a,b){od(this,a);return X.e.removeChild.call(this,a,b)};f.Wa=function(a){X.e.Wa.call(this,a);this.ac&&this.V(M(this,a))};f.V=function(a){a?Xc(a,!0):this.H&&Xc(this.H,!1)};
+var od=function(a,b){if(b&&b==a.H){for(var c=kc(a,b),d=c-1;b=M(a,d);d--)if(b.s()&&b.isEnabled()){a.V(b);return}for(c+=1;b=M(a,c);c++)if(b.s()&&b.isEnabled()){a.V(b);return}a.V(null)}};f=X.prototype;f.Zb=function(a){this.H&&this.H!=a.target&&Xc(this.H,!1);this.H=a.target};f.$b=function(a){a.target==this.H&&(this.H=null)};f.Xb=function(a){od(this,a.target)};f.Yb=function(a){od(this,a.target)};f.na=function(){M(this,this.h)||this.B(this.H||M(this,0))};
+var nd=function(a){ec(a).c(a,"select",a.Zb).c(a,"unselect",a.$b).c(a,"disable",a.Xb).c(a,"hide",a.Yb)},md=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Hc("goog-tab-bar",function(){return new X});var Y=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,oc(a,h.fa()),Cc(a,"goog-zippy-header"),pd(h,a),a&&h.Ob.c(a,"keydown",h.Pb))}H.call(this);this.A=e||fb();this.R=this.A.a(a)||null;this.Aa=this.A.a(d||null);this.ea=(this.Qa=n(b)?b:null)||!b?null:this.A.a(b);this.k=1==c;this.Ob=new G(this);this.qb=new G(this);var h=this;g(this.R);g(this.Aa);this.S(this.k)};p(Y,H);f=Y.prototype;f.Z=!0;f.fa=function(){return"tab"};f.C=function(){return this.ea};f.toggle=function(){this.S(!this.k)};
+f.S=function(a){this.ea?J(this.ea,a):a&&this.Qa&&(this.ea=this.Qa());this.ea&&Cc(this.ea,"goog-zippy-content");if(this.Aa)J(this.R,!a),J(this.Aa,a);else if(this.R){var b=this.R;a?Cc(b,"goog-zippy-expanded"):Ec(b,"goog-zippy-expanded");b=this.R;a?Ec(b,"goog-zippy-collapsed"):Cc(b,"goog-zippy-collapsed");qc(this.R,"expanded",a)}this.k=a;this.dispatchEvent(new qd("toggle",this))};f.pb=function(){return this.Z};f.Pa=function(a){this.Z!=a&&((this.Z=a)?(pd(this,this.R),pd(this,this.Aa)):this.qb.ab())};
+var pd=function(a,b){b&&a.qb.c(b,"click",a.bc)};Y.prototype.Pb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new B("action",this)),a.preventDefault(),a.stopPropagation()};Y.prototype.bc=function(){this.toggle();this.dispatchEvent(new B("action",this))};var qd=function(a,b){B.call(this,a,b)};p(qd,B);var Z=function(a,b){this.ob=[];for(var c=hb("span","ae-zippy",gb(document,a)),d=0,e;e=c[d];d++){var g=e.parentNode.parentNode.parentNode;if(void 0!=g.nextElementSibling)g=g.nextElementSibling;else for(g=g.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new Y(e,g,!1);this.ob.push(e)}this.hc=new rd(this.ob,gb(document,b))};Z.prototype.kc=function(){return this.hc};Z.prototype.lc=function(){return this.ob};
+var rd=function(a,b){this.va=a;if(this.va.length)for(var c=0,d;d=this.va[c];c++)E(d,"toggle",this.Wb,!1,this);this.La=0;this.k=!1;c="ae-toggle ae-plus ae-action";this.va.length||(c+=" ae-disabled");this.P=lb("span",{className:c},"Expand All");E(this.P,"click",this.Vb,!1,this);b&&b.appendChild(this.P)};rd.prototype.Vb=function(){this.va.length&&this.S(!this.k)};
+rd.prototype.Wb=function(a){a=a.currentTarget;this.La=a.k?this.La+1:this.La-1;a.k!=this.k&&(a.k?(this.k=!0,sd(this,!0)):0==this.La&&(this.k=!1,sd(this,!1)))};rd.prototype.S=function(a){this.k=a;a=0;for(var b;b=this.va[a];a++)b.k!=this.k&&b.S(this.k);sd(this)};
+var sd=function(a,b){(void 0!==b?b:a.k)?(La(a.P,"ae-plus"),Ja(a.P,"ae-minus"),ob(a.P,"Collapse All")):(La(a.P,"ae-minus"),Ja(a.P,"ae-plus"),ob(a.P,"Expand All"))},td=function(a){this.cc=a;this.Cb={};var b,c=lb("div",{},b=lb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),lb("div",{className:"goog-tab-bar-clear"}),a=lb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new X;d.K(b);E(d,"select",this.Bb,!1,this);E(d,"unselect",this.Bb,!1,this);
+b=0;for(var e;e=this.cc[b];b++)if(e=gb(document,"ae-stats-details-"+e)){var g=hb("h2",null,e)[0],h;h=g;var k=void 0;cb&&"innerText"in h?k=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(k=[],vb(h,k,!0),k=k.join(""));k=k.replace(/ \xAD /g," ").replace(/\xAD/g,"");k=k.replace(/\u200B/g,"");cb||(k=k.replace(/ +/g," "));" "!=k&&(k=k.replace(/^\s*/,""));h=k;g&&g.parentNode&&g.parentNode.removeChild(g);g=new kd(h);this.Cb[ia(g)]=e;d.Da(g,!0);a.appendChild(e);0==b?d.V(g):J(e,!1)}gb(document,"bd").appendChild(c)};
+td.prototype.Bb=function(a){var b=this.Cb[ia(a.target)];J(b,"select"==a.type)};ka("ae.Stats.Details.Tabs",td);ka("goog.ui.Zippy",Y);Y.prototype.setExpanded=Y.prototype.S;ka("ae.Stats.MakeZippys",Z);Z.prototype.getExpandCollapse=Z.prototype.kc;Z.prototype.getZippys=Z.prototype.lc;rd.prototype.setExpanded=rd.prototype.S;var $=function(){this.cb=[];this.ib=[]},ud=[[5,.2,1],[6,.2,1.2],[5,.25,1.25],[6,.25,1.5],[4,.5,2],[5,.5,2.5],[6,.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],vd=function(a){if(0>=a)return[2,.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c<ud.length;c++)if(a<=ud[c][2])return[ud[c][0],ud[c][1]*b,ud[c][2]*b];return[5,2*b,10*b]};$.prototype.hb="stats/static/pix.gif";$.prototype.w="ae-stats-gantt-";$.prototype.fb=0;$.prototype.write=function(a){this.ib.push(a)};
+var wd=function(a,b,c,d){a.write('<tr class="'+a.w+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.w+'axis">');for(var e=0;e<=b;e++)a.write('<img class="'+a.w+'tick" src="'+a.hb+'" alt="" '),a.write('style="left:'+e*c*d+'%"\n>'),a.write('<span class="'+a.w+'scale" style="left:'+e*c*d+'%">'),a.write("&nbsp;"+e*c+"</span>");a.write("</div></td></tr>\n")};
+$.prototype.jc=function(){this.ib=[];var a=vd(this.fb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.w+'table">\n');wd(this,b,c,a);for(var d=0;d<this.cb.length;d++){var e=this.cb[d];this.write('<tr class="'+this.w+'datarow"><td width="20%">');0<e.label.length&&(0<e.ja.length&&this.write('<a class="'+this.w+'link" href="'+e.ja+'">'),this.write(e.label),0<e.ja.length&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.w+'container">');0<e.ja.length&&this.write('<a class="'+
+this.w+'link" href="'+e.ja+'"\n>');this.write('<img class="'+this.w+'bar" src="'+this.hb+'" alt="" ');this.write('style="left:'+e.start*a+"%;width:"+e.duration*a+'%;min-width:1px"\n>');0<e.eb&&(this.write('<img class="'+this.w+'extra" src="'+this.hb+'" alt="" '),this.write('style="left:'+e.start*a+"%;width:"+e.eb*a+'%"\n>'));0<e.yb.length&&(this.write('<span class="'+this.w+'inline" style="left:'+(e.start+Math.max(e.duration,e.eb))*a+'%">&nbsp;'),this.write(e.yb),this.write("</span>"));0<e.ja.length&&
+this.write("</a>");this.write("</div></td></tr>\n")}wd(this,b,c,a);this.write("</table>\n");return this.ib.join("")};$.prototype.ic=function(a,b,c,d,e,g){this.fb=Math.max(this.fb,Math.max(b+c,b+d));this.cb.push({label:a,start:b,duration:c,eb:d,yb:e,ja:g})};ka("Gantt",$);$.prototype.add_bar=$.prototype.ic;$.prototype.draw=$.prototype.jc;})();
diff --git a/google/appengine/ext/datastore_admin/backup_handler.py b/google/appengine/ext/datastore_admin/backup_handler.py
index 849d1de..f74f60c 100644
--- a/google/appengine/ext/datastore_admin/backup_handler.py
+++ b/google/appengine/ext/datastore_admin/backup_handler.py
@@ -495,6 +495,9 @@
   else:
     raise BackupValidationError('Unknown filesystem "%s".' % filesystem)
 
+  backup_info = None
+  job_operation = None
+
   job_name = 'datastore_backup_%s_%%(kind)s' % re.sub(r'[^\w]', '_', backup)
   try:
     job_operation = utils.StartOperation('Backup: %s' % backup)
diff --git a/google/appengine/ext/datastore_admin/backup_pb2.py b/google/appengine/ext/datastore_admin/backup_pb2.py
index 5ee7f1d..8b2f4f9 100644
--- a/google/appengine/ext/datastore_admin/backup_pb2.py
+++ b/google/appengine/ext/datastore_admin/backup_pb2.py
@@ -161,6 +161,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=80,
   serialized_end=220,
 )
@@ -203,6 +205,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=222,
   serialized_end=303,
 )
@@ -252,6 +256,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=306,
   serialized_end=446,
 )
@@ -294,6 +300,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=547,
   serialized_end=725,
 )
@@ -335,6 +343,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=727,
   serialized_end=833,
 )
@@ -370,6 +380,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=449,
   serialized_end=1105,
 )
diff --git a/google/appengine/ext/datastore_admin/main.py b/google/appengine/ext/datastore_admin/main.py
index ace4411..d743dd1 100644
--- a/google/appengine/ext/datastore_admin/main.py
+++ b/google/appengine/ext/datastore_admin/main.py
@@ -41,9 +41,10 @@
 from google.appengine.ext.datastore_admin import copy_handler
 from google.appengine.ext.datastore_admin import delete_handler
 from google.appengine.ext.datastore_admin import utils
-from google.appengine.ext.db import stats
 from google.appengine.ext.db import metadata
+from google.appengine.ext.db import stats
 from google.appengine.ext.webapp import util
+from google.appengine.runtime import apiproxy_errors
 
 
 
@@ -278,7 +279,7 @@
           ReadFromKindIters(kind_iter_list)
       while kind_iter_list:
         ReadFromKindIters(kind_iter_list)
-    except datastore_errors.Timeout:
+    except (datastore_errors.Timeout, apiproxy_errors.DeadlineExceededError):
       more_kinds = True
       logging.warning('Failed to retrieve all kinds within deadline.')
     return sorted(kind_name_set), more_kinds
@@ -305,7 +306,7 @@
         kind_name = kind.kind_name
         if utils.IsKindNameVisible(kind_name):
           kind_names.append(kind_name)
-    except datastore_errors.Timeout:
+    except (datastore_errors.Timeout, apiproxy_errors.DeadlineExceededError):
       more_kinds = True
       logging.warning('Failed to retrieve all kinds within deadline.')
     return kind_names, more_kinds
diff --git a/google/appengine/ext/mapreduce/static/status.js b/google/appengine/ext/mapreduce/static/status.js
index cb38118..1b288c0 100644
--- a/google/appengine/ext/mapreduce/static/status.js
+++ b/google/appengine/ext/mapreduce/static/status.js
@@ -245,11 +245,18 @@
   return keys;
 }
 
-// Gets a local datestring from a UNIX timestamp in milliseconds.
-function getLocalTimestring(timestamp_ms) {
-  var when = new Date();
-  when.setTime(timestamp_ms);
-  return when.toLocaleString();
+// Convert milliseconds since the epoch to an ISO8601 datestring.
+// Consider using new Date().toISOString() instead (J.S 1.8+)
+function getIso8601String(timestamp_ms) {
+  var time = new Date();
+  time.setTime(timestamp_ms);
+  return '' +
+      time.getUTCFullYear() + '-' +
+      leftPadNumber(time.getUTCMonth() + 1, 2, '0') + '-' +
+      leftPadNumber(time.getUTCDate(), 2, '0') + 'T' +
+      leftPadNumber(time.getUTCHours(), 2, '0') + ':' +
+      leftPadNumber(time.getUTCMinutes(), 2, '0') + ':' +
+      leftPadNumber(time.getUTCSeconds(), 2, '0') + 'Z';
 }
 
 function leftPadNumber(number, minSize, paddingChar) {
@@ -328,7 +335,7 @@
     var activity = '' + job.active_shards + ' / ' + job.shards + ' shards';
     row.append($('<td>').text(activity))
 
-    row.append($('<td>').text(getLocalTimestring(job.start_timestamp_ms)));
+    row.append($('<td>').text(getIso8601String(job.start_timestamp_ms)));
 
     row.append($('<td>').text(getElapsedTimeString(
         job.start_timestamp_ms, job.updated_timestamp_ms)));
@@ -550,7 +557,7 @@
   $('<li>')
     .append($('<span class="param-key">').text('Start time'))
     .append($('<span>').text(': '))
-    .append($('<span class="param-value">').text(getLocalTimestring(
+    .append($('<span class="param-value">').text(getIso8601String(
           detail.start_timestamp_ms)))
     .appendTo(jobParams);
 
diff --git a/google/appengine/tools/adaptive_thread_pool.py b/google/appengine/tools/adaptive_thread_pool.py
index fac5885..3f730ec 100644
--- a/google/appengine/tools/adaptive_thread_pool.py
+++ b/google/appengine/tools/adaptive_thread_pool.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Provides thread-pool-like functionality for workers accessing App Engine.
 
 The pool adapts to slow or timing out requests by reducing the number of
diff --git a/google/appengine/tools/api_server.py b/google/appengine/tools/api_server.py
index 8768d0d..3b51b04 100644
--- a/google/appengine/tools/api_server.py
+++ b/google/appengine/tools/api_server.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Serves the stub App Engine APIs (e.g. memcache, datastore) over HTTP.
 
 The Remote API protocol is used for communication.
@@ -542,7 +538,7 @@
   parser.add_argument('--smtp_allow_tls',
                       action=boolean_action.BooleanAction,
                       const=True,
-                      default=False)
+                      default=True)
 
 
   parser.add_argument('--prospective_search_path', default=None)
diff --git a/google/appengine/tools/app_engine_web_xml_parser.py b/google/appengine/tools/app_engine_web_xml_parser.py
index edcee56..288ef5a 100644
--- a/google/appengine/tools/app_engine_web_xml_parser.py
+++ b/google/appengine/tools/app_engine_web_xml_parser.py
@@ -309,7 +309,7 @@
             node, 'disabled-rewriter')]
     self.app_engine_web_xml.pagespeed = pagespeed
 
-  def ProcessClassLoaderConfig(self, node):
+  def ProcessClassLoaderConfigNode(self, node):
     for node in xml_parser_utils.GetNodes(node, 'priority-specifier'):
       entry = PrioritySpecifierEntry()
       entry.filename = xml_parser_utils.GetAttribute(node, 'filename')
@@ -353,6 +353,29 @@
         return
       self.app_engine_web_xml.auto_id_policy = policy
 
+  def ProcessVmHealthCheckNode(self, node):
+    vm_health_check = VmHealthCheck()
+    for child in node:
+      tag = xml_parser_utils.GetTag(child)
+      if tag == 'enable-health-check':
+        vm_health_check.enable_health_check = (
+            xml_parser_utils.BooleanValue(child.text))
+      elif tag == 'host':
+        vm_health_check.host = child.text
+      elif tag in ('check-interval-sec', 'healthy-threshold',
+                   'restart-threshold', 'timeout-sec', 'unhealthy-threshold'):
+        text = child.text or ''
+        try:
+          value = self._PositiveInt(text)
+          setattr(vm_health_check, tag.replace('-', '_'), value)
+        except ValueError:
+          self.errors.append('value for %s must be a positive integer: "%s"' %
+                             (tag, text))
+      else:
+        self.errors.append(
+            'unrecognized element within <vm-health-check>: <%s>' % tag)
+    self.app_engine_web_xml.vm_health_check = vm_health_check
+
   def CheckScalingConstraints(self):
     """Checks that at most one type of scaling is enabled."""
     scaling_num = sum([x is not None for x in [
@@ -363,6 +386,25 @@
     if scaling_num > 1:
       self.errors.append('Cannot enable more than one type of scaling')
 
+  @staticmethod
+  def _PositiveInt(text):
+    """Parse the given text as a positive integer.
+
+    Args:
+      text: a string that should contain the decimal representation of a
+        positive integer.
+
+    Returns:
+      An int that is the parsed value.
+
+    Raises:
+      ValueError: if text cannot be parsed as a positive integer.
+    """
+    value = int(text)
+    if value > 0:
+      return value
+    raise ValueError('Not a positive integer: %s' % text)
+
 
 class AppEngineWebXml(ValueMixin):
   """Organizes and stores data from appengine-web.xml."""
@@ -380,6 +422,7 @@
     self.module = None
     self.system_properties = {}
     self.vm_settings = {}
+    self.vm_health_check = None
     self.env_variables = {}
     self.instance_class = None
     self.automatic_scaling = None
@@ -558,3 +601,8 @@
     self.pattern = pattern
     self.expiration = expiration
     self.http_headers = http_headers
+
+
+class VmHealthCheck(ValueMixin):
+  """Instances contain information about VM health check settings."""
+  pass
diff --git a/google/appengine/tools/appcfg.py b/google/appengine/tools/appcfg.py
index cb261c1..d77e83c 100644
--- a/google/appengine/tools/appcfg.py
+++ b/google/appengine/tools/appcfg.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Tool for deploying apps to an app server.
 
 Currently, the application only uploads new appversions. To do this, it first
@@ -3624,7 +3620,8 @@
                       'ignoring --no_precompilation')
       self.options.precompilation = True
 
-    if appyaml.runtime.startswith('java'):
+    if (appyaml.runtime.startswith('java') or
+        appyaml.GetEffectiveRuntime() == 'dart'):
       self.options.precompilation = False
 
     if self.options.precompilation:
diff --git a/google/appengine/tools/appcfg_java.py b/google/appengine/tools/appcfg_java.py
index 6e264a3..6529a9f 100644
--- a/google/appengine/tools/appcfg_java.py
+++ b/google/appengine/tools/appcfg_java.py
@@ -139,7 +139,7 @@
 
       self.options.no_symlinks = True
 
-    java_home, exec_suffix = _JavaHomeAndSuffix()
+    java_home, exec_suffix = JavaHomeAndSuffix()
     self.java_command = os.path.join(java_home, 'bin', 'java' + exec_suffix)
     self.javac_command = os.path.join(java_home, 'bin', 'javac' + exec_suffix)
 
@@ -568,7 +568,7 @@
   return matches
 
 
-def _JavaHomeAndSuffix():
+def JavaHomeAndSuffix():
   """Find the directory that the JDK is installed in.
 
   The JDK install directory is expected to have a bin directory that contains
diff --git a/google/appengine/tools/appengine_rpc.py b/google/appengine/tools/appengine_rpc.py
index e1af55b..2f8c738 100644
--- a/google/appengine/tools/appengine_rpc.py
+++ b/google/appengine/tools/appengine_rpc.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Tool for performing authenticated RPCs against App Engine."""
 
 
diff --git a/google/appengine/tools/appengine_rpc_httplib2.py b/google/appengine/tools/appengine_rpc_httplib2.py
index fee123b..2f8cece 100644
--- a/google/appengine/tools/appengine_rpc_httplib2.py
+++ b/google/appengine/tools/appengine_rpc_httplib2.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Library with a variant of appengine_rpc using httplib2.
 
 The httplib2 module offers some of the features in appengine_rpc, with
diff --git a/google/appengine/tools/boolean_action.py b/google/appengine/tools/boolean_action.py
index 77a5527..809b3d1 100644
--- a/google/appengine/tools/boolean_action.py
+++ b/google/appengine/tools/boolean_action.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Provides a flexible way of configuring Boolean flags using argparse.
 
 This action behaves like the "store_const" action but allows the flag to accept
diff --git a/google/appengine/tools/bulkload_client.py b/google/appengine/tools/bulkload_client.py
index 4b7e876..65273ce 100644
--- a/google/appengine/tools/bulkload_client.py
+++ b/google/appengine/tools/bulkload_client.py
@@ -14,9 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
 """Imports CSV data over HTTP.
 
 Usage:
diff --git a/google/appengine/tools/bulkloader.py b/google/appengine/tools/bulkloader.py
index 5920e41..07ae889 100644
--- a/google/appengine/tools/bulkloader.py
+++ b/google/appengine/tools/bulkloader.py
@@ -14,9 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
 """Imports data over HTTP.
 
 Usage:
diff --git a/google/appengine/tools/dev-channel-js.js b/google/appengine/tools/dev-channel-js.js
index e3f253e..727d347 100644
--- a/google/appengine/tools/dev-channel-js.js
+++ b/google/appengine/tools/dev-channel-js.js
@@ -1718,6 +1718,17 @@
 goog.object.setIfUndefined = function(obj, key, value) {
   return key in obj ? obj[key] : obj[key] = value;
 };
+goog.object.equals = function(a, b) {
+  if (!goog.array.equals(goog.object.getKeys(a), goog.object.getKeys(b))) {
+    return!1;
+  }
+  for (var k in a) {
+    if (a[k] !== b[k]) {
+      return!1;
+    }
+  }
+  return!0;
+};
 goog.object.clone = function(obj) {
   var res = {}, key;
   for (key in obj) {
@@ -2143,7 +2154,7 @@
   var doc = win.document, height = 0;
   if (doc) {
     var body = doc.body, docEl = doc.documentElement;
-    if (!body && !docEl) {
+    if (!docEl || !body) {
       return 0;
     }
     var vh = goog.dom.getViewportSize_(win).height;
@@ -3134,7 +3145,6 @@
   }
   return-1;
 };
-goog.events.listeners_ = {};
 goog.events.LISTENER_MAP_PROP_ = "closure_lm_" + (1E6 * Math.random() | 0);
 goog.events.onString_ = "on";
 goog.events.onStringMap_ = {};
@@ -3470,7 +3480,7 @@
       rv = !1 !== listenerFn.call(listenerHandler, eventObject) && rv;
     }
   }
-  return rv && !1 != eventObject.returnValue_;
+  return rv && 0 != eventObject.returnValue_;
 };
 goog.events.EventTarget.prototype.getListeners = function(type, capture) {
   return this.eventTargetListeners_.getListeners(String(type), capture);
@@ -5542,18 +5552,18 @@
 goog.Uri.prototype.ignoreCase_ = !1;
 goog.Uri.prototype.toString = function() {
   var out = [], scheme = this.getScheme();
-  scheme && out.push(goog.Uri.encodeSpecialChars_(scheme, goog.Uri.reDisallowedInSchemeOrUserInfo_), ":");
+  scheme && out.push(goog.Uri.encodeSpecialChars_(scheme, goog.Uri.reDisallowedInSchemeOrUserInfo_, !0), ":");
   var domain = this.getDomain();
   if (domain) {
     out.push("//");
     var userInfo = this.getUserInfo();
-    userInfo && out.push(goog.Uri.encodeSpecialChars_(userInfo, goog.Uri.reDisallowedInSchemeOrUserInfo_), "@");
-    out.push(goog.string.urlEncode(domain));
+    userInfo && out.push(goog.Uri.encodeSpecialChars_(userInfo, goog.Uri.reDisallowedInSchemeOrUserInfo_, !0), "@");
+    out.push(goog.Uri.removeDoubleEncoding_(goog.string.urlEncode(domain)));
     var port = this.getPort();
     null != port && out.push(":", String(port));
   }
   var path = this.getPath();
-  path && (this.hasDomain() && "/" != path.charAt(0) && out.push("/"), out.push(goog.Uri.encodeSpecialChars_(path, "/" == path.charAt(0) ? goog.Uri.reDisallowedInAbsolutePath_ : goog.Uri.reDisallowedInRelativePath_)));
+  path && (this.hasDomain() && "/" != path.charAt(0) && out.push("/"), out.push(goog.Uri.encodeSpecialChars_(path, "/" == path.charAt(0) ? goog.Uri.reDisallowedInAbsolutePath_ : goog.Uri.reDisallowedInRelativePath_, !0)));
   var query = this.getEncodedQuery();
   query && out.push("?", query);
   var fragment = this.getFragment();
@@ -5594,7 +5604,7 @@
 };
 goog.Uri.prototype.setScheme = function(newScheme, opt_decode) {
   this.enforceReadOnly();
-  if (this.scheme_ = opt_decode ? goog.Uri.decodeOrEmpty_(newScheme) : newScheme) {
+  if (this.scheme_ = opt_decode ? goog.Uri.decodeOrEmpty_(newScheme, !0) : newScheme) {
     this.scheme_ = this.scheme_.replace(/:$/, "");
   }
   return this;
@@ -5618,7 +5628,7 @@
 };
 goog.Uri.prototype.setDomain = function(newDomain, opt_decode) {
   this.enforceReadOnly();
-  this.domain_ = opt_decode ? goog.Uri.decodeOrEmpty_(newDomain) : newDomain;
+  this.domain_ = opt_decode ? goog.Uri.decodeOrEmpty_(newDomain, !0) : newDomain;
   return this;
 };
 goog.Uri.prototype.hasDomain = function() {
@@ -5648,7 +5658,7 @@
 };
 goog.Uri.prototype.setPath = function(newPath, opt_decode) {
   this.enforceReadOnly();
-  this.path_ = opt_decode ? goog.Uri.decodeOrEmpty_(newPath) : newPath;
+  this.path_ = opt_decode ? goog.Uri.decodeOrEmpty_(newPath, !0) : newPath;
   return this;
 };
 goog.Uri.prototype.hasPath = function() {
@@ -5737,16 +5747,24 @@
   }
   return path;
 };
-goog.Uri.decodeOrEmpty_ = function(val) {
-  return val ? decodeURIComponent(val) : "";
+goog.Uri.decodeOrEmpty_ = function(val, opt_preserveReserved) {
+  return val ? opt_preserveReserved ? decodeURI(val) : decodeURIComponent(val) : "";
 };
-goog.Uri.encodeSpecialChars_ = function(unescapedPart, extra) {
-  return goog.isString(unescapedPart) ? encodeURI(unescapedPart).replace(extra, goog.Uri.encodeChar_) : null;
+goog.Uri.encodeSpecialChars_ = function(unescapedPart, extra, opt_removeDoubleEncoding) {
+  if (goog.isString(unescapedPart)) {
+    var encoded = encodeURI(unescapedPart).replace(extra, goog.Uri.encodeChar_);
+    opt_removeDoubleEncoding && (encoded = goog.Uri.removeDoubleEncoding_(encoded));
+    return encoded;
+  }
+  return null;
 };
 goog.Uri.encodeChar_ = function(ch) {
   var n = ch.charCodeAt(0);
   return "%" + (n >> 4 & 15).toString(16) + (n & 15).toString(16);
 };
+goog.Uri.removeDoubleEncoding_ = function(doubleEncodedString) {
+  return doubleEncodedString.replace(/%25([0-9a-fA-F]{2})/g, "%$1");
+};
 goog.Uri.reDisallowedInSchemeOrUserInfo_ = /[#\/\?@]/g;
 goog.Uri.reDisallowedInRelativePath_ = /[\#\?:]/g;
 goog.Uri.reDisallowedInAbsolutePath_ = /[\#\?]/g;
diff --git a/google/appengine/tools/dev_appserver_apiserver.py b/google/appengine/tools/dev_appserver_apiserver.py
index 21ad1d3..34a8b0b 100644
--- a/google/appengine/tools/dev_appserver_apiserver.py
+++ b/google/appengine/tools/dev_appserver_apiserver.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Helper CGI for Apiserver in the development app server.
 
 This is a fake apiserver proxy that does simple transforms on requests that
diff --git a/google/appengine/tools/dev_appserver_blobimage.py b/google/appengine/tools/dev_appserver_blobimage.py
index d155138..a8b3c79 100644
--- a/google/appengine/tools/dev_appserver_blobimage.py
+++ b/google/appengine/tools/dev_appserver_blobimage.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Dispatcher for dynamic image serving requests.
 
 Classes:
diff --git a/google/appengine/tools/dev_appserver_blobstore.py b/google/appengine/tools/dev_appserver_blobstore.py
index 312925b..753565d 100644
--- a/google/appengine/tools/dev_appserver_blobstore.py
+++ b/google/appengine/tools/dev_appserver_blobstore.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Blobstore support classes.
 
 Classes:
diff --git a/google/appengine/tools/dev_appserver_channel.py b/google/appengine/tools/dev_appserver_channel.py
index 2442f27..fb846bd 100644
--- a/google/appengine/tools/dev_appserver_channel.py
+++ b/google/appengine/tools/dev_appserver_channel.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Channel support classes.
 
 Classes:
diff --git a/google/appengine/tools/dev_appserver_import_hook.py b/google/appengine/tools/dev_appserver_import_hook.py
index 5185f8d..61f3f05 100644
--- a/google/appengine/tools/dev_appserver_import_hook.py
+++ b/google/appengine/tools/dev_appserver_import_hook.py
@@ -14,9 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
 """Import hook for dev_appserver.py."""
 
 import dummy_thread
diff --git a/google/appengine/tools/dev_appserver_index.py b/google/appengine/tools/dev_appserver_index.py
index a64e7fd..fb80ee9 100644
--- a/google/appengine/tools/dev_appserver_index.py
+++ b/google/appengine/tools/dev_appserver_index.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Utilities for generating and updating index.yaml."""
 
 
diff --git a/google/appengine/tools/dev_appserver_login.py b/google/appengine/tools/dev_appserver_login.py
index 4aee1e7..d72bb86 100644
--- a/google/appengine/tools/dev_appserver_login.py
+++ b/google/appengine/tools/dev_appserver_login.py
@@ -14,9 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
 """Helper CGI for logins/logout in the development application server.
 
 This CGI has these parameters:
diff --git a/google/appengine/tools/dev_appserver_main.py b/google/appengine/tools/dev_appserver_main.py
index cac350a..fbf6432 100644
--- a/google/appengine/tools/dev_appserver_main.py
+++ b/google/appengine/tools/dev_appserver_main.py
@@ -14,9 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
 """Runs a development application server for an application.
 
 %(script)s [options] <application root>
diff --git a/google/appengine/tools/dev_appserver_multiprocess.py b/google/appengine/tools/dev_appserver_multiprocess.py
index 330a0ed..0442da2 100644
--- a/google/appengine/tools/dev_appserver_multiprocess.py
+++ b/google/appengine/tools/dev_appserver_multiprocess.py
@@ -14,8 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
 """This module adds support for multiple processes in the dev_appserver.
 
 Each instance of the application is started as a separate process on a unique
diff --git a/google/appengine/tools/dev_appserver_oauth.py b/google/appengine/tools/dev_appserver_oauth.py
index 7f5d3e3..66aaa7e 100644
--- a/google/appengine/tools/dev_appserver_oauth.py
+++ b/google/appengine/tools/dev_appserver_oauth.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Helper CGI for OAuth in the development app server."""
 
 
diff --git a/google/appengine/tools/dev_appserver_upload.py b/google/appengine/tools/dev_appserver_upload.py
index 98a7fdd..db53c5d 100644
--- a/google/appengine/tools/dev_appserver_upload.py
+++ b/google/appengine/tools/dev_appserver_upload.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Helper CGI for POST uploads.
 
 Utility library contains the main logic behind simulating the blobstore
diff --git a/google/appengine/tools/devappserver2/api_server.py b/google/appengine/tools/devappserver2/api_server.py
index 494627b..325d9e1 100644
--- a/google/appengine/tools/devappserver2/api_server.py
+++ b/google/appengine/tools/devappserver2/api_server.py
@@ -530,7 +530,7 @@
     mail_smtp_password='',
     mail_enable_sendmail=False,
     mail_show_mail_body=False,
-    mail_allow_tls=False,
+    mail_allow_tls=True,
     matcher_prospective_search_path='/dev/null',
     search_index_path=None,
     taskqueue_auto_run_tasks=False,
diff --git a/google/appengine/tools/devappserver2/application_configuration.py b/google/appengine/tools/devappserver2/application_configuration.py
index 6a544f6..b02317a 100644
--- a/google/appengine/tools/devappserver2/application_configuration.py
+++ b/google/appengine/tools/devappserver2/application_configuration.py
@@ -83,6 +83,7 @@
           from the yaml or xml file should be used.
     """
     self._config_path = config_path
+    self._forced_app_id = app_id
     root = os.path.dirname(config_path)
     self._is_java = os.path.normpath(config_path).endswith(
         os.sep + 'WEB-INF' + os.sep + 'appengine-web.xml')
@@ -97,8 +98,6 @@
 
     self._app_info_external, files_to_check = self._parse_configuration(
         self._config_path)
-    if app_id:
-      self._app_info_external.application = app_id
     self._mtimes = self._get_mtimes(files_to_check)
     self._application = '%s~%s' % (self.partition,
                                    self.application_external_name)
@@ -322,6 +321,8 @@
     else:
       with open(configuration_path) as f:
         config, files = appinfo_includes.ParseAndReturnIncludePaths(f)
+    if self._forced_app_id:
+      config.application = self._forced_app_id
     return config, [configuration_path] + files
 
   def _parse_java_configuration(self, app_engine_web_xml_path):
diff --git a/google/appengine/tools/devappserver2/application_configuration_test.py b/google/appengine/tools/devappserver2/application_configuration_test.py
index 6316b6c..7c6983b 100644
--- a/google/appengine/tools/devappserver2/application_configuration_test.py
+++ b/google/appengine/tools/devappserver2/application_configuration_test.py
@@ -19,6 +19,7 @@
 
 import collections
 from contextlib import contextmanager
+import io
 import os.path
 import shutil
 import tempfile
@@ -28,6 +29,7 @@
 import mox
 
 from google.appengine.api import appinfo
+from google.appengine.api import appinfo_includes
 from google.appengine.api import backendinfo
 from google.appengine.api import dispatchinfo
 from google.appengine.tools.devappserver2 import application_configuration
@@ -53,13 +55,17 @@
 
   def setUp(self):
     self.mox = mox.Mox()
-    self.mox.StubOutWithMock(
-        application_configuration.ModuleConfiguration,
-        '_parse_configuration')
+    self.mox.StubOutWithMock(appinfo_includes, 'ParseAndReturnIncludePaths')
     self.mox.StubOutWithMock(os.path, 'getmtime')
+    application_configuration.open = self._fake_open
 
   def tearDown(self):
     self.mox.UnsetStubs()
+    del application_configuration.open
+
+  @staticmethod
+  def _fake_open(unused_filename):
+    return io.BytesIO()
 
   def test_good_app_yaml_configuration(self):
     automatic_scaling = appinfo.AutomaticScaling(min_pending_latency='1.0s',
@@ -82,13 +88,12 @@
         inbound_services=['warmup'],
         env_variables=env_variables,
         )
-    application_configuration.ModuleConfiguration._parse_configuration(
-        '/appdir/app.yaml').AndReturn((info, ['/appdir/app.yaml']))
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info, []))
     os.path.getmtime('/appdir/app.yaml').AndReturn(10)
 
     self.mox.ReplayAll()
-    config = application_configuration.ModuleConfiguration(
-        '/appdir/app.yaml')
+    config = application_configuration.ModuleConfiguration('/appdir/app.yaml')
     self.mox.VerifyAll()
 
     self.assertEqual(os.path.realpath('/appdir'), config.application_root)
@@ -124,8 +129,8 @@
         threadsafe=False,
         manual_scaling=manual_scaling,
     )
-    application_configuration.ModuleConfiguration._parse_configuration(
-        '/appdir/app.yaml').AndReturn((info, ['/appdir/app.yaml']))
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info, []))
     os.path.getmtime('/appdir/app.yaml').AndReturn(10)
 
     self.mox.ReplayAll()
@@ -153,16 +158,23 @@
         version='version',
         runtime='python27',
         threadsafe=False)
-    application_configuration.ModuleConfiguration._parse_configuration(
-        '/appdir/app.yaml').AndReturn((info, ['/appdir/app.yaml']))
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info, []))
     os.path.getmtime('/appdir/app.yaml').AndReturn(10)
+    os.path.getmtime('/appdir/app.yaml').AndReturn(20)
+    os.path.getmtime('/appdir/app.yaml').AndReturn(20)
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info, []))
 
     self.mox.ReplayAll()
     config = application_configuration.ModuleConfiguration(
         '/appdir/app.yaml', 'overriding-app')
-    self.mox.VerifyAll()
     self.assertEqual('overriding-app', config.application_external_name)
     self.assertEqual('dev~overriding-app', config.application)
+    config.check_for_updates()
+    self.assertEqual('overriding-app', config.application_external_name)
+    self.assertEqual('dev~overriding-app', config.application)
+    self.mox.VerifyAll()
 
   def test_check_for_updates_unchanged_mtime(self):
     info = appinfo.AppInfoExternal(
@@ -171,8 +183,8 @@
         version='version',
         runtime='python27',
         threadsafe=False)
-    application_configuration.ModuleConfiguration._parse_configuration(
-        '/appdir/app.yaml').AndReturn((info, ['/appdir/app.yaml']))
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info, []))
     os.path.getmtime('/appdir/app.yaml').AndReturn(10)
     os.path.getmtime('/appdir/app.yaml').AndReturn(10)
 
@@ -189,17 +201,15 @@
         runtime='python27',
         includes=['/appdir/include.yaml'],
         threadsafe=False)
-    application_configuration.ModuleConfiguration._parse_configuration(
-        '/appdir/app.yaml').AndReturn(
-            (info, ['/appdir/app.yaml', '/appdir/include.yaml']))
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info, ['/appdir/include.yaml']))
     os.path.getmtime('/appdir/app.yaml').InAnyOrder().AndReturn(10)
     os.path.getmtime('/appdir/include.yaml').InAnyOrder().AndReturn(10)
     os.path.getmtime('/appdir/app.yaml').AndReturn(10)
     os.path.getmtime('/appdir/include.yaml').AndReturn(11)
 
-    application_configuration.ModuleConfiguration._parse_configuration(
-        '/appdir/app.yaml').AndReturn(
-            (info, ['/appdir/app.yaml', '/appdir/include.yaml']))
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info, ['/appdir/include.yaml']))
     os.path.getmtime('/appdir/app.yaml').InAnyOrder().AndReturn(10)
     os.path.getmtime('/appdir/include.yaml').InAnyOrder().AndReturn(11)
 
@@ -221,12 +231,12 @@
         version='version',
         runtime='python27',
         threadsafe=False)
-    application_configuration.ModuleConfiguration._parse_configuration(
-        '/appdir/app.yaml').AndReturn((info, ['/appdir/app.yaml']))
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info, []))
     os.path.getmtime('/appdir/app.yaml').AndReturn(10)
     os.path.getmtime('/appdir/app.yaml').AndReturn(11)
-    application_configuration.ModuleConfiguration._parse_configuration(
-        '/appdir/app.yaml').AndReturn((info, ['/appdir/app.yaml']))
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info, []))
     os.path.getmtime('/appdir/app.yaml').AndReturn(11)
 
     self.mox.ReplayAll()
@@ -261,12 +271,12 @@
             min_idle_instances=1,
             max_idle_instances=2))
 
-    application_configuration.ModuleConfiguration._parse_configuration(
-        '/appdir/app.yaml').AndReturn((info1, ['/appdir/app.yaml']))
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info1, []))
     os.path.getmtime('/appdir/app.yaml').AndReturn(10)
     os.path.getmtime('/appdir/app.yaml').AndReturn(11)
-    application_configuration.ModuleConfiguration._parse_configuration(
-        '/appdir/app.yaml').AndReturn((info2, ['/appdir/app.yaml']))
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info2, []))
     os.path.getmtime('/appdir/app.yaml').AndReturn(11)
 
     self.mox.ReplayAll()
@@ -282,7 +292,7 @@
     self.assertFalse(config.threadsafe)
     self.assertEqual(automatic_scaling1, config.automatic_scaling)
 
-  def test_check_for_mutable_changes(self):
+  def test_check_for_updates_mutable_changes(self):
     info1 = appinfo.AppInfoExternal(
         application='app',
         module='default',
@@ -308,12 +318,12 @@
         inbound_services=[],
         )
 
-    application_configuration.ModuleConfiguration._parse_configuration(
-        '/appdir/app.yaml').AndReturn((info1, ['/appdir/app.yaml']))
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info1, []))
     os.path.getmtime('/appdir/app.yaml').AndReturn(10)
     os.path.getmtime('/appdir/app.yaml').AndReturn(11)
-    application_configuration.ModuleConfiguration._parse_configuration(
-        '/appdir/app.yaml').AndReturn((info2, ['/appdir/app.yaml']))
+    appinfo_includes.ParseAndReturnIncludePaths(mox.IgnoreArg()).AndReturn(
+        (info2, []))
     os.path.getmtime('/appdir/app.yaml').AndReturn(11)
 
     self.mox.ReplayAll()
diff --git a/google/appengine/tools/devappserver2/devappserver2.py b/google/appengine/tools/devappserver2/devappserver2.py
index 32693f3..299ac12 100644
--- a/google/appengine/tools/devappserver2/devappserver2.py
+++ b/google/appengine/tools/devappserver2/devappserver2.py
@@ -541,7 +541,7 @@
       '--smtp_allow_tls',
       action=boolean_action.BooleanAction,
       const=True,
-      default=False,
+      default=True,
       help='Allow TLS to be used when the SMTP server announces TLS support '
       '(ignored if --smtp_host is not set)')
 
diff --git a/google/appengine/tools/devappserver2/http_runtime.py b/google/appengine/tools/devappserver2/http_runtime.py
index 89712d1..8e2f9e8 100644
--- a/google/appengine/tools/devappserver2/http_runtime.py
+++ b/google/appengine/tools/devappserver2/http_runtime.py
@@ -113,6 +113,11 @@
 
   _VALID_START_PROCESS_FLAVORS = [START_PROCESS, START_PROCESS_FILE]
 
+  # TODO: Determine if we can always use SIGTERM.
+  # Set this to True to quit with SIGTERM rather than SIGKILL
+
+  quit_with_sigterm = False
+
   def __init__(self, args, runtime_config_getter, module_configuration,
                env=None, start_process_flavor=START_PROCESS):
     """Initializer for HttpRuntimeProxy.
@@ -275,7 +280,11 @@
     with self._process_lock:
       assert self._process, 'module was not running'
       try:
-        self._process.kill()
+        if HttpRuntimeProxy.quit_with_sigterm:
+          logging.debug('Calling process.terminate on child runtime.')
+          self._process.terminate()
+        else:
+          self._process.kill()
       except OSError:
         pass
       # Mac leaks file descriptors without call to join. Suspect a race
diff --git a/google/appengine/tools/devappserver2/http_runtime_test.py b/google/appengine/tools/devappserver2/http_runtime_test.py
index 0af3c84..f9ecc06 100644
--- a/google/appengine/tools/devappserver2/http_runtime_test.py
+++ b/google/appengine/tools/devappserver2/http_runtime_test.py
@@ -129,14 +129,19 @@
 
     self.mox.StubOutWithMock(http_proxy.HttpProxy, 'wait_for_connection')
     http_proxy.HttpProxy.wait_for_connection()
+    self._saved_quit_with_sigterm = (
+        http_runtime.HttpRuntimeProxy.quit_with_sigterm)
 
   def tearDown(self):
     shutil.rmtree(self.tmpdir)
     self.mox.UnsetStubs()
+    http_runtime.HttpRuntimeProxy.quit_with_sigterm = (
+        self._saved_quit_with_sigterm)
 
-  def test_start_and_quit(self):
+  def _test_start_and_quit(self, quit_with_sigterm):
     ## Test start()
     # start()
+    http_runtime.HttpRuntimeProxy.quit_with_sigterm = quit_with_sigterm
     safe_subprocess.start_process(
         ['/runtime'],
         base64.b64encode(self.runtime_config.SerializeToString()),
@@ -153,11 +158,20 @@
     self.mox.ResetAll()
 
     ## Test quit()
-    self.process.kill()
+    if quit_with_sigterm:
+      self.process.terminate()
+    else:
+      self.process.kill()
     self.mox.ReplayAll()
     self.proxy.quit()
     self.mox.VerifyAll()
 
+  def test_start_and_quit(self):
+    self._test_start_and_quit(quit_with_sigterm=False)
+
+  def test_start_and_quit_with_sigterm(self):
+    self._test_start_and_quit(quit_with_sigterm=True)
+
   def test_start_bad_port(self):
     safe_subprocess.start_process(
         ['/runtime'],
diff --git a/google/appengine/tools/devappserver2/php_runtime.py b/google/appengine/tools/devappserver2/php_runtime.py
index e2510a9..2891815 100644
--- a/google/appengine/tools/devappserver2/php_runtime.py
+++ b/google/appengine/tools/devappserver2/php_runtime.py
@@ -33,8 +33,7 @@
 
 
 _RUNTIME_PATH = os.path.abspath(
-    os.path.join(os.path.dirname(sys.argv[0]), '_php_runtime.py')
-    )
+    os.path.join(os.path.dirname(sys.argv[0]), '_php_runtime.py'))
 _CHECK_ENVIRONMENT_SCRIPT_PATH = os.path.join(
     os.path.dirname(__file__), 'php', 'check_environment.php')
 _RUNTIME_ARGS = [sys.executable, _RUNTIME_PATH]
diff --git a/google/appengine/tools/devappserver2/python_runtime.py b/google/appengine/tools/devappserver2/python_runtime.py
index 7062af2..86fb637 100644
--- a/google/appengine/tools/devappserver2/python_runtime.py
+++ b/google/appengine/tools/devappserver2/python_runtime.py
@@ -26,8 +26,7 @@
 from google.appengine.tools.devappserver2 import instance
 
 _RUNTIME_PATH = os.path.abspath(
-    os.path.join(os.path.dirname(sys.argv[0]), '_python_runtime.py')
-    )
+    os.path.join(os.path.dirname(sys.argv[0]), '_python_runtime.py'))
 _RUNTIME_ARGS = [sys.executable, _RUNTIME_PATH]
 
 
diff --git a/google/appengine/tools/devappserver2/runtime_config_pb2.py b/google/appengine/tools/devappserver2/runtime_config_pb2.py
index 1af1018..13ef39f 100644
--- a/google/appengine/tools/devappserver2/runtime_config_pb2.py
+++ b/google/appengine/tools/devappserver2/runtime_config_pb2.py
@@ -197,6 +197,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=88,
   serialized_end=796,
 )
@@ -232,6 +234,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=798,
   serialized_end=863,
 )
@@ -267,6 +271,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=865,
   serialized_end=925,
 )
@@ -323,6 +329,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=927,
   serialized_end=1043,
 )
@@ -358,6 +366,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1045,
   serialized_end=1085,
 )
@@ -393,6 +403,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1087,
   serialized_end=1124,
 )
@@ -428,6 +440,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1126,
   serialized_end=1228,
 )
@@ -477,6 +491,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1230,
   serialized_end=1341,
 )
diff --git a/google/appengine/tools/devappserver2/tee.py b/google/appengine/tools/devappserver2/tee.py
index 2072f32..2f6c38f 100644
--- a/google/appengine/tools/devappserver2/tee.py
+++ b/google/appengine/tools/devappserver2/tee.py
@@ -42,6 +42,7 @@
       if not line:
         break
       self.__out.write(line)
+      self.__out.flush()
       self.__deque.append(line)
 
   def get_buf(self):
diff --git a/google/appengine/tools/devappserver2/vm_runtime_proxy.py b/google/appengine/tools/devappserver2/vm_runtime_proxy.py
index 405c111..1ecff65 100644
--- a/google/appengine/tools/devappserver2/vm_runtime_proxy.py
+++ b/google/appengine/tools/devappserver2/vm_runtime_proxy.py
@@ -29,6 +29,7 @@
 
 
 _DOCKER_IMAGE_NAME_FORMAT = '{display}.{module}.{version}'
+_DOCKER_CONTAINER_NAME_FORMAT = 'google.appengine.{image_name}.{minor_version}'
 
 
 class Error(Exception):
@@ -39,6 +40,10 @@
   """Raised if an environment variable name or value cannot be supported."""
 
 
+class VersionError(Error):
+  """Raised if no version is specified in application configuration file."""
+
+
 def _GetPortToPublish(port):
   """Checks if given port is available.
 
@@ -136,6 +141,11 @@
   def start(self, dockerfile_dir=None):
     runtime_config = self._runtime_config_getter()
 
+    if not self._module_configuration.major_version:
+      logging.error('Version needs to be specified in your application '
+                    'configuration file.')
+      raise VersionError()
+
     if not dockerfile_dir:
       dockerfile_dir = self._module_configuration.application_root
 
@@ -199,6 +209,9 @@
         self._module_configuration.module_name,
         self._module_configuration.major_version,
         runtime_config.instance_id)
+    container_name = _DOCKER_CONTAINER_NAME_FORMAT.format(
+        image_name=image_name,
+        minor_version=self._module_configuration.minor_version)
     self._container = containers.Container(
         self._docker_client,
         containers.ContainerOptions(
@@ -211,7 +224,8 @@
             environment=environment,
             volumes={
                 external_logs_path: {'bind': '/var/log/app_engine'}
-            }
+            },
+            name=container_name
         ))
 
     self._container.Start()
diff --git a/google/appengine/tools/devappserver2/vm_runtime_proxy_dart.py b/google/appengine/tools/devappserver2/vm_runtime_proxy_dart.py
index d4a4e9f..d2504fd 100644
--- a/google/appengine/tools/devappserver2/vm_runtime_proxy_dart.py
+++ b/google/appengine/tools/devappserver2/vm_runtime_proxy_dart.py
@@ -32,7 +32,6 @@
 VM_SERVICE_PORT = 8181
 DEV_MODE = 'dev'
 DEPLOY_MODE = 'deploy'
-DEFAULT_DOCKER_FILE = 'FROM google/appengine-dart'
 
 
 class DartVMRuntimeProxy(instance.RuntimeProxy):
@@ -141,14 +140,8 @@
                               ' links to ' + os.readlink(src))
           raise
 
-        dst_dockerfile = os.path.join(dst_application_dir, 'Dockerfile')
         dst_build_dir = os.path.join(dst_application_dir, 'build')
 
-        # Write default Dockerfile if none found.
-        if not os.path.exists(dst_dockerfile):
-          with open(dst_dockerfile, 'w') as fd:
-            fd.write(DEFAULT_DOCKER_FILE)
-
         if self._is_deployment_mode:
           # Run 'pub build' to generate assets from web/ directory if necessary.
           web_dir = os.path.join(application_dir, 'web')
diff --git a/google/appengine/tools/devappserver2/vm_runtime_proxy_go.py b/google/appengine/tools/devappserver2/vm_runtime_proxy_go.py
index 7d7b0af..7942f6c 100644
--- a/google/appengine/tools/devappserver2/vm_runtime_proxy_go.py
+++ b/google/appengine/tools/devappserver2/vm_runtime_proxy_go.py
@@ -30,7 +30,7 @@
 
 DEBUG_PORT = 5858
 VM_SERVICE_PORT = 8181
-DEFAULT_DOCKER_FILE = """FROM google/golang
+DEFAULT_DOCKER_FILE = """FROM google/golang:1.2.2
 ADD . /app
 RUN /bin/bash /app/_ah/build.sh
 
@@ -112,94 +112,10 @@
       application_dir = os.path.abspath(
           self._module_configuration.application_root)
 
-      # - copy the application to a new temporary directory (follow symlinks)
-      # - copy used parts of $GOPATH to the temporary directory
-      # - copy or create a Dockerfile in the temporary directory
-      # - build & deploy the docker container
-      with TempDir('go_deployment_dir') as temp_dir:
-        dst_deployment_dir = temp_dir
-        dst_application_dir = temp_dir
-        try:
-          _copytree(application_dir, dst_application_dir,
-                    self._module_configuration.skip_files)
-        except shutil.Error as e:
-          logging.error('Error copying tree: %s', e)
-          for src, unused_dst, unused_error in e.args[0]:
-            if os.path.islink(src):
-              linkto = os.readlink(src)
-              if not os.path.exists(linkto):
-                logging.error('Dangling symlink in Go project. '
-                              'Path %s links to %s', src, os.readlink(src))
-          raise
-        except OSError as e:
-          logging.error('Failed to copy dir: %s', e.strerror)
-          raise
-
-        extras = go_application.get_app_extras_for_vm(
-            self._module_configuration)
-        for dest, src in extras:
-          try:
-            dest = os.path.join(dst_deployment_dir, dest)
-            dirname = os.path.dirname(dest)
-            if not os.path.exists(dirname):
-              os.makedirs(dirname)
-            shutil.copy(src, dest)
-          except OSError as e:
-            logging.error('Failed to copy %s to %s', src, dest)
-            raise
-
-        # Make the _ah subdirectory for the app engine tools.
-        ah_dir = os.path.join(dst_deployment_dir, '_ah')
-        try:
-          os.mkdir(ah_dir)
-        except OSError as e:
-          logging.error('Failed to create %s: %s', ah_dir, e.strerror)
-          raise
-
-        # Copy gab.
-        try:
-          gab_dest = os.path.join(ah_dir, 'gab')
-          shutil.copy(_GO_APP_BUILDER, gab_dest)
-        except OSError as e:
-          logging.error('Failed to copy %s to %s', _GO_APP_BUILDER, gab_dest)
-          raise
-
-        # Write build script.
-        nobuild_files = '^' + str(self._module_configuration.nobuild_files)
-        gab_args = [
-            '/app/_ah/gab',
-            '-app_base', '/app',
-            '-arch', '6',
-            '-dynamic',
-            '-goroot', '/goroot',
-            '-nobuild_files', nobuild_files,
-            '-unsafe',
-            '-binary_name', '_ah_exe',
-            '-work_dir', '/tmp/work',
-            '-vm',
-        ]
-        gab_args.extend(
-            go_application.list_go_files(self._module_configuration))
-        gab_args.extend([x[0] for x in extras])
-        dst_build = os.path.join(ah_dir, 'build.sh')
-        with open(dst_build, 'wb') as fd:
-          fd.write('#!/bin/bash\n')
-          fd.write('set -e\n')
-          fd.write('mkdir -p /tmp/work\n')
-          fd.write('chmod a+x /app/_ah/gab\n')
-          # Without this line, Windows errors "text file busy".
-          fd.write('shasum /app/_ah/gab\n')
-          fd.write(' '.join(gab_args) + '\n')
-          fd.write('mv /tmp/work/_ah_exe /app/_ah/exe\n')
-          fd.write('rm -rf /tmp/work\n')
-          fd.write('echo Done.\n')
-        os.chmod(dst_build, 0777)
-
-        # Write default Dockerfile if none found.
-        dst_dockerfile = os.path.join(dst_application_dir, 'Dockerfile')
-        if not os.path.exists(dst_dockerfile):
-          with open(dst_dockerfile, 'w') as fd:
-            fd.write(DEFAULT_DOCKER_FILE)
+      with TempDir('go_deployment_dir') as dst_deployment_dir:
+        build_go_docker_image_source(
+            application_dir, dst_deployment_dir,
+            _GO_APP_BUILDER, self._module_configuration)
 
         self._vm_runtime_proxy.start(dockerfile_dir=dst_deployment_dir)
 
@@ -215,6 +131,18 @@
     self._vm_runtime_proxy.quit()
 
 
+def _write_dockerfile(dst_dir):
+  """Writes Dockerfile to named directory if one does not exist.
+
+  Args:
+    dst_dir: string name of destination directory.
+  """
+  dst_dockerfile = os.path.join(dst_dir, 'Dockerfile')
+  if not os.path.exists(dst_dockerfile):
+    with open(dst_dockerfile, 'w') as fd:
+      fd.write(DEFAULT_DOCKER_FILE)
+
+
 class TempDir(object):
   """Creates a temporary directory."""
 
@@ -255,3 +183,104 @@
       shutil.copytree(s, d, symlinks, ignore=ignored_files)
     else:
       shutil.copy2(s, d)
+
+
+def build_go_docker_image_source(
+    application_dir, dst_deployment_dir, go_app_builder, module_configuration):
+  """Builds the Docker image source in preparation for building.
+
+  Steps:
+    copy the application to dst_deployment_dir (follow symlinks)
+    copy used parts of $GOPATH to dst_deployment_dir
+    copy or create a Dockerfile in dst_deployment_dir
+
+  Args:
+    application_dir: string pathname of application directory.
+    dst_deployment_dir: string pathname of temporary deployment directory.
+    go_app_builder: string pathname of docker-gab executable.
+    module_configuration: An application_configuration.ModuleConfiguration
+        instance respresenting the configuration of the module that owns the
+        runtime.
+  """
+  try:
+    _copytree(application_dir, dst_deployment_dir,
+              module_configuration.skip_files)
+  except shutil.Error as e:
+    logging.error('Error copying tree: %s', e)
+    for src, unused_dst, unused_error in e.args[0]:
+      if os.path.islink(src):
+        linkto = os.readlink(src)
+        if not os.path.exists(linkto):
+          logging.error('Dangling symlink in Go project. '
+                        'Path %s links to %s', src, os.readlink(src))
+    raise
+  except OSError as e:
+    logging.error('Failed to copy dir: %s', e.strerror)
+    raise
+
+  extras = go_application.get_app_extras_for_vm(module_configuration)
+  for dest, src in extras:
+    try:
+      dest = os.path.join(dst_deployment_dir, dest)
+      dirname = os.path.dirname(dest)
+      if not os.path.exists(dirname):
+        os.makedirs(dirname)
+      shutil.copy(src, dest)
+    except OSError as e:
+      logging.error('Failed to copy %s to %s', src, dest)
+      raise
+
+  # Make the _ah subdirectory for the app engine tools.
+  ah_dir = os.path.join(dst_deployment_dir, '_ah')
+  try:
+    os.mkdir(ah_dir)
+  except OSError as e:
+    logging.error('Failed to create %s: %s', ah_dir, e.strerror)
+    raise
+
+  # Copy gab.
+  try:
+    gab_dest = os.path.join(ah_dir, 'gab')
+    shutil.copy(go_app_builder, gab_dest)
+  except OSError as e:
+    logging.error('Failed to copy %s to %s', go_app_builder, gab_dest)
+    raise
+
+  # Write build script.
+  nobuild_files = '^' + str(module_configuration.nobuild_files)
+  gab_args = [
+      '/app/_ah/gab',
+      '-app_base', '/app',
+      '-arch', '6',
+      '-dynamic',
+      '-goroot', '/goroot',
+      '-nobuild_files', nobuild_files,
+      '-unsafe',
+      '-binary_name', '_ah_exe',
+      '-work_dir', '/tmp/work',
+      '-vm',
+  ]
+  gab_args.extend(go_application.list_go_files(module_configuration))
+  gab_args.extend([x[0] for x in extras])
+  dst_build = os.path.join(ah_dir, 'build.sh')
+  lines = [
+      '#!/bin/bash',
+      'set -e',
+      'mkdir -p /tmp/work',
+      'chmod a+x /app/_ah/gab',
+      # Without this line, Windows errors "text file busy".
+      'shasum /app/_ah/gab',
+      ' '.join(gab_args),
+      'mv /tmp/work/_ah_exe /app/_ah/exe',
+      'rm -rf /tmp/work',
+      'echo Done.',
+  ]
+  with open(dst_build, 'wb') as fd:
+    fd.write('\n'.join(lines) + '\n')
+  os.chmod(dst_build, 0777)
+
+  # TODO: Remove this when classic Go SDK is gone.
+  # Write default Dockerfile if none found.
+  _write_dockerfile(dst_deployment_dir)
+  # Also write the default Dockerfile if not found in the app dir.
+  _write_dockerfile(application_dir)
diff --git a/google/appengine/tools/docker/containers.py b/google/appengine/tools/docker/containers.py
index 2a1bfb5..e9514f2 100644
--- a/google/appengine/tools/docker/containers.py
+++ b/google/appengine/tools/docker/containers.py
@@ -28,7 +28,7 @@
 Container is a result of "docker run image_tag" command.
 ImageOptions and ContainerOptions allow to pass parameters to these commands.
 
-Versions 1.6 and 1.10 of docker remote API are supported.
+Versions 1.9 and 1.10 of docker remote API are supported.
 """
 
 from collections import namedtuple
@@ -51,7 +51,7 @@
                               ['dockerfile_dir', 'tag', 'nocache', 'rm'])):
   """Options for building Docker Images."""
 
-  def __new__(cls, dockerfile_dir=None, tag=None, nocache=False, rm=False):
+  def __new__(cls, dockerfile_dir=None, tag=None, nocache=False, rm=True):
     """This method is redefined to provide default values for namedtuple.
 
     Args:
@@ -64,7 +64,8 @@
       nocache: boolean, True if cache should not be used when building the
           image.
       rm: boolean, True if intermediate images should be removed after a
-          successful build.
+          successful build. Default value is set to True because this is the
+          default value used by "docker build" command.
 
     Returns:
       ImageOptions object.
@@ -216,41 +217,37 @@
         path=self._image_opts.dockerfile_dir,
         tag=self.tag,
         quiet=False, fileobj=None, nocache=self._image_opts.nocache,
-        rm=self._image_opts.rm, stream=False)
+        rm=self._image_opts.rm)
 
-    if isinstance(build_res, tuple):
-      # Older API returns pair (image_id, warnings)
-      self._id, error = build_res
-      if not self.id:
-        raise ImageError(
-            'There was a build error for the image %s. Error: %s' % (self.tag,
-                                                                     error))
-    else:
-      # Newer API returns stream_helper generator. Each message contains output
-      # from the build, and the last message contains the status.
-      for x in build_res:
-        x = x.strip()
-        logging.debug(x)
-        m = _SUCCESSFUL_BUILD_PATTERN.match(x)
-        if m:
-          self._id = m.group(1)
-          break
-      else:
-        # There was no line indicating a successful response.
-        raise ImageError(
-            'There was a build error for the image %s. Error: %s. Run with '
-            '\'--verbosity debug\' for more information.' % (self.tag, x))
-    if self.id:
+    log_lines = [x.strip() for x in build_res]
+    if not log_lines:
+      logging.error('Error building docker image %s [with no output]', self.tag)
+      raise ImageError
+
+    m = _SUCCESSFUL_BUILD_PATTERN.match(log_lines[-1])
+
+    if m:  # The build was successful.
+      self._id = m.group(1)
+      for line in log_lines:
+        logging.debug(line)
       logging.info('Image %s built, id = %s', self.tag, self.id)
+    else:
+      logging.error('Error building docker image %s', self.tag)
+      for line in log_lines:
+        logging.error(line)
+      raise ImageError
 
   def Remove(self):
     """Calls "docker rmi"."""
     if self._id:
       try:
         self._docker_client.remove_image(self.id)
-      except docker.errors.APIError:
-        logging.warning('Image %s cannot be removed because it is tagged in '
-                        'multiple repositories. Use -f to remove it.', self.id)
+      except docker.errors.APIError as e:
+        logging.warning('Image %s (id=%s) cannot be removed: %s. Try cleaning '
+                        'up old containers that can be listed with '
+                        '"docker ps -a" and removing the image again with '
+                        '"docker rmi IMAGE_ID".',
+                        self.tag, self.id, e)
       self._id = None
 
 
@@ -364,11 +361,7 @@
     if self.id:
       raise ContainerError('Trying to start already running container.')
 
-    try:
-      self._image.Build()
-    except ImageError, e:
-      logging.error('Error starting container: %s', e)
-      raise
+    self._image.Build()
 
     logging.info('Creating container...')
     port_bindings = self._container_opts.port_bindings or {}
diff --git a/google/appengine/tools/download_appstats.py b/google/appengine/tools/download_appstats.py
index 665b3af..9c0dbbb 100644
--- a/google/appengine/tools/download_appstats.py
+++ b/google/appengine/tools/download_appstats.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """Script for downloading Appstats data using remote_api.
 
 Usage:
diff --git a/google/appengine/tools/handler_generator.py b/google/appengine/tools/handler_generator.py
index dd23d18..bcf877a 100644
--- a/google/appengine/tools/handler_generator.py
+++ b/google/appengine/tools/handler_generator.py
@@ -29,11 +29,11 @@
     static files.
 """
 
+from google.appengine.api import appinfo
 from google.appengine.tools import handler
 from google.appengine.tools.app_engine_config_exception import AppEngineConfigException
 
 API_ENDPOINT_REGEX = '/_ah/spi/*'
-MAX_HANDLERS = 100
 
 
 def GenerateYamlHandlersList(app_engine_web_xml, web_xml, static_files):
@@ -47,7 +47,7 @@
   handler_length = len(dynamic_handler_generator.GenerateOrderedHandlerList())
   if static_files:
     handler_length += len(static_handler_generator.GenerateOrderedHandlerList())
-  if handler_length > MAX_HANDLERS:
+  if handler_length > appinfo.MAX_URL_MAPS:
 
 
 
@@ -88,6 +88,14 @@
     A list of strings that together make up the lines of the generated app.yaml
     file.
   """
+
+
+
+
+
+
+
+  appinfo.MAX_URL_MAPS = 10000
   static_handler_generator = StaticHandlerGeneratorForDevAppServer(
       app_engine_web_xml, web_xml, static_urls)
   dynamic_handler_generator = DynamicHandlerGenerator(
diff --git a/google/appengine/tools/old_dev_appserver.py b/google/appengine/tools/old_dev_appserver.py
index 98818c5..c8b1e92 100644
--- a/google/appengine/tools/old_dev_appserver.py
+++ b/google/appengine/tools/old_dev_appserver.py
@@ -14,9 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
 """Pure-Python application server for testing applications locally.
 
 Given a port and the paths to a valid application directory (with an 'app.yaml'
diff --git a/google/appengine/tools/os_compat.py b/google/appengine/tools/os_compat.py
index 8aabc77..50a8ed5 100644
--- a/google/appengine/tools/os_compat.py
+++ b/google/appengine/tools/os_compat.py
@@ -14,9 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
 """OS cross-platform compatibility tweaks.
 
 This module will, on import, change some parts of the running evironment so
diff --git a/google/appengine/tools/remote_api_shell.py b/google/appengine/tools/remote_api_shell.py
index f6d9ec2..5d621a0 100644
--- a/google/appengine/tools/remote_api_shell.py
+++ b/google/appengine/tools/remote_api_shell.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """An interactive python shell that uses remote_api.
 
 Usage:
diff --git a/google/appengine/tools/requeue.py b/google/appengine/tools/requeue.py
index d9a8f03..db571fc 100644
--- a/google/appengine/tools/requeue.py
+++ b/google/appengine/tools/requeue.py
@@ -14,10 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-
-
-
 """A thread-safe queue in which removed objects put back to the front."""
 
 
diff --git a/google/appengine/tools/sdk_update_checker.py b/google/appengine/tools/sdk_update_checker.py
index b361435..8f9b111 100644
--- a/google/appengine/tools/sdk_update_checker.py
+++ b/google/appengine/tools/sdk_update_checker.py
@@ -70,23 +70,19 @@
     return yaml_object.BuildSingleObject(NagFile, nag_file)
 
 
-def GetVersionObject(isfile=os.path.isfile, open_fn=open):
+def GetVersionObject():
   """Gets the version of the SDK by parsing the VERSION file.
 
-  Args:
-    isfile: used for testing.
-    open_fn: Used for testing.
-
   Returns:
     A Yaml object or None if the VERSION file does not exist.
   """
   version_filename = os.path.join(os.path.dirname(google.appengine.__file__),
                                   VERSION_FILE)
-  if not isfile(version_filename):
+  try:
+    version_fh = open(version_filename)
+  except IOError:
     logging.error('Could not find version file at %s', version_filename)
     return None
-
-  version_fh = open_fn(version_filename, 'r')
   try:
     version = yaml.safe_load(version_fh)
   finally:
@@ -133,26 +129,17 @@
 
   def __init__(self,
                rpcserver,
-               configs,
-               isdir=os.path.isdir,
-               isfile=os.path.isfile,
-               open_fn=open):
+               configs):
     """Create a new SDKUpdateChecker.
 
     Args:
       rpcserver: The AbstractRpcServer to use.
       configs: A list of yaml objects or a single yaml object that specify the
           configuration of this application.
-      isdir: Replacement for os.path.isdir (for testing).
-      isfile: Replacement for os.path.isfile (for testing).
-      open_fn: Replacement for the open builtin (for testing).
     """
     if not isinstance(configs, list):
       configs = [configs]
     self.rpcserver = rpcserver
-    self.isdir = isdir
-    self.isfile = isfile
-    self.open = open_fn
     self.runtimes = set(config.runtime for config in configs)
     self.runtime_to_api_version = {}
     for config in configs:
@@ -181,7 +168,7 @@
     Returns:
       A Yaml object or None if the file does not exist.
     """
-    return GetVersionObject(isfile=self.isfile, open_fn=self.open)
+    return GetVersionObject()
 
   def CheckSupportedVersion(self):
     """Determines if the app's api_version is supported by the SDK.
@@ -326,14 +313,15 @@
       A NagFile if the file was present else None.
     """
     nag_filename = SDKUpdateChecker.MakeNagFilename()
-    if self.isfile(nag_filename):
-      fh = self.open(nag_filename, 'r')
-      try:
-        nag = NagFile.Load(fh)
-      finally:
-        fh.close()
-      return nag
-    return None
+    try:
+      fh = open(nag_filename)
+    except IOError:
+      return None
+    try:
+      nag = NagFile.Load(fh)
+    finally:
+      fh.close()
+    return nag
 
   def _WriteNagFile(self, nag):
     """Writes the NagFile to the user's nag file.
@@ -346,7 +334,7 @@
     """
     nagfilename = SDKUpdateChecker.MakeNagFilename()
     try:
-      fh = self.open(nagfilename, 'w')
+      fh = open(nagfilename, 'w')
       try:
         fh.write(nag.ToYAML())
       finally:
diff --git a/google/appengine/tools/yaml_translator.py b/google/appengine/tools/yaml_translator.py
index cb0258c..7e7f24f 100644
--- a/google/appengine/tools/yaml_translator.py
+++ b/google/appengine/tools/yaml_translator.py
@@ -262,6 +262,21 @@
               self.SanitizeForYaml(name), self.SanitizeForYaml(settings[name])))
     return statements
 
+  def TranslateVmHealthCheck(self):
+    """Translates <vm-health-check> in appengine-web.xml to yaml."""
+    vm_health_check = self.app_engine_web_xml.vm_health_check
+    if not vm_health_check:
+      return []
+
+    statements = ['vm_health_check:']
+    for attr in ('enable_health_check', 'check_interval_sec', 'timeout_sec',
+                 'unhealthy_threshold', 'healthy_threshold',
+                 'restart_threshold', 'host'):
+      value = getattr(vm_health_check, attr, None)
+      if value is not None:
+        statements.append('  %s: %s' % (attr, value))
+    return statements
+
   def TranslateInboundServices(self):
     services = self.app_engine_web_xml.inbound_services
     if not services:
diff --git a/google/net/proto2/proto/descriptor_pb2.py b/google/net/proto2/proto/descriptor_pb2.py
index 181a348..0741b69 100644
--- a/google/net/proto2/proto/descriptor_pb2.py
+++ b/google/net/proto2/proto/descriptor_pb2.py
@@ -33,7 +33,7 @@
 DESCRIPTOR = _descriptor.FileDescriptor(
   name='net/proto2/proto/descriptor.proto',
   package='proto2',
-  serialized_pb=_b('\n!net/proto2/proto/descriptor.proto\x12\x06proto2\">\n\x11\x46ileDescriptorSet\x12)\n\x04\x66ile\x18\x01 \x03(\x0b\x32\x1b.proto2.FileDescriptorProto\"\x95\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12-\n\x0cmessage_type\x18\x04 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x05 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12/\n\x07service\x18\x06 \x03(\x0b\x32\x1e.proto2.ServiceDescriptorProto\x12/\n\textension\x18\x07 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12$\n\x07options\x18\x08 \x01(\x0b\x32\x13.proto2.FileOptions\x12\x30\n\x10source_code_info\x18\t \x01(\x0b\x32\x16.proto2.SourceCodeInfo\"\xa5\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05\x66ield\x18\x02 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12/\n\textension\x18\x06 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12,\n\x0bnested_type\x18\x03 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x04 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12?\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32&.proto2.DescriptorProto.ExtensionRange\x12\x30\n\noneof_decl\x18\x08 \x03(\x0b\x32\x1c.proto2.OneofDescriptorProto\x12\'\n\x07options\x18\x07 \x01(\x0b\x32\x16.proto2.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\x8e\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x31\n\x05label\x18\x04 \x01(\x0e\x32\".proto2.FieldDescriptorProto.Label\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.proto2.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12%\n\x07options\x18\x08 \x01(\x0b\x32\x14.proto2.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"z\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05value\x18\x02 \x03(\x0b\x32 .proto2.EnumValueDescriptorProto\x12$\n\x07options\x18\x03 \x01(\x0b\x32\x13.proto2.EnumOptions\"c\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12)\n\x07options\x18\x03 \x01(\x0b\x32\x18.proto2.EnumValueOptions\"\xad\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x06method\x18\x02 \x03(\x0b\x32\x1d.proto2.MethodDescriptorProto\x12-\n\x06stream\x18\x04 \x03(\x0b\x32\x1d.proto2.StreamDescriptorProto\x12\'\n\x07options\x18\x03 \x01(\x0b\x32\x16.proto2.ServiceOptions\"v\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.MethodOptions\"\x87\x01\n\x15StreamDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1b\n\x13\x63lient_message_type\x18\x02 \x01(\t\x12\x1b\n\x13server_message_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.StreamOptions\"\xa5\n\n\x0b\x46ileOptions\x12\x19\n\x0e\x63\x63_api_version\x18\x02 \x01(\x05:\x01\x32\x12V\n\x14\x63\x63_api_compatibility\x18\x0f \x01(\x0e\x32&.proto2.FileOptions.CompatibilityLevel:\x10NO_COMPATIBILITY\x12\'\n\x19\x63\x63_proto_array_compatible\x18\x16 \x01(\x08:\x04true\x12\"\n\x14\x63\x63_utf8_verification\x18\x18 \x01(\x08:\x04true\x12$\n\x15\x63\x63_proto1_text_format\x18\x19 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x19\n\x0epy_api_version\x18\x04 \x01(\x05:\x01\x32\x12\x1b\n\x10java_api_version\x18\x05 \x01(\x05:\x01\x32\x12!\n\x13java_use_javaproto2\x18\x06 \x01(\x08:\x04true\x12\x1e\n\x10java_java5_enums\x18\x07 \x01(\x08:\x04true\x12)\n\x1ajava_generate_rpc_baseimpl\x18\r \x01(\x08:\x05\x66\x61lse\x12#\n\x14java_use_javastrings\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_alt_api_package\x18\x13 \x01(\t\x12\x34\n%java_enable_dual_generate_mutable_api\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10java_mutable_api\x18\x1c \x01(\x08:\x05\x66\x61lse\x12+\n#java_multiple_files_mutable_package\x18\x1d \x01(\t\x12=\n\x0coptimize_for\x18\t \x01(\x0e\x32 .proto2.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\x1a\n\x12javascript_package\x18\x0c \x01(\t\x12\x1a\n\x0fszl_api_version\x18\x0e \x01(\x05:\x01\x31\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1a\n\x12\x65xperimental_style\x18\x1e \x01(\t\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"c\n\x12\x43ompatibilityLevel\x12\x14\n\x10NO_COMPATIBILITY\x10\x00\x12\x15\n\x11PROTO1_COMPATIBLE\x10\x64\x12 \n\x1c\x44\x45PRECATED_PROTO1_COMPATIBLE\x10\x32\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd1\x02\n\x0eMessageOptions\x12+\n#experimental_java_message_interface\x18\x04 \x03(\t\x12+\n#experimental_java_builder_interface\x18\x05 \x03(\t\x12+\n#experimental_java_interface_extends\x18\x06 \x03(\t\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xa0\x05\n\x0c\x46ieldOptions\x12\x31\n\x05\x63type\x18\x01 \x01(\x0e\x32\x1a.proto2.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x31\n\x05jtype\x18\x04 \x01(\x0e\x32\x1a.proto2.FieldOptions.JType:\x06NORMAL\x12\x36\n\x06jstype\x18\x06 \x01(\x0e\x32\x1b.proto2.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14\x65xperimental_map_key\x18\t \x01(\t\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12<\n\x0fupgraded_option\x18\x0b \x03(\x0b\x32#.proto2.FieldOptions.UpgradedOption\x12%\n\x16\x64\x65precated_raw_message\x18\x0c \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\x1a-\n\x0eUpgradedOption\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"<\n\x05JType\x12\n\n\x06NORMAL\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\x1c\n\x18\x45XPERIMENTAL_BYTE_BUFFER\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x99\x01\n\x0b\x45numOptions\x12\x13\n\x0bproto1_name\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"t\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xb6\x01\n\x0eServiceOptions\x12\x1d\n\x0emulticast_stub\x18\x14 \x01(\x08:\x05\x66\x61lse\x12#\n\x17\x66\x61ilure_detection_delay\x18\x10 \x01(\x01:\x02-1\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd3\t\n\rMethodOptions\x12\x35\n\x08protocol\x18\x07 \x01(\x0e\x32\x1e.proto2.MethodOptions.Protocol:\x03TCP\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12$\n\x15\x64uplicate_suppression\x18\t \x01(\x08:\x05\x66\x61lse\x12\x18\n\tfail_fast\x18\n \x01(\x08:\x05\x66\x61lse\x12\'\n\x18\x65nd_user_creds_requested\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0e\x63lient_logging\x18\x0b \x01(\x11:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x0c \x01(\x11:\x03\x32\x35\x36\x12\x41\n\x0esecurity_level\x18\r \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x43\n\x0fresponse_format\x18\x0f \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x42\n\x0erequest_format\x18\x11 \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x13\n\x0bstream_type\x18\x12 \x01(\t\x12\x16\n\x0esecurity_label\x18\x13 \x01(\t\x12\x18\n\x10\x63lient_streaming\x18\x14 \x01(\x08\x12\x18\n\x10server_streaming\x18\x15 \x01(\x08\x12\x1a\n\x12legacy_stream_type\x18\x16 \x01(\t\x12\x1a\n\x12legacy_result_type\x18\x17 \x01(\t\x12(\n\x1clegacy_client_initial_tokens\x18\x18 \x01(\x03:\x02-1\x12(\n\x1clegacy_server_initial_tokens\x18\x19 \x01(\x03:\x02-1\x12^\n\tlog_level\x18\x1b \x01(\x0e\x32\x1e.proto2.MethodOptions.LogLevel:+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\x1c\n\x08Protocol\x12\x07\n\x03TCP\x10\x00\x12\x07\n\x03UDP\x10\x01\"e\n\rSecurityLevel\x12\x08\n\x04NONE\x10\x00\x12\r\n\tINTEGRITY\x10\x01\x12\x19\n\x15PRIVACY_AND_INTEGRITY\x10\x02\x12 \n\x1cSTRONG_PRIVACY_AND_INTEGRITY\x10\x03\"0\n\x06\x46ormat\x12\x10\n\x0cUNCOMPRESSED\x10\x00\x12\x14\n\x10ZIPPY_COMPRESSED\x10\x01\"\x9f\x01\n\x08LogLevel\x12\x0c\n\x08LOG_NONE\x10\x00\x12\x13\n\x0fLOG_HEADER_ONLY\x10\x01\x12/\n+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x10\x02\x12#\n\x1fLOG_HEADER_AND_FILTERED_PAYLOAD\x10\x03\x12\x1a\n\x16LOG_HEADER_AND_PAYLOAD\x10\x04*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe7\x04\n\rStreamOptions\x12!\n\x15\x63lient_initial_tokens\x18\x01 \x01(\x03:\x02-1\x12!\n\x15server_initial_tokens\x18\x02 \x01(\x03:\x02-1\x12<\n\ntoken_unit\x18\x03 \x01(\x0e\x32\x1f.proto2.StreamOptions.TokenUnit:\x07MESSAGE\x12\x41\n\x0esecurity_level\x18\x04 \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x16\n\x0esecurity_label\x18\x05 \x01(\t\x12\x1b\n\x0e\x63lient_logging\x18\x06 \x01(\x05:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x07 \x01(\x05:\x03\x32\x35\x36\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12\x18\n\tfail_fast\x18\t \x01(\x08:\x05\x66\x61lse\x12\'\n\x18\x65nd_user_creds_requested\x18\n \x01(\x08:\x05\x66\x61lse\x12^\n\tlog_level\x18\x0b \x01(\x0e\x32\x1e.proto2.MethodOptions.LogLevel:+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\"\n\tTokenUnit\x12\x0b\n\x07MESSAGE\x10\x00\x12\x08\n\x04\x42YTE\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x95\x02\n\x13UninterpretedOption\x12\x32\n\x04name\x18\x02 \x03(\x0b\x32$.proto2.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xa8\x01\n\x0eSourceCodeInfo\x12\x31\n\x08location\x18\x01 \x03(\x0b\x32\x1f.proto2.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB,\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01\xe0\x01\x01')
+  serialized_pb=_b('\n!net/proto2/proto/descriptor.proto\x12\x06proto2\">\n\x11\x46ileDescriptorSet\x12)\n\x04\x66ile\x18\x01 \x03(\x0b\x32\x1b.proto2.FileDescriptorProto\"\x95\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12-\n\x0cmessage_type\x18\x04 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x05 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12/\n\x07service\x18\x06 \x03(\x0b\x32\x1e.proto2.ServiceDescriptorProto\x12/\n\textension\x18\x07 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12$\n\x07options\x18\x08 \x01(\x0b\x32\x13.proto2.FileOptions\x12\x30\n\x10source_code_info\x18\t \x01(\x0b\x32\x16.proto2.SourceCodeInfo\"\xa5\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05\x66ield\x18\x02 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12/\n\textension\x18\x06 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12,\n\x0bnested_type\x18\x03 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x04 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12?\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32&.proto2.DescriptorProto.ExtensionRange\x12\x30\n\noneof_decl\x18\x08 \x03(\x0b\x32\x1c.proto2.OneofDescriptorProto\x12\'\n\x07options\x18\x07 \x01(\x0b\x32\x16.proto2.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\x8e\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x31\n\x05label\x18\x04 \x01(\x0e\x32\".proto2.FieldDescriptorProto.Label\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.proto2.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12%\n\x07options\x18\x08 \x01(\x0b\x32\x14.proto2.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"z\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05value\x18\x02 \x03(\x0b\x32 .proto2.EnumValueDescriptorProto\x12$\n\x07options\x18\x03 \x01(\x0b\x32\x13.proto2.EnumOptions\"c\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12)\n\x07options\x18\x03 \x01(\x0b\x32\x18.proto2.EnumValueOptions\"\xad\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x06method\x18\x02 \x03(\x0b\x32\x1d.proto2.MethodDescriptorProto\x12-\n\x06stream\x18\x04 \x03(\x0b\x32\x1d.proto2.StreamDescriptorProto\x12\'\n\x07options\x18\x03 \x01(\x0b\x32\x16.proto2.ServiceOptions\"v\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.MethodOptions\"\x87\x01\n\x15StreamDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1b\n\x13\x63lient_message_type\x18\x02 \x01(\t\x12\x1b\n\x13server_message_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.StreamOptions\"\xa5\n\n\x0b\x46ileOptions\x12\x19\n\x0e\x63\x63_api_version\x18\x02 \x01(\x05:\x01\x32\x12V\n\x14\x63\x63_api_compatibility\x18\x0f \x01(\x0e\x32&.proto2.FileOptions.CompatibilityLevel:\x10NO_COMPATIBILITY\x12\'\n\x19\x63\x63_proto_array_compatible\x18\x16 \x01(\x08:\x04true\x12\"\n\x14\x63\x63_utf8_verification\x18\x18 \x01(\x08:\x04true\x12$\n\x15\x63\x63_proto1_text_format\x18\x19 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x19\n\x0epy_api_version\x18\x04 \x01(\x05:\x01\x32\x12\x1b\n\x10java_api_version\x18\x05 \x01(\x05:\x01\x32\x12!\n\x13java_use_javaproto2\x18\x06 \x01(\x08:\x04true\x12\x1e\n\x10java_java5_enums\x18\x07 \x01(\x08:\x04true\x12)\n\x1ajava_generate_rpc_baseimpl\x18\r \x01(\x08:\x05\x66\x61lse\x12#\n\x14java_use_javastrings\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_alt_api_package\x18\x13 \x01(\t\x12\x34\n%java_enable_dual_generate_mutable_api\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10java_mutable_api\x18\x1c \x01(\x08:\x05\x66\x61lse\x12+\n#java_multiple_files_mutable_package\x18\x1d \x01(\t\x12=\n\x0coptimize_for\x18\t \x01(\x0e\x32 .proto2.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\x1a\n\x12javascript_package\x18\x0c \x01(\t\x12\x1a\n\x0fszl_api_version\x18\x0e \x01(\x05:\x01\x31\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1a\n\x12\x65xperimental_style\x18\x1e \x01(\t\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"c\n\x12\x43ompatibilityLevel\x12\x14\n\x10NO_COMPATIBILITY\x10\x00\x12\x15\n\x11PROTO1_COMPATIBLE\x10\x64\x12 \n\x1c\x44\x45PRECATED_PROTO1_COMPATIBLE\x10\x32\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe4\x02\n\x0eMessageOptions\x12+\n#experimental_java_message_interface\x18\x04 \x03(\t\x12+\n#experimental_java_builder_interface\x18\x05 \x03(\t\x12+\n#experimental_java_interface_extends\x18\x06 \x03(\t\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xa0\x05\n\x0c\x46ieldOptions\x12\x31\n\x05\x63type\x18\x01 \x01(\x0e\x32\x1a.proto2.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x31\n\x05jtype\x18\x04 \x01(\x0e\x32\x1a.proto2.FieldOptions.JType:\x06NORMAL\x12\x36\n\x06jstype\x18\x06 \x01(\x0e\x32\x1b.proto2.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14\x65xperimental_map_key\x18\t \x01(\t\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12<\n\x0fupgraded_option\x18\x0b \x03(\x0b\x32#.proto2.FieldOptions.UpgradedOption\x12%\n\x16\x64\x65precated_raw_message\x18\x0c \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\x1a-\n\x0eUpgradedOption\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"<\n\x05JType\x12\n\n\x06NORMAL\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\x1c\n\x18\x45XPERIMENTAL_BYTE_BUFFER\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x99\x01\n\x0b\x45numOptions\x12\x13\n\x0bproto1_name\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"t\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xb6\x01\n\x0eServiceOptions\x12\x1d\n\x0emulticast_stub\x18\x14 \x01(\x08:\x05\x66\x61lse\x12#\n\x17\x66\x61ilure_detection_delay\x18\x10 \x01(\x01:\x02-1\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd3\t\n\rMethodOptions\x12\x35\n\x08protocol\x18\x07 \x01(\x0e\x32\x1e.proto2.MethodOptions.Protocol:\x03TCP\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12$\n\x15\x64uplicate_suppression\x18\t \x01(\x08:\x05\x66\x61lse\x12\x18\n\tfail_fast\x18\n \x01(\x08:\x05\x66\x61lse\x12\'\n\x18\x65nd_user_creds_requested\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0e\x63lient_logging\x18\x0b \x01(\x11:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x0c \x01(\x11:\x03\x32\x35\x36\x12\x41\n\x0esecurity_level\x18\r \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x43\n\x0fresponse_format\x18\x0f \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x42\n\x0erequest_format\x18\x11 \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x13\n\x0bstream_type\x18\x12 \x01(\t\x12\x16\n\x0esecurity_label\x18\x13 \x01(\t\x12\x18\n\x10\x63lient_streaming\x18\x14 \x01(\x08\x12\x18\n\x10server_streaming\x18\x15 \x01(\x08\x12\x1a\n\x12legacy_stream_type\x18\x16 \x01(\t\x12\x1a\n\x12legacy_result_type\x18\x17 \x01(\t\x12(\n\x1clegacy_client_initial_tokens\x18\x18 \x01(\x03:\x02-1\x12(\n\x1clegacy_server_initial_tokens\x18\x19 \x01(\x03:\x02-1\x12^\n\tlog_level\x18\x1b \x01(\x0e\x32\x1e.proto2.MethodOptions.LogLevel:+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\x1c\n\x08Protocol\x12\x07\n\x03TCP\x10\x00\x12\x07\n\x03UDP\x10\x01\"e\n\rSecurityLevel\x12\x08\n\x04NONE\x10\x00\x12\r\n\tINTEGRITY\x10\x01\x12\x19\n\x15PRIVACY_AND_INTEGRITY\x10\x02\x12 \n\x1cSTRONG_PRIVACY_AND_INTEGRITY\x10\x03\"0\n\x06\x46ormat\x12\x10\n\x0cUNCOMPRESSED\x10\x00\x12\x14\n\x10ZIPPY_COMPRESSED\x10\x01\"\x9f\x01\n\x08LogLevel\x12\x0c\n\x08LOG_NONE\x10\x00\x12\x13\n\x0fLOG_HEADER_ONLY\x10\x01\x12/\n+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x10\x02\x12#\n\x1fLOG_HEADER_AND_FILTERED_PAYLOAD\x10\x03\x12\x1a\n\x16LOG_HEADER_AND_PAYLOAD\x10\x04*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe7\x04\n\rStreamOptions\x12!\n\x15\x63lient_initial_tokens\x18\x01 \x01(\x03:\x02-1\x12!\n\x15server_initial_tokens\x18\x02 \x01(\x03:\x02-1\x12<\n\ntoken_unit\x18\x03 \x01(\x0e\x32\x1f.proto2.StreamOptions.TokenUnit:\x07MESSAGE\x12\x41\n\x0esecurity_level\x18\x04 \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x16\n\x0esecurity_label\x18\x05 \x01(\t\x12\x1b\n\x0e\x63lient_logging\x18\x06 \x01(\x05:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x07 \x01(\x05:\x03\x32\x35\x36\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12\x18\n\tfail_fast\x18\t \x01(\x08:\x05\x66\x61lse\x12\'\n\x18\x65nd_user_creds_requested\x18\n \x01(\x08:\x05\x66\x61lse\x12^\n\tlog_level\x18\x0b \x01(\x0e\x32\x1e.proto2.MethodOptions.LogLevel:+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\"\n\tTokenUnit\x12\x0b\n\x07MESSAGE\x10\x00\x12\x08\n\x04\x42YTE\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x95\x02\n\x13UninterpretedOption\x12\x32\n\x04name\x18\x02 \x03(\x0b\x32$.proto2.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xa8\x01\n\x0eSourceCodeInfo\x12\x31\n\x08location\x18\x01 \x03(\x0b\x32\x1f.proto2.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB,\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01\xe0\x01\x01')
 )
 _sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
@@ -224,8 +224,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=4453,
-  serialized_end=4500,
+  serialized_start=4472,
+  serialized_end=4519,
 )
 _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE)
 
@@ -250,8 +250,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=4502,
-  serialized_end=4562,
+  serialized_start=4521,
+  serialized_end=4581,
 )
 _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JTYPE)
 
@@ -276,8 +276,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=4564,
-  serialized_end=4617,
+  serialized_start=4583,
+  serialized_end=4636,
 )
 _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE)
 
@@ -298,8 +298,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=5971,
-  serialized_end=5999,
+  serialized_start=5990,
+  serialized_end=6018,
 )
 _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_PROTOCOL)
 
@@ -328,8 +328,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=6001,
-  serialized_end=6102,
+  serialized_start=6020,
+  serialized_end=6121,
 )
 _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_SECURITYLEVEL)
 
@@ -350,8 +350,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=6104,
-  serialized_end=6152,
+  serialized_start=6123,
+  serialized_end=6171,
 )
 _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_FORMAT)
 
@@ -384,8 +384,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=6155,
-  serialized_end=6314,
+  serialized_start=6174,
+  serialized_end=6333,
 )
 _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_LOGLEVEL)
 
@@ -406,8 +406,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=6898,
-  serialized_end=6932,
+  serialized_start=6917,
+  serialized_end=6951,
 )
 _sym_db.RegisterEnumDescriptor(_STREAMOPTIONS_TOKENUNIT)
 
@@ -435,6 +435,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=45,
   serialized_end=107,
 )
@@ -533,6 +535,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=110,
   serialized_end=515,
 )
@@ -568,6 +572,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=895,
   serialized_end=939,
 )
@@ -644,6 +650,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=518,
   serialized_end=939,
 )
@@ -730,6 +738,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=942,
   serialized_end=1596,
 )
@@ -758,6 +768,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1598,
   serialized_end=1634,
 )
@@ -800,6 +812,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1636,
   serialized_end=1758,
 )
@@ -842,6 +856,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1760,
   serialized_end=1859,
 )
@@ -891,6 +907,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1862,
   serialized_end=2035,
 )
@@ -940,6 +958,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=2037,
   serialized_end=2155,
 )
@@ -989,6 +1009,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=2158,
   serialized_end=2293,
 )
@@ -1222,6 +1244,8 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
   serialized_start=2296,
   serialized_end=3613,
 )
@@ -1277,7 +1301,14 @@
       is_extension=False, extension_scope=None,
       options=None),
     _descriptor.FieldDescriptor(
-      name='uninterpreted_option', full_name='proto2.MessageOptions.uninterpreted_option', index=6,
+      name='map_entry', full_name='proto2.MessageOptions.map_entry', index=6,
+      number=7, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='proto2.MessageOptions.uninterpreted_option', index=7,
       number=999, type=11, cpp_type=10, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
@@ -1292,8 +1323,10 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
   serialized_start=3616,
-  serialized_end=3953,
+  serialized_end=3972,
 )
 
 
@@ -1327,8 +1360,10 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=4406,
-  serialized_end=4451,
+  oneofs=[
+  ],
+  serialized_start=4425,
+  serialized_end=4470,
 )
 
 _FIELDOPTIONS = _descriptor.Descriptor(
@@ -1427,8 +1462,10 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=3956,
-  serialized_end=4628,
+  oneofs=[
+  ],
+  serialized_start=3975,
+  serialized_end=4647,
 )
 
 
@@ -1476,8 +1513,10 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=4631,
-  serialized_end=4784,
+  oneofs=[
+  ],
+  serialized_start=4650,
+  serialized_end=4803,
 )
 
 
@@ -1511,8 +1550,10 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=4786,
-  serialized_end=4902,
+  oneofs=[
+  ],
+  serialized_start=4805,
+  serialized_end=4921,
 )
 
 
@@ -1560,8 +1601,10 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=4905,
-  serialized_end=5087,
+  oneofs=[
+  ],
+  serialized_start=4924,
+  serialized_end=5106,
 )
 
 
@@ -1732,8 +1775,10 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=5090,
-  serialized_end=6325,
+  oneofs=[
+  ],
+  serialized_start=5109,
+  serialized_end=6344,
 )
 
 
@@ -1845,8 +1890,10 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=6328,
-  serialized_end=6943,
+  oneofs=[
+  ],
+  serialized_start=6347,
+  serialized_end=6962,
 )
 
 
@@ -1880,8 +1927,10 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=7172,
-  serialized_end=7223,
+  oneofs=[
+  ],
+  serialized_start=7191,
+  serialized_end=7242,
 )
 
 _UNINTERPRETEDOPTION = _descriptor.Descriptor(
@@ -1949,8 +1998,10 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=6946,
-  serialized_end=7223,
+  oneofs=[
+  ],
+  serialized_start=6965,
+  serialized_end=7242,
 )
 
 
@@ -1998,8 +2049,10 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=7295,
-  serialized_end=7394,
+  oneofs=[
+  ],
+  serialized_start=7314,
+  serialized_end=7413,
 )
 
 _SOURCECODEINFO = _descriptor.Descriptor(
@@ -2025,8 +2078,10 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=7226,
-  serialized_end=7394,
+  oneofs=[
+  ],
+  serialized_start=7245,
+  serialized_end=7413,
 )
 
 _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
diff --git a/google/net/proto2/python/public/descriptor.py b/google/net/proto2/python/public/descriptor.py
index c8b4ece..024024b 100644
--- a/google/net/proto2/python/public/descriptor.py
+++ b/google/net/proto2/python/public/descriptor.py
@@ -18,6 +18,7 @@
 
 
 
+
 """Descriptors essentially contain exactly the information found in a .proto
 file, in types that make this information accessible in Python.
 """
@@ -212,13 +213,21 @@
     options: (descriptor_pb2.MessageOptions) Protocol message options or None
       to use default message options.
 
+    oneofs: (list of OneofDescriptor) The list of descriptors for oneof fields
+      in this message.
+    oneofs_by_name: (dict str -> OneofDescriptor) Same objects as in |oneofs|,
+      but indexed by "name" attribute.
+
     file: (FileDescriptor) Reference to file descriptor.
   """
 
+
+
+
   def __init__(self, name, full_name, filename, containing_type, fields,
                nested_types, enum_types, extensions, options=None,
-               is_extendable=True, extension_ranges=None, file=None,
-               serialized_start=None, serialized_end=None):
+               is_extendable=True, extension_ranges=None, oneofs=None,
+               file=None, serialized_start=None, serialized_end=None):
     """Arguments to __init__() are as described in the description
     of Descriptor fields above.
 
@@ -259,6 +268,10 @@
     self.extensions_by_name = dict((f.name, f) for f in extensions)
     self.is_extendable = is_extendable
     self.extension_ranges = extension_ranges
+    self.oneofs = oneofs if oneofs is not None else []
+    self.oneofs_by_name = dict((o.name, o) for o in self.oneofs)
+    for oneof in self.oneofs:
+      oneof.containing_type = self
 
   def EnumValueName(self, enum, value):
     """Returns the string name of an enum value.
@@ -344,6 +357,9 @@
 
     options: (descriptor_pb2.FieldOptions) Protocol message field options or
       None to use default field options.
+
+    containing_oneof: (OneofDescriptor) If the field is a member of a oneof
+      union, contains its descriptor. Otherwise, None.
   """
 
 
@@ -425,7 +441,7 @@
   def __init__(self, name, full_name, index, number, type, cpp_type, label,
                default_value, message_type, enum_type, containing_type,
                is_extension, extension_scope, options=None,
-               has_default_value=True):
+               has_default_value=True, containing_oneof=None):
     """The arguments are as described in the description of FieldDescriptor
     attributes above.
 
@@ -448,6 +464,7 @@
     self.enum_type = enum_type
     self.is_extension = is_extension
     self.extension_scope = extension_scope
+    self.containing_oneof = containing_oneof
     if api_implementation.Type() == 'cpp':
       if is_extension:
         if api_implementation.Version() == 2:
@@ -566,6 +583,29 @@
     self.type = type
 
 
+class OneofDescriptor(object):
+  """Descriptor for a oneof field.
+
+    name: (str) Name of the oneof field.
+    full_name: (str) Full name of the oneof field, including package name.
+    index: (int) 0-based index giving the order of the oneof field inside
+      its containing type.
+    containing_type: (Descriptor) Descriptor of the protocol message
+      type that contains this field.  Set by the Descriptor constructor
+      if we're passed into one.
+    fields: (list of FieldDescriptor) The list of field descriptors this
+      oneof can contain.
+  """
+
+  def __init__(self, name, full_name, index, containing_type, fields):
+    """Arguments are as described in the attribute description above."""
+    self.name = name
+    self.full_name = full_name
+    self.index = index
+    self.containing_type = containing_type
+    self.fields = fields
+
+
 class ServiceDescriptor(_NestedDescriptorBase):
 
   """Descriptor for a service.
diff --git a/google/net/proto2/python/public/message.py b/google/net/proto2/python/public/message.py
index e8398b2..a2d8e40 100644
--- a/google/net/proto2/python/public/message.py
+++ b/google/net/proto2/python/public/message.py
@@ -166,7 +166,11 @@
     raise NotImplementedError
 
   def ParseFromString(self, serialized):
-    """Like MergeFromString(), except we clear the object first."""
+    """Parse serialized protocol buffer data into this message.
+
+    Like MergeFromString(), except we clear the object first and
+    do not return the value that MergeFromString returns.
+    """
     self.Clear()
     self.MergeFromString(serialized)
 
diff --git a/google/storage/speckle/proto/client_error_code_pb2.py b/google/storage/speckle/proto/client_error_code_pb2.py
index 6dc532d..3e3836c 100644
--- a/google/storage/speckle/proto/client_error_code_pb2.py
+++ b/google/storage/speckle/proto/client_error_code_pb2.py
@@ -208,6 +208,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=63,
   serialized_end=1138,
 )
diff --git a/google/storage/speckle/proto/client_pb2.py b/google/storage/speckle/proto/client_pb2.py
index 68ef46d..3279311 100644
--- a/google/storage/speckle/proto/client_pb2.py
+++ b/google/storage/speckle/proto/client_pb2.py
@@ -624,6 +624,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=48,
   serialized_end=230,
 )
@@ -722,6 +724,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=233,
   serialized_end=629,
 )
@@ -807,6 +811,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=632,
   serialized_end=1385,
 )
@@ -842,6 +848,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1387,
   serialized_end=1424,
 )
@@ -891,6 +899,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1426,
   serialized_end=1525,
 )
@@ -926,6 +936,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1527,
   serialized_end=1570,
 )
@@ -1080,6 +1092,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1573,
   serialized_end=2021,
 )
@@ -1115,6 +1129,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=2023,
   serialized_end=2112,
 )
@@ -1150,6 +1166,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=8720,
   serialized_end=8773,
 )
@@ -2180,6 +2198,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=2115,
   serialized_end=9102,
 )
@@ -2215,6 +2235,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=9104,
   serialized_end=9142,
 )
@@ -2327,6 +2349,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=9145,
   serialized_end=9615,
 )
@@ -2355,6 +2379,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=9617,
   serialized_end=9692,
 )
@@ -2390,6 +2416,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=9694,
   serialized_end=9787,
 )
@@ -2418,6 +2446,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=9789,
   serialized_end=9822,
 )
@@ -2453,6 +2483,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=9824,
   serialized_end=9882,
 )
diff --git a/google/storage/speckle/proto/sql_pb2.py b/google/storage/speckle/proto/sql_pb2.py
index e7bca80..e8ae7c2 100644
--- a/google/storage/speckle/proto/sql_pb2.py
+++ b/google/storage/speckle/proto/sql_pb2.py
@@ -164,6 +164,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=85,
   serialized_end=481,
 )
@@ -199,6 +201,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=483,
   serialized_end=581,
 )
@@ -248,6 +252,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=583,
   serialized_end=689,
 )
@@ -311,6 +317,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=692,
   serialized_end=929,
 )
@@ -367,6 +375,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=932,
   serialized_end=1102,
 )
@@ -409,6 +419,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1105,
   serialized_end=1275,
 )
@@ -458,6 +470,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1278,
   serialized_end=1450,
 )
@@ -500,6 +514,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1453,
   serialized_end=1587,
 )
@@ -535,6 +551,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1589,
   serialized_end=1654,
 )
@@ -563,6 +581,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
+  oneofs=[
+  ],
   serialized_start=1656,
   serialized_end=1727,
 )
diff --git a/lib/cacerts/urlfetch_cacerts.txt b/lib/cacerts/urlfetch_cacerts.txt
index 38a912d..44eab70 100644
--- a/lib/cacerts/urlfetch_cacerts.txt
+++ b/lib/cacerts/urlfetch_cacerts.txt
@@ -7200,34 +7200,6 @@
 mIzroG3RNlOQoI8WMB7ew79/RqWVKvnI3jvbd/TyMrEzYaIwNQ==
 -----END CERTIFICATE-----
 
-subject= /C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert High Assurance EV Root CA
-serial=428740A5
------BEGIN CERTIFICATE-----
-MIIEQjCCA6ugAwIBAgIEQodApTANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
-VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
-ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
-KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
-ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEw
-MDEwNTAwMDBaFw0xNDA3MjYxODE1MTVaMGwxCzAJBgNVBAYTAlVTMRUwEwYDVQQK
-EwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xKzApBgNV
-BAMTIkRpZ2lDZXJ0IEhpZ2ggQXNzdXJhbmNlIEVWIFJvb3QgQ0EwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGzOVz5vvUu+UtLTKm3+WBP8nNJUm2cSrD
-1ZQ0Z6IKHLBfaaZAscS3so/QmKSpQVk609yU1jzbdDikSsxNJYL3SqVTEjju80lt
-cZF+Y7arpl/DpIT4T2JRvvjF7Ns4kuMG5QiRDMQoQVX7y1qJFX5x6DW/TXIJPb46
-OFBbdzEbjbPHJEWap6xtABRaBLe6E+tRCphBQSJOZWGHgUFQpnlcid4ZSlfVLuZd
-HFMsfpjNGgYWpGhz0DQEE1yhcdNafFXbXmThN4cwVgTlEbQpgBLxeTmIogIRfCdm
-t4i3ePLKCqg4qwpkwr9mXZWEwaElHoddGlALIBLMQbtuC1E4uEvLAgMBAAGjggET
-MIIBDzASBgNVHRMBAf8ECDAGAQH/AgEBMCcGA1UdJQQgMB4GCCsGAQUFBwMBBggr
-BgEFBQcDAgYIKwYBBQUHAwQwMwYIKwYBBQUHAQEEJzAlMCMGCCsGAQUFBzABhhdo
-dHRwOi8vb2NzcC5lbnRydXN0Lm5ldDAzBgNVHR8ELDAqMCigJqAkhiJodHRwOi8v
-Y3JsLmVudHJ1c3QubmV0L3NlcnZlcjEuY3JsMB0GA1UdDgQWBBSxPsNpA/i/RwHU
-mCYaCALvY2QrwzALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8BdiE1U9s/8KAGv7
-UISX8+1i0BowGQYJKoZIhvZ9B0EABAwwChsEVjcuMQMCAIEwDQYJKoZIhvcNAQEF
-BQADgYEASA4rbyBiTCiToyQ9WKshz4D4mpeQaiLtWnxHNpnneYR1qySPkgrVYQSu
-w2pcsszZ5ESHb9uPOGL3RDadurxuB8TUjegf0Qtgo7WczmO+7Wfc+Lrebskly1u1
-nXZwC99CcvhPQRFkpdLq/NWvEfQVOGecIKhLd1qRMkIy54Wz3zY=
------END CERTIFICATE-----
-
 subject= /C=IT/L=Milan/O=Actalis S.p.A./03358520967/CN=Actalis Authentication CA G2
 serial=07276202
 -----BEGIN CERTIFICATE-----
diff --git a/lib/enum/enum/LICENSE b/lib/deprecated_enum/deprecated_enum/LICENSE
similarity index 100%
rename from lib/enum/enum/LICENSE
rename to lib/deprecated_enum/deprecated_enum/LICENSE
diff --git a/lib/enum/enum/__init__.py b/lib/deprecated_enum/deprecated_enum/__init__.py
similarity index 100%
rename from lib/enum/enum/__init__.py
rename to lib/deprecated_enum/deprecated_enum/__init__.py
diff --git a/lib/enum/enum/test/test_enum.py b/lib/deprecated_enum/deprecated_enum/test/test_enum.py
similarity index 100%
rename from lib/enum/enum/test/test_enum.py
rename to lib/deprecated_enum/deprecated_enum/test/test_enum.py
diff --git a/lib/enum/enum/test/tools.py b/lib/deprecated_enum/deprecated_enum/test/tools.py
similarity index 100%
rename from lib/enum/enum/test/tools.py
rename to lib/deprecated_enum/deprecated_enum/test/tools.py
diff --git a/lib/distutils/distutils/__init__.py b/lib/distutils/distutils/__init__.py
index a849f1a..6e50885 100644
--- a/lib/distutils/distutils/__init__.py
+++ b/lib/distutils/distutils/__init__.py
@@ -15,5 +15,5 @@
 # Updated automatically by the Python release process.
 #
 #--start constants--
-__version__ = "2.7.2"
+__version__ = "2.7.6"
 #--end constants--
diff --git a/lib/distutils/distutils/ccompiler.py b/lib/distutils/distutils/ccompiler.py
index c2b1f6f..4907a0a 100644
--- a/lib/distutils/distutils/ccompiler.py
+++ b/lib/distutils/distutils/ccompiler.py
@@ -17,58 +17,8 @@
 from distutils.dep_util import newer_group
 from distutils.util import split_quoted, execute
 from distutils import log
-
-_sysconfig = __import__('sysconfig')
-
-def customize_compiler(compiler):
-    """Do any platform-specific customization of a CCompiler instance.
-
-    Mainly needed on Unix, so we can plug in the information that
-    varies across Unices and is stored in Python's Makefile.
-    """
-    if compiler.compiler_type == "unix":
-        (cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags) = \
-            _sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
-                                       'CCSHARED', 'LDSHARED', 'SO', 'AR',
-                                       'ARFLAGS')
-
-        if 'CC' in os.environ:
-            cc = os.environ['CC']
-        if 'CXX' in os.environ:
-            cxx = os.environ['CXX']
-        if 'LDSHARED' in os.environ:
-            ldshared = os.environ['LDSHARED']
-        if 'CPP' in os.environ:
-            cpp = os.environ['CPP']
-        else:
-            cpp = cc + " -E"           # not always
-        if 'LDFLAGS' in os.environ:
-            ldshared = ldshared + ' ' + os.environ['LDFLAGS']
-        if 'CFLAGS' in os.environ:
-            cflags = opt + ' ' + os.environ['CFLAGS']
-            ldshared = ldshared + ' ' + os.environ['CFLAGS']
-        if 'CPPFLAGS' in os.environ:
-            cpp = cpp + ' ' + os.environ['CPPFLAGS']
-            cflags = cflags + ' ' + os.environ['CPPFLAGS']
-            ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
-        if 'AR' in os.environ:
-            ar = os.environ['AR']
-        if 'ARFLAGS' in os.environ:
-            archiver = ar + ' ' + os.environ['ARFLAGS']
-        else:
-            archiver = ar + ' ' + ar_flags
-
-        cc_cmd = cc + ' ' + cflags
-        compiler.set_executables(
-            preprocessor=cpp,
-            compiler=cc_cmd,
-            compiler_so=cc_cmd + ' ' + ccshared,
-            compiler_cxx=cxx,
-            linker_so=ldshared,
-            linker_exe=cc,
-            archiver=archiver)
-
-        compiler.shared_lib_extension = so_ext
+# following import is for backward compatibility
+from distutils.sysconfig import customize_compiler
 
 class CCompiler:
     """Abstract base class to define the interface that must be implemented
diff --git a/lib/distutils/distutils/command/__init__.py b/lib/distutils/distutils/command/__init__.py
new file mode 100644
index 0000000..20b159f
--- /dev/null
+++ b/lib/distutils/distutils/command/__init__.py
@@ -0,0 +1,33 @@
+"""distutils.command
+
+Package containing implementation of all the standard Distutils
+commands."""
+
+__revision__ = "$Id$"
+
+__all__ = ['build',
+           'build_py',
+           'build_ext',
+           'build_clib',
+           'build_scripts',
+           'clean',
+           'install',
+           'install_lib',
+           'install_headers',
+           'install_scripts',
+           'install_data',
+           'sdist',
+           'register',
+           'bdist',
+           'bdist_dumb',
+           'bdist_rpm',
+           'bdist_wininst',
+           'upload',
+           'check',
+           # These two are reserved for future use:
+           #'bdist_sdux',
+           #'bdist_pkgtool',
+           # Note:
+           # bdist_packager is not included because it only provides
+           # an abstract base class
+          ]
diff --git a/lib/distutils/distutils/command/bdist.py b/lib/distutils/distutils/command/bdist.py
new file mode 100644
index 0000000..d7910b1
--- /dev/null
+++ b/lib/distutils/distutils/command/bdist.py
@@ -0,0 +1,146 @@
+"""distutils.command.bdist
+
+Implements the Distutils 'bdist' command (create a built [binary]
+distribution)."""
+
+__revision__ = "$Id$"
+
+import os
+
+from distutils.util import get_platform
+from distutils.core import Command
+from distutils.errors import DistutilsPlatformError, DistutilsOptionError
+
+
+def show_formats():
+    """Print list of available formats (arguments to "--format" option).
+    """
+    from distutils.fancy_getopt import FancyGetopt
+    formats = []
+    for format in bdist.format_commands:
+        formats.append(("formats=" + format, None,
+                        bdist.format_command[format][1]))
+    pretty_printer = FancyGetopt(formats)
+    pretty_printer.print_help("List of available distribution formats:")
+
+
+class bdist(Command):
+
+    description = "create a built (binary) distribution"
+
+    user_options = [('bdist-base=', 'b',
+                     "temporary directory for creating built distributions"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_platform()),
+                    ('formats=', None,
+                     "formats for distribution (comma-separated list)"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in "
+                     "[default: dist]"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('owner=', 'u',
+                     "Owner name used when creating a tar file"
+                     " [default: current user]"),
+                    ('group=', 'g',
+                     "Group name used when creating a tar file"
+                     " [default: current group]"),
+                   ]
+
+    boolean_options = ['skip-build']
+
+    help_options = [
+        ('help-formats', None,
+         "lists available distribution formats", show_formats),
+        ]
+
+    # The following commands do not take a format option from bdist
+    no_format_option = ('bdist_rpm',)
+
+    # This won't do in reality: will need to distinguish RPM-ish Linux,
+    # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
+    default_format = {'posix': 'gztar',
+                      'nt': 'zip',
+                      'os2': 'zip'}
+
+    # Establish the preferred order (for the --help-formats option).
+    format_commands = ['rpm', 'gztar', 'bztar', 'ztar', 'tar',
+                       'wininst', 'zip', 'msi']
+
+    # And the real information.
+    format_command = {'rpm':   ('bdist_rpm',  "RPM distribution"),
+                      'gztar': ('bdist_dumb', "gzip'ed tar file"),
+                      'bztar': ('bdist_dumb', "bzip2'ed tar file"),
+                      'ztar':  ('bdist_dumb', "compressed tar file"),
+                      'tar':   ('bdist_dumb', "tar file"),
+                      'wininst': ('bdist_wininst',
+                                  "Windows executable installer"),
+                      'zip':   ('bdist_dumb', "ZIP file"),
+                      'msi':   ('bdist_msi',  "Microsoft Installer")
+                      }
+
+
+    def initialize_options(self):
+        self.bdist_base = None
+        self.plat_name = None
+        self.formats = None
+        self.dist_dir = None
+        self.skip_build = 0
+        self.group = None
+        self.owner = None
+
+    def finalize_options(self):
+        # have to finalize 'plat_name' before 'bdist_base'
+        if self.plat_name is None:
+            if self.skip_build:
+                self.plat_name = get_platform()
+            else:
+                self.plat_name = self.get_finalized_command('build').plat_name
+
+        # 'bdist_base' -- parent of per-built-distribution-format
+        # temporary directories (eg. we'll probably have
+        # "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.)
+        if self.bdist_base is None:
+            build_base = self.get_finalized_command('build').build_base
+            self.bdist_base = os.path.join(build_base,
+                                           'bdist.' + self.plat_name)
+
+        self.ensure_string_list('formats')
+        if self.formats is None:
+            try:
+                self.formats = [self.default_format[os.name]]
+            except KeyError:
+                raise DistutilsPlatformError, \
+                      "don't know how to create built distributions " + \
+                      "on platform %s" % os.name
+
+        if self.dist_dir is None:
+            self.dist_dir = "dist"
+
+    def run(self):
+        # Figure out which sub-commands we need to run.
+        commands = []
+        for format in self.formats:
+            try:
+                commands.append(self.format_command[format][0])
+            except KeyError:
+                raise DistutilsOptionError, "invalid format '%s'" % format
+
+        # Reinitialize and run each command.
+        for i in range(len(self.formats)):
+            cmd_name = commands[i]
+            sub_cmd = self.reinitialize_command(cmd_name)
+            if cmd_name not in self.no_format_option:
+                sub_cmd.format = self.formats[i]
+
+            # passing the owner and group names for tar archiving
+            if cmd_name == 'bdist_dumb':
+                sub_cmd.owner = self.owner
+                sub_cmd.group = self.group
+
+            # If we're going to need to run this command again, tell it to
+            # keep its temporary files around so subsequent runs go faster.
+            if cmd_name in commands[i+1:]:
+                sub_cmd.keep_temp = 1
+            self.run_command(cmd_name)
diff --git a/lib/distutils/distutils/command/bdist_dumb.py b/lib/distutils/distutils/command/bdist_dumb.py
new file mode 100644
index 0000000..2f3c668
--- /dev/null
+++ b/lib/distutils/distutils/command/bdist_dumb.py
@@ -0,0 +1,133 @@
+"""distutils.command.bdist_dumb
+
+Implements the Distutils 'bdist_dumb' command (create a "dumb" built
+distribution -- i.e., just an archive to be unpacked under $prefix or
+$exec_prefix)."""
+
+__revision__ = "$Id$"
+
+import os
+
+from sysconfig import get_python_version
+
+from distutils.util import get_platform
+from distutils.core import Command
+from distutils.dir_util import remove_tree, ensure_relative
+from distutils.errors import DistutilsPlatformError
+from distutils import log
+
+class bdist_dumb (Command):
+
+    description = 'create a "dumb" built distribution'
+
+    user_options = [('bdist-dir=', 'd',
+                     "temporary directory for creating the distribution"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_platform()),
+                    ('format=', 'f',
+                     "archive format to create (tar, ztar, gztar, zip)"),
+                    ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('relative', None,
+                     "build the archive using relative paths"
+                     "(default: false)"),
+                    ('owner=', 'u',
+                     "Owner name used when creating a tar file"
+                     " [default: current user]"),
+                    ('group=', 'g',
+                     "Group name used when creating a tar file"
+                     " [default: current group]"),
+                   ]
+
+    boolean_options = ['keep-temp', 'skip-build', 'relative']
+
+    default_format = { 'posix': 'gztar',
+                       'nt': 'zip',
+                       'os2': 'zip' }
+
+
+    def initialize_options (self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.format = None
+        self.keep_temp = 0
+        self.dist_dir = None
+        self.skip_build = None
+        self.relative = 0
+        self.owner = None
+        self.group = None
+
+    def finalize_options(self):
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'dumb')
+
+        if self.format is None:
+            try:
+                self.format = self.default_format[os.name]
+            except KeyError:
+                raise DistutilsPlatformError, \
+                      ("don't know how to create dumb built distributions " +
+                       "on platform %s") % os.name
+
+        self.set_undefined_options('bdist',
+                                   ('dist_dir', 'dist_dir'),
+                                   ('plat_name', 'plat_name'),
+                                   ('skip_build', 'skip_build'))
+
+    def run(self):
+        if not self.skip_build:
+            self.run_command('build')
+
+        install = self.reinitialize_command('install', reinit_subcommands=1)
+        install.root = self.bdist_dir
+        install.skip_build = self.skip_build
+        install.warn_dir = 0
+
+        log.info("installing to %s" % self.bdist_dir)
+        self.run_command('install')
+
+        # And make an archive relative to the root of the
+        # pseudo-installation tree.
+        archive_basename = "%s.%s" % (self.distribution.get_fullname(),
+                                      self.plat_name)
+
+        # OS/2 objects to any ":" characters in a filename (such as when
+        # a timestamp is used in a version) so change them to hyphens.
+        if os.name == "os2":
+            archive_basename = archive_basename.replace(":", "-")
+
+        pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
+        if not self.relative:
+            archive_root = self.bdist_dir
+        else:
+            if (self.distribution.has_ext_modules() and
+                (install.install_base != install.install_platbase)):
+                raise DistutilsPlatformError, \
+                      ("can't make a dumb built distribution where "
+                       "base and platbase are different (%s, %s)"
+                       % (repr(install.install_base),
+                          repr(install.install_platbase)))
+            else:
+                archive_root = os.path.join(self.bdist_dir,
+                                   ensure_relative(install.install_base))
+
+        # Make the archive
+        filename = self.make_archive(pseudoinstall_root,
+                                     self.format, root_dir=archive_root,
+                                     owner=self.owner, group=self.group)
+        if self.distribution.has_ext_modules():
+            pyversion = get_python_version()
+        else:
+            pyversion = 'any'
+        self.distribution.dist_files.append(('bdist_dumb', pyversion,
+                                             filename))
+
+        if not self.keep_temp:
+            remove_tree(self.bdist_dir, dry_run=self.dry_run)
diff --git a/lib/distutils/distutils/command/bdist_msi.py b/lib/distutils/distutils/command/bdist_msi.py
new file mode 100644
index 0000000..703f873
--- /dev/null
+++ b/lib/distutils/distutils/command/bdist_msi.py
@@ -0,0 +1,742 @@
+# -*- coding: iso-8859-1 -*-
+# Copyright (C) 2005, 2006 Martin von Löwis
+# Licensed to PSF under a Contributor Agreement.
+# The bdist_wininst command proper
+# based on bdist_wininst
+"""
+Implements the bdist_msi command.
+"""
+import sys, os
+from sysconfig import get_python_version
+
+from distutils.core import Command
+from distutils.dir_util import remove_tree
+from distutils.version import StrictVersion
+from distutils.errors import DistutilsOptionError
+from distutils import log
+from distutils.util import get_platform
+
+import msilib
+from msilib import schema, sequence, text
+from msilib import Directory, Feature, Dialog, add_data
+
+class PyDialog(Dialog):
+    """Dialog class with a fixed layout: controls at the top, then a ruler,
+    then a list of buttons: back, next, cancel. Optionally a bitmap at the
+    left."""
+    def __init__(self, *args, **kw):
+        """Dialog(database, name, x, y, w, h, attributes, title, first,
+        default, cancel, bitmap=true)"""
+        Dialog.__init__(self, *args)
+        ruler = self.h - 36
+        #if kw.get("bitmap", True):
+        #    self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
+        self.line("BottomLine", 0, ruler, self.w, 0)
+
+    def title(self, title):
+        "Set the title text of the dialog at the top."
+        # name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
+        # text, in VerdanaBold10
+        self.text("Title", 15, 10, 320, 60, 0x30003,
+                  r"{\VerdanaBold10}%s" % title)
+
+    def back(self, title, next, name = "Back", active = 1):
+        """Add a back button with a given title, the tab-next button,
+        its name in the Control table, possibly initially disabled.
+
+        Return the button, so that events can be associated"""
+        if active:
+            flags = 3 # Visible|Enabled
+        else:
+            flags = 1 # Visible
+        return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
+
+    def cancel(self, title, next, name = "Cancel", active = 1):
+        """Add a cancel button with a given title, the tab-next button,
+        its name in the Control table, possibly initially disabled.
+
+        Return the button, so that events can be associated"""
+        if active:
+            flags = 3 # Visible|Enabled
+        else:
+            flags = 1 # Visible
+        return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
+
+    def next(self, title, next, name = "Next", active = 1):
+        """Add a Next button with a given title, the tab-next button,
+        its name in the Control table, possibly initially disabled.
+
+        Return the button, so that events can be associated"""
+        if active:
+            flags = 3 # Visible|Enabled
+        else:
+            flags = 1 # Visible
+        return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
+
+    def xbutton(self, name, title, next, xpos):
+        """Add a button with a given title, the tab-next button,
+        its name in the Control table, giving its x position; the
+        y-position is aligned with the other buttons.
+
+        Return the button, so that events can be associated"""
+        return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
+
+class bdist_msi (Command):
+
+    description = "create a Microsoft Installer (.msi) binary distribution"
+
+    user_options = [('bdist-dir=', None,
+                     "temporary directory for creating the distribution"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_platform()),
+                    ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+                    ('target-version=', None,
+                     "require a specific python version" +
+                     " on the target system"),
+                    ('no-target-compile', 'c',
+                     "do not compile .py to .pyc on the target system"),
+                    ('no-target-optimize', 'o',
+                     "do not compile .py to .pyo (optimized)"
+                     "on the target system"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('install-script=', None,
+                     "basename of installation script to be run after"
+                     "installation or before deinstallation"),
+                    ('pre-install-script=', None,
+                     "Fully qualified filename of a script to be run before "
+                     "any files are installed.  This script need not be in the "
+                     "distribution"),
+                   ]
+
+    boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
+                       'skip-build']
+
+    all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
+                    '2.5', '2.6', '2.7', '2.8', '2.9',
+                    '3.0', '3.1', '3.2', '3.3', '3.4',
+                    '3.5', '3.6', '3.7', '3.8', '3.9']
+    other_version = 'X'
+
+    def initialize_options (self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.keep_temp = 0
+        self.no_target_compile = 0
+        self.no_target_optimize = 0
+        self.target_version = None
+        self.dist_dir = None
+        self.skip_build = None
+        self.install_script = None
+        self.pre_install_script = None
+        self.versions = None
+
+    def finalize_options (self):
+        self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
+
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'msi')
+
+        short_version = get_python_version()
+        if (not self.target_version) and self.distribution.has_ext_modules():
+            self.target_version = short_version
+
+        if self.target_version:
+            self.versions = [self.target_version]
+            if not self.skip_build and self.distribution.has_ext_modules()\
+               and self.target_version != short_version:
+                raise DistutilsOptionError, \
+                      "target version can only be %s, or the '--skip-build'" \
+                      " option must be specified" % (short_version,)
+        else:
+            self.versions = list(self.all_versions)
+
+        self.set_undefined_options('bdist',
+                                   ('dist_dir', 'dist_dir'),
+                                   ('plat_name', 'plat_name'),
+                                   )
+
+        if self.pre_install_script:
+            raise DistutilsOptionError, "the pre-install-script feature is not yet implemented"
+
+        if self.install_script:
+            for script in self.distribution.scripts:
+                if self.install_script == os.path.basename(script):
+                    break
+            else:
+                raise DistutilsOptionError, \
+                      "install_script '%s' not found in scripts" % \
+                      self.install_script
+        self.install_script_key = None
+    # finalize_options()
+
+
+    def run (self):
+        if not self.skip_build:
+            self.run_command('build')
+
+        install = self.reinitialize_command('install', reinit_subcommands=1)
+        install.prefix = self.bdist_dir
+        install.skip_build = self.skip_build
+        install.warn_dir = 0
+
+        install_lib = self.reinitialize_command('install_lib')
+        # we do not want to include pyc or pyo files
+        install_lib.compile = 0
+        install_lib.optimize = 0
+
+        if self.distribution.has_ext_modules():
+            # If we are building an installer for a Python version other
+            # than the one we are currently running, then we need to ensure
+            # our build_lib reflects the other Python version rather than ours.
+            # Note that for target_version!=sys.version, we must have skipped the
+            # build step, so there is no issue with enforcing the build of this
+            # version.
+            target_version = self.target_version
+            if not target_version:
+                assert self.skip_build, "Should have already checked this"
+                target_version = sys.version[0:3]
+            plat_specifier = ".%s-%s" % (self.plat_name, target_version)
+            build = self.get_finalized_command('build')
+            build.build_lib = os.path.join(build.build_base,
+                                           'lib' + plat_specifier)
+
+        log.info("installing to %s", self.bdist_dir)
+        install.ensure_finalized()
+
+        # avoid warning of 'install_lib' about installing
+        # into a directory not in sys.path
+        sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
+
+        install.run()
+
+        del sys.path[0]
+
+        self.mkpath(self.dist_dir)
+        fullname = self.distribution.get_fullname()
+        installer_name = self.get_installer_filename(fullname)
+        installer_name = os.path.abspath(installer_name)
+        if os.path.exists(installer_name): os.unlink(installer_name)
+
+        metadata = self.distribution.metadata
+        author = metadata.author
+        if not author:
+            author = metadata.maintainer
+        if not author:
+            author = "UNKNOWN"
+        version = metadata.get_version()
+        # ProductVersion must be strictly numeric
+        # XXX need to deal with prerelease versions
+        sversion = "%d.%d.%d" % StrictVersion(version).version
+        # Prefix ProductName with Python x.y, so that
+        # it sorts together with the other Python packages
+        # in Add-Remove-Programs (APR)
+        fullname = self.distribution.get_fullname()
+        if self.target_version:
+            product_name = "Python %s %s" % (self.target_version, fullname)
+        else:
+            product_name = "Python %s" % (fullname)
+        self.db = msilib.init_database(installer_name, schema,
+                product_name, msilib.gen_uuid(),
+                sversion, author)
+        msilib.add_tables(self.db, sequence)
+        props = [('DistVersion', version)]
+        email = metadata.author_email or metadata.maintainer_email
+        if email:
+            props.append(("ARPCONTACT", email))
+        if metadata.url:
+            props.append(("ARPURLINFOABOUT", metadata.url))
+        if props:
+            add_data(self.db, 'Property', props)
+
+        self.add_find_python()
+        self.add_files()
+        self.add_scripts()
+        self.add_ui()
+        self.db.Commit()
+
+        if hasattr(self.distribution, 'dist_files'):
+            tup = 'bdist_msi', self.target_version or 'any', fullname
+            self.distribution.dist_files.append(tup)
+
+        if not self.keep_temp:
+            remove_tree(self.bdist_dir, dry_run=self.dry_run)
+
+    def add_files(self):
+        db = self.db
+        cab = msilib.CAB("distfiles")
+        rootdir = os.path.abspath(self.bdist_dir)
+
+        root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
+        f = Feature(db, "Python", "Python", "Everything",
+                    0, 1, directory="TARGETDIR")
+
+        items = [(f, root, '')]
+        for version in self.versions + [self.other_version]:
+            target = "TARGETDIR" + version
+            name = default = "Python" + version
+            desc = "Everything"
+            if version is self.other_version:
+                title = "Python from another location"
+                level = 2
+            else:
+                title = "Python %s from registry" % version
+                level = 1
+            f = Feature(db, name, title, desc, 1, level, directory=target)
+            dir = Directory(db, cab, root, rootdir, target, default)
+            items.append((f, dir, version))
+        db.Commit()
+
+        seen = {}
+        for feature, dir, version in items:
+            todo = [dir]
+            while todo:
+                dir = todo.pop()
+                for file in os.listdir(dir.absolute):
+                    afile = os.path.join(dir.absolute, file)
+                    if os.path.isdir(afile):
+                        short = "%s|%s" % (dir.make_short(file), file)
+                        default = file + version
+                        newdir = Directory(db, cab, dir, file, default, short)
+                        todo.append(newdir)
+                    else:
+                        if not dir.component:
+                            dir.start_component(dir.logical, feature, 0)
+                        if afile not in seen:
+                            key = seen[afile] = dir.add_file(file)
+                            if file==self.install_script:
+                                if self.install_script_key:
+                                    raise DistutilsOptionError(
+                                          "Multiple files with name %s" % file)
+                                self.install_script_key = '[#%s]' % key
+                        else:
+                            key = seen[afile]
+                            add_data(self.db, "DuplicateFile",
+                                [(key + version, dir.component, key, None, dir.logical)])
+            db.Commit()
+        cab.commit(db)
+
+    def add_find_python(self):
+        """Adds code to the installer to compute the location of Python.
+
+        Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
+        registry for each version of Python.
+
+        Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
+        else from PYTHON.MACHINE.X.Y.
+
+        Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
+
+        start = 402
+        for ver in self.versions:
+            install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
+            machine_reg = "python.machine." + ver
+            user_reg = "python.user." + ver
+            machine_prop = "PYTHON.MACHINE." + ver
+            user_prop = "PYTHON.USER." + ver
+            machine_action = "PythonFromMachine" + ver
+            user_action = "PythonFromUser" + ver
+            exe_action = "PythonExe" + ver
+            target_dir_prop = "TARGETDIR" + ver
+            exe_prop = "PYTHON" + ver
+            if msilib.Win64:
+                # type: msidbLocatorTypeRawValue + msidbLocatorType64bit
+                Type = 2+16
+            else:
+                Type = 2
+            add_data(self.db, "RegLocator",
+                    [(machine_reg, 2, install_path, None, Type),
+                     (user_reg, 1, install_path, None, Type)])
+            add_data(self.db, "AppSearch",
+                    [(machine_prop, machine_reg),
+                     (user_prop, user_reg)])
+            add_data(self.db, "CustomAction",
+                    [(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
+                     (user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
+                     (exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
+                    ])
+            add_data(self.db, "InstallExecuteSequence",
+                    [(machine_action, machine_prop, start),
+                     (user_action, user_prop, start + 1),
+                     (exe_action, None, start + 2),
+                    ])
+            add_data(self.db, "InstallUISequence",
+                    [(machine_action, machine_prop, start),
+                     (user_action, user_prop, start + 1),
+                     (exe_action, None, start + 2),
+                    ])
+            add_data(self.db, "Condition",
+                    [("Python" + ver, 0, "NOT TARGETDIR" + ver)])
+            start += 4
+            assert start < 500
+
+    def add_scripts(self):
+        if self.install_script:
+            start = 6800
+            for ver in self.versions + [self.other_version]:
+                install_action = "install_script." + ver
+                exe_prop = "PYTHON" + ver
+                add_data(self.db, "CustomAction",
+                        [(install_action, 50, exe_prop, self.install_script_key)])
+                add_data(self.db, "InstallExecuteSequence",
+                        [(install_action, "&Python%s=3" % ver, start)])
+                start += 1
+        # XXX pre-install scripts are currently refused in finalize_options()
+        #     but if this feature is completed, it will also need to add
+        #     entries for each version as the above code does
+        if self.pre_install_script:
+            scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
+            f = open(scriptfn, "w")
+            # The batch file will be executed with [PYTHON], so that %1
+            # is the path to the Python interpreter; %0 will be the path
+            # of the batch file.
+            # rem ="""
+            # %1 %0
+            # exit
+            # """
+            # <actual script>
+            f.write('rem ="""\n%1 %0\nexit\n"""\n')
+            f.write(open(self.pre_install_script).read())
+            f.close()
+            add_data(self.db, "Binary",
+                [("PreInstall", msilib.Binary(scriptfn))
+                ])
+            add_data(self.db, "CustomAction",
+                [("PreInstall", 2, "PreInstall", None)
+                ])
+            add_data(self.db, "InstallExecuteSequence",
+                    [("PreInstall", "NOT Installed", 450)])
+
+
+    def add_ui(self):
+        db = self.db
+        x = y = 50
+        w = 370
+        h = 300
+        title = "[ProductName] Setup"
+
+        # see "Dialog Style Bits"
+        modal = 3      # visible | modal
+        modeless = 1   # visible
+
+        # UI customization properties
+        add_data(db, "Property",
+                 # See "DefaultUIFont Property"
+                 [("DefaultUIFont", "DlgFont8"),
+                  # See "ErrorDialog Style Bit"
+                  ("ErrorDialog", "ErrorDlg"),
+                  ("Progress1", "Install"),   # modified in maintenance type dlg
+                  ("Progress2", "installs"),
+                  ("MaintenanceForm_Action", "Repair"),
+                  # possible values: ALL, JUSTME
+                  ("WhichUsers", "ALL")
+                 ])
+
+        # Fonts, see "TextStyle Table"
+        add_data(db, "TextStyle",
+                 [("DlgFont8", "Tahoma", 9, None, 0),
+                  ("DlgFontBold8", "Tahoma", 8, None, 1), #bold
+                  ("VerdanaBold10", "Verdana", 10, None, 1),
+                  ("VerdanaRed9", "Verdana", 9, 255, 0),
+                 ])
+
+        # UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
+        # Numbers indicate sequence; see sequence.py for how these action integrate
+        add_data(db, "InstallUISequence",
+                 [("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
+                  ("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
+                  # In the user interface, assume all-users installation if privileged.
+                  ("SelectFeaturesDlg", "Not Installed", 1230),
+                  # XXX no support for resume installations yet
+                  #("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
+                  ("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
+                  ("ProgressDlg", None, 1280)])
+
+        add_data(db, 'ActionText', text.ActionText)
+        add_data(db, 'UIText', text.UIText)
+        #####################################################################
+        # Standard dialogs: FatalError, UserExit, ExitDialog
+        fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
+                     "Finish", "Finish", "Finish")
+        fatal.title("[ProductName] Installer ended prematurely")
+        fatal.back("< Back", "Finish", active = 0)
+        fatal.cancel("Cancel", "Back", active = 0)
+        fatal.text("Description1", 15, 70, 320, 80, 0x30003,
+                   "[ProductName] setup ended prematurely because of an error.  Your system has not been modified.  To install this program at a later time, please run the installation again.")
+        fatal.text("Description2", 15, 155, 320, 20, 0x30003,
+                   "Click the Finish button to exit the Installer.")
+        c=fatal.next("Finish", "Cancel", name="Finish")
+        c.event("EndDialog", "Exit")
+
+        user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
+                     "Finish", "Finish", "Finish")
+        user_exit.title("[ProductName] Installer was interrupted")
+        user_exit.back("< Back", "Finish", active = 0)
+        user_exit.cancel("Cancel", "Back", active = 0)
+        user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
+                   "[ProductName] setup was interrupted.  Your system has not been modified.  "
+                   "To install this program at a later time, please run the installation again.")
+        user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
+                   "Click the Finish button to exit the Installer.")
+        c = user_exit.next("Finish", "Cancel", name="Finish")
+        c.event("EndDialog", "Exit")
+
+        exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
+                             "Finish", "Finish", "Finish")
+        exit_dialog.title("Completing the [ProductName] Installer")
+        exit_dialog.back("< Back", "Finish", active = 0)
+        exit_dialog.cancel("Cancel", "Back", active = 0)
+        exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
+                   "Click the Finish button to exit the Installer.")
+        c = exit_dialog.next("Finish", "Cancel", name="Finish")
+        c.event("EndDialog", "Return")
+
+        #####################################################################
+        # Required dialog: FilesInUse, ErrorDlg
+        inuse = PyDialog(db, "FilesInUse",
+                         x, y, w, h,
+                         19,                # KeepModeless|Modal|Visible
+                         title,
+                         "Retry", "Retry", "Retry", bitmap=False)
+        inuse.text("Title", 15, 6, 200, 15, 0x30003,
+                   r"{\DlgFontBold8}Files in Use")
+        inuse.text("Description", 20, 23, 280, 20, 0x30003,
+               "Some files that need to be updated are currently in use.")
+        inuse.text("Text", 20, 55, 330, 50, 3,
+                   "The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
+        inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
+                      None, None, None)
+        c=inuse.back("Exit", "Ignore", name="Exit")
+        c.event("EndDialog", "Exit")
+        c=inuse.next("Ignore", "Retry", name="Ignore")
+        c.event("EndDialog", "Ignore")
+        c=inuse.cancel("Retry", "Exit", name="Retry")
+        c.event("EndDialog","Retry")
+
+        # See "Error Dialog". See "ICE20" for the required names of the controls.
+        error = Dialog(db, "ErrorDlg",
+                       50, 10, 330, 101,
+                       65543,       # Error|Minimize|Modal|Visible
+                       title,
+                       "ErrorText", None, None)
+        error.text("ErrorText", 50,9,280,48,3, "")
+        #error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
+        error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
+        error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
+        error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
+        error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
+        error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
+        error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
+        error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
+
+        #####################################################################
+        # Global "Query Cancel" dialog
+        cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
+                        "No", "No", "No")
+        cancel.text("Text", 48, 15, 194, 30, 3,
+                    "Are you sure you want to cancel [ProductName] installation?")
+        #cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
+        #               "py.ico", None, None)
+        c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
+        c.event("EndDialog", "Exit")
+
+        c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
+        c.event("EndDialog", "Return")
+
+        #####################################################################
+        # Global "Wait for costing" dialog
+        costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
+                         "Return", "Return", "Return")
+        costing.text("Text", 48, 15, 194, 30, 3,
+                     "Please wait while the installer finishes determining your disk space requirements.")
+        c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
+        c.event("EndDialog", "Exit")
+
+        #####################################################################
+        # Preparation dialog: no user input except cancellation
+        prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
+                        "Cancel", "Cancel", "Cancel")
+        prep.text("Description", 15, 70, 320, 40, 0x30003,
+                  "Please wait while the Installer prepares to guide you through the installation.")
+        prep.title("Welcome to the [ProductName] Installer")
+        c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
+        c.mapping("ActionText", "Text")
+        c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
+        c.mapping("ActionData", "Text")
+        prep.back("Back", None, active=0)
+        prep.next("Next", None, active=0)
+        c=prep.cancel("Cancel", None)
+        c.event("SpawnDialog", "CancelDlg")
+
+        #####################################################################
+        # Feature (Python directory) selection
+        seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
+                        "Next", "Next", "Cancel")
+        seldlg.title("Select Python Installations")
+
+        seldlg.text("Hint", 15, 30, 300, 20, 3,
+                    "Select the Python locations where %s should be installed."
+                    % self.distribution.get_fullname())
+
+        seldlg.back("< Back", None, active=0)
+        c = seldlg.next("Next >", "Cancel")
+        order = 1
+        c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
+        for version in self.versions + [self.other_version]:
+            order += 1
+            c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
+                    "FEATURE_SELECTED AND &Python%s=3" % version,
+                    ordering=order)
+        c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
+        c.event("EndDialog", "Return", ordering=order + 2)
+        c = seldlg.cancel("Cancel", "Features")
+        c.event("SpawnDialog", "CancelDlg")
+
+        c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
+                           "FEATURE", None, "PathEdit", None)
+        c.event("[FEATURE_SELECTED]", "1")
+        ver = self.other_version
+        install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
+        dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
+
+        c = seldlg.text("Other", 15, 200, 300, 15, 3,
+                        "Provide an alternate Python location")
+        c.condition("Enable", install_other_cond)
+        c.condition("Show", install_other_cond)
+        c.condition("Disable", dont_install_other_cond)
+        c.condition("Hide", dont_install_other_cond)
+
+        c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
+                           "TARGETDIR" + ver, None, "Next", None)
+        c.condition("Enable", install_other_cond)
+        c.condition("Show", install_other_cond)
+        c.condition("Disable", dont_install_other_cond)
+        c.condition("Hide", dont_install_other_cond)
+
+        #####################################################################
+        # Disk cost
+        cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
+                        "OK", "OK", "OK", bitmap=False)
+        cost.text("Title", 15, 6, 200, 15, 0x30003,
+                  "{\DlgFontBold8}Disk Space Requirements")
+        cost.text("Description", 20, 20, 280, 20, 0x30003,
+                  "The disk space required for the installation of the selected features.")
+        cost.text("Text", 20, 53, 330, 60, 3,
+                  "The highlighted volumes (if any) do not have enough disk space "
+              "available for the currently selected features.  You can either "
+              "remove some files from the highlighted volumes, or choose to "
+              "install less features onto local drive(s), or select different "
+              "destination drive(s).")
+        cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
+                     None, "{120}{70}{70}{70}{70}", None, None)
+        cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
+
+        #####################################################################
+        # WhichUsers Dialog. Only available on NT, and for privileged users.
+        # This must be run before FindRelatedProducts, because that will
+        # take into account whether the previous installation was per-user
+        # or per-machine. We currently don't support going back to this
+        # dialog after "Next" was selected; to support this, we would need to
+        # find how to reset the ALLUSERS property, and how to re-run
+        # FindRelatedProducts.
+        # On Windows9x, the ALLUSERS property is ignored on the command line
+        # and in the Property table, but installer fails according to the documentation
+        # if a dialog attempts to set ALLUSERS.
+        whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
+                            "AdminInstall", "Next", "Cancel")
+        whichusers.title("Select whether to install [ProductName] for all users of this computer.")
+        # A radio group with two options: allusers, justme
+        g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
+                                  "WhichUsers", "", "Next")
+        g.add("ALL", 0, 5, 150, 20, "Install for all users")
+        g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
+
+        whichusers.back("Back", None, active=0)
+
+        c = whichusers.next("Next >", "Cancel")
+        c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
+        c.event("EndDialog", "Return", ordering = 2)
+
+        c = whichusers.cancel("Cancel", "AdminInstall")
+        c.event("SpawnDialog", "CancelDlg")
+
+        #####################################################################
+        # Installation Progress dialog (modeless)
+        progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
+                            "Cancel", "Cancel", "Cancel", bitmap=False)
+        progress.text("Title", 20, 15, 200, 15, 0x30003,
+                      "{\DlgFontBold8}[Progress1] [ProductName]")
+        progress.text("Text", 35, 65, 300, 30, 3,
+                      "Please wait while the Installer [Progress2] [ProductName]. "
+                      "This may take several minutes.")
+        progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
+
+        c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
+        c.mapping("ActionText", "Text")
+
+        #c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
+        #c.mapping("ActionData", "Text")
+
+        c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
+                           None, "Progress done", None, None)
+        c.mapping("SetProgress", "Progress")
+
+        progress.back("< Back", "Next", active=False)
+        progress.next("Next >", "Cancel", active=False)
+        progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
+
+        ###################################################################
+        # Maintenance type: repair/uninstall
+        maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
+                         "Next", "Next", "Cancel")
+        maint.title("Welcome to the [ProductName] Setup Wizard")
+        maint.text("BodyText", 15, 63, 330, 42, 3,
+                   "Select whether you want to repair or remove [ProductName].")
+        g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
+                            "MaintenanceForm_Action", "", "Next")
+        #g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
+        g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
+        g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
+
+        maint.back("< Back", None, active=False)
+        c=maint.next("Finish", "Cancel")
+        # Change installation: Change progress dialog to "Change", then ask
+        # for feature selection
+        #c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
+        #c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
+
+        # Reinstall: Change progress dialog to "Repair", then invoke reinstall
+        # Also set list of reinstalled features to "ALL"
+        c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
+        c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
+        c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
+        c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
+
+        # Uninstall: Change progress to "Remove", then invoke uninstall
+        # Also set list of removed features to "ALL"
+        c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
+        c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
+        c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
+        c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
+
+        # Close dialog when maintenance action scheduled
+        c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
+        #c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
+
+        maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
+
+    def get_installer_filename(self, fullname):
+        # Factored out to allow overriding in subclasses
+        if self.target_version:
+            base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
+                                            self.target_version)
+        else:
+            base_name = "%s.%s.msi" % (fullname, self.plat_name)
+        installer_name = os.path.join(self.dist_dir, base_name)
+        return installer_name
diff --git a/lib/distutils/distutils/command/bdist_rpm.py b/lib/distutils/distutils/command/bdist_rpm.py
new file mode 100644
index 0000000..5958243
--- /dev/null
+++ b/lib/distutils/distutils/command/bdist_rpm.py
@@ -0,0 +1,587 @@
+"""distutils.command.bdist_rpm
+
+Implements the Distutils 'bdist_rpm' command (create RPM source and binary
+distributions)."""
+
+__revision__ = "$Id$"
+
+import sys
+import os
+import string
+
+from distutils.core import Command
+from distutils.debug import DEBUG
+from distutils.file_util import write_file
+from distutils.errors import (DistutilsOptionError, DistutilsPlatformError,
+                              DistutilsFileError, DistutilsExecError)
+from distutils import log
+
+class bdist_rpm (Command):
+
+    description = "create an RPM distribution"
+
+    user_options = [
+        ('bdist-base=', None,
+         "base directory for creating built distributions"),
+        ('rpm-base=', None,
+         "base directory for creating RPMs (defaults to \"rpm\" under "
+         "--bdist-base; must be specified for RPM 2)"),
+        ('dist-dir=', 'd',
+         "directory to put final RPM files in "
+         "(and .spec files if --spec-only)"),
+        ('python=', None,
+         "path to Python interpreter to hard-code in the .spec file "
+         "(default: \"python\")"),
+        ('fix-python', None,
+         "hard-code the exact path to the current Python interpreter in "
+         "the .spec file"),
+        ('spec-only', None,
+         "only regenerate spec file"),
+        ('source-only', None,
+         "only generate source RPM"),
+        ('binary-only', None,
+         "only generate binary RPM"),
+        ('use-bzip2', None,
+         "use bzip2 instead of gzip to create source distribution"),
+
+        # More meta-data: too RPM-specific to put in the setup script,
+        # but needs to go in the .spec file -- so we make these options
+        # to "bdist_rpm".  The idea is that packagers would put this
+        # info in setup.cfg, although they are of course free to
+        # supply it on the command line.
+        ('distribution-name=', None,
+         "name of the (Linux) distribution to which this "
+         "RPM applies (*not* the name of the module distribution!)"),
+        ('group=', None,
+         "package classification [default: \"Development/Libraries\"]"),
+        ('release=', None,
+         "RPM release number"),
+        ('serial=', None,
+         "RPM serial number"),
+        ('vendor=', None,
+         "RPM \"vendor\" (eg. \"Joe Blow <[email protected]>\") "
+         "[default: maintainer or author from setup script]"),
+        ('packager=', None,
+         "RPM packager (eg. \"Jane Doe <[email protected]>\")"
+         "[default: vendor]"),
+        ('doc-files=', None,
+         "list of documentation files (space or comma-separated)"),
+        ('changelog=', None,
+         "RPM changelog"),
+        ('icon=', None,
+         "name of icon file"),
+        ('provides=', None,
+         "capabilities provided by this package"),
+        ('requires=', None,
+         "capabilities required by this package"),
+        ('conflicts=', None,
+         "capabilities which conflict with this package"),
+        ('build-requires=', None,
+         "capabilities required to build this package"),
+        ('obsoletes=', None,
+         "capabilities made obsolete by this package"),
+        ('no-autoreq', None,
+         "do not automatically calculate dependencies"),
+
+        # Actions to take when building RPM
+        ('keep-temp', 'k',
+         "don't clean up RPM build directory"),
+        ('no-keep-temp', None,
+         "clean up RPM build directory [default]"),
+        ('use-rpm-opt-flags', None,
+         "compile with RPM_OPT_FLAGS when building from source RPM"),
+        ('no-rpm-opt-flags', None,
+         "do not pass any RPM CFLAGS to compiler"),
+        ('rpm3-mode', None,
+         "RPM 3 compatibility mode (default)"),
+        ('rpm2-mode', None,
+         "RPM 2 compatibility mode"),
+
+        # Add the hooks necessary for specifying custom scripts
+        ('prep-script=', None,
+         "Specify a script for the PREP phase of RPM building"),
+        ('build-script=', None,
+         "Specify a script for the BUILD phase of RPM building"),
+
+        ('pre-install=', None,
+         "Specify a script for the pre-INSTALL phase of RPM building"),
+        ('install-script=', None,
+         "Specify a script for the INSTALL phase of RPM building"),
+        ('post-install=', None,
+         "Specify a script for the post-INSTALL phase of RPM building"),
+
+        ('pre-uninstall=', None,
+         "Specify a script for the pre-UNINSTALL phase of RPM building"),
+        ('post-uninstall=', None,
+         "Specify a script for the post-UNINSTALL phase of RPM building"),
+
+        ('clean-script=', None,
+         "Specify a script for the CLEAN phase of RPM building"),
+
+        ('verify-script=', None,
+         "Specify a script for the VERIFY phase of the RPM build"),
+
+        # Allow a packager to explicitly force an architecture
+        ('force-arch=', None,
+         "Force an architecture onto the RPM build process"),
+
+        ('quiet', 'q',
+         "Run the INSTALL phase of RPM building in quiet mode"),
+        ]
+
+    boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode',
+                       'no-autoreq', 'quiet']
+
+    negative_opt = {'no-keep-temp': 'keep-temp',
+                    'no-rpm-opt-flags': 'use-rpm-opt-flags',
+                    'rpm2-mode': 'rpm3-mode'}
+
+
+    def initialize_options (self):
+        self.bdist_base = None
+        self.rpm_base = None
+        self.dist_dir = None
+        self.python = None
+        self.fix_python = None
+        self.spec_only = None
+        self.binary_only = None
+        self.source_only = None
+        self.use_bzip2 = None
+
+        self.distribution_name = None
+        self.group = None
+        self.release = None
+        self.serial = None
+        self.vendor = None
+        self.packager = None
+        self.doc_files = None
+        self.changelog = None
+        self.icon = None
+
+        self.prep_script = None
+        self.build_script = None
+        self.install_script = None
+        self.clean_script = None
+        self.verify_script = None
+        self.pre_install = None
+        self.post_install = None
+        self.pre_uninstall = None
+        self.post_uninstall = None
+        self.prep = None
+        self.provides = None
+        self.requires = None
+        self.conflicts = None
+        self.build_requires = None
+        self.obsoletes = None
+
+        self.keep_temp = 0
+        self.use_rpm_opt_flags = 1
+        self.rpm3_mode = 1
+        self.no_autoreq = 0
+
+        self.force_arch = None
+        self.quiet = 0
+
+    # initialize_options()
+
+
+    def finalize_options (self):
+        self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
+        if self.rpm_base is None:
+            if not self.rpm3_mode:
+                raise DistutilsOptionError, \
+                      "you must specify --rpm-base in RPM 2 mode"
+            self.rpm_base = os.path.join(self.bdist_base, "rpm")
+
+        if self.python is None:
+            if self.fix_python:
+                self.python = sys.executable
+            else:
+                self.python = "python"
+        elif self.fix_python:
+            raise DistutilsOptionError, \
+                  "--python and --fix-python are mutually exclusive options"
+
+        if os.name != 'posix':
+            raise DistutilsPlatformError, \
+                  ("don't know how to create RPM "
+                   "distributions on platform %s" % os.name)
+        if self.binary_only and self.source_only:
+            raise DistutilsOptionError, \
+                  "cannot supply both '--source-only' and '--binary-only'"
+
+        # don't pass CFLAGS to pure python distributions
+        if not self.distribution.has_ext_modules():
+            self.use_rpm_opt_flags = 0
+
+        self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
+        self.finalize_package_data()
+
+    # finalize_options()
+
+    def finalize_package_data (self):
+        self.ensure_string('group', "Development/Libraries")
+        self.ensure_string('vendor',
+                           "%s <%s>" % (self.distribution.get_contact(),
+                                        self.distribution.get_contact_email()))
+        self.ensure_string('packager')
+        self.ensure_string_list('doc_files')
+        if isinstance(self.doc_files, list):
+            for readme in ('README', 'README.txt'):
+                if os.path.exists(readme) and readme not in self.doc_files:
+                    self.doc_files.append(readme)
+
+        self.ensure_string('release', "1")
+        self.ensure_string('serial')   # should it be an int?
+
+        self.ensure_string('distribution_name')
+
+        self.ensure_string('changelog')
+          # Format changelog correctly
+        self.changelog = self._format_changelog(self.changelog)
+
+        self.ensure_filename('icon')
+
+        self.ensure_filename('prep_script')
+        self.ensure_filename('build_script')
+        self.ensure_filename('install_script')
+        self.ensure_filename('clean_script')
+        self.ensure_filename('verify_script')
+        self.ensure_filename('pre_install')
+        self.ensure_filename('post_install')
+        self.ensure_filename('pre_uninstall')
+        self.ensure_filename('post_uninstall')
+
+        # XXX don't forget we punted on summaries and descriptions -- they
+        # should be handled here eventually!
+
+        # Now *this* is some meta-data that belongs in the setup script...
+        self.ensure_string_list('provides')
+        self.ensure_string_list('requires')
+        self.ensure_string_list('conflicts')
+        self.ensure_string_list('build_requires')
+        self.ensure_string_list('obsoletes')
+
+        self.ensure_string('force_arch')
+    # finalize_package_data ()
+
+
+    def run (self):
+
+        if DEBUG:
+            print "before _get_package_data():"
+            print "vendor =", self.vendor
+            print "packager =", self.packager
+            print "doc_files =", self.doc_files
+            print "changelog =", self.changelog
+
+        # make directories
+        if self.spec_only:
+            spec_dir = self.dist_dir
+            self.mkpath(spec_dir)
+        else:
+            rpm_dir = {}
+            for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'):
+                rpm_dir[d] = os.path.join(self.rpm_base, d)
+                self.mkpath(rpm_dir[d])
+            spec_dir = rpm_dir['SPECS']
+
+        # Spec file goes into 'dist_dir' if '--spec-only specified',
+        # build/rpm.<plat> otherwise.
+        spec_path = os.path.join(spec_dir,
+                                 "%s.spec" % self.distribution.get_name())
+        self.execute(write_file,
+                     (spec_path,
+                      self._make_spec_file()),
+                     "writing '%s'" % spec_path)
+
+        if self.spec_only: # stop if requested
+            return
+
+        # Make a source distribution and copy to SOURCES directory with
+        # optional icon.
+        saved_dist_files = self.distribution.dist_files[:]
+        sdist = self.reinitialize_command('sdist')
+        if self.use_bzip2:
+            sdist.formats = ['bztar']
+        else:
+            sdist.formats = ['gztar']
+        self.run_command('sdist')
+        self.distribution.dist_files = saved_dist_files
+
+        source = sdist.get_archive_files()[0]
+        source_dir = rpm_dir['SOURCES']
+        self.copy_file(source, source_dir)
+
+        if self.icon:
+            if os.path.exists(self.icon):
+                self.copy_file(self.icon, source_dir)
+            else:
+                raise DistutilsFileError, \
+                      "icon file '%s' does not exist" % self.icon
+
+
+        # build package
+        log.info("building RPMs")
+        rpm_cmd = ['rpm']
+        if os.path.exists('/usr/bin/rpmbuild') or \
+           os.path.exists('/bin/rpmbuild'):
+            rpm_cmd = ['rpmbuild']
+
+        if self.source_only: # what kind of RPMs?
+            rpm_cmd.append('-bs')
+        elif self.binary_only:
+            rpm_cmd.append('-bb')
+        else:
+            rpm_cmd.append('-ba')
+        if self.rpm3_mode:
+            rpm_cmd.extend(['--define',
+                             '_topdir %s' % os.path.abspath(self.rpm_base)])
+        if not self.keep_temp:
+            rpm_cmd.append('--clean')
+
+        if self.quiet:
+            rpm_cmd.append('--quiet')
+
+        rpm_cmd.append(spec_path)
+        # Determine the binary rpm names that should be built out of this spec
+        # file
+        # Note that some of these may not be really built (if the file
+        # list is empty)
+        nvr_string = "%{name}-%{version}-%{release}"
+        src_rpm = nvr_string + ".src.rpm"
+        non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
+        q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % (
+            src_rpm, non_src_rpm, spec_path)
+
+        out = os.popen(q_cmd)
+        try:
+            binary_rpms = []
+            source_rpm = None
+            while 1:
+                line = out.readline()
+                if not line:
+                    break
+                l = string.split(string.strip(line))
+                assert(len(l) == 2)
+                binary_rpms.append(l[1])
+                # The source rpm is named after the first entry in the spec file
+                if source_rpm is None:
+                    source_rpm = l[0]
+
+            status = out.close()
+            if status:
+                raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
+
+        finally:
+            out.close()
+
+        self.spawn(rpm_cmd)
+
+        if not self.dry_run:
+            if self.distribution.has_ext_modules():
+                pyversion = get_python_version()
+            else:
+                pyversion = 'any'
+
+            if not self.binary_only:
+                srpm = os.path.join(rpm_dir['SRPMS'], source_rpm)
+                assert(os.path.exists(srpm))
+                self.move_file(srpm, self.dist_dir)
+                filename = os.path.join(self.dist_dir, source_rpm)
+                self.distribution.dist_files.append(
+                    ('bdist_rpm', pyversion, filename))
+
+            if not self.source_only:
+                for rpm in binary_rpms:
+                    rpm = os.path.join(rpm_dir['RPMS'], rpm)
+                    if os.path.exists(rpm):
+                        self.move_file(rpm, self.dist_dir)
+                        filename = os.path.join(self.dist_dir,
+                                                os.path.basename(rpm))
+                        self.distribution.dist_files.append(
+                            ('bdist_rpm', pyversion, filename))
+    # run()
+
+    def _dist_path(self, path):
+        return os.path.join(self.dist_dir, os.path.basename(path))
+
+    def _make_spec_file(self):
+        """Generate the text of an RPM spec file and return it as a
+        list of strings (one per line).
+        """
+        # definitions and headers
+        spec_file = [
+            '%define name ' + self.distribution.get_name(),
+            '%define version ' + self.distribution.get_version().replace('-','_'),
+            '%define unmangled_version ' + self.distribution.get_version(),
+            '%define release ' + self.release.replace('-','_'),
+            '',
+            'Summary: ' + self.distribution.get_description(),
+            ]
+
+        # put locale summaries into spec file
+        # XXX not supported for now (hard to put a dictionary
+        # in a config file -- arg!)
+        #for locale in self.summaries.keys():
+        #    spec_file.append('Summary(%s): %s' % (locale,
+        #                                          self.summaries[locale]))
+
+        spec_file.extend([
+            'Name: %{name}',
+            'Version: %{version}',
+            'Release: %{release}',])
+
+        # XXX yuck! this filename is available from the "sdist" command,
+        # but only after it has run: and we create the spec file before
+        # running "sdist", in case of --spec-only.
+        if self.use_bzip2:
+            spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
+        else:
+            spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
+
+        spec_file.extend([
+            'License: ' + self.distribution.get_license(),
+            'Group: ' + self.group,
+            'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
+            'Prefix: %{_prefix}', ])
+
+        if not self.force_arch:
+            # noarch if no extension modules
+            if not self.distribution.has_ext_modules():
+                spec_file.append('BuildArch: noarch')
+        else:
+            spec_file.append( 'BuildArch: %s' % self.force_arch )
+
+        for field in ('Vendor',
+                      'Packager',
+                      'Provides',
+                      'Requires',
+                      'Conflicts',
+                      'Obsoletes',
+                      ):
+            val = getattr(self, string.lower(field))
+            if isinstance(val, list):
+                spec_file.append('%s: %s' % (field, string.join(val)))
+            elif val is not None:
+                spec_file.append('%s: %s' % (field, val))
+
+
+        if self.distribution.get_url() != 'UNKNOWN':
+            spec_file.append('Url: ' + self.distribution.get_url())
+
+        if self.distribution_name:
+            spec_file.append('Distribution: ' + self.distribution_name)
+
+        if self.build_requires:
+            spec_file.append('BuildRequires: ' +
+                             string.join(self.build_requires))
+
+        if self.icon:
+            spec_file.append('Icon: ' + os.path.basename(self.icon))
+
+        if self.no_autoreq:
+            spec_file.append('AutoReq: 0')
+
+        spec_file.extend([
+            '',
+            '%description',
+            self.distribution.get_long_description()
+            ])
+
+        # put locale descriptions into spec file
+        # XXX again, suppressed because config file syntax doesn't
+        # easily support this ;-(
+        #for locale in self.descriptions.keys():
+        #    spec_file.extend([
+        #        '',
+        #        '%description -l ' + locale,
+        #        self.descriptions[locale],
+        #        ])
+
+        # rpm scripts
+        # figure out default build script
+        def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0]))
+        def_build = "%s build" % def_setup_call
+        if self.use_rpm_opt_flags:
+            def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
+
+        # insert contents of files
+
+        # XXX this is kind of misleading: user-supplied options are files
+        # that we open and interpolate into the spec file, but the defaults
+        # are just text that we drop in as-is.  Hmmm.
+
+        install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT '
+                       '--record=INSTALLED_FILES') % def_setup_call
+
+        script_options = [
+            ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
+            ('build', 'build_script', def_build),
+            ('install', 'install_script', install_cmd),
+            ('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
+            ('verifyscript', 'verify_script', None),
+            ('pre', 'pre_install', None),
+            ('post', 'post_install', None),
+            ('preun', 'pre_uninstall', None),
+            ('postun', 'post_uninstall', None),
+        ]
+
+        for (rpm_opt, attr, default) in script_options:
+            # Insert contents of file referred to, if no file is referred to
+            # use 'default' as contents of script
+            val = getattr(self, attr)
+            if val or default:
+                spec_file.extend([
+                    '',
+                    '%' + rpm_opt,])
+                if val:
+                    spec_file.extend(string.split(open(val, 'r').read(), '\n'))
+                else:
+                    spec_file.append(default)
+
+
+        # files section
+        spec_file.extend([
+            '',
+            '%files -f INSTALLED_FILES',
+            '%defattr(-,root,root)',
+            ])
+
+        if self.doc_files:
+            spec_file.append('%doc ' + string.join(self.doc_files))
+
+        if self.changelog:
+            spec_file.extend([
+                '',
+                '%changelog',])
+            spec_file.extend(self.changelog)
+
+        return spec_file
+
+    # _make_spec_file ()
+
+    def _format_changelog(self, changelog):
+        """Format the changelog correctly and convert it to a list of strings
+        """
+        if not changelog:
+            return changelog
+        new_changelog = []
+        for line in string.split(string.strip(changelog), '\n'):
+            line = string.strip(line)
+            if line[0] == '*':
+                new_changelog.extend(['', line])
+            elif line[0] == '-':
+                new_changelog.append(line)
+            else:
+                new_changelog.append('  ' + line)
+
+        # strip trailing newline inserted by first changelog entry
+        if not new_changelog[0]:
+            del new_changelog[0]
+
+        return new_changelog
+
+    # _format_changelog()
+
+# class bdist_rpm
diff --git a/lib/distutils/distutils/command/bdist_wininst.py b/lib/distutils/distutils/command/bdist_wininst.py
new file mode 100644
index 0000000..aa9383a
--- /dev/null
+++ b/lib/distutils/distutils/command/bdist_wininst.py
@@ -0,0 +1,368 @@
+"""distutils.command.bdist_wininst
+
+Implements the Distutils 'bdist_wininst' command: create a windows installer
+exe-program."""
+
+__revision__ = "$Id$"
+
+import sys
+import os
+import string
+
+from sysconfig import get_python_version
+
+from distutils.core import Command
+from distutils.dir_util import remove_tree
+from distutils.errors import DistutilsOptionError, DistutilsPlatformError
+from distutils import log
+from distutils.util import get_platform
+
+class bdist_wininst (Command):
+
+    description = "create an executable installer for MS Windows"
+
+    user_options = [('bdist-dir=', None,
+                     "temporary directory for creating the distribution"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_platform()),
+                    ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+                    ('target-version=', None,
+                     "require a specific python version" +
+                     " on the target system"),
+                    ('no-target-compile', 'c',
+                     "do not compile .py to .pyc on the target system"),
+                    ('no-target-optimize', 'o',
+                     "do not compile .py to .pyo (optimized)"
+                     "on the target system"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+                    ('bitmap=', 'b',
+                     "bitmap to use for the installer instead of python-powered logo"),
+                    ('title=', 't',
+                     "title to display on the installer background instead of default"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('install-script=', None,
+                     "basename of installation script to be run after"
+                     "installation or before deinstallation"),
+                    ('pre-install-script=', None,
+                     "Fully qualified filename of a script to be run before "
+                     "any files are installed.  This script need not be in the "
+                     "distribution"),
+                    ('user-access-control=', None,
+                     "specify Vista's UAC handling - 'none'/default=no "
+                     "handling, 'auto'=use UAC if target Python installed for "
+                     "all users, 'force'=always use UAC"),
+                   ]
+
+    boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
+                       'skip-build']
+
+    def initialize_options (self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.keep_temp = 0
+        self.no_target_compile = 0
+        self.no_target_optimize = 0
+        self.target_version = None
+        self.dist_dir = None
+        self.bitmap = None
+        self.title = None
+        self.skip_build = None
+        self.install_script = None
+        self.pre_install_script = None
+        self.user_access_control = None
+
+    # initialize_options()
+
+
+    def finalize_options (self):
+        self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
+
+        if self.bdist_dir is None:
+            if self.skip_build and self.plat_name:
+                # If build is skipped and plat_name is overridden, bdist will
+                # not see the correct 'plat_name' - so set that up manually.
+                bdist = self.distribution.get_command_obj('bdist')
+                bdist.plat_name = self.plat_name
+                # next the command will be initialized using that name
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'wininst')
+
+        if not self.target_version:
+            self.target_version = ""
+
+        if not self.skip_build and self.distribution.has_ext_modules():
+            short_version = get_python_version()
+            if self.target_version and self.target_version != short_version:
+                raise DistutilsOptionError, \
+                      "target version can only be %s, or the '--skip-build'" \
+                      " option must be specified" % (short_version,)
+            self.target_version = short_version
+
+        self.set_undefined_options('bdist',
+                                   ('dist_dir', 'dist_dir'),
+                                   ('plat_name', 'plat_name'),
+                                  )
+
+        if self.install_script:
+            for script in self.distribution.scripts:
+                if self.install_script == os.path.basename(script):
+                    break
+            else:
+                raise DistutilsOptionError, \
+                      "install_script '%s' not found in scripts" % \
+                      self.install_script
+    # finalize_options()
+
+
+    def run (self):
+        if (sys.platform != "win32" and
+            (self.distribution.has_ext_modules() or
+             self.distribution.has_c_libraries())):
+            raise DistutilsPlatformError \
+                  ("distribution contains extensions and/or C libraries; "
+                   "must be compiled on a Windows 32 platform")
+
+        if not self.skip_build:
+            self.run_command('build')
+
+        install = self.reinitialize_command('install', reinit_subcommands=1)
+        install.root = self.bdist_dir
+        install.skip_build = self.skip_build
+        install.warn_dir = 0
+        install.plat_name = self.plat_name
+
+        install_lib = self.reinitialize_command('install_lib')
+        # we do not want to include pyc or pyo files
+        install_lib.compile = 0
+        install_lib.optimize = 0
+
+        if self.distribution.has_ext_modules():
+            # If we are building an installer for a Python version other
+            # than the one we are currently running, then we need to ensure
+            # our build_lib reflects the other Python version rather than ours.
+            # Note that for target_version!=sys.version, we must have skipped the
+            # build step, so there is no issue with enforcing the build of this
+            # version.
+            target_version = self.target_version
+            if not target_version:
+                assert self.skip_build, "Should have already checked this"
+                target_version = sys.version[0:3]
+            plat_specifier = ".%s-%s" % (self.plat_name, target_version)
+            build = self.get_finalized_command('build')
+            build.build_lib = os.path.join(build.build_base,
+                                           'lib' + plat_specifier)
+
+        # Use a custom scheme for the zip-file, because we have to decide
+        # at installation time which scheme to use.
+        for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
+            value = string.upper(key)
+            if key == 'headers':
+                value = value + '/Include/$dist_name'
+            setattr(install,
+                    'install_' + key,
+                    value)
+
+        log.info("installing to %s", self.bdist_dir)
+        install.ensure_finalized()
+
+        # avoid warning of 'install_lib' about installing
+        # into a directory not in sys.path
+        sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
+
+        install.run()
+
+        del sys.path[0]
+
+        # And make an archive relative to the root of the
+        # pseudo-installation tree.
+        from tempfile import mktemp
+        archive_basename = mktemp()
+        fullname = self.distribution.get_fullname()
+        arcname = self.make_archive(archive_basename, "zip",
+                                    root_dir=self.bdist_dir)
+        # create an exe containing the zip-file
+        self.create_exe(arcname, fullname, self.bitmap)
+        if self.distribution.has_ext_modules():
+            pyversion = get_python_version()
+        else:
+            pyversion = 'any'
+        self.distribution.dist_files.append(('bdist_wininst', pyversion,
+                                             self.get_installer_filename(fullname)))
+        # remove the zip-file again
+        log.debug("removing temporary file '%s'", arcname)
+        os.remove(arcname)
+
+        if not self.keep_temp:
+            remove_tree(self.bdist_dir, dry_run=self.dry_run)
+
+    # run()
+
+    def get_inidata (self):
+        # Return data describing the installation.
+
+        lines = []
+        metadata = self.distribution.metadata
+
+        # Write the [metadata] section.
+        lines.append("[metadata]")
+
+        # 'info' will be displayed in the installer's dialog box,
+        # describing the items to be installed.
+        info = (metadata.long_description or '') + '\n'
+
+        # Escape newline characters
+        def escape(s):
+            return string.replace(s, "\n", "\\n")
+
+        for name in ["author", "author_email", "description", "maintainer",
+                     "maintainer_email", "name", "url", "version"]:
+            data = getattr(metadata, name, "")
+            if data:
+                info = info + ("\n    %s: %s" % \
+                               (string.capitalize(name), escape(data)))
+                lines.append("%s=%s" % (name, escape(data)))
+
+        # The [setup] section contains entries controlling
+        # the installer runtime.
+        lines.append("\n[Setup]")
+        if self.install_script:
+            lines.append("install_script=%s" % self.install_script)
+        lines.append("info=%s" % escape(info))
+        lines.append("target_compile=%d" % (not self.no_target_compile))
+        lines.append("target_optimize=%d" % (not self.no_target_optimize))
+        if self.target_version:
+            lines.append("target_version=%s" % self.target_version)
+        if self.user_access_control:
+            lines.append("user_access_control=%s" % self.user_access_control)
+
+        title = self.title or self.distribution.get_fullname()
+        lines.append("title=%s" % escape(title))
+        import time
+        import distutils
+        build_info = "Built %s with distutils-%s" % \
+                     (time.ctime(time.time()), distutils.__version__)
+        lines.append("build_info=%s" % build_info)
+        return string.join(lines, "\n")
+
+    # get_inidata()
+
+    def create_exe (self, arcname, fullname, bitmap=None):
+        import struct
+
+        self.mkpath(self.dist_dir)
+
+        cfgdata = self.get_inidata()
+
+        installer_name = self.get_installer_filename(fullname)
+        self.announce("creating %s" % installer_name)
+
+        if bitmap:
+            bitmapdata = open(bitmap, "rb").read()
+            bitmaplen = len(bitmapdata)
+        else:
+            bitmaplen = 0
+
+        file = open(installer_name, "wb")
+        file.write(self.get_exe_bytes())
+        if bitmap:
+            file.write(bitmapdata)
+
+        # Convert cfgdata from unicode to ascii, mbcs encoded
+        try:
+            unicode
+        except NameError:
+            pass
+        else:
+            if isinstance(cfgdata, unicode):
+                cfgdata = cfgdata.encode("mbcs")
+
+        # Append the pre-install script
+        cfgdata = cfgdata + "\0"
+        if self.pre_install_script:
+            script_data = open(self.pre_install_script, "r").read()
+            cfgdata = cfgdata + script_data + "\n\0"
+        else:
+            # empty pre-install script
+            cfgdata = cfgdata + "\0"
+        file.write(cfgdata)
+
+        # The 'magic number' 0x1234567B is used to make sure that the
+        # binary layout of 'cfgdata' is what the wininst.exe binary
+        # expects.  If the layout changes, increment that number, make
+        # the corresponding changes to the wininst.exe sources, and
+        # recompile them.
+        header = struct.pack("<iii",
+                             0x1234567B,       # tag
+                             len(cfgdata),     # length
+                             bitmaplen,        # number of bytes in bitmap
+                             )
+        file.write(header)
+        file.write(open(arcname, "rb").read())
+
+    # create_exe()
+
+    def get_installer_filename(self, fullname):
+        # Factored out to allow overriding in subclasses
+        if self.target_version:
+            # if we create an installer for a specific python version,
+            # it's better to include this in the name
+            installer_name = os.path.join(self.dist_dir,
+                                          "%s.%s-py%s.exe" %
+                                           (fullname, self.plat_name, self.target_version))
+        else:
+            installer_name = os.path.join(self.dist_dir,
+                                          "%s.%s.exe" % (fullname, self.plat_name))
+        return installer_name
+    # get_installer_filename()
+
+    def get_exe_bytes (self):
+        from distutils.msvccompiler import get_build_version
+        # If a target-version other than the current version has been
+        # specified, then using the MSVC version from *this* build is no good.
+        # Without actually finding and executing the target version and parsing
+        # its sys.version, we just hard-code our knowledge of old versions.
+        # NOTE: Possible alternative is to allow "--target-version" to
+        # specify a Python executable rather than a simple version string.
+        # We can then execute this program to obtain any info we need, such
+        # as the real sys.version string for the build.
+        cur_version = get_python_version()
+        if self.target_version and self.target_version != cur_version:
+            # If the target version is *later* than us, then we assume they
+            # use what we use
+            # string compares seem wrong, but are what sysconfig.py itself uses
+            if self.target_version > cur_version:
+                bv = get_build_version()
+            else:
+                if self.target_version < "2.4":
+                    bv = 6.0
+                else:
+                    bv = 7.1
+        else:
+            # for current version - use authoritative check.
+            bv = get_build_version()
+
+        # wininst-x.y.exe is in the same directory as this file
+        directory = os.path.dirname(__file__)
+        # we must use a wininst-x.y.exe built with the same C compiler
+        # used for python.  XXX What about mingw, borland, and so on?
+
+        # if plat_name starts with "win" but is not "win32"
+        # we want to strip "win" and leave the rest (e.g. -amd64)
+        # for all other cases, we don't want any suffix
+        if self.plat_name != 'win32' and self.plat_name[:3] == 'win':
+            sfix = self.plat_name[3:]
+        else:
+            sfix = ''
+
+        filename = os.path.join(directory, "wininst-%.1f%s.exe" % (bv, sfix))
+        f = open(filename, "rb")
+        try:
+            return f.read()
+        finally:
+            f.close()
+# class bdist_wininst
diff --git a/lib/distutils/distutils/command/build.py b/lib/distutils/distutils/command/build.py
new file mode 100644
index 0000000..f84bf35
--- /dev/null
+++ b/lib/distutils/distutils/command/build.py
@@ -0,0 +1,147 @@
+"""distutils.command.build
+
+Implements the Distutils 'build' command."""
+
+__revision__ = "$Id$"
+
+import sys, os
+
+from distutils.util import get_platform
+from distutils.core import Command
+from distutils.errors import DistutilsOptionError
+
+def show_compilers():
+    from distutils.ccompiler import show_compilers
+    show_compilers()
+
+class build(Command):
+
+    description = "build everything needed to install"
+
+    user_options = [
+        ('build-base=', 'b',
+         "base directory for build library"),
+        ('build-purelib=', None,
+         "build directory for platform-neutral distributions"),
+        ('build-platlib=', None,
+         "build directory for platform-specific distributions"),
+        ('build-lib=', None,
+         "build directory for all distribution (defaults to either " +
+         "build-purelib or build-platlib"),
+        ('build-scripts=', None,
+         "build directory for scripts"),
+        ('build-temp=', 't',
+         "temporary build directory"),
+        ('plat-name=', 'p',
+         "platform name to build for, if supported "
+         "(default: %s)" % get_platform()),
+        ('compiler=', 'c',
+         "specify the compiler type"),
+        ('debug', 'g',
+         "compile extensions and libraries with debugging information"),
+        ('force', 'f',
+         "forcibly build everything (ignore file timestamps)"),
+        ('executable=', 'e',
+         "specify final destination interpreter path (build.py)"),
+        ]
+
+    boolean_options = ['debug', 'force']
+
+    help_options = [
+        ('help-compiler', None,
+         "list available compilers", show_compilers),
+        ]
+
+    def initialize_options(self):
+        self.build_base = 'build'
+        # these are decided only after 'build_base' has its final value
+        # (unless overridden by the user or client)
+        self.build_purelib = None
+        self.build_platlib = None
+        self.build_lib = None
+        self.build_temp = None
+        self.build_scripts = None
+        self.compiler = None
+        self.plat_name = None
+        self.debug = None
+        self.force = 0
+        self.executable = None
+
+    def finalize_options(self):
+        if self.plat_name is None:
+            self.plat_name = get_platform()
+        else:
+            # plat-name only supported for windows (other platforms are
+            # supported via ./configure flags, if at all).  Avoid misleading
+            # other platforms.
+            if os.name != 'nt':
+                raise DistutilsOptionError(
+                            "--plat-name only supported on Windows (try "
+                            "using './configure --help' on your platform)")
+
+        plat_specifier = ".%s-%s" % (self.plat_name, sys.version[0:3])
+
+        # Make it so Python 2.x and Python 2.x with --with-pydebug don't
+        # share the same build directories. Doing so confuses the build
+        # process for C modules
+        if hasattr(sys, 'gettotalrefcount'):
+            plat_specifier += '-pydebug'
+
+        # 'build_purelib' and 'build_platlib' just default to 'lib' and
+        # 'lib.<plat>' under the base build directory.  We only use one of
+        # them for a given distribution, though --
+        if self.build_purelib is None:
+            self.build_purelib = os.path.join(self.build_base, 'lib')
+        if self.build_platlib is None:
+            self.build_platlib = os.path.join(self.build_base,
+                                              'lib' + plat_specifier)
+
+        # 'build_lib' is the actual directory that we will use for this
+        # particular module distribution -- if user didn't supply it, pick
+        # one of 'build_purelib' or 'build_platlib'.
+        if self.build_lib is None:
+            if self.distribution.ext_modules:
+                self.build_lib = self.build_platlib
+            else:
+                self.build_lib = self.build_purelib
+
+        # 'build_temp' -- temporary directory for compiler turds,
+        # "build/temp.<plat>"
+        if self.build_temp is None:
+            self.build_temp = os.path.join(self.build_base,
+                                           'temp' + plat_specifier)
+        if self.build_scripts is None:
+            self.build_scripts = os.path.join(self.build_base,
+                                              'scripts-' + sys.version[0:3])
+
+        if self.executable is None:
+            self.executable = os.path.normpath(sys.executable)
+
+    def run(self):
+        # Run all relevant sub-commands.  This will be some subset of:
+        #  - build_py      - pure Python modules
+        #  - build_clib    - standalone C libraries
+        #  - build_ext     - Python extensions
+        #  - build_scripts - (Python) scripts
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+    # -- Predicates for the sub-command list ---------------------------
+
+    def has_pure_modules (self):
+        return self.distribution.has_pure_modules()
+
+    def has_c_libraries (self):
+        return self.distribution.has_c_libraries()
+
+    def has_ext_modules (self):
+        return self.distribution.has_ext_modules()
+
+    def has_scripts (self):
+        return self.distribution.has_scripts()
+
+    sub_commands = [('build_py',      has_pure_modules),
+                    ('build_clib',    has_c_libraries),
+                    ('build_ext',     has_ext_modules),
+                    ('build_scripts', has_scripts),
+                   ]
diff --git a/lib/distutils/distutils/command/build_clib.py b/lib/distutils/distutils/command/build_clib.py
new file mode 100644
index 0000000..205587e
--- /dev/null
+++ b/lib/distutils/distutils/command/build_clib.py
@@ -0,0 +1,209 @@
+"""distutils.command.build_clib
+
+Implements the Distutils 'build_clib' command, to build a C/C++ library
+that is included in the module distribution and needed by an extension
+module."""
+
+__revision__ = "$Id$"
+
+
+# XXX this module has *lots* of code ripped-off quite transparently from
+# build_ext.py -- not surprisingly really, as the work required to build
+# a static library from a collection of C source files is not really all
+# that different from what's required to build a shared object file from
+# a collection of C source files.  Nevertheless, I haven't done the
+# necessary refactoring to account for the overlap in code between the
+# two modules, mainly because a number of subtle details changed in the
+# cut 'n paste.  Sigh.
+
+import os
+from distutils.core import Command
+from distutils.errors import DistutilsSetupError
+from distutils.sysconfig import customize_compiler
+from distutils import log
+
+def show_compilers():
+    from distutils.ccompiler import show_compilers
+    show_compilers()
+
+
+class build_clib(Command):
+
+    description = "build C/C++ libraries used by Python extensions"
+
+    user_options = [
+        ('build-clib=', 'b',
+         "directory to build C/C++ libraries to"),
+        ('build-temp=', 't',
+         "directory to put temporary build by-products"),
+        ('debug', 'g',
+         "compile with debugging information"),
+        ('force', 'f',
+         "forcibly build everything (ignore file timestamps)"),
+        ('compiler=', 'c',
+         "specify the compiler type"),
+        ]
+
+    boolean_options = ['debug', 'force']
+
+    help_options = [
+        ('help-compiler', None,
+         "list available compilers", show_compilers),
+        ]
+
+    def initialize_options(self):
+        self.build_clib = None
+        self.build_temp = None
+
+        # List of libraries to build
+        self.libraries = None
+
+        # Compilation options for all libraries
+        self.include_dirs = None
+        self.define = None
+        self.undef = None
+        self.debug = None
+        self.force = 0
+        self.compiler = None
+
+
+    def finalize_options(self):
+        # This might be confusing: both build-clib and build-temp default
+        # to build-temp as defined by the "build" command.  This is because
+        # I think that C libraries are really just temporary build
+        # by-products, at least from the point of view of building Python
+        # extensions -- but I want to keep my options open.
+        self.set_undefined_options('build',
+                                   ('build_temp', 'build_clib'),
+                                   ('build_temp', 'build_temp'),
+                                   ('compiler', 'compiler'),
+                                   ('debug', 'debug'),
+                                   ('force', 'force'))
+
+        self.libraries = self.distribution.libraries
+        if self.libraries:
+            self.check_library_list(self.libraries)
+
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        if isinstance(self.include_dirs, str):
+            self.include_dirs = self.include_dirs.split(os.pathsep)
+
+        # XXX same as for build_ext -- what about 'self.define' and
+        # 'self.undef' ?
+
+    def run(self):
+        if not self.libraries:
+            return
+
+        # Yech -- this is cut 'n pasted from build_ext.py!
+        from distutils.ccompiler import new_compiler
+        self.compiler = new_compiler(compiler=self.compiler,
+                                     dry_run=self.dry_run,
+                                     force=self.force)
+        customize_compiler(self.compiler)
+
+        if self.include_dirs is not None:
+            self.compiler.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for (name,value) in self.define:
+                self.compiler.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                self.compiler.undefine_macro(macro)
+
+        self.build_libraries(self.libraries)
+
+
+    def check_library_list(self, libraries):
+        """Ensure that the list of libraries is valid.
+
+        `library` is presumably provided as a command option 'libraries'.
+        This method checks that it is a list of 2-tuples, where the tuples
+        are (library_name, build_info_dict).
+
+        Raise DistutilsSetupError if the structure is invalid anywhere;
+        just returns otherwise.
+        """
+        if not isinstance(libraries, list):
+            raise DistutilsSetupError, \
+                  "'libraries' option must be a list of tuples"
+
+        for lib in libraries:
+            if not isinstance(lib, tuple) and len(lib) != 2:
+                raise DistutilsSetupError, \
+                      "each element of 'libraries' must a 2-tuple"
+
+            name, build_info = lib
+
+            if not isinstance(name, str):
+                raise DistutilsSetupError, \
+                      "first element of each tuple in 'libraries' " + \
+                      "must be a string (the library name)"
+            if '/' in name or (os.sep != '/' and os.sep in name):
+                raise DistutilsSetupError, \
+                      ("bad library name '%s': " +
+                       "may not contain directory separators") % \
+                      lib[0]
+
+            if not isinstance(build_info, dict):
+                raise DistutilsSetupError, \
+                      "second element of each tuple in 'libraries' " + \
+                      "must be a dictionary (build info)"
+
+    def get_library_names(self):
+        # Assume the library list is valid -- 'check_library_list()' is
+        # called from 'finalize_options()', so it should be!
+        if not self.libraries:
+            return None
+
+        lib_names = []
+        for (lib_name, build_info) in self.libraries:
+            lib_names.append(lib_name)
+        return lib_names
+
+
+    def get_source_files(self):
+        self.check_library_list(self.libraries)
+        filenames = []
+        for (lib_name, build_info) in self.libraries:
+            sources = build_info.get('sources')
+            if sources is None or not isinstance(sources, (list, tuple)):
+                raise DistutilsSetupError, \
+                      ("in 'libraries' option (library '%s'), "
+                       "'sources' must be present and must be "
+                       "a list of source filenames") % lib_name
+
+            filenames.extend(sources)
+        return filenames
+
+    def build_libraries(self, libraries):
+        for (lib_name, build_info) in libraries:
+            sources = build_info.get('sources')
+            if sources is None or not isinstance(sources, (list, tuple)):
+                raise DistutilsSetupError, \
+                      ("in 'libraries' option (library '%s'), " +
+                       "'sources' must be present and must be " +
+                       "a list of source filenames") % lib_name
+            sources = list(sources)
+
+            log.info("building '%s' library", lib_name)
+
+            # First, compile the source code to object files in the library
+            # directory.  (This should probably change to putting object
+            # files in a temporary build directory.)
+            macros = build_info.get('macros')
+            include_dirs = build_info.get('include_dirs')
+            objects = self.compiler.compile(sources,
+                                            output_dir=self.build_temp,
+                                            macros=macros,
+                                            include_dirs=include_dirs,
+                                            debug=self.debug)
+
+            # Now "link" the object files together into a static library.
+            # (On Unix at least, this isn't really linking -- it just
+            # builds an archive.  Whatever.)
+            self.compiler.create_static_lib(objects, lib_name,
+                                            output_dir=self.build_clib,
+                                            debug=self.debug)
diff --git a/lib/distutils/distutils/command/build_ext.py b/lib/distutils/distutils/command/build_ext.py
new file mode 100644
index 0000000..f0a7d4c
--- /dev/null
+++ b/lib/distutils/distutils/command/build_ext.py
@@ -0,0 +1,766 @@
+"""distutils.command.build_ext
+
+Implements the Distutils 'build_ext' command, for building extension
+modules (currently limited to C extensions, should accommodate C++
+extensions ASAP)."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id$"
+
+import sys, os, string, re
+from types import *
+from site import USER_BASE, USER_SITE
+from distutils.core import Command
+from distutils.errors import *
+from distutils.sysconfig import customize_compiler, get_python_version
+from distutils.dep_util import newer_group
+from distutils.extension import Extension
+from distutils.util import get_platform
+from distutils import log
+
+if os.name == 'nt':
+    from distutils.msvccompiler import get_build_version
+    MSVC_VERSION = int(get_build_version())
+
+# An extension name is just a dot-separated list of Python NAMEs (ie.
+# the same as a fully-qualified module name).
+extension_name_re = re.compile \
+    (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
+
+
+def show_compilers ():
+    from distutils.ccompiler import show_compilers
+    show_compilers()
+
+
+class build_ext (Command):
+
+    description = "build C/C++ extensions (compile/link to build directory)"
+
+    # XXX thoughts on how to deal with complex command-line options like
+    # these, i.e. how to make it so fancy_getopt can suck them off the
+    # command line and make it look like setup.py defined the appropriate
+    # lists of tuples of what-have-you.
+    #   - each command needs a callback to process its command-line options
+    #   - Command.__init__() needs access to its share of the whole
+    #     command line (must ultimately come from
+    #     Distribution.parse_command_line())
+    #   - it then calls the current command class' option-parsing
+    #     callback to deal with weird options like -D, which have to
+    #     parse the option text and churn out some custom data
+    #     structure
+    #   - that data structure (in this case, a list of 2-tuples)
+    #     will then be present in the command object by the time
+    #     we get to finalize_options() (i.e. the constructor
+    #     takes care of both command-line and client options
+    #     in between initialize_options() and finalize_options())
+
+    sep_by = " (separated by '%s')" % os.pathsep
+    user_options = [
+        ('build-lib=', 'b',
+         "directory for compiled extension modules"),
+        ('build-temp=', 't',
+         "directory for temporary files (build by-products)"),
+        ('plat-name=', 'p',
+         "platform name to cross-compile for, if supported "
+         "(default: %s)" % get_platform()),
+        ('inplace', 'i',
+         "ignore build-lib and put compiled extensions into the source " +
+         "directory alongside your pure Python modules"),
+        ('include-dirs=', 'I',
+         "list of directories to search for header files" + sep_by),
+        ('define=', 'D',
+         "C preprocessor macros to define"),
+        ('undef=', 'U',
+         "C preprocessor macros to undefine"),
+        ('libraries=', 'l',
+         "external C libraries to link with"),
+        ('library-dirs=', 'L',
+         "directories to search for external C libraries" + sep_by),
+        ('rpath=', 'R',
+         "directories to search for shared C libraries at runtime"),
+        ('link-objects=', 'O',
+         "extra explicit link objects to include in the link"),
+        ('debug', 'g',
+         "compile/link with debugging information"),
+        ('force', 'f',
+         "forcibly build everything (ignore file timestamps)"),
+        ('compiler=', 'c',
+         "specify the compiler type"),
+        ('swig-cpp', None,
+         "make SWIG create C++ files (default is C)"),
+        ('swig-opts=', None,
+         "list of SWIG command line options"),
+        ('swig=', None,
+         "path to the SWIG executable"),
+        ('user', None,
+         "add user include, library and rpath"),
+        ]
+
+    boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user']
+
+    help_options = [
+        ('help-compiler', None,
+         "list available compilers", show_compilers),
+        ]
+
+    def initialize_options (self):
+        self.extensions = None
+        self.build_lib = None
+        self.plat_name = None
+        self.build_temp = None
+        self.inplace = 0
+        self.package = None
+
+        self.include_dirs = None
+        self.define = None
+        self.undef = None
+        self.libraries = None
+        self.library_dirs = None
+        self.rpath = None
+        self.link_objects = None
+        self.debug = None
+        self.force = None
+        self.compiler = None
+        self.swig = None
+        self.swig_cpp = None
+        self.swig_opts = None
+        self.user = None
+
+    def finalize_options(self):
+        from distutils import sysconfig
+
+        self.set_undefined_options('build',
+                                   ('build_lib', 'build_lib'),
+                                   ('build_temp', 'build_temp'),
+                                   ('compiler', 'compiler'),
+                                   ('debug', 'debug'),
+                                   ('force', 'force'),
+                                   ('plat_name', 'plat_name'),
+                                   )
+
+        if self.package is None:
+            self.package = self.distribution.ext_package
+
+        self.extensions = self.distribution.ext_modules
+
+        # Make sure Python's include directories (for Python.h, pyconfig.h,
+        # etc.) are in the include search path.
+        py_include = sysconfig.get_python_inc()
+        plat_py_include = sysconfig.get_python_inc(plat_specific=1)
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        if isinstance(self.include_dirs, str):
+            self.include_dirs = self.include_dirs.split(os.pathsep)
+
+        # Put the Python "system" include dir at the end, so that
+        # any local include dirs take precedence.
+        self.include_dirs.append(py_include)
+        if plat_py_include != py_include:
+            self.include_dirs.append(plat_py_include)
+
+        self.ensure_string_list('libraries')
+
+        # Life is easier if we're not forever checking for None, so
+        # simplify these options to empty lists if unset
+        if self.libraries is None:
+            self.libraries = []
+        if self.library_dirs is None:
+            self.library_dirs = []
+        elif type(self.library_dirs) is StringType:
+            self.library_dirs = string.split(self.library_dirs, os.pathsep)
+
+        if self.rpath is None:
+            self.rpath = []
+        elif type(self.rpath) is StringType:
+            self.rpath = string.split(self.rpath, os.pathsep)
+
+        # for extensions under windows use different directories
+        # for Release and Debug builds.
+        # also Python's library directory must be appended to library_dirs
+        if os.name == 'nt':
+            # the 'libs' directory is for binary installs - we assume that
+            # must be the *native* platform.  But we don't really support
+            # cross-compiling via a binary install anyway, so we let it go.
+            self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
+            if self.debug:
+                self.build_temp = os.path.join(self.build_temp, "Debug")
+            else:
+                self.build_temp = os.path.join(self.build_temp, "Release")
+
+            # Append the source distribution include and library directories,
+            # this allows distutils on windows to work in the source tree
+            self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
+            if MSVC_VERSION == 9:
+                # Use the .lib files for the correct architecture
+                if self.plat_name == 'win32':
+                    suffix = ''
+                else:
+                    # win-amd64 or win-ia64
+                    suffix = self.plat_name[4:]
+                new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
+                if suffix:
+                    new_lib = os.path.join(new_lib, suffix)
+                self.library_dirs.append(new_lib)
+
+            elif MSVC_VERSION == 8:
+                self.library_dirs.append(os.path.join(sys.exec_prefix,
+                                         'PC', 'VS8.0'))
+            elif MSVC_VERSION == 7:
+                self.library_dirs.append(os.path.join(sys.exec_prefix,
+                                         'PC', 'VS7.1'))
+            else:
+                self.library_dirs.append(os.path.join(sys.exec_prefix,
+                                         'PC', 'VC6'))
+
+        # OS/2 (EMX) doesn't support Debug vs Release builds, but has the
+        # import libraries in its "Config" subdirectory
+        if os.name == 'os2':
+            self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
+
+        # for extensions under Cygwin and AtheOS Python's library directory must be
+        # appended to library_dirs
+        if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
+            if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
+                # building third party extensions
+                self.library_dirs.append(os.path.join(sys.prefix, "lib",
+                                                      "python" + get_python_version(),
+                                                      "config"))
+            else:
+                # building python standard extensions
+                self.library_dirs.append('.')
+
+        # For building extensions with a shared Python library,
+        # Python's library directory must be appended to library_dirs
+        # See Issues: #1600860, #4366
+        if (sysconfig.get_config_var('Py_ENABLE_SHARED')):
+            if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
+                # building third party extensions
+                self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
+            else:
+                # building python standard extensions
+                self.library_dirs.append('.')
+
+        # The argument parsing will result in self.define being a string, but
+        # it has to be a list of 2-tuples.  All the preprocessor symbols
+        # specified by the 'define' option will be set to '1'.  Multiple
+        # symbols can be separated with commas.
+
+        if self.define:
+            defines = self.define.split(',')
+            self.define = map(lambda symbol: (symbol, '1'), defines)
+
+        # The option for macros to undefine is also a string from the
+        # option parsing, but has to be a list.  Multiple symbols can also
+        # be separated with commas here.
+        if self.undef:
+            self.undef = self.undef.split(',')
+
+        if self.swig_opts is None:
+            self.swig_opts = []
+        else:
+            self.swig_opts = self.swig_opts.split(' ')
+
+        # Finally add the user include and library directories if requested
+        if self.user:
+            user_include = os.path.join(USER_BASE, "include")
+            user_lib = os.path.join(USER_BASE, "lib")
+            if os.path.isdir(user_include):
+                self.include_dirs.append(user_include)
+            if os.path.isdir(user_lib):
+                self.library_dirs.append(user_lib)
+                self.rpath.append(user_lib)
+
+    def run(self):
+        from distutils.ccompiler import new_compiler
+
+        # 'self.extensions', as supplied by setup.py, is a list of
+        # Extension instances.  See the documentation for Extension (in
+        # distutils.extension) for details.
+        #
+        # For backwards compatibility with Distutils 0.8.2 and earlier, we
+        # also allow the 'extensions' list to be a list of tuples:
+        #    (ext_name, build_info)
+        # where build_info is a dictionary containing everything that
+        # Extension instances do except the name, with a few things being
+        # differently named.  We convert these 2-tuples to Extension
+        # instances as needed.
+
+        if not self.extensions:
+            return
+
+        # If we were asked to build any C/C++ libraries, make sure that the
+        # directory where we put them is in the library search path for
+        # linking extensions.
+        if self.distribution.has_c_libraries():
+            build_clib = self.get_finalized_command('build_clib')
+            self.libraries.extend(build_clib.get_library_names() or [])
+            self.library_dirs.append(build_clib.build_clib)
+
+        # Setup the CCompiler object that we'll use to do all the
+        # compiling and linking
+        self.compiler = new_compiler(compiler=self.compiler,
+                                     verbose=self.verbose,
+                                     dry_run=self.dry_run,
+                                     force=self.force)
+        customize_compiler(self.compiler)
+        # If we are cross-compiling, init the compiler now (if we are not
+        # cross-compiling, init would not hurt, but people may rely on
+        # late initialization of compiler even if they shouldn't...)
+        if os.name == 'nt' and self.plat_name != get_platform():
+            self.compiler.initialize(self.plat_name)
+
+        # And make sure that any compile/link-related options (which might
+        # come from the command-line or from the setup script) are set in
+        # that CCompiler object -- that way, they automatically apply to
+        # all compiling and linking done here.
+        if self.include_dirs is not None:
+            self.compiler.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for (name, value) in self.define:
+                self.compiler.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                self.compiler.undefine_macro(macro)
+        if self.libraries is not None:
+            self.compiler.set_libraries(self.libraries)
+        if self.library_dirs is not None:
+            self.compiler.set_library_dirs(self.library_dirs)
+        if self.rpath is not None:
+            self.compiler.set_runtime_library_dirs(self.rpath)
+        if self.link_objects is not None:
+            self.compiler.set_link_objects(self.link_objects)
+
+        # Now actually compile and link everything.
+        self.build_extensions()
+
+    def check_extensions_list(self, extensions):
+        """Ensure that the list of extensions (presumably provided as a
+        command option 'extensions') is valid, i.e. it is a list of
+        Extension objects.  We also support the old-style list of 2-tuples,
+        where the tuples are (ext_name, build_info), which are converted to
+        Extension instances here.
+
+        Raise DistutilsSetupError if the structure is invalid anywhere;
+        just returns otherwise.
+        """
+        if not isinstance(extensions, list):
+            raise DistutilsSetupError, \
+                  "'ext_modules' option must be a list of Extension instances"
+
+        for i, ext in enumerate(extensions):
+            if isinstance(ext, Extension):
+                continue                # OK! (assume type-checking done
+                                        # by Extension constructor)
+
+            if not isinstance(ext, tuple) or len(ext) != 2:
+                raise DistutilsSetupError, \
+                      ("each element of 'ext_modules' option must be an "
+                       "Extension instance or 2-tuple")
+
+            ext_name, build_info = ext
+
+            log.warn(("old-style (ext_name, build_info) tuple found in "
+                      "ext_modules for extension '%s'"
+                      "-- please convert to Extension instance" % ext_name))
+
+            if not (isinstance(ext_name, str) and
+                    extension_name_re.match(ext_name)):
+                raise DistutilsSetupError, \
+                      ("first element of each tuple in 'ext_modules' "
+                       "must be the extension name (a string)")
+
+            if not isinstance(build_info, dict):
+                raise DistutilsSetupError, \
+                      ("second element of each tuple in 'ext_modules' "
+                       "must be a dictionary (build info)")
+
+            # OK, the (ext_name, build_info) dict is type-safe: convert it
+            # to an Extension instance.
+            ext = Extension(ext_name, build_info['sources'])
+
+            # Easy stuff: one-to-one mapping from dict elements to
+            # instance attributes.
+            for key in ('include_dirs', 'library_dirs', 'libraries',
+                        'extra_objects', 'extra_compile_args',
+                        'extra_link_args'):
+                val = build_info.get(key)
+                if val is not None:
+                    setattr(ext, key, val)
+
+            # Medium-easy stuff: same syntax/semantics, different names.
+            ext.runtime_library_dirs = build_info.get('rpath')
+            if 'def_file' in build_info:
+                log.warn("'def_file' element of build info dict "
+                         "no longer supported")
+
+            # Non-trivial stuff: 'macros' split into 'define_macros'
+            # and 'undef_macros'.
+            macros = build_info.get('macros')
+            if macros:
+                ext.define_macros = []
+                ext.undef_macros = []
+                for macro in macros:
+                    if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
+                        raise DistutilsSetupError, \
+                              ("'macros' element of build info dict "
+                               "must be 1- or 2-tuple")
+                    if len(macro) == 1:
+                        ext.undef_macros.append(macro[0])
+                    elif len(macro) == 2:
+                        ext.define_macros.append(macro)
+
+            extensions[i] = ext
+
+    def get_source_files(self):
+        self.check_extensions_list(self.extensions)
+        filenames = []
+
+        # Wouldn't it be neat if we knew the names of header files too...
+        for ext in self.extensions:
+            filenames.extend(ext.sources)
+
+        return filenames
+
+    def get_outputs(self):
+        # Sanity check the 'extensions' list -- can't assume this is being
+        # done in the same run as a 'build_extensions()' call (in fact, we
+        # can probably assume that it *isn't*!).
+        self.check_extensions_list(self.extensions)
+
+        # And build the list of output (built) filenames.  Note that this
+        # ignores the 'inplace' flag, and assumes everything goes in the
+        # "build" tree.
+        outputs = []
+        for ext in self.extensions:
+            outputs.append(self.get_ext_fullpath(ext.name))
+        return outputs
+
+    def build_extensions(self):
+        # First, sanity-check the 'extensions' list
+        self.check_extensions_list(self.extensions)
+
+        for ext in self.extensions:
+            self.build_extension(ext)
+
+    def build_extension(self, ext):
+        sources = ext.sources
+        if sources is None or type(sources) not in (ListType, TupleType):
+            raise DistutilsSetupError, \
+                  ("in 'ext_modules' option (extension '%s'), " +
+                   "'sources' must be present and must be " +
+                   "a list of source filenames") % ext.name
+        sources = list(sources)
+
+        ext_path = self.get_ext_fullpath(ext.name)
+        depends = sources + ext.depends
+        if not (self.force or newer_group(depends, ext_path, 'newer')):
+            log.debug("skipping '%s' extension (up-to-date)", ext.name)
+            return
+        else:
+            log.info("building '%s' extension", ext.name)
+
+        # First, scan the sources for SWIG definition files (.i), run
+        # SWIG on 'em to create .c files, and modify the sources list
+        # accordingly.
+        sources = self.swig_sources(sources, ext)
+
+        # Next, compile the source code to object files.
+
+        # XXX not honouring 'define_macros' or 'undef_macros' -- the
+        # CCompiler API needs to change to accommodate this, and I
+        # want to do one thing at a time!
+
+        # Two possible sources for extra compiler arguments:
+        #   - 'extra_compile_args' in Extension object
+        #   - CFLAGS environment variable (not particularly
+        #     elegant, but people seem to expect it and I
+        #     guess it's useful)
+        # The environment variable should take precedence, and
+        # any sensible compiler will give precedence to later
+        # command line args.  Hence we combine them in order:
+        extra_args = ext.extra_compile_args or []
+
+        macros = ext.define_macros[:]
+        for undef in ext.undef_macros:
+            macros.append((undef,))
+
+        objects = self.compiler.compile(sources,
+                                         output_dir=self.build_temp,
+                                         macros=macros,
+                                         include_dirs=ext.include_dirs,
+                                         debug=self.debug,
+                                         extra_postargs=extra_args,
+                                         depends=ext.depends)
+
+        # XXX -- this is a Vile HACK!
+        #
+        # The setup.py script for Python on Unix needs to be able to
+        # get this list so it can perform all the clean up needed to
+        # avoid keeping object files around when cleaning out a failed
+        # build of an extension module.  Since Distutils does not
+        # track dependencies, we have to get rid of intermediates to
+        # ensure all the intermediates will be properly re-built.
+        #
+        self._built_objects = objects[:]
+
+        # Now link the object files together into a "shared object" --
+        # of course, first we have to figure out all the other things
+        # that go into the mix.
+        if ext.extra_objects:
+            objects.extend(ext.extra_objects)
+        extra_args = ext.extra_link_args or []
+
+        # Detect target language, if not provided
+        language = ext.language or self.compiler.detect_language(sources)
+
+        self.compiler.link_shared_object(
+            objects, ext_path,
+            libraries=self.get_libraries(ext),
+            library_dirs=ext.library_dirs,
+            runtime_library_dirs=ext.runtime_library_dirs,
+            extra_postargs=extra_args,
+            export_symbols=self.get_export_symbols(ext),
+            debug=self.debug,
+            build_temp=self.build_temp,
+            target_lang=language)
+
+
+    def swig_sources (self, sources, extension):
+
+        """Walk the list of source files in 'sources', looking for SWIG
+        interface (.i) files.  Run SWIG on all that are found, and
+        return a modified 'sources' list with SWIG source files replaced
+        by the generated C (or C++) files.
+        """
+
+        new_sources = []
+        swig_sources = []
+        swig_targets = {}
+
+        # XXX this drops generated C/C++ files into the source tree, which
+        # is fine for developers who want to distribute the generated
+        # source -- but there should be an option to put SWIG output in
+        # the temp dir.
+
+        if self.swig_cpp:
+            log.warn("--swig-cpp is deprecated - use --swig-opts=-c++")
+
+        if self.swig_cpp or ('-c++' in self.swig_opts) or \
+           ('-c++' in extension.swig_opts):
+            target_ext = '.cpp'
+        else:
+            target_ext = '.c'
+
+        for source in sources:
+            (base, ext) = os.path.splitext(source)
+            if ext == ".i":             # SWIG interface file
+                new_sources.append(base + '_wrap' + target_ext)
+                swig_sources.append(source)
+                swig_targets[source] = new_sources[-1]
+            else:
+                new_sources.append(source)
+
+        if not swig_sources:
+            return new_sources
+
+        swig = self.swig or self.find_swig()
+        swig_cmd = [swig, "-python"]
+        swig_cmd.extend(self.swig_opts)
+        if self.swig_cpp:
+            swig_cmd.append("-c++")
+
+        # Do not override commandline arguments
+        if not self.swig_opts:
+            for o in extension.swig_opts:
+                swig_cmd.append(o)
+
+        for source in swig_sources:
+            target = swig_targets[source]
+            log.info("swigging %s to %s", source, target)
+            self.spawn(swig_cmd + ["-o", target, source])
+
+        return new_sources
+
+    # swig_sources ()
+
+    def find_swig (self):
+        """Return the name of the SWIG executable.  On Unix, this is
+        just "swig" -- it should be in the PATH.  Tries a bit harder on
+        Windows.
+        """
+
+        if os.name == "posix":
+            return "swig"
+        elif os.name == "nt":
+
+            # Look for SWIG in its standard installation directory on
+            # Windows (or so I presume!).  If we find it there, great;
+            # if not, act like Unix and assume it's in the PATH.
+            for vers in ("1.3", "1.2", "1.1"):
+                fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
+                if os.path.isfile(fn):
+                    return fn
+            else:
+                return "swig.exe"
+
+        elif os.name == "os2":
+            # assume swig available in the PATH.
+            return "swig.exe"
+
+        else:
+            raise DistutilsPlatformError, \
+                  ("I don't know how to find (much less run) SWIG "
+                   "on platform '%s'") % os.name
+
+    # find_swig ()
+
+    # -- Name generators -----------------------------------------------
+    # (extension names, filenames, whatever)
+    def get_ext_fullpath(self, ext_name):
+        """Returns the path of the filename for a given extension.
+
+        The file is located in `build_lib` or directly in the package
+        (inplace option).
+        """
+        # makes sure the extension name is only using dots
+        all_dots = string.maketrans('/'+os.sep, '..')
+        ext_name = ext_name.translate(all_dots)
+
+        fullname = self.get_ext_fullname(ext_name)
+        modpath = fullname.split('.')
+        filename = self.get_ext_filename(ext_name)
+        filename = os.path.split(filename)[-1]
+
+        if not self.inplace:
+            # no further work needed
+            # returning :
+            #   build_dir/package/path/filename
+            filename = os.path.join(*modpath[:-1]+[filename])
+            return os.path.join(self.build_lib, filename)
+
+        # the inplace option requires to find the package directory
+        # using the build_py command for that
+        package = '.'.join(modpath[0:-1])
+        build_py = self.get_finalized_command('build_py')
+        package_dir = os.path.abspath(build_py.get_package_dir(package))
+
+        # returning
+        #   package_dir/filename
+        return os.path.join(package_dir, filename)
+
+    def get_ext_fullname(self, ext_name):
+        """Returns the fullname of a given extension name.
+
+        Adds the `package.` prefix"""
+        if self.package is None:
+            return ext_name
+        else:
+            return self.package + '.' + ext_name
+
+    def get_ext_filename(self, ext_name):
+        r"""Convert the name of an extension (eg. "foo.bar") into the name
+        of the file from which it will be loaded (eg. "foo/bar.so", or
+        "foo\bar.pyd").
+        """
+        from distutils.sysconfig import get_config_var
+        ext_path = string.split(ext_name, '.')
+        # OS/2 has an 8 character module (extension) limit :-(
+        if os.name == "os2":
+            ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
+        # extensions in debug_mode are named 'module_d.pyd' under windows
+        so_ext = get_config_var('SO')
+        if os.name == 'nt' and self.debug:
+            return os.path.join(*ext_path) + '_d' + so_ext
+        return os.path.join(*ext_path) + so_ext
+
+    def get_export_symbols (self, ext):
+        """Return the list of symbols that a shared extension has to
+        export.  This either uses 'ext.export_symbols' or, if it's not
+        provided, "init" + module_name.  Only relevant on Windows, where
+        the .pyd file (DLL) must export the module "init" function.
+        """
+        initfunc_name = "init" + ext.name.split('.')[-1]
+        if initfunc_name not in ext.export_symbols:
+            ext.export_symbols.append(initfunc_name)
+        return ext.export_symbols
+
+    def get_libraries (self, ext):
+        """Return the list of libraries to link against when building a
+        shared extension.  On most platforms, this is just 'ext.libraries';
+        on Windows and OS/2, we add the Python library (eg. python20.dll).
+        """
+        # The python library is always needed on Windows.  For MSVC, this
+        # is redundant, since the library is mentioned in a pragma in
+        # pyconfig.h that MSVC groks.  The other Windows compilers all seem
+        # to need it mentioned explicitly, though, so that's what we do.
+        # Append '_d' to the python import library on debug builds.
+        if sys.platform == "win32":
+            from distutils.msvccompiler import MSVCCompiler
+            if not isinstance(self.compiler, MSVCCompiler):
+                template = "python%d%d"
+                if self.debug:
+                    template = template + '_d'
+                pythonlib = (template %
+                       (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+                # don't extend ext.libraries, it may be shared with other
+                # extensions, it is a reference to the original list
+                return ext.libraries + [pythonlib]
+            else:
+                return ext.libraries
+        elif sys.platform == "os2emx":
+            # EMX/GCC requires the python library explicitly, and I
+            # believe VACPP does as well (though not confirmed) - AIM Apr01
+            template = "python%d%d"
+            # debug versions of the main DLL aren't supported, at least
+            # not at this time - AIM Apr01
+            #if self.debug:
+            #    template = template + '_d'
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib]
+        elif sys.platform[:6] == "cygwin":
+            template = "python%d.%d"
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib]
+        elif sys.platform[:6] == "atheos":
+            from distutils import sysconfig
+
+            template = "python%d.%d"
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # Get SHLIBS from Makefile
+            extra = []
+            for lib in sysconfig.get_config_var('SHLIBS').split():
+                if lib.startswith('-l'):
+                    extra.append(lib[2:])
+                else:
+                    extra.append(lib)
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib, "m"] + extra
+
+        elif sys.platform == 'darwin':
+            # Don't use the default code below
+            return ext.libraries
+        elif sys.platform[:3] == 'aix':
+            # Don't use the default code below
+            return ext.libraries
+        else:
+            from distutils import sysconfig
+            if sysconfig.get_config_var('Py_ENABLE_SHARED'):
+                template = "python%d.%d"
+                pythonlib = (template %
+                             (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+                return ext.libraries + [pythonlib]
+            else:
+                return ext.libraries
+
+# class build_ext
diff --git a/lib/distutils/distutils/command/build_py.py b/lib/distutils/distutils/command/build_py.py
new file mode 100644
index 0000000..04c455f
--- /dev/null
+++ b/lib/distutils/distutils/command/build_py.py
@@ -0,0 +1,393 @@
+"""distutils.command.build_py
+
+Implements the Distutils 'build_py' command."""
+
+__revision__ = "$Id$"
+
+import os
+import sys
+from glob import glob
+
+from distutils.core import Command
+from distutils.errors import DistutilsOptionError, DistutilsFileError
+from distutils.util import convert_path
+from distutils import log
+
+class build_py(Command):
+
+    description = "\"build\" pure Python modules (copy to build directory)"
+
+    user_options = [
+        ('build-lib=', 'd', "directory to \"build\" (copy) to"),
+        ('compile', 'c', "compile .py to .pyc"),
+        ('no-compile', None, "don't compile .py files [default]"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+        ('force', 'f', "forcibly build everything (ignore file timestamps)"),
+        ]
+
+    boolean_options = ['compile', 'force']
+    negative_opt = {'no-compile' : 'compile'}
+
+    def initialize_options(self):
+        self.build_lib = None
+        self.py_modules = None
+        self.package = None
+        self.package_data = None
+        self.package_dir = None
+        self.compile = 0
+        self.optimize = 0
+        self.force = None
+
+    def finalize_options(self):
+        self.set_undefined_options('build',
+                                   ('build_lib', 'build_lib'),
+                                   ('force', 'force'))
+
+        # Get the distribution options that are aliases for build_py
+        # options -- list of packages and list of modules.
+        self.packages = self.distribution.packages
+        self.py_modules = self.distribution.py_modules
+        self.package_data = self.distribution.package_data
+        self.package_dir = {}
+        if self.distribution.package_dir:
+            for name, path in self.distribution.package_dir.items():
+                self.package_dir[name] = convert_path(path)
+        self.data_files = self.get_data_files()
+
+        # Ick, copied straight from install_lib.py (fancy_getopt needs a
+        # type system!  Hell, *everything* needs a type system!!!)
+        if not isinstance(self.optimize, int):
+            try:
+                self.optimize = int(self.optimize)
+                assert 0 <= self.optimize <= 2
+            except (ValueError, AssertionError):
+                raise DistutilsOptionError("optimize must be 0, 1, or 2")
+
+    def run(self):
+        # XXX copy_file by default preserves atime and mtime.  IMHO this is
+        # the right thing to do, but perhaps it should be an option -- in
+        # particular, a site administrator might want installed files to
+        # reflect the time of installation rather than the last
+        # modification time before the installed release.
+
+        # XXX copy_file by default preserves mode, which appears to be the
+        # wrong thing to do: if a file is read-only in the working
+        # directory, we want it to be installed read/write so that the next
+        # installation of the same module distribution can overwrite it
+        # without problems.  (This might be a Unix-specific issue.)  Thus
+        # we turn off 'preserve_mode' when copying to the build directory,
+        # since the build directory is supposed to be exactly what the
+        # installation will look like (ie. we preserve mode when
+        # installing).
+
+        # Two options control which modules will be installed: 'packages'
+        # and 'py_modules'.  The former lets us work with whole packages, not
+        # specifying individual modules at all; the latter is for
+        # specifying modules one-at-a-time.
+
+        if self.py_modules:
+            self.build_modules()
+        if self.packages:
+            self.build_packages()
+            self.build_package_data()
+
+        self.byte_compile(self.get_outputs(include_bytecode=0))
+
+    def get_data_files(self):
+        """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
+        data = []
+        if not self.packages:
+            return data
+        for package in self.packages:
+            # Locate package source directory
+            src_dir = self.get_package_dir(package)
+
+            # Compute package build directory
+            build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+            # Length of path to strip from found files
+            plen = 0
+            if src_dir:
+                plen = len(src_dir)+1
+
+            # Strip directory from globbed filenames
+            filenames = [
+                file[plen:] for file in self.find_data_files(package, src_dir)
+                ]
+            data.append((package, src_dir, build_dir, filenames))
+        return data
+
+    def find_data_files(self, package, src_dir):
+        """Return filenames for package's data files in 'src_dir'"""
+        globs = (self.package_data.get('', [])
+                 + self.package_data.get(package, []))
+        files = []
+        for pattern in globs:
+            # Each pattern has to be converted to a platform-specific path
+            filelist = glob(os.path.join(src_dir, convert_path(pattern)))
+            # Files that match more than one pattern are only added once
+            files.extend([fn for fn in filelist if fn not in files])
+        return files
+
+    def build_package_data(self):
+        """Copy data files into build directory"""
+        for package, src_dir, build_dir, filenames in self.data_files:
+            for filename in filenames:
+                target = os.path.join(build_dir, filename)
+                self.mkpath(os.path.dirname(target))
+                self.copy_file(os.path.join(src_dir, filename), target,
+                               preserve_mode=False)
+
+    def get_package_dir(self, package):
+        """Return the directory, relative to the top of the source
+           distribution, where package 'package' should be found
+           (at least according to the 'package_dir' option, if any)."""
+
+        path = package.split('.')
+
+        if not self.package_dir:
+            if path:
+                return os.path.join(*path)
+            else:
+                return ''
+        else:
+            tail = []
+            while path:
+                try:
+                    pdir = self.package_dir['.'.join(path)]
+                except KeyError:
+                    tail.insert(0, path[-1])
+                    del path[-1]
+                else:
+                    tail.insert(0, pdir)
+                    return os.path.join(*tail)
+            else:
+                # Oops, got all the way through 'path' without finding a
+                # match in package_dir.  If package_dir defines a directory
+                # for the root (nameless) package, then fallback on it;
+                # otherwise, we might as well have not consulted
+                # package_dir at all, as we just use the directory implied
+                # by 'tail' (which should be the same as the original value
+                # of 'path' at this point).
+                pdir = self.package_dir.get('')
+                if pdir is not None:
+                    tail.insert(0, pdir)
+
+                if tail:
+                    return os.path.join(*tail)
+                else:
+                    return ''
+
+    def check_package(self, package, package_dir):
+        # Empty dir name means current directory, which we can probably
+        # assume exists.  Also, os.path.exists and isdir don't know about
+        # my "empty string means current dir" convention, so we have to
+        # circumvent them.
+        if package_dir != "":
+            if not os.path.exists(package_dir):
+                raise DistutilsFileError(
+                      "package directory '%s' does not exist" % package_dir)
+            if not os.path.isdir(package_dir):
+                raise DistutilsFileError(
+                       "supposed package directory '%s' exists, "
+                       "but is not a directory" % package_dir)
+
+        # Require __init__.py for all but the "root package"
+        if package:
+            init_py = os.path.join(package_dir, "__init__.py")
+            if os.path.isfile(init_py):
+                return init_py
+            else:
+                log.warn(("package init file '%s' not found " +
+                          "(or not a regular file)"), init_py)
+
+        # Either not in a package at all (__init__.py not expected), or
+        # __init__.py doesn't exist -- so don't return the filename.
+        return None
+
+    def check_module(self, module, module_file):
+        if not os.path.isfile(module_file):
+            log.warn("file %s (for module %s) not found", module_file, module)
+            return False
+        else:
+            return True
+
+    def find_package_modules(self, package, package_dir):
+        self.check_package(package, package_dir)
+        module_files = glob(os.path.join(package_dir, "*.py"))
+        modules = []
+        setup_script = os.path.abspath(self.distribution.script_name)
+
+        for f in module_files:
+            abs_f = os.path.abspath(f)
+            if abs_f != setup_script:
+                module = os.path.splitext(os.path.basename(f))[0]
+                modules.append((package, module, f))
+            else:
+                self.debug_print("excluding %s" % setup_script)
+        return modules
+
+    def find_modules(self):
+        """Finds individually-specified Python modules, ie. those listed by
+        module name in 'self.py_modules'.  Returns a list of tuples (package,
+        module_base, filename): 'package' is a tuple of the path through
+        package-space to the module; 'module_base' is the bare (no
+        packages, no dots) module name, and 'filename' is the path to the
+        ".py" file (relative to the distribution root) that implements the
+        module.
+        """
+        # Map package names to tuples of useful info about the package:
+        #    (package_dir, checked)
+        # package_dir - the directory where we'll find source files for
+        #   this package
+        # checked - true if we have checked that the package directory
+        #   is valid (exists, contains __init__.py, ... ?)
+        packages = {}
+
+        # List of (package, module, filename) tuples to return
+        modules = []
+
+        # We treat modules-in-packages almost the same as toplevel modules,
+        # just the "package" for a toplevel is empty (either an empty
+        # string or empty list, depending on context).  Differences:
+        #   - don't check for __init__.py in directory for empty package
+        for module in self.py_modules:
+            path = module.split('.')
+            package = '.'.join(path[0:-1])
+            module_base = path[-1]
+
+            try:
+                (package_dir, checked) = packages[package]
+            except KeyError:
+                package_dir = self.get_package_dir(package)
+                checked = 0
+
+            if not checked:
+                init_py = self.check_package(package, package_dir)
+                packages[package] = (package_dir, 1)
+                if init_py:
+                    modules.append((package, "__init__", init_py))
+
+            # XXX perhaps we should also check for just .pyc files
+            # (so greedy closed-source bastards can distribute Python
+            # modules too)
+            module_file = os.path.join(package_dir, module_base + ".py")
+            if not self.check_module(module, module_file):
+                continue
+
+            modules.append((package, module_base, module_file))
+
+        return modules
+
+    def find_all_modules(self):
+        """Compute the list of all modules that will be built, whether
+        they are specified one-module-at-a-time ('self.py_modules') or
+        by whole packages ('self.packages').  Return a list of tuples
+        (package, module, module_file), just like 'find_modules()' and
+        'find_package_modules()' do."""
+        modules = []
+        if self.py_modules:
+            modules.extend(self.find_modules())
+        if self.packages:
+            for package in self.packages:
+                package_dir = self.get_package_dir(package)
+                m = self.find_package_modules(package, package_dir)
+                modules.extend(m)
+        return modules
+
+    def get_source_files(self):
+        return [module[-1] for module in self.find_all_modules()]
+
+    def get_module_outfile(self, build_dir, package, module):
+        outfile_path = [build_dir] + list(package) + [module + ".py"]
+        return os.path.join(*outfile_path)
+
+    def get_outputs(self, include_bytecode=1):
+        modules = self.find_all_modules()
+        outputs = []
+        for (package, module, module_file) in modules:
+            package = package.split('.')
+            filename = self.get_module_outfile(self.build_lib, package, module)
+            outputs.append(filename)
+            if include_bytecode:
+                if self.compile:
+                    outputs.append(filename + "c")
+                if self.optimize > 0:
+                    outputs.append(filename + "o")
+
+        outputs += [
+            os.path.join(build_dir, filename)
+            for package, src_dir, build_dir, filenames in self.data_files
+            for filename in filenames
+            ]
+
+        return outputs
+
+    def build_module(self, module, module_file, package):
+        if isinstance(package, str):
+            package = package.split('.')
+        elif not isinstance(package, (list, tuple)):
+            raise TypeError(
+                  "'package' must be a string (dot-separated), list, or tuple")
+
+        # Now put the module source file into the "build" area -- this is
+        # easy, we just copy it somewhere under self.build_lib (the build
+        # directory for Python source).
+        outfile = self.get_module_outfile(self.build_lib, package, module)
+        dir = os.path.dirname(outfile)
+        self.mkpath(dir)
+        return self.copy_file(module_file, outfile, preserve_mode=0)
+
+    def build_modules(self):
+        modules = self.find_modules()
+        for (package, module, module_file) in modules:
+
+            # Now "build" the module -- ie. copy the source file to
+            # self.build_lib (the build directory for Python source).
+            # (Actually, it gets copied to the directory for this package
+            # under self.build_lib.)
+            self.build_module(module, module_file, package)
+
+    def build_packages(self):
+        for package in self.packages:
+
+            # Get list of (package, module, module_file) tuples based on
+            # scanning the package directory.  'package' is only included
+            # in the tuple so that 'find_modules()' and
+            # 'find_package_tuples()' have a consistent interface; it's
+            # ignored here (apart from a sanity check).  Also, 'module' is
+            # the *unqualified* module name (ie. no dots, no package -- we
+            # already know its package!), and 'module_file' is the path to
+            # the .py file, relative to the current directory
+            # (ie. including 'package_dir').
+            package_dir = self.get_package_dir(package)
+            modules = self.find_package_modules(package, package_dir)
+
+            # Now loop over the modules we found, "building" each one (just
+            # copy it to self.build_lib).
+            for (package_, module, module_file) in modules:
+                assert package == package_
+                self.build_module(module, module_file, package)
+
+    def byte_compile(self, files):
+        if sys.dont_write_bytecode:
+            self.warn('byte-compiling is disabled, skipping.')
+            return
+
+        from distutils.util import byte_compile
+        prefix = self.build_lib
+        if prefix[-1] != os.sep:
+            prefix = prefix + os.sep
+
+        # XXX this code is essentially the same as the 'byte_compile()
+        # method of the "install_lib" command, except for the determination
+        # of the 'prefix' string.  Hmmm.
+
+        if self.compile:
+            byte_compile(files, optimize=0,
+                         force=self.force, prefix=prefix, dry_run=self.dry_run)
+        if self.optimize > 0:
+            byte_compile(files, optimize=self.optimize,
+                         force=self.force, prefix=prefix, dry_run=self.dry_run)
diff --git a/lib/distutils/distutils/command/build_scripts.py b/lib/distutils/distutils/command/build_scripts.py
new file mode 100644
index 0000000..567df65
--- /dev/null
+++ b/lib/distutils/distutils/command/build_scripts.py
@@ -0,0 +1,131 @@
+"""distutils.command.build_scripts
+
+Implements the Distutils 'build_scripts' command."""
+
+__revision__ = "$Id$"
+
+import os, re
+from stat import ST_MODE
+from distutils.core import Command
+from distutils.dep_util import newer
+from distutils.util import convert_path
+from distutils import log
+
+# check if Python is called on the first line with this expression
+first_line_re = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
+
+class build_scripts (Command):
+
+    description = "\"build\" scripts (copy and fixup #! line)"
+
+    user_options = [
+        ('build-dir=', 'd', "directory to \"build\" (copy) to"),
+        ('force', 'f', "forcibly build everything (ignore file timestamps"),
+        ('executable=', 'e', "specify final destination interpreter path"),
+        ]
+
+    boolean_options = ['force']
+
+
+    def initialize_options (self):
+        self.build_dir = None
+        self.scripts = None
+        self.force = None
+        self.executable = None
+        self.outfiles = None
+
+    def finalize_options (self):
+        self.set_undefined_options('build',
+                                   ('build_scripts', 'build_dir'),
+                                   ('force', 'force'),
+                                   ('executable', 'executable'))
+        self.scripts = self.distribution.scripts
+
+    def get_source_files(self):
+        return self.scripts
+
+    def run (self):
+        if not self.scripts:
+            return
+        self.copy_scripts()
+
+
+    def copy_scripts (self):
+        """Copy each script listed in 'self.scripts'; if it's marked as a
+        Python script in the Unix way (first line matches 'first_line_re',
+        ie. starts with "\#!" and contains "python"), then adjust the first
+        line to refer to the current Python interpreter as we copy.
+        """
+        _sysconfig = __import__('sysconfig')
+        self.mkpath(self.build_dir)
+        outfiles = []
+        for script in self.scripts:
+            adjust = 0
+            script = convert_path(script)
+            outfile = os.path.join(self.build_dir, os.path.basename(script))
+            outfiles.append(outfile)
+
+            if not self.force and not newer(script, outfile):
+                log.debug("not copying %s (up-to-date)", script)
+                continue
+
+            # Always open the file, but ignore failures in dry-run mode --
+            # that way, we'll get accurate feedback if we can read the
+            # script.
+            try:
+                f = open(script, "r")
+            except IOError:
+                if not self.dry_run:
+                    raise
+                f = None
+            else:
+                first_line = f.readline()
+                if not first_line:
+                    self.warn("%s is an empty file (skipping)" % script)
+                    continue
+
+                match = first_line_re.match(first_line)
+                if match:
+                    adjust = 1
+                    post_interp = match.group(1) or ''
+
+            if adjust:
+                log.info("copying and adjusting %s -> %s", script,
+                         self.build_dir)
+                if not self.dry_run:
+                    outf = open(outfile, "w")
+                    if not _sysconfig.is_python_build():
+                        outf.write("#!%s%s\n" %
+                                   (self.executable,
+                                    post_interp))
+                    else:
+                        outf.write("#!%s%s\n" %
+                                   (os.path.join(
+                            _sysconfig.get_config_var("BINDIR"),
+                           "python%s%s" % (_sysconfig.get_config_var("VERSION"),
+                                           _sysconfig.get_config_var("EXE"))),
+                                    post_interp))
+                    outf.writelines(f.readlines())
+                    outf.close()
+                if f:
+                    f.close()
+            else:
+                if f:
+                    f.close()
+                self.copy_file(script, outfile)
+
+        if os.name == 'posix':
+            for file in outfiles:
+                if self.dry_run:
+                    log.info("changing mode of %s", file)
+                else:
+                    oldmode = os.stat(file)[ST_MODE] & 07777
+                    newmode = (oldmode | 0555) & 07777
+                    if newmode != oldmode:
+                        log.info("changing mode of %s from %o to %o",
+                                 file, oldmode, newmode)
+                        os.chmod(file, newmode)
+
+    # copy_scripts ()
+
+# class build_scripts
diff --git a/lib/distutils/distutils/command/check.py b/lib/distutils/distutils/command/check.py
new file mode 100644
index 0000000..152bf0d
--- /dev/null
+++ b/lib/distutils/distutils/command/check.py
@@ -0,0 +1,149 @@
+"""distutils.command.check
+
+Implements the Distutils 'check' command.
+"""
+__revision__ = "$Id$"
+
+from distutils.core import Command
+from distutils.dist import PKG_INFO_ENCODING
+from distutils.errors import DistutilsSetupError
+
+try:
+    # docutils is installed
+    from docutils.utils import Reporter
+    from docutils.parsers.rst import Parser
+    from docutils import frontend
+    from docutils import nodes
+    from StringIO import StringIO
+
+    class SilentReporter(Reporter):
+
+        def __init__(self, source, report_level, halt_level, stream=None,
+                     debug=0, encoding='ascii', error_handler='replace'):
+            self.messages = []
+            Reporter.__init__(self, source, report_level, halt_level, stream,
+                              debug, encoding, error_handler)
+
+        def system_message(self, level, message, *children, **kwargs):
+            self.messages.append((level, message, children, kwargs))
+            return nodes.system_message(message, level=level,
+                                        type=self.levels[level],
+                                        *children, **kwargs)
+
+    HAS_DOCUTILS = True
+except ImportError:
+    # docutils is not installed
+    HAS_DOCUTILS = False
+
+class check(Command):
+    """This command checks the meta-data of the package.
+    """
+    description = ("perform some checks on the package")
+    user_options = [('metadata', 'm', 'Verify meta-data'),
+                    ('restructuredtext', 'r',
+                     ('Checks if long string meta-data syntax '
+                      'are reStructuredText-compliant')),
+                    ('strict', 's',
+                     'Will exit with an error if a check fails')]
+
+    boolean_options = ['metadata', 'restructuredtext', 'strict']
+
+    def initialize_options(self):
+        """Sets default values for options."""
+        self.restructuredtext = 0
+        self.metadata = 1
+        self.strict = 0
+        self._warnings = 0
+
+    def finalize_options(self):
+        pass
+
+    def warn(self, msg):
+        """Counts the number of warnings that occurs."""
+        self._warnings += 1
+        return Command.warn(self, msg)
+
+    def run(self):
+        """Runs the command."""
+        # perform the various tests
+        if self.metadata:
+            self.check_metadata()
+        if self.restructuredtext:
+            if HAS_DOCUTILS:
+                self.check_restructuredtext()
+            elif self.strict:
+                raise DistutilsSetupError('The docutils package is needed.')
+
+        # let's raise an error in strict mode, if we have at least
+        # one warning
+        if self.strict and self._warnings > 0:
+            raise DistutilsSetupError('Please correct your package.')
+
+    def check_metadata(self):
+        """Ensures that all required elements of meta-data are supplied.
+
+        name, version, URL, (author and author_email) or
+        (maintainer and maintainer_email)).
+
+        Warns if any are missing.
+        """
+        metadata = self.distribution.metadata
+
+        missing = []
+        for attr in ('name', 'version', 'url'):
+            if not (hasattr(metadata, attr) and getattr(metadata, attr)):
+                missing.append(attr)
+
+        if missing:
+            self.warn("missing required meta-data: %s"  % ', '.join(missing))
+        if metadata.author:
+            if not metadata.author_email:
+                self.warn("missing meta-data: if 'author' supplied, " +
+                          "'author_email' must be supplied too")
+        elif metadata.maintainer:
+            if not metadata.maintainer_email:
+                self.warn("missing meta-data: if 'maintainer' supplied, " +
+                          "'maintainer_email' must be supplied too")
+        else:
+            self.warn("missing meta-data: either (author and author_email) " +
+                      "or (maintainer and maintainer_email) " +
+                      "must be supplied")
+
+    def check_restructuredtext(self):
+        """Checks if the long string fields are reST-compliant."""
+        data = self.distribution.get_long_description()
+        if not isinstance(data, unicode):
+            data = data.decode(PKG_INFO_ENCODING)
+        for warning in self._check_rst_data(data):
+            line = warning[-1].get('line')
+            if line is None:
+                warning = warning[1]
+            else:
+                warning = '%s (line %s)' % (warning[1], line)
+            self.warn(warning)
+
+    def _check_rst_data(self, data):
+        """Returns warnings when the provided data doesn't compile."""
+        source_path = StringIO()
+        parser = Parser()
+        settings = frontend.OptionParser().get_default_values()
+        settings.tab_width = 4
+        settings.pep_references = None
+        settings.rfc_references = None
+        reporter = SilentReporter(source_path,
+                          settings.report_level,
+                          settings.halt_level,
+                          stream=settings.warning_stream,
+                          debug=settings.debug,
+                          encoding=settings.error_encoding,
+                          error_handler=settings.error_encoding_error_handler)
+
+        document = nodes.document(settings, reporter, source=source_path)
+        document.note_source(source_path, -1)
+        try:
+            parser.parse(data, document)
+        except AttributeError:
+            reporter.messages.append((-1, 'Could not finish the parsing.',
+                                      '', {}))
+
+        return reporter.messages
diff --git a/lib/distutils/distutils/command/clean.py b/lib/distutils/distutils/command/clean.py
new file mode 100644
index 0000000..90ef35f
--- /dev/null
+++ b/lib/distutils/distutils/command/clean.py
@@ -0,0 +1,80 @@
+"""distutils.command.clean
+
+Implements the Distutils 'clean' command."""
+
+# contributed by Bastian Kleineidam <[email protected]>, added 2000-03-18
+
+__revision__ = "$Id$"
+
+import os
+from distutils.core import Command
+from distutils.dir_util import remove_tree
+from distutils import log
+
+class clean(Command):
+
+    description = "clean up temporary files from 'build' command"
+    user_options = [
+        ('build-base=', 'b',
+         "base build directory (default: 'build.build-base')"),
+        ('build-lib=', None,
+         "build directory for all modules (default: 'build.build-lib')"),
+        ('build-temp=', 't',
+         "temporary build directory (default: 'build.build-temp')"),
+        ('build-scripts=', None,
+         "build directory for scripts (default: 'build.build-scripts')"),
+        ('bdist-base=', None,
+         "temporary directory for built distributions"),
+        ('all', 'a',
+         "remove all build output, not just temporary by-products")
+    ]
+
+    boolean_options = ['all']
+
+    def initialize_options(self):
+        self.build_base = None
+        self.build_lib = None
+        self.build_temp = None
+        self.build_scripts = None
+        self.bdist_base = None
+        self.all = None
+
+    def finalize_options(self):
+        self.set_undefined_options('build',
+                                   ('build_base', 'build_base'),
+                                   ('build_lib', 'build_lib'),
+                                   ('build_scripts', 'build_scripts'),
+                                   ('build_temp', 'build_temp'))
+        self.set_undefined_options('bdist',
+                                   ('bdist_base', 'bdist_base'))
+
+    def run(self):
+        # remove the build/temp.<plat> directory (unless it's already
+        # gone)
+        if os.path.exists(self.build_temp):
+            remove_tree(self.build_temp, dry_run=self.dry_run)
+        else:
+            log.debug("'%s' does not exist -- can't clean it",
+                      self.build_temp)
+
+        if self.all:
+            # remove build directories
+            for directory in (self.build_lib,
+                              self.bdist_base,
+                              self.build_scripts):
+                if os.path.exists(directory):
+                    remove_tree(directory, dry_run=self.dry_run)
+                else:
+                    log.warn("'%s' does not exist -- can't clean it",
+                             directory)
+
+        # just for the heck of it, try to remove the base build directory:
+        # we might have emptied it right now, but if not we don't care
+        if not self.dry_run:
+            try:
+                os.rmdir(self.build_base)
+                log.info("removing '%s'", self.build_base)
+            except OSError:
+                pass
+
+# class clean
diff --git a/lib/distutils/distutils/command/config.py b/lib/distutils/distutils/command/config.py
new file mode 100644
index 0000000..b084913
--- /dev/null
+++ b/lib/distutils/distutils/command/config.py
@@ -0,0 +1,357 @@
+"""distutils.command.config
+
+Implements the Distutils 'config' command, a (mostly) empty command class
+that exists mainly to be sub-classed by specific module distributions and
+applications.  The idea is that while every "config" command is different,
+at least they're all named the same, and users always see "config" in the
+list of standard commands.  Also, this is a good place to put common
+configure-like tasks: "try to compile this C code", or "figure out where
+this header file lives".
+"""
+
+__revision__ = "$Id$"
+
+import os
+import re
+
+from distutils.core import Command
+from distutils.errors import DistutilsExecError
+from distutils.sysconfig import customize_compiler
+from distutils import log
+
+LANG_EXT = {'c': '.c', 'c++': '.cxx'}
+
+class config(Command):
+
+    description = "prepare to build"
+
+    user_options = [
+        ('compiler=', None,
+         "specify the compiler type"),
+        ('cc=', None,
+         "specify the compiler executable"),
+        ('include-dirs=', 'I',
+         "list of directories to search for header files"),
+        ('define=', 'D',
+         "C preprocessor macros to define"),
+        ('undef=', 'U',
+         "C preprocessor macros to undefine"),
+        ('libraries=', 'l',
+         "external C libraries to link with"),
+        ('library-dirs=', 'L',
+         "directories to search for external C libraries"),
+
+        ('noisy', None,
+         "show every action (compile, link, run, ...) taken"),
+        ('dump-source', None,
+         "dump generated source files before attempting to compile them"),
+        ]
+
+
+    # The three standard command methods: since the "config" command
+    # does nothing by default, these are empty.
+
+    def initialize_options(self):
+        self.compiler = None
+        self.cc = None
+        self.include_dirs = None
+        self.libraries = None
+        self.library_dirs = None
+
+        # maximal output for now
+        self.noisy = 1
+        self.dump_source = 1
+
+        # list of temporary files generated along-the-way that we have
+        # to clean at some point
+        self.temp_files = []
+
+    def finalize_options(self):
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        elif isinstance(self.include_dirs, str):
+            self.include_dirs = self.include_dirs.split(os.pathsep)
+
+        if self.libraries is None:
+            self.libraries = []
+        elif isinstance(self.libraries, str):
+            self.libraries = [self.libraries]
+
+        if self.library_dirs is None:
+            self.library_dirs = []
+        elif isinstance(self.library_dirs, str):
+            self.library_dirs = self.library_dirs.split(os.pathsep)
+
+    def run(self):
+        pass
+
+
+    # Utility methods for actual "config" commands.  The interfaces are
+    # loosely based on Autoconf macros of similar names.  Sub-classes
+    # may use these freely.
+
+    def _check_compiler(self):
+        """Check that 'self.compiler' really is a CCompiler object;
+        if not, make it one.
+        """
+        # We do this late, and only on-demand, because this is an expensive
+        # import.
+        from distutils.ccompiler import CCompiler, new_compiler
+        if not isinstance(self.compiler, CCompiler):
+            self.compiler = new_compiler(compiler=self.compiler,
+                                         dry_run=self.dry_run, force=1)
+            customize_compiler(self.compiler)
+            if self.include_dirs:
+                self.compiler.set_include_dirs(self.include_dirs)
+            if self.libraries:
+                self.compiler.set_libraries(self.libraries)
+            if self.library_dirs:
+                self.compiler.set_library_dirs(self.library_dirs)
+
+
+    def _gen_temp_sourcefile(self, body, headers, lang):
+        filename = "_configtest" + LANG_EXT[lang]
+        file = open(filename, "w")
+        if headers:
+            for header in headers:
+                file.write("#include <%s>\n" % header)
+            file.write("\n")
+        file.write(body)
+        if body[-1] != "\n":
+            file.write("\n")
+        file.close()
+        return filename
+
+    def _preprocess(self, body, headers, include_dirs, lang):
+        src = self._gen_temp_sourcefile(body, headers, lang)
+        out = "_configtest.i"
+        self.temp_files.extend([src, out])
+        self.compiler.preprocess(src, out, include_dirs=include_dirs)
+        return (src, out)
+
+    def _compile(self, body, headers, include_dirs, lang):
+        src = self._gen_temp_sourcefile(body, headers, lang)
+        if self.dump_source:
+            dump_file(src, "compiling '%s':" % src)
+        (obj,) = self.compiler.object_filenames([src])
+        self.temp_files.extend([src, obj])
+        self.compiler.compile([src], include_dirs=include_dirs)
+        return (src, obj)
+
+    def _link(self, body, headers, include_dirs, libraries, library_dirs,
+              lang):
+        (src, obj) = self._compile(body, headers, include_dirs, lang)
+        prog = os.path.splitext(os.path.basename(src))[0]
+        self.compiler.link_executable([obj], prog,
+                                      libraries=libraries,
+                                      library_dirs=library_dirs,
+                                      target_lang=lang)
+
+        if self.compiler.exe_extension is not None:
+            prog = prog + self.compiler.exe_extension
+        self.temp_files.append(prog)
+
+        return (src, obj, prog)
+
+    def _clean(self, *filenames):
+        if not filenames:
+            filenames = self.temp_files
+            self.temp_files = []
+        log.info("removing: %s", ' '.join(filenames))
+        for filename in filenames:
+            try:
+                os.remove(filename)
+            except OSError:
+                pass
+
+
+    # XXX these ignore the dry-run flag: what to do, what to do? even if
+    # you want a dry-run build, you still need some sort of configuration
+    # info.  My inclination is to make it up to the real config command to
+    # consult 'dry_run', and assume a default (minimal) configuration if
+    # true.  The problem with trying to do it here is that you'd have to
+    # return either true or false from all the 'try' methods, neither of
+    # which is correct.
+
+    # XXX need access to the header search path and maybe default macros.
+
+    def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
+        """Construct a source file from 'body' (a string containing lines
+        of C/C++ code) and 'headers' (a list of header files to include)
+        and run it through the preprocessor.  Return true if the
+        preprocessor succeeded, false if there were any errors.
+        ('body' probably isn't of much use, but what the heck.)
+        """
+        from distutils.ccompiler import CompileError
+        self._check_compiler()
+        ok = 1
+        try:
+            self._preprocess(body, headers, include_dirs, lang)
+        except CompileError:
+            ok = 0
+
+        self._clean()
+        return ok
+
+    def search_cpp(self, pattern, body=None, headers=None, include_dirs=None,
+                   lang="c"):
+        """Construct a source file (just like 'try_cpp()'), run it through
+        the preprocessor, and return true if any line of the output matches
+        'pattern'.  'pattern' should either be a compiled regex object or a
+        string containing a regex.  If both 'body' and 'headers' are None,
+        preprocesses an empty file -- which can be useful to determine the
+        symbols the preprocessor and compiler set by default.
+        """
+        self._check_compiler()
+        src, out = self._preprocess(body, headers, include_dirs, lang)
+
+        if isinstance(pattern, str):
+            pattern = re.compile(pattern)
+
+        file = open(out)
+        match = 0
+        while 1:
+            line = file.readline()
+            if line == '':
+                break
+            if pattern.search(line):
+                match = 1
+                break
+
+        file.close()
+        self._clean()
+        return match
+
+    def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
+        """Try to compile a source file built from 'body' and 'headers'.
+        Return true on success, false otherwise.
+        """
+        from distutils.ccompiler import CompileError
+        self._check_compiler()
+        try:
+            self._compile(body, headers, include_dirs, lang)
+            ok = 1
+        except CompileError:
+            ok = 0
+
+        log.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+    def try_link(self, body, headers=None, include_dirs=None, libraries=None,
+                 library_dirs=None, lang="c"):
+        """Try to compile and link a source file, built from 'body' and
+        'headers', to executable form.  Return true on success, false
+        otherwise.
+        """
+        from distutils.ccompiler import CompileError, LinkError
+        self._check_compiler()
+        try:
+            self._link(body, headers, include_dirs,
+                       libraries, library_dirs, lang)
+            ok = 1
+        except (CompileError, LinkError):
+            ok = 0
+
+        log.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+    def try_run(self, body, headers=None, include_dirs=None, libraries=None,
+                library_dirs=None, lang="c"):
+        """Try to compile, link to an executable, and run a program
+        built from 'body' and 'headers'.  Return true on success, false
+        otherwise.
+        """
+        from distutils.ccompiler import CompileError, LinkError
+        self._check_compiler()
+        try:
+            src, obj, exe = self._link(body, headers, include_dirs,
+                                       libraries, library_dirs, lang)
+            self.spawn([exe])
+            ok = 1
+        except (CompileError, LinkError, DistutilsExecError):
+            ok = 0
+
+        log.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+
+    # -- High-level methods --------------------------------------------
+    # (these are the ones that are actually likely to be useful
+    # when implementing a real-world config command!)
+
+    def check_func(self, func, headers=None, include_dirs=None,
+                   libraries=None, library_dirs=None, decl=0, call=0):
+
+        """Determine if function 'func' is available by constructing a
+        source file that refers to 'func', and compiles and links it.
+        If everything succeeds, returns true; otherwise returns false.
+
+        The constructed source file starts out by including the header
+        files listed in 'headers'.  If 'decl' is true, it then declares
+        'func' (as "int func()"); you probably shouldn't supply 'headers'
+        and set 'decl' true in the same call, or you might get errors about
+        a conflicting declarations for 'func'.  Finally, the constructed
+        'main()' function either references 'func' or (if 'call' is true)
+        calls it.  'libraries' and 'library_dirs' are used when
+        linking.
+        """
+
+        self._check_compiler()
+        body = []
+        if decl:
+            body.append("int %s ();" % func)
+        body.append("int main () {")
+        if call:
+            body.append("  %s();" % func)
+        else:
+            body.append("  %s;" % func)
+        body.append("}")
+        body = "\n".join(body) + "\n"
+
+        return self.try_link(body, headers, include_dirs,
+                             libraries, library_dirs)
+
+    # check_func ()
+
+    def check_lib(self, library, library_dirs=None, headers=None,
+                  include_dirs=None, other_libraries=[]):
+        """Determine if 'library' is available to be linked against,
+        without actually checking that any particular symbols are provided
+        by it.  'headers' will be used in constructing the source file to
+        be compiled, but the only effect of this is to check if all the
+        header files listed are available.  Any libraries listed in
+        'other_libraries' will be included in the link, in case 'library'
+        has symbols that depend on other libraries.
+        """
+        self._check_compiler()
+        return self.try_link("int main (void) { }",
+                             headers, include_dirs,
+                             [library]+other_libraries, library_dirs)
+
+    def check_header(self, header, include_dirs=None, library_dirs=None,
+                     lang="c"):
+        """Determine if the system header file named by 'header_file'
+        exists and can be found by the preprocessor; return true if so,
+        false otherwise.
+        """
+        return self.try_cpp(body="/* No body */", headers=[header],
+                            include_dirs=include_dirs)
+
+
+def dump_file(filename, head=None):
+    """Dumps a file content into log.info.
+
+    If head is not None, will be dumped before the file content.
+    """
+    if head is None:
+        log.info('%s' % filename)
+    else:
+        log.info(head)
+    file = open(filename)
+    try:
+        log.info(file.read())
+    finally:
+        file.close()
diff --git a/lib/distutils/distutils/command/install.py b/lib/distutils/distutils/command/install.py
new file mode 100644
index 0000000..b9f1c6c
--- /dev/null
+++ b/lib/distutils/distutils/command/install.py
@@ -0,0 +1,672 @@
+"""distutils.command.install
+
+Implements the Distutils 'install' command."""
+
+from distutils import log
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id$"
+
+import sys, os, string
+from types import *
+from distutils.core import Command
+from distutils.debug import DEBUG
+from distutils.sysconfig import get_config_vars
+from distutils.errors import DistutilsPlatformError
+from distutils.file_util import write_file
+from distutils.util import convert_path, subst_vars, change_root
+from distutils.util import get_platform
+from distutils.errors import DistutilsOptionError
+from site import USER_BASE
+from site import USER_SITE
+
+
+if sys.version < "2.2":
+    WINDOWS_SCHEME = {
+        'purelib': '$base',
+        'platlib': '$base',
+        'headers': '$base/Include/$dist_name',
+        'scripts': '$base/Scripts',
+        'data'   : '$base',
+    }
+else:
+    WINDOWS_SCHEME = {
+        'purelib': '$base/Lib/site-packages',
+        'platlib': '$base/Lib/site-packages',
+        'headers': '$base/Include/$dist_name',
+        'scripts': '$base/Scripts',
+        'data'   : '$base',
+    }
+
+INSTALL_SCHEMES = {
+    'unix_prefix': {
+        'purelib': '$base/lib/python$py_version_short/site-packages',
+        'platlib': '$platbase/lib/python$py_version_short/site-packages',
+        'headers': '$base/include/python$py_version_short/$dist_name',
+        'scripts': '$base/bin',
+        'data'   : '$base',
+        },
+    'unix_home': {
+        'purelib': '$base/lib/python',
+        'platlib': '$base/lib/python',
+        'headers': '$base/include/python/$dist_name',
+        'scripts': '$base/bin',
+        'data'   : '$base',
+        },
+    'unix_user': {
+        'purelib': '$usersite',
+        'platlib': '$usersite',
+        'headers': '$userbase/include/python$py_version_short/$dist_name',
+        'scripts': '$userbase/bin',
+        'data'   : '$userbase',
+        },
+    'nt': WINDOWS_SCHEME,
+    'nt_user': {
+        'purelib': '$usersite',
+        'platlib': '$usersite',
+        'headers': '$userbase/Python$py_version_nodot/Include/$dist_name',
+        'scripts': '$userbase/Scripts',
+        'data'   : '$userbase',
+        },
+    'os2': {
+        'purelib': '$base/Lib/site-packages',
+        'platlib': '$base/Lib/site-packages',
+        'headers': '$base/Include/$dist_name',
+        'scripts': '$base/Scripts',
+        'data'   : '$base',
+        },
+    'os2_home': {
+        'purelib': '$usersite',
+        'platlib': '$usersite',
+        'headers': '$userbase/include/python$py_version_short/$dist_name',
+        'scripts': '$userbase/bin',
+        'data'   : '$userbase',
+        },
+    }
+
+# The keys to an installation scheme; if any new types of files are to be
+# installed, be sure to add an entry to every installation scheme above,
+# and to SCHEME_KEYS here.
+SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data')
+
+
+class install (Command):
+
+    description = "install everything from build directory"
+
+    user_options = [
+        # Select installation scheme and set base director(y|ies)
+        ('prefix=', None,
+         "installation prefix"),
+        ('exec-prefix=', None,
+         "(Unix only) prefix for platform-specific files"),
+        ('home=', None,
+         "(Unix only) home directory to install under"),
+        ('user', None,
+         "install in user site-package '%s'" % USER_SITE),
+
+        # Or, just set the base director(y|ies)
+        ('install-base=', None,
+         "base installation directory (instead of --prefix or --home)"),
+        ('install-platbase=', None,
+         "base installation directory for platform-specific files " +
+         "(instead of --exec-prefix or --home)"),
+        ('root=', None,
+         "install everything relative to this alternate root directory"),
+
+        # Or, explicitly set the installation scheme
+        ('install-purelib=', None,
+         "installation directory for pure Python module distributions"),
+        ('install-platlib=', None,
+         "installation directory for non-pure module distributions"),
+        ('install-lib=', None,
+         "installation directory for all module distributions " +
+         "(overrides --install-purelib and --install-platlib)"),
+
+        ('install-headers=', None,
+         "installation directory for C/C++ headers"),
+        ('install-scripts=', None,
+         "installation directory for Python scripts"),
+        ('install-data=', None,
+         "installation directory for data files"),
+
+        # Byte-compilation options -- see install_lib.py for details, as
+        # these are duplicated from there (but only install_lib does
+        # anything with them).
+        ('compile', 'c', "compile .py to .pyc [default]"),
+        ('no-compile', None, "don't compile .py files"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+
+        # Miscellaneous control options
+        ('force', 'f',
+         "force installation (overwrite any existing files)"),
+        ('skip-build', None,
+         "skip rebuilding everything (for testing/debugging)"),
+
+        # Where to install documentation (eventually!)
+        #('doc-format=', None, "format of documentation to generate"),
+        #('install-man=', None, "directory for Unix man pages"),
+        #('install-html=', None, "directory for HTML documentation"),
+        #('install-info=', None, "directory for GNU info files"),
+
+        ('record=', None,
+         "filename in which to record list of installed files"),
+        ]
+
+    boolean_options = ['compile', 'force', 'skip-build', 'user']
+    negative_opt = {'no-compile' : 'compile'}
+
+
+    def initialize_options (self):
+
+        # High-level options: these select both an installation base
+        # and scheme.
+        self.prefix = None
+        self.exec_prefix = None
+        self.home = None
+        self.user = 0
+
+        # These select only the installation base; it's up to the user to
+        # specify the installation scheme (currently, that means supplying
+        # the --install-{platlib,purelib,scripts,data} options).
+        self.install_base = None
+        self.install_platbase = None
+        self.root = None
+
+        # These options are the actual installation directories; if not
+        # supplied by the user, they are filled in using the installation
+        # scheme implied by prefix/exec-prefix/home and the contents of
+        # that installation scheme.
+        self.install_purelib = None     # for pure module distributions
+        self.install_platlib = None     # non-pure (dists w/ extensions)
+        self.install_headers = None     # for C/C++ headers
+        self.install_lib = None         # set to either purelib or platlib
+        self.install_scripts = None
+        self.install_data = None
+        self.install_userbase = USER_BASE
+        self.install_usersite = USER_SITE
+
+        self.compile = None
+        self.optimize = None
+
+        # These two are for putting non-packagized distributions into their
+        # own directory and creating a .pth file if it makes sense.
+        # 'extra_path' comes from the setup file; 'install_path_file' can
+        # be turned off if it makes no sense to install a .pth file.  (But
+        # better to install it uselessly than to guess wrong and not
+        # install it when it's necessary and would be used!)  Currently,
+        # 'install_path_file' is always true unless some outsider meddles
+        # with it.
+        self.extra_path = None
+        self.install_path_file = 1
+
+        # 'force' forces installation, even if target files are not
+        # out-of-date.  'skip_build' skips running the "build" command,
+        # handy if you know it's not necessary.  'warn_dir' (which is *not*
+        # a user option, it's just there so the bdist_* commands can turn
+        # it off) determines whether we warn about installing to a
+        # directory not in sys.path.
+        self.force = 0
+        self.skip_build = 0
+        self.warn_dir = 1
+
+        # These are only here as a conduit from the 'build' command to the
+        # 'install_*' commands that do the real work.  ('build_base' isn't
+        # actually used anywhere, but it might be useful in future.)  They
+        # are not user options, because if the user told the install
+        # command where the build directory is, that wouldn't affect the
+        # build command.
+        self.build_base = None
+        self.build_lib = None
+
+        # Not defined yet because we don't know anything about
+        # documentation yet.
+        #self.install_man = None
+        #self.install_html = None
+        #self.install_info = None
+
+        self.record = None
+
+
+    # -- Option finalizing methods -------------------------------------
+    # (This is rather more involved than for most commands,
+    # because this is where the policy for installing third-
+    # party Python modules on various platforms given a wide
+    # array of user input is decided.  Yes, it's quite complex!)
+
+    def finalize_options (self):
+
+        # This method (and its pliant slaves, like 'finalize_unix()',
+        # 'finalize_other()', and 'select_scheme()') is where the default
+        # installation directories for modules, extension modules, and
+        # anything else we care to install from a Python module
+        # distribution.  Thus, this code makes a pretty important policy
+        # statement about how third-party stuff is added to a Python
+        # installation!  Note that the actual work of installation is done
+        # by the relatively simple 'install_*' commands; they just take
+        # their orders from the installation directory options determined
+        # here.
+
+        # Check for errors/inconsistencies in the options; first, stuff
+        # that's wrong on any platform.
+
+        if ((self.prefix or self.exec_prefix or self.home) and
+            (self.install_base or self.install_platbase)):
+            raise DistutilsOptionError, \
+                  ("must supply either prefix/exec-prefix/home or " +
+                   "install-base/install-platbase -- not both")
+
+        if self.home and (self.prefix or self.exec_prefix):
+            raise DistutilsOptionError, \
+                  "must supply either home or prefix/exec-prefix -- not both"
+
+        if self.user and (self.prefix or self.exec_prefix or self.home or
+                self.install_base or self.install_platbase):
+            raise DistutilsOptionError("can't combine user with prefix, "
+                                       "exec_prefix/home, or install_(plat)base")
+
+        # Next, stuff that's wrong (or dubious) only on certain platforms.
+        if os.name != "posix":
+            if self.exec_prefix:
+                self.warn("exec-prefix option ignored on this platform")
+                self.exec_prefix = None
+
+        # Now the interesting logic -- so interesting that we farm it out
+        # to other methods.  The goal of these methods is to set the final
+        # values for the install_{lib,scripts,data,...}  options, using as
+        # input a heady brew of prefix, exec_prefix, home, install_base,
+        # install_platbase, user-supplied versions of
+        # install_{purelib,platlib,lib,scripts,data,...}, and the
+        # INSTALL_SCHEME dictionary above.  Phew!
+
+        self.dump_dirs("pre-finalize_{unix,other}")
+
+        if os.name == 'posix':
+            self.finalize_unix()
+        else:
+            self.finalize_other()
+
+        self.dump_dirs("post-finalize_{unix,other}()")
+
+        # Expand configuration variables, tilde, etc. in self.install_base
+        # and self.install_platbase -- that way, we can use $base or
+        # $platbase in the other installation directories and not worry
+        # about needing recursive variable expansion (shudder).
+
+        py_version = (string.split(sys.version))[0]
+        (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix')
+        self.config_vars = {'dist_name': self.distribution.get_name(),
+                            'dist_version': self.distribution.get_version(),
+                            'dist_fullname': self.distribution.get_fullname(),
+                            'py_version': py_version,
+                            'py_version_short': py_version[0:3],
+                            'py_version_nodot': py_version[0] + py_version[2],
+                            'sys_prefix': prefix,
+                            'prefix': prefix,
+                            'sys_exec_prefix': exec_prefix,
+                            'exec_prefix': exec_prefix,
+                            'userbase': self.install_userbase,
+                            'usersite': self.install_usersite,
+                           }
+        self.expand_basedirs()
+
+        self.dump_dirs("post-expand_basedirs()")
+
+        # Now define config vars for the base directories so we can expand
+        # everything else.
+        self.config_vars['base'] = self.install_base
+        self.config_vars['platbase'] = self.install_platbase
+
+        if DEBUG:
+            from pprint import pprint
+            print "config vars:"
+            pprint(self.config_vars)
+
+        # Expand "~" and configuration variables in the installation
+        # directories.
+        self.expand_dirs()
+
+        self.dump_dirs("post-expand_dirs()")
+
+        # Create directories in the home dir:
+        if self.user:
+            self.create_home_path()
+
+        # Pick the actual directory to install all modules to: either
+        # install_purelib or install_platlib, depending on whether this
+        # module distribution is pure or not.  Of course, if the user
+        # already specified install_lib, use their selection.
+        if self.install_lib is None:
+            if self.distribution.ext_modules: # has extensions: non-pure
+                self.install_lib = self.install_platlib
+            else:
+                self.install_lib = self.install_purelib
+
+
+        # Convert directories from Unix /-separated syntax to the local
+        # convention.
+        self.convert_paths('lib', 'purelib', 'platlib',
+                           'scripts', 'data', 'headers',
+                           'userbase', 'usersite')
+
+        # Well, we're not actually fully completely finalized yet: we still
+        # have to deal with 'extra_path', which is the hack for allowing
+        # non-packagized module distributions (hello, Numerical Python!) to
+        # get their own directories.
+        self.handle_extra_path()
+        self.install_libbase = self.install_lib # needed for .pth file
+        self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
+
+        # If a new root directory was supplied, make all the installation
+        # dirs relative to it.
+        if self.root is not None:
+            self.change_roots('libbase', 'lib', 'purelib', 'platlib',
+                              'scripts', 'data', 'headers')
+
+        self.dump_dirs("after prepending root")
+
+        # Find out the build directories, ie. where to install from.
+        self.set_undefined_options('build',
+                                   ('build_base', 'build_base'),
+                                   ('build_lib', 'build_lib'))
+
+        # Punt on doc directories for now -- after all, we're punting on
+        # documentation completely!
+
+    # finalize_options ()
+
+
+    def dump_dirs (self, msg):
+        if DEBUG:
+            from distutils.fancy_getopt import longopt_xlate
+            print msg + ":"
+            for opt in self.user_options:
+                opt_name = opt[0]
+                if opt_name[-1] == "=":
+                    opt_name = opt_name[0:-1]
+                if opt_name in self.negative_opt:
+                    opt_name = string.translate(self.negative_opt[opt_name],
+                                                longopt_xlate)
+                    val = not getattr(self, opt_name)
+                else:
+                    opt_name = string.translate(opt_name, longopt_xlate)
+                    val = getattr(self, opt_name)
+                print "  %s: %s" % (opt_name, val)
+
+
+    def finalize_unix (self):
+
+        if self.install_base is not None or self.install_platbase is not None:
+            if ((self.install_lib is None and
+                 self.install_purelib is None and
+                 self.install_platlib is None) or
+                self.install_headers is None or
+                self.install_scripts is None or
+                self.install_data is None):
+                raise DistutilsOptionError, \
+                      ("install-base or install-platbase supplied, but "
+                      "installation scheme is incomplete")
+            return
+
+        if self.user:
+            if self.install_userbase is None:
+                raise DistutilsPlatformError(
+                    "User base directory is not specified")
+            self.install_base = self.install_platbase = self.install_userbase
+            self.select_scheme("unix_user")
+        elif self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("unix_home")
+        else:
+            if self.prefix is None:
+                if self.exec_prefix is not None:
+                    raise DistutilsOptionError, \
+                          "must not supply exec-prefix without prefix"
+
+                self.prefix = os.path.normpath(sys.prefix)
+                self.exec_prefix = os.path.normpath(sys.exec_prefix)
+
+            else:
+                if self.exec_prefix is None:
+                    self.exec_prefix = self.prefix
+
+            self.install_base = self.prefix
+            self.install_platbase = self.exec_prefix
+            self.select_scheme("unix_prefix")
+
+    # finalize_unix ()
+
+
+    def finalize_other (self):          # Windows and Mac OS for now
+
+        if self.user:
+            if self.install_userbase is None:
+                raise DistutilsPlatformError(
+                    "User base directory is not specified")
+            self.install_base = self.install_platbase = self.install_userbase
+            self.select_scheme(os.name + "_user")
+        elif self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("unix_home")
+        else:
+            if self.prefix is None:
+                self.prefix = os.path.normpath(sys.prefix)
+
+            self.install_base = self.install_platbase = self.prefix
+            try:
+                self.select_scheme(os.name)
+            except KeyError:
+                raise DistutilsPlatformError, \
+                      "I don't know how to install stuff on '%s'" % os.name
+
+    # finalize_other ()
+
+
+    def select_scheme (self, name):
+        # it's the caller's problem if they supply a bad name!
+        scheme = INSTALL_SCHEMES[name]
+        for key in SCHEME_KEYS:
+            attrname = 'install_' + key
+            if getattr(self, attrname) is None:
+                setattr(self, attrname, scheme[key])
+
+
+    def _expand_attrs (self, attrs):
+        for attr in attrs:
+            val = getattr(self, attr)
+            if val is not None:
+                if os.name == 'posix' or os.name == 'nt':
+                    val = os.path.expanduser(val)
+                val = subst_vars(val, self.config_vars)
+                setattr(self, attr, val)
+
+
+    def expand_basedirs (self):
+        self._expand_attrs(['install_base',
+                            'install_platbase',
+                            'root'])
+
+    def expand_dirs (self):
+        self._expand_attrs(['install_purelib',
+                            'install_platlib',
+                            'install_lib',
+                            'install_headers',
+                            'install_scripts',
+                            'install_data',])
+
+
+    def convert_paths (self, *names):
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, convert_path(getattr(self, attr)))
+
+
+    def handle_extra_path (self):
+
+        if self.extra_path is None:
+            self.extra_path = self.distribution.extra_path
+
+        if self.extra_path is not None:
+            if type(self.extra_path) is StringType:
+                self.extra_path = string.split(self.extra_path, ',')
+
+            if len(self.extra_path) == 1:
+                path_file = extra_dirs = self.extra_path[0]
+            elif len(self.extra_path) == 2:
+                (path_file, extra_dirs) = self.extra_path
+            else:
+                raise DistutilsOptionError, \
+                      ("'extra_path' option must be a list, tuple, or "
+                      "comma-separated string with 1 or 2 elements")
+
+            # convert to local form in case Unix notation used (as it
+            # should be in setup scripts)
+            extra_dirs = convert_path(extra_dirs)
+
+        else:
+            path_file = None
+            extra_dirs = ''
+
+        # XXX should we warn if path_file and not extra_dirs? (in which
+        # case the path file would be harmless but pointless)
+        self.path_file = path_file
+        self.extra_dirs = extra_dirs
+
+    # handle_extra_path ()
+
+
+    def change_roots (self, *names):
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, change_root(self.root, getattr(self, attr)))
+
+    def create_home_path(self):
+        """Create directories under ~
+        """
+        if not self.user:
+            return
+        home = convert_path(os.path.expanduser("~"))
+        for name, path in self.config_vars.iteritems():
+            if path.startswith(home) and not os.path.isdir(path):
+                self.debug_print("os.makedirs('%s', 0700)" % path)
+                os.makedirs(path, 0700)
+
+    # -- Command execution methods -------------------------------------
+
+    def run (self):
+
+        # Obviously have to build before we can install
+        if not self.skip_build:
+            self.run_command('build')
+            # If we built for any other platform, we can't install.
+            build_plat = self.distribution.get_command_obj('build').plat_name
+            # check warn_dir - it is a clue that the 'install' is happening
+            # internally, and not to sys.path, so we don't check the platform
+            # matches what we are running.
+            if self.warn_dir and build_plat != get_platform():
+                raise DistutilsPlatformError("Can't install when "
+                                             "cross-compiling")
+
+        # Run all sub-commands (at least those that need to be run)
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+        if self.path_file:
+            self.create_path_file()
+
+        # write list of installed files, if requested.
+        if self.record:
+            outputs = self.get_outputs()
+            if self.root:               # strip any package prefix
+                root_len = len(self.root)
+                for counter in xrange(len(outputs)):
+                    outputs[counter] = outputs[counter][root_len:]
+            self.execute(write_file,
+                         (self.record, outputs),
+                         "writing list of installed files to '%s'" %
+                         self.record)
+
+        sys_path = map(os.path.normpath, sys.path)
+        sys_path = map(os.path.normcase, sys_path)
+        install_lib = os.path.normcase(os.path.normpath(self.install_lib))
+        if (self.warn_dir and
+            not (self.path_file and self.install_path_file) and
+            install_lib not in sys_path):
+            log.debug(("modules installed to '%s', which is not in "
+                       "Python's module search path (sys.path) -- "
+                       "you'll have to change the search path yourself"),
+                       self.install_lib)
+
+    # run ()
+
+    def create_path_file (self):
+        filename = os.path.join(self.install_libbase,
+                                self.path_file + ".pth")
+        if self.install_path_file:
+            self.execute(write_file,
+                         (filename, [self.extra_dirs]),
+                         "creating %s" % filename)
+        else:
+            self.warn("path file '%s' not created" % filename)
+
+
+    # -- Reporting methods ---------------------------------------------
+
+    def get_outputs (self):
+        # Assemble the outputs of all the sub-commands.
+        outputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            # Add the contents of cmd.get_outputs(), ensuring
+            # that outputs doesn't contain duplicate entries
+            for filename in cmd.get_outputs():
+                if filename not in outputs:
+                    outputs.append(filename)
+
+        if self.path_file and self.install_path_file:
+            outputs.append(os.path.join(self.install_libbase,
+                                        self.path_file + ".pth"))
+
+        return outputs
+
+    def get_inputs (self):
+        # XXX gee, this looks familiar ;-(
+        inputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            inputs.extend(cmd.get_inputs())
+
+        return inputs
+
+
+    # -- Predicates for sub-command list -------------------------------
+
+    def has_lib (self):
+        """Return true if the current distribution has any Python
+        modules to install."""
+        return (self.distribution.has_pure_modules() or
+                self.distribution.has_ext_modules())
+
+    def has_headers (self):
+        return self.distribution.has_headers()
+
+    def has_scripts (self):
+        return self.distribution.has_scripts()
+
+    def has_data (self):
+        return self.distribution.has_data_files()
+
+
+    # 'sub_commands': a list of commands this command might have to run to
+    # get its work done.  See cmd.py for more info.
+    sub_commands = [('install_lib',     has_lib),
+                    ('install_headers', has_headers),
+                    ('install_scripts', has_scripts),
+                    ('install_data',    has_data),
+                    ('install_egg_info', lambda self:True),
+                   ]
+
+# class install
diff --git a/lib/distutils/distutils/command/install_data.py b/lib/distutils/distutils/command/install_data.py
new file mode 100644
index 0000000..ab40797
--- /dev/null
+++ b/lib/distutils/distutils/command/install_data.py
@@ -0,0 +1,81 @@
+"""distutils.command.install_data
+
+Implements the Distutils 'install_data' command, for installing
+platform-independent data files."""
+
+# contributed by Bastian Kleineidam
+
+__revision__ = "$Id$"
+
+import os
+from distutils.core import Command
+from distutils.util import change_root, convert_path
+
+class install_data(Command):
+
+    description = "install data files"
+
+    user_options = [
+        ('install-dir=', 'd',
+         "base directory for installing data files "
+         "(default: installation base dir)"),
+        ('root=', None,
+         "install everything relative to this alternate root directory"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ]
+
+    boolean_options = ['force']
+
+    def initialize_options(self):
+        self.install_dir = None
+        self.outfiles = []
+        self.root = None
+        self.force = 0
+        self.data_files = self.distribution.data_files
+        self.warn_dir = 1
+
+    def finalize_options(self):
+        self.set_undefined_options('install',
+                                   ('install_data', 'install_dir'),
+                                   ('root', 'root'),
+                                   ('force', 'force'),
+                                  )
+
+    def run(self):
+        self.mkpath(self.install_dir)
+        for f in self.data_files:
+            if isinstance(f, str):
+                # it's a simple file, so copy it
+                f = convert_path(f)
+                if self.warn_dir:
+                    self.warn("setup script did not provide a directory for "
+                              "'%s' -- installing right in '%s'" %
+                              (f, self.install_dir))
+                (out, _) = self.copy_file(f, self.install_dir)
+                self.outfiles.append(out)
+            else:
+                # it's a tuple with path to install to and a list of files
+                dir = convert_path(f[0])
+                if not os.path.isabs(dir):
+                    dir = os.path.join(self.install_dir, dir)
+                elif self.root:
+                    dir = change_root(self.root, dir)
+                self.mkpath(dir)
+
+                if f[1] == []:
+                    # If there are no files listed, the user must be
+                    # trying to create an empty directory, so add the
+                    # directory to the list of output files.
+                    self.outfiles.append(dir)
+                else:
+                    # Copy files, adding them to the list of output files.
+                    for data in f[1]:
+                        data = convert_path(data)
+                        (out, _) = self.copy_file(data, dir)
+                        self.outfiles.append(out)
+
+    def get_inputs(self):
+        return self.data_files or []
+
+    def get_outputs(self):
+        return self.outfiles
diff --git a/lib/distutils/distutils/command/install_egg_info.py b/lib/distutils/distutils/command/install_egg_info.py
new file mode 100644
index 0000000..c888031
--- /dev/null
+++ b/lib/distutils/distutils/command/install_egg_info.py
@@ -0,0 +1,78 @@
+"""distutils.command.install_egg_info
+
+Implements the Distutils 'install_egg_info' command, for installing
+a package's PKG-INFO metadata."""
+
+
+from distutils.cmd import Command
+from distutils import log, dir_util
+import os, sys, re
+
+class install_egg_info(Command):
+    """Install an .egg-info file for the package"""
+
+    description = "Install package's PKG-INFO metadata as an .egg-info file"
+    user_options = [
+        ('install-dir=', 'd', "directory to install to"),
+    ]
+
+    def initialize_options(self):
+        self.install_dir = None
+
+    def finalize_options(self):
+        self.set_undefined_options('install_lib',('install_dir','install_dir'))
+        basename = "%s-%s-py%s.egg-info" % (
+            to_filename(safe_name(self.distribution.get_name())),
+            to_filename(safe_version(self.distribution.get_version())),
+            sys.version[:3]
+        )
+        self.target = os.path.join(self.install_dir, basename)
+        self.outputs = [self.target]
+
+    def run(self):
+        target = self.target
+        if os.path.isdir(target) and not os.path.islink(target):
+            dir_util.remove_tree(target, dry_run=self.dry_run)
+        elif os.path.exists(target):
+            self.execute(os.unlink,(self.target,),"Removing "+target)
+        elif not os.path.isdir(self.install_dir):
+            self.execute(os.makedirs, (self.install_dir,),
+                         "Creating "+self.install_dir)
+        log.info("Writing %s", target)
+        if not self.dry_run:
+            f = open(target, 'w')
+            self.distribution.metadata.write_pkg_file(f)
+            f.close()
+
+    def get_outputs(self):
+        return self.outputs
+
+
+# The following routines are taken from setuptools' pkg_resources module and
+# can be replaced by importing them from pkg_resources once it is included
+# in the stdlib.
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+    """Convert an arbitrary string to a standard version string
+
+    Spaces become dots, and all other non-alphanumeric characters become
+    dashes, with runs of multiple dashes condensed to a single dash.
+    """
+    version = version.replace(' ','.')
+    return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def to_filename(name):
+    """Convert a project or version name to its filename-escaped form
+
+    Any '-' characters are currently replaced with '_'.
+    """
+    return name.replace('-','_')
diff --git a/lib/distutils/distutils/command/install_headers.py b/lib/distutils/distutils/command/install_headers.py
new file mode 100644
index 0000000..d892416
--- /dev/null
+++ b/lib/distutils/distutils/command/install_headers.py
@@ -0,0 +1,51 @@
+"""distutils.command.install_headers
+
+Implements the Distutils 'install_headers' command, to install C/C++ header
+files to the Python include directory."""
+
+__revision__ = "$Id$"
+
+from distutils.core import Command
+
+
+# XXX force is never used
+class install_headers(Command):
+
+    description = "install C/C++ header files"
+
+    user_options = [('install-dir=', 'd',
+                     "directory to install header files to"),
+                    ('force', 'f',
+                     "force installation (overwrite existing files)"),
+                   ]
+
+    boolean_options = ['force']
+
+    def initialize_options(self):
+        self.install_dir = None
+        self.force = 0
+        self.outfiles = []
+
+    def finalize_options(self):
+        self.set_undefined_options('install',
+                                   ('install_headers', 'install_dir'),
+                                   ('force', 'force'))
+
+
+    def run(self):
+        headers = self.distribution.headers
+        if not headers:
+            return
+
+        self.mkpath(self.install_dir)
+        for header in headers:
+            (out, _) = self.copy_file(header, self.install_dir)
+            self.outfiles.append(out)
+
+    def get_inputs(self):
+        return self.distribution.headers or []
+
+    def get_outputs(self):
+        return self.outfiles
+
+# class install_headers
diff --git a/lib/distutils/distutils/command/install_lib.py b/lib/distutils/distutils/command/install_lib.py
new file mode 100644
index 0000000..043e8b6
--- /dev/null
+++ b/lib/distutils/distutils/command/install_lib.py
@@ -0,0 +1,219 @@
+"""distutils.command.install_lib
+
+Implements the Distutils 'install_lib' command
+(install all Python modules)."""
+
+__revision__ = "$Id$"
+
+import os
+import sys
+
+from distutils.core import Command
+from distutils.errors import DistutilsOptionError
+
+
+# Extension for Python source files.
+if hasattr(os, 'extsep'):
+    PYTHON_SOURCE_EXTENSION = os.extsep + "py"
+else:
+    PYTHON_SOURCE_EXTENSION = ".py"
+
+class install_lib(Command):
+
+    description = "install all Python modules (extensions and pure Python)"
+
+    # The byte-compilation options are a tad confusing.  Here are the
+    # possible scenarios:
+    #   1) no compilation at all (--no-compile --no-optimize)
+    #   2) compile .pyc only (--compile --no-optimize; default)
+    #   3) compile .pyc and "level 1" .pyo (--compile --optimize)
+    #   4) compile "level 1" .pyo only (--no-compile --optimize)
+    #   5) compile .pyc and "level 2" .pyo (--compile --optimize-more)
+    #   6) compile "level 2" .pyo only (--no-compile --optimize-more)
+    #
+    # The UI for this is two option, 'compile' and 'optimize'.
+    # 'compile' is strictly boolean, and only decides whether to
+    # generate .pyc files.  'optimize' is three-way (0, 1, or 2), and
+    # decides both whether to generate .pyo files and what level of
+    # optimization to use.
+
+    user_options = [
+        ('install-dir=', 'd', "directory to install to"),
+        ('build-dir=','b', "build directory (where to install from)"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ('compile', 'c', "compile .py to .pyc [default]"),
+        ('no-compile', None, "don't compile .py files"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+        ('skip-build', None, "skip the build steps"),
+        ]
+
+    boolean_options = ['force', 'compile', 'skip-build']
+    negative_opt = {'no-compile' : 'compile'}
+
+    def initialize_options(self):
+        # let the 'install' command dictate our installation directory
+        self.install_dir = None
+        self.build_dir = None
+        self.force = 0
+        self.compile = None
+        self.optimize = None
+        self.skip_build = None
+
+    def finalize_options(self):
+        # Get all the information we need to install pure Python modules
+        # from the umbrella 'install' command -- build (source) directory,
+        # install (target) directory, and whether to compile .py files.
+        self.set_undefined_options('install',
+                                   ('build_lib', 'build_dir'),
+                                   ('install_lib', 'install_dir'),
+                                   ('force', 'force'),
+                                   ('compile', 'compile'),
+                                   ('optimize', 'optimize'),
+                                   ('skip_build', 'skip_build'),
+                                  )
+
+        if self.compile is None:
+            self.compile = 1
+        if self.optimize is None:
+            self.optimize = 0
+
+        if not isinstance(self.optimize, int):
+            try:
+                self.optimize = int(self.optimize)
+                if self.optimize not in (0, 1, 2):
+                    raise AssertionError
+            except (ValueError, AssertionError):
+                raise DistutilsOptionError, "optimize must be 0, 1, or 2"
+
+    def run(self):
+        # Make sure we have built everything we need first
+        self.build()
+
+        # Install everything: simply dump the entire contents of the build
+        # directory to the installation directory (that's the beauty of
+        # having a build directory!)
+        outfiles = self.install()
+
+        # (Optionally) compile .py to .pyc
+        if outfiles is not None and self.distribution.has_pure_modules():
+            self.byte_compile(outfiles)
+
+    # -- Top-level worker functions ------------------------------------
+    # (called from 'run()')
+
+    def build(self):
+        if not self.skip_build:
+            if self.distribution.has_pure_modules():
+                self.run_command('build_py')
+            if self.distribution.has_ext_modules():
+                self.run_command('build_ext')
+
+    def install(self):
+        if os.path.isdir(self.build_dir):
+            outfiles = self.copy_tree(self.build_dir, self.install_dir)
+        else:
+            self.warn("'%s' does not exist -- no Python modules to install" %
+                      self.build_dir)
+            return
+        return outfiles
+
+    def byte_compile(self, files):
+        if sys.dont_write_bytecode:
+            self.warn('byte-compiling is disabled, skipping.')
+            return
+
+        from distutils.util import byte_compile
+
+        # Get the "--root" directory supplied to the "install" command,
+        # and use it as a prefix to strip off the purported filename
+        # encoded in bytecode files.  This is far from complete, but it
+        # should at least generate usable bytecode in RPM distributions.
+        install_root = self.get_finalized_command('install').root
+
+        if self.compile:
+            byte_compile(files, optimize=0,
+                         force=self.force, prefix=install_root,
+                         dry_run=self.dry_run)
+        if self.optimize > 0:
+            byte_compile(files, optimize=self.optimize,
+                         force=self.force, prefix=install_root,
+                         verbose=self.verbose, dry_run=self.dry_run)
+
+
+    # -- Utility methods -----------------------------------------------
+
+    def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
+        if not has_any:
+            return []
+
+        build_cmd = self.get_finalized_command(build_cmd)
+        build_files = build_cmd.get_outputs()
+        build_dir = getattr(build_cmd, cmd_option)
+
+        prefix_len = len(build_dir) + len(os.sep)
+        outputs = []
+        for file in build_files:
+            outputs.append(os.path.join(output_dir, file[prefix_len:]))
+
+        return outputs
+
+    def _bytecode_filenames(self, py_filenames):
+        bytecode_files = []
+        for py_file in py_filenames:
+            # Since build_py handles package data installation, the
+            # list of outputs can contain more than just .py files.
+            # Make sure we only report bytecode for the .py files.
+            ext = os.path.splitext(os.path.normcase(py_file))[1]
+            if ext != PYTHON_SOURCE_EXTENSION:
+                continue
+            if self.compile:
+                bytecode_files.append(py_file + "c")
+            if self.optimize > 0:
+                bytecode_files.append(py_file + "o")
+
+        return bytecode_files
+
+
+    # -- External interface --------------------------------------------
+    # (called by outsiders)
+
+    def get_outputs(self):
+        """Return the list of files that would be installed if this command
+        were actually run.  Not affected by the "dry-run" flag or whether
+        modules have actually been built yet.
+        """
+        pure_outputs = \
+            self._mutate_outputs(self.distribution.has_pure_modules(),
+                                 'build_py', 'build_lib',
+                                 self.install_dir)
+        if self.compile:
+            bytecode_outputs = self._bytecode_filenames(pure_outputs)
+        else:
+            bytecode_outputs = []
+
+        ext_outputs = \
+            self._mutate_outputs(self.distribution.has_ext_modules(),
+                                 'build_ext', 'build_lib',
+                                 self.install_dir)
+
+        return pure_outputs + bytecode_outputs + ext_outputs
+
+    def get_inputs(self):
+        """Get the list of files that are input to this command, ie. the
+        files that get installed as they are named in the build tree.
+        The files in this list correspond one-to-one to the output
+        filenames returned by 'get_outputs()'.
+        """
+        inputs = []
+
+        if self.distribution.has_pure_modules():
+            build_py = self.get_finalized_command('build_py')
+            inputs.extend(build_py.get_outputs())
+
+        if self.distribution.has_ext_modules():
+            build_ext = self.get_finalized_command('build_ext')
+            inputs.extend(build_ext.get_outputs())
+
+        return inputs
diff --git a/lib/distutils/distutils/command/install_scripts.py b/lib/distutils/distutils/command/install_scripts.py
new file mode 100644
index 0000000..29cd9e7
--- /dev/null
+++ b/lib/distutils/distutils/command/install_scripts.py
@@ -0,0 +1,64 @@
+"""distutils.command.install_scripts
+
+Implements the Distutils 'install_scripts' command, for installing
+Python scripts."""
+
+# contributed by Bastian Kleineidam
+
+__revision__ = "$Id$"
+
+import os
+from distutils.core import Command
+from distutils import log
+from stat import ST_MODE
+
+class install_scripts (Command):
+
+    description = "install scripts (Python or otherwise)"
+
+    user_options = [
+        ('install-dir=', 'd', "directory to install scripts to"),
+        ('build-dir=','b', "build directory (where to install from)"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ('skip-build', None, "skip the build steps"),
+    ]
+
+    boolean_options = ['force', 'skip-build']
+
+
+    def initialize_options (self):
+        self.install_dir = None
+        self.force = 0
+        self.build_dir = None
+        self.skip_build = None
+
+    def finalize_options (self):
+        self.set_undefined_options('build', ('build_scripts', 'build_dir'))
+        self.set_undefined_options('install',
+                                   ('install_scripts', 'install_dir'),
+                                   ('force', 'force'),
+                                   ('skip_build', 'skip_build'),
+                                  )
+
+    def run (self):
+        if not self.skip_build:
+            self.run_command('build_scripts')
+        self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
+        if os.name == 'posix':
+            # Set the executable bits (owner, group, and world) on
+            # all the scripts we just installed.
+            for file in self.get_outputs():
+                if self.dry_run:
+                    log.info("changing mode of %s", file)
+                else:
+                    mode = ((os.stat(file)[ST_MODE]) | 0555) & 07777
+                    log.info("changing mode of %s to %o", file, mode)
+                    os.chmod(file, mode)
+
+    def get_inputs (self):
+        return self.distribution.scripts or []
+
+    def get_outputs(self):
+        return self.outfiles or []
+
+# class install_scripts
diff --git a/lib/distutils/distutils/command/register.py b/lib/distutils/distutils/command/register.py
new file mode 100644
index 0000000..edb42b9
--- /dev/null
+++ b/lib/distutils/distutils/command/register.py
@@ -0,0 +1,315 @@
+"""distutils.command.register
+
+Implements the Distutils 'register' command (register with the repository).
+"""
+
+# created 2002/10/21, Richard Jones
+
+__revision__ = "$Id$"
+
+import urllib2
+import getpass
+import urlparse
+from warnings import warn
+
+from distutils.core import PyPIRCCommand
+from distutils import log
+
+class register(PyPIRCCommand):
+
+    description = ("register the distribution with the Python package index")
+    user_options = PyPIRCCommand.user_options + [
+        ('list-classifiers', None,
+         'list the valid Trove classifiers'),
+        ('strict', None ,
+         'Will stop the registering if the meta-data are not fully compliant')
+        ]
+    boolean_options = PyPIRCCommand.boolean_options + [
+        'verify', 'list-classifiers', 'strict']
+
+    sub_commands = [('check', lambda self: True)]
+
+    def initialize_options(self):
+        PyPIRCCommand.initialize_options(self)
+        self.list_classifiers = 0
+        self.strict = 0
+
+    def finalize_options(self):
+        PyPIRCCommand.finalize_options(self)
+        # setting options for the `check` subcommand
+        check_options = {'strict': ('register', self.strict),
+                         'restructuredtext': ('register', 1)}
+        self.distribution.command_options['check'] = check_options
+
+    def run(self):
+        self.finalize_options()
+        self._set_config()
+
+        # Run sub commands
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+        if self.dry_run:
+            self.verify_metadata()
+        elif self.list_classifiers:
+            self.classifiers()
+        else:
+            self.send_metadata()
+
+    def check_metadata(self):
+        """Deprecated API."""
+        warn("distutils.command.register.check_metadata is deprecated, \
+              use the check command instead", PendingDeprecationWarning)
+        check = self.distribution.get_command_obj('check')
+        check.ensure_finalized()
+        check.strict = self.strict
+        check.restructuredtext = 1
+        check.run()
+
+    def _set_config(self):
+        ''' Reads the configuration file and set attributes.
+        '''
+        config = self._read_pypirc()
+        if config != {}:
+            self.username = config['username']
+            self.password = config['password']
+            self.repository = config['repository']
+            self.realm = config['realm']
+            self.has_config = True
+        else:
+            if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
+                raise ValueError('%s not found in .pypirc' % self.repository)
+            if self.repository == 'pypi':
+                self.repository = self.DEFAULT_REPOSITORY
+            self.has_config = False
+
+    def classifiers(self):
+        ''' Fetch the list of classifiers from the server.
+        '''
+        response = urllib2.urlopen(self.repository+'?:action=list_classifiers')
+        log.info(response.read())
+
+    def verify_metadata(self):
+        ''' Send the metadata to the package index server to be checked.
+        '''
+        # send the info to the server and report the result
+        (code, result) = self.post_to_server(self.build_post_data('verify'))
+        log.info('Server response (%s): %s' % (code, result))
+
+
+    def send_metadata(self):
+        ''' Send the metadata to the package index server.
+
+            Well, do the following:
+            1. figure who the user is, and then
+            2. send the data as a Basic auth'ed POST.
+
+            First we try to read the username/password from $HOME/.pypirc,
+            which is a ConfigParser-formatted file with a section
+            [distutils] containing username and password entries (both
+            in clear text). Eg:
+
+                [distutils]
+                index-servers =
+                    pypi
+
+                [pypi]
+                username: fred
+                password: sekrit
+
+            Otherwise, to figure who the user is, we offer the user three
+            choices:
+
+             1. use existing login,
+             2. register as a new user, or
+             3. set the password to a random string and email the user.
+
+        '''
+        # see if we can short-cut and get the username/password from the
+        # config
+        if self.has_config:
+            choice = '1'
+            username = self.username
+            password = self.password
+        else:
+            choice = 'x'
+            username = password = ''
+
+        # get the user's login info
+        choices = '1 2 3 4'.split()
+        while choice not in choices:
+            self.announce('''\
+We need to know who you are, so please choose either:
+ 1. use your existing login,
+ 2. register as a new user,
+ 3. have the server generate a new password for you (and email it to you), or
+ 4. quit
+Your selection [default 1]: ''', log.INFO)
+
+            choice = raw_input()
+            if not choice:
+                choice = '1'
+            elif choice not in choices:
+                print 'Please choose one of the four options!'
+
+        if choice == '1':
+            # get the username and password
+            while not username:
+                username = raw_input('Username: ')
+            while not password:
+                password = getpass.getpass('Password: ')
+
+            # set up the authentication
+            auth = urllib2.HTTPPasswordMgr()
+            host = urlparse.urlparse(self.repository)[1]
+            auth.add_password(self.realm, host, username, password)
+            # send the info to the server and report the result
+            code, result = self.post_to_server(self.build_post_data('submit'),
+                auth)
+            self.announce('Server response (%s): %s' % (code, result),
+                          log.INFO)
+
+            # possibly save the login
+            if code == 200:
+                if self.has_config:
+                    # sharing the password in the distribution instance
+                    # so the upload command can reuse it
+                    self.distribution.password = password
+                else:
+                    self.announce(('I can store your PyPI login so future '
+                                   'submissions will be faster.'), log.INFO)
+                    self.announce('(the login will be stored in %s)' % \
+                                  self._get_rc_file(), log.INFO)
+                    choice = 'X'
+                    while choice.lower() not in 'yn':
+                        choice = raw_input('Save your login (y/N)?')
+                        if not choice:
+                            choice = 'n'
+                    if choice.lower() == 'y':
+                        self._store_pypirc(username, password)
+
+        elif choice == '2':
+            data = {':action': 'user'}
+            data['name'] = data['password'] = data['email'] = ''
+            data['confirm'] = None
+            while not data['name']:
+                data['name'] = raw_input('Username: ')
+            while data['password'] != data['confirm']:
+                while not data['password']:
+                    data['password'] = getpass.getpass('Password: ')
+                while not data['confirm']:
+                    data['confirm'] = getpass.getpass(' Confirm: ')
+                if data['password'] != data['confirm']:
+                    data['password'] = ''
+                    data['confirm'] = None
+                    print "Password and confirm don't match!"
+            while not data['email']:
+                data['email'] = raw_input('   EMail: ')
+            code, result = self.post_to_server(data)
+            if code != 200:
+                log.info('Server response (%s): %s' % (code, result))
+            else:
+                log.info('You will receive an email shortly.')
+                log.info(('Follow the instructions in it to '
+                          'complete registration.'))
+        elif choice == '3':
+            data = {':action': 'password_reset'}
+            data['email'] = ''
+            while not data['email']:
+                data['email'] = raw_input('Your email address: ')
+            code, result = self.post_to_server(data)
+            log.info('Server response (%s): %s' % (code, result))
+
+    def build_post_data(self, action):
+        # figure the data to send - the metadata plus some additional
+        # information used by the package server
+        meta = self.distribution.metadata
+        data = {
+            ':action': action,
+            'metadata_version' : '1.0',
+            'name': meta.get_name(),
+            'version': meta.get_version(),
+            'summary': meta.get_description(),
+            'home_page': meta.get_url(),
+            'author': meta.get_contact(),
+            'author_email': meta.get_contact_email(),
+            'license': meta.get_licence(),
+            'description': meta.get_long_description(),
+            'keywords': meta.get_keywords(),
+            'platform': meta.get_platforms(),
+            'classifiers': meta.get_classifiers(),
+            'download_url': meta.get_download_url(),
+            # PEP 314
+            'provides': meta.get_provides(),
+            'requires': meta.get_requires(),
+            'obsoletes': meta.get_obsoletes(),
+        }
+        if data['provides'] or data['requires'] or data['obsoletes']:
+            data['metadata_version'] = '1.1'
+        return data
+
+    def post_to_server(self, data, auth=None):
+        ''' Post a query to the server, and return a string response.
+        '''
+        if 'name' in data:
+            self.announce('Registering %s to %s' % (data['name'],
+                                                   self.repository),
+                                                   log.INFO)
+        # Build up the MIME payload for the urllib2 POST data
+        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+        sep_boundary = '\n--' + boundary
+        end_boundary = sep_boundary + '--'
+        chunks = []
+        for key, value in data.items():
+            # handle multiple entries for the same name
+            if type(value) not in (type([]), type( () )):
+                value = [value]
+            for value in value:
+                chunks.append(sep_boundary)
+                chunks.append('\nContent-Disposition: form-data; name="%s"'%key)
+                chunks.append("\n\n")
+                chunks.append(value)
+                if value and value[-1] == '\r':
+                    chunks.append('\n')  # write an extra newline (lurve Macs)
+        chunks.append(end_boundary)
+        chunks.append("\n")
+
+        # chunks may be bytes (str) or unicode objects that we need to encode
+        body = []
+        for chunk in chunks:
+            if isinstance(chunk, unicode):
+                body.append(chunk.encode('utf-8'))
+            else:
+                body.append(chunk)
+
+        body = ''.join(body)
+
+        # build the Request
+        headers = {
+            'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary,
+            'Content-length': str(len(body))
+        }
+        req = urllib2.Request(self.repository, body, headers)
+
+        # handle HTTP and include the Basic Auth handler
+        opener = urllib2.build_opener(
+            urllib2.HTTPBasicAuthHandler(password_mgr=auth)
+        )
+        data = ''
+        try:
+            result = opener.open(req)
+        except urllib2.HTTPError, e:
+            if self.show_response:
+                data = e.fp.read()
+            result = e.code, e.msg
+        except urllib2.URLError, e:
+            result = 500, str(e)
+        else:
+            if self.show_response:
+                data = result.read()
+            result = 200, 'OK'
+        if self.show_response:
+            dashes = '-' * 75
+            self.announce('%s%s%s' % (dashes, data, dashes))
+
+        return result
diff --git a/lib/distutils/distutils/command/sdist.py b/lib/distutils/distutils/command/sdist.py
new file mode 100644
index 0000000..821420d
--- /dev/null
+++ b/lib/distutils/distutils/command/sdist.py
@@ -0,0 +1,477 @@
+"""distutils.command.sdist
+
+Implements the Distutils 'sdist' command (create a source distribution)."""
+
+__revision__ = "$Id$"
+
+import os
+import string
+import sys
+from glob import glob
+from warnings import warn
+
+from distutils.core import Command
+from distutils import dir_util, dep_util, file_util, archive_util
+from distutils.text_file import TextFile
+from distutils.errors import (DistutilsPlatformError, DistutilsOptionError,
+                              DistutilsTemplateError)
+from distutils.filelist import FileList
+from distutils import log
+from distutils.util import convert_path
+
+def show_formats():
+    """Print all possible values for the 'formats' option (used by
+    the "--help-formats" command-line option).
+    """
+    from distutils.fancy_getopt import FancyGetopt
+    from distutils.archive_util import ARCHIVE_FORMATS
+    formats = []
+    for format in ARCHIVE_FORMATS.keys():
+        formats.append(("formats=" + format, None,
+                        ARCHIVE_FORMATS[format][2]))
+    formats.sort()
+    FancyGetopt(formats).print_help(
+        "List of available source distribution formats:")
+
+class sdist(Command):
+
+    description = "create a source distribution (tarball, zip file, etc.)"
+
+    def checking_metadata(self):
+        """Callable used for the check sub-command.
+
+        Placed here so user_options can view it"""
+        return self.metadata_check
+
+    user_options = [
+        ('template=', 't',
+         "name of manifest template file [default: MANIFEST.in]"),
+        ('manifest=', 'm',
+         "name of manifest file [default: MANIFEST]"),
+        ('use-defaults', None,
+         "include the default file set in the manifest "
+         "[default; disable with --no-defaults]"),
+        ('no-defaults', None,
+         "don't include the default file set"),
+        ('prune', None,
+         "specifically exclude files/directories that should not be "
+         "distributed (build tree, RCS/CVS dirs, etc.) "
+         "[default; disable with --no-prune]"),
+        ('no-prune', None,
+         "don't automatically exclude anything"),
+        ('manifest-only', 'o',
+         "just regenerate the manifest and then stop "
+         "(implies --force-manifest)"),
+        ('force-manifest', 'f',
+         "forcibly regenerate the manifest and carry on as usual. "
+         "Deprecated: now the manifest is always regenerated."),
+        ('formats=', None,
+         "formats for source distribution (comma-separated list)"),
+        ('keep-temp', 'k',
+         "keep the distribution tree around after creating " +
+         "archive file(s)"),
+        ('dist-dir=', 'd',
+         "directory to put the source distribution archive(s) in "
+         "[default: dist]"),
+        ('metadata-check', None,
+         "Ensure that all required elements of meta-data "
+         "are supplied. Warn if any missing. [default]"),
+        ('owner=', 'u',
+         "Owner name used when creating a tar file [default: current user]"),
+        ('group=', 'g',
+         "Group name used when creating a tar file [default: current group]"),
+        ]
+
+    boolean_options = ['use-defaults', 'prune',
+                       'manifest-only', 'force-manifest',
+                       'keep-temp', 'metadata-check']
+
+    help_options = [
+        ('help-formats', None,
+         "list available distribution formats", show_formats),
+        ]
+
+    negative_opt = {'no-defaults': 'use-defaults',
+                    'no-prune': 'prune' }
+
+    default_format = {'posix': 'gztar',
+                      'nt': 'zip' }
+
+    sub_commands = [('check', checking_metadata)]
+
+    def initialize_options(self):
+        # 'template' and 'manifest' are, respectively, the names of
+        # the manifest template and manifest file.
+        self.template = None
+        self.manifest = None
+
+        # 'use_defaults': if true, we will include the default file set
+        # in the manifest
+        self.use_defaults = 1
+        self.prune = 1
+
+        self.manifest_only = 0
+        self.force_manifest = 0
+
+        self.formats = None
+        self.keep_temp = 0
+        self.dist_dir = None
+
+        self.archive_files = None
+        self.metadata_check = 1
+        self.owner = None
+        self.group = None
+
+    def finalize_options(self):
+        if self.manifest is None:
+            self.manifest = "MANIFEST"
+        if self.template is None:
+            self.template = "MANIFEST.in"
+
+        self.ensure_string_list('formats')
+        if self.formats is None:
+            try:
+                self.formats = [self.default_format[os.name]]
+            except KeyError:
+                raise DistutilsPlatformError, \
+                      "don't know how to create source distributions " + \
+                      "on platform %s" % os.name
+
+        bad_format = archive_util.check_archive_formats(self.formats)
+        if bad_format:
+            raise DistutilsOptionError, \
+                  "unknown archive format '%s'" % bad_format
+
+        if self.dist_dir is None:
+            self.dist_dir = "dist"
+
+    def run(self):
+        # 'filelist' contains the list of files that will make up the
+        # manifest
+        self.filelist = FileList()
+
+        # Run sub commands
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+        # Do whatever it takes to get the list of files to process
+        # (process the manifest template, read an existing manifest,
+        # whatever).  File list is accumulated in 'self.filelist'.
+        self.get_file_list()
+
+        # If user just wanted us to regenerate the manifest, stop now.
+        if self.manifest_only:
+            return
+
+        # Otherwise, go ahead and create the source distribution tarball,
+        # or zipfile, or whatever.
+        self.make_distribution()
+
+    def check_metadata(self):
+        """Deprecated API."""
+        warn("distutils.command.sdist.check_metadata is deprecated, \
+              use the check command instead", PendingDeprecationWarning)
+        check = self.distribution.get_command_obj('check')
+        check.ensure_finalized()
+        check.run()
+
+    def get_file_list(self):
+        """Figure out the list of files to include in the source
+        distribution, and put it in 'self.filelist'.  This might involve
+        reading the manifest template (and writing the manifest), or just
+        reading the manifest, or just using the default file set -- it all
+        depends on the user's options.
+        """
+        # new behavior when using a template:
+        # the file list is recalculated every time because
+        # even if MANIFEST.in or setup.py are not changed
+        # the user might have added some files in the tree that
+        # need to be included.
+        #
+        #  This makes --force the default and only behavior with templates.
+        template_exists = os.path.isfile(self.template)
+        if not template_exists and self._manifest_is_not_generated():
+            self.read_manifest()
+            self.filelist.sort()
+            self.filelist.remove_duplicates()
+            return
+
+        if not template_exists:
+            self.warn(("manifest template '%s' does not exist " +
+                        "(using default file list)") %
+                        self.template)
+        self.filelist.findall()
+
+        if self.use_defaults:
+            self.add_defaults()
+
+        if template_exists:
+            self.read_template()
+
+        if self.prune:
+            self.prune_file_list()
+
+        self.filelist.sort()
+        self.filelist.remove_duplicates()
+        self.write_manifest()
+
+    def add_defaults(self):
+        """Add all the default files to self.filelist:
+          - README or README.txt
+          - setup.py
+          - test/test*.py
+          - all pure Python modules mentioned in setup script
+          - all files pointed by package_data (build_py)
+          - all files defined in data_files.
+          - all files defined as scripts.
+          - all C sources listed as part of extensions or C libraries
+            in the setup script (doesn't catch C headers!)
+        Warns if (README or README.txt) or setup.py are missing; everything
+        else is optional.
+        """
+
+        standards = [('README', 'README.txt'), self.distribution.script_name]
+        for fn in standards:
+            if isinstance(fn, tuple):
+                alts = fn
+                got_it = 0
+                for fn in alts:
+                    if os.path.exists(fn):
+                        got_it = 1
+                        self.filelist.append(fn)
+                        break
+
+                if not got_it:
+                    self.warn("standard file not found: should have one of " +
+                              string.join(alts, ', '))
+            else:
+                if os.path.exists(fn):
+                    self.filelist.append(fn)
+                else:
+                    self.warn("standard file '%s' not found" % fn)
+
+        optional = ['test/test*.py', 'setup.cfg']
+        for pattern in optional:
+            files = filter(os.path.isfile, glob(pattern))
+            if files:
+                self.filelist.extend(files)
+
+        # build_py is used to get:
+        #  - python modules
+        #  - files defined in package_data
+        build_py = self.get_finalized_command('build_py')
+
+        # getting python files
+        if self.distribution.has_pure_modules():
+            self.filelist.extend(build_py.get_source_files())
+
+        # getting package_data files
+        # (computed in build_py.data_files by build_py.finalize_options)
+        for pkg, src_dir, build_dir, filenames in build_py.data_files:
+            for filename in filenames:
+                self.filelist.append(os.path.join(src_dir, filename))
+
+        # getting distribution.data_files
+        if self.distribution.has_data_files():
+            for item in self.distribution.data_files:
+                if isinstance(item, str): # plain file
+                    item = convert_path(item)
+                    if os.path.isfile(item):
+                        self.filelist.append(item)
+                else:    # a (dirname, filenames) tuple
+                    dirname, filenames = item
+                    for f in filenames:
+                        f = convert_path(f)
+                        if os.path.isfile(f):
+                            self.filelist.append(f)
+
+        if self.distribution.has_ext_modules():
+            build_ext = self.get_finalized_command('build_ext')
+            self.filelist.extend(build_ext.get_source_files())
+
+        if self.distribution.has_c_libraries():
+            build_clib = self.get_finalized_command('build_clib')
+            self.filelist.extend(build_clib.get_source_files())
+
+        if self.distribution.has_scripts():
+            build_scripts = self.get_finalized_command('build_scripts')
+            self.filelist.extend(build_scripts.get_source_files())
+
+    def read_template(self):
+        """Read and parse manifest template file named by self.template.
+
+        (usually "MANIFEST.in") The parsing and processing is done by
+        'self.filelist', which updates itself accordingly.
+        """
+        log.info("reading manifest template '%s'", self.template)
+        template = TextFile(self.template,
+                            strip_comments=1,
+                            skip_blanks=1,
+                            join_lines=1,
+                            lstrip_ws=1,
+                            rstrip_ws=1,
+                            collapse_join=1)
+
+        try:
+            while 1:
+                line = template.readline()
+                if line is None:            # end of file
+                    break
+
+                try:
+                    self.filelist.process_template_line(line)
+                # the call above can raise a DistutilsTemplateError for
+                # malformed lines, or a ValueError from the lower-level
+                # convert_path function
+                except (DistutilsTemplateError, ValueError) as msg:
+                    self.warn("%s, line %d: %s" % (template.filename,
+                                                   template.current_line,
+                                                   msg))
+        finally:
+            template.close()
+
+    def prune_file_list(self):
+        """Prune off branches that might slip into the file list as created
+        by 'read_template()', but really don't belong there:
+          * the build tree (typically "build")
+          * the release tree itself (only an issue if we ran "sdist"
+            previously with --keep-temp, or it aborted)
+          * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
+        """
+        build = self.get_finalized_command('build')
+        base_dir = self.distribution.get_fullname()
+
+        self.filelist.exclude_pattern(None, prefix=build.build_base)
+        self.filelist.exclude_pattern(None, prefix=base_dir)
+
+        # pruning out vcs directories
+        # both separators are used under win32
+        if sys.platform == 'win32':
+            seps = r'/|\\'
+        else:
+            seps = '/'
+
+        vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr',
+                    '_darcs']
+        vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
+        self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
+
+    def write_manifest(self):
+        """Write the file list in 'self.filelist' (presumably as filled in
+        by 'add_defaults()' and 'read_template()') to the manifest file
+        named by 'self.manifest'.
+        """
+        if self._manifest_is_not_generated():
+            log.info("not writing to manually maintained "
+                     "manifest file '%s'" % self.manifest)
+            return
+
+        content = self.filelist.files[:]
+        content.insert(0, '# file GENERATED by distutils, do NOT edit')
+        self.execute(file_util.write_file, (self.manifest, content),
+                     "writing manifest file '%s'" % self.manifest)
+
+    def _manifest_is_not_generated(self):
+        # check for special comment used in 2.7.1 and higher
+        if not os.path.isfile(self.manifest):
+            return False
+
+        fp = open(self.manifest, 'rU')
+        try:
+            first_line = fp.readline()
+        finally:
+            fp.close()
+        return first_line != '# file GENERATED by distutils, do NOT edit\n'
+
+    def read_manifest(self):
+        """Read the manifest file (named by 'self.manifest') and use it to
+        fill in 'self.filelist', the list of files to include in the source
+        distribution.
+        """
+        log.info("reading manifest file '%s'", self.manifest)
+        manifest = open(self.manifest)
+        for line in manifest:
+            # ignore comments and blank lines
+            line = line.strip()
+            if line.startswith('#') or not line:
+                continue
+            self.filelist.append(line)
+        manifest.close()
+
+    def make_release_tree(self, base_dir, files):
+        """Create the directory tree that will become the source
+        distribution archive.  All directories implied by the filenames in
+        'files' are created under 'base_dir', and then we hard link or copy
+        (if hard linking is unavailable) those files into place.
+        Essentially, this duplicates the developer's source tree, but in a
+        directory named after the distribution, containing only the files
+        to be distributed.
+        """
+        # Create all the directories under 'base_dir' necessary to
+        # put 'files' there; the 'mkpath()' is just so we don't die
+        # if the manifest happens to be empty.
+        self.mkpath(base_dir)
+        dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
+
+        # And walk over the list of files, either making a hard link (if
+        # os.link exists) to each one that doesn't already exist in its
+        # corresponding location under 'base_dir', or copying each file
+        # that's out-of-date in 'base_dir'.  (Usually, all files will be
+        # out-of-date, because by default we blow away 'base_dir' when
+        # we're done making the distribution archives.)
+
+        if hasattr(os, 'link'):        # can make hard links on this system
+            link = 'hard'
+            msg = "making hard links in %s..." % base_dir
+        else:                           # nope, have to copy
+            link = None
+            msg = "copying files to %s..." % base_dir
+
+        if not files:
+            log.warn("no files to distribute -- empty manifest?")
+        else:
+            log.info(msg)
+        for file in files:
+            if not os.path.isfile(file):
+                log.warn("'%s' not a regular file -- skipping" % file)
+            else:
+                dest = os.path.join(base_dir, file)
+                self.copy_file(file, dest, link=link)
+
+        self.distribution.metadata.write_pkg_info(base_dir)
+
+    def make_distribution(self):
+        """Create the source distribution(s).  First, we create the release
+        tree with 'make_release_tree()'; then, we create all required
+        archive files (according to 'self.formats') from the release tree.
+        Finally, we clean up by blowing away the release tree (unless
+        'self.keep_temp' is true).  The list of archive files created is
+        stored so it can be retrieved later by 'get_archive_files()'.
+        """
+        # Don't warn about missing meta-data here -- should be (and is!)
+        # done elsewhere.
+        base_dir = self.distribution.get_fullname()
+        base_name = os.path.join(self.dist_dir, base_dir)
+
+        self.make_release_tree(base_dir, self.filelist.files)
+        archive_files = []              # remember names of files we create
+        # tar archive must be created last to avoid overwrite and remove
+        if 'tar' in self.formats:
+            self.formats.append(self.formats.pop(self.formats.index('tar')))
+
+        for fmt in self.formats:
+            file = self.make_archive(base_name, fmt, base_dir=base_dir,
+                                     owner=self.owner, group=self.group)
+            archive_files.append(file)
+            self.distribution.dist_files.append(('sdist', '', file))
+
+        self.archive_files = archive_files
+
+        if not self.keep_temp:
+            dir_util.remove_tree(base_dir, dry_run=self.dry_run)
+
+    def get_archive_files(self):
+        """Return the list of archive files created when the command
+        was run, or None if the command hasn't run yet.
+        """
+        return self.archive_files
diff --git a/lib/distutils/distutils/command/upload.py b/lib/distutils/distutils/command/upload.py
new file mode 100644
index 0000000..d013335
--- /dev/null
+++ b/lib/distutils/distutils/command/upload.py
@@ -0,0 +1,194 @@
+"""distutils.command.upload
+
+Implements the Distutils 'upload' subcommand (upload package to PyPI)."""
+import os
+import socket
+import platform
+from urllib2 import urlopen, Request, HTTPError
+from base64 import standard_b64encode
+import urlparse
+import cStringIO as StringIO
+from hashlib import md5
+
+from distutils.errors import DistutilsOptionError
+from distutils.core import PyPIRCCommand
+from distutils.spawn import spawn
+from distutils import log
+
+class upload(PyPIRCCommand):
+
+    description = "upload binary package to PyPI"
+
+    user_options = PyPIRCCommand.user_options + [
+        ('sign', 's',
+         'sign files to upload using gpg'),
+        ('identity=', 'i', 'GPG identity used to sign files'),
+        ]
+
+    boolean_options = PyPIRCCommand.boolean_options + ['sign']
+
+    def initialize_options(self):
+        PyPIRCCommand.initialize_options(self)
+        self.username = ''
+        self.password = ''
+        self.show_response = 0
+        self.sign = False
+        self.identity = None
+
+    def finalize_options(self):
+        PyPIRCCommand.finalize_options(self)
+        if self.identity and not self.sign:
+            raise DistutilsOptionError(
+                "Must use --sign for --identity to have meaning"
+            )
+        config = self._read_pypirc()
+        if config != {}:
+            self.username = config['username']
+            self.password = config['password']
+            self.repository = config['repository']
+            self.realm = config['realm']
+
+        # getting the password from the distribution
+        # if previously set by the register command
+        if not self.password and self.distribution.password:
+            self.password = self.distribution.password
+
+    def run(self):
+        if not self.distribution.dist_files:
+            raise DistutilsOptionError("No dist file created in earlier command")
+        for command, pyversion, filename in self.distribution.dist_files:
+            self.upload_file(command, pyversion, filename)
+
+    def upload_file(self, command, pyversion, filename):
+        # Makes sure the repository URL is compliant
+        schema, netloc, url, params, query, fragments = \
+            urlparse.urlparse(self.repository)
+        if params or query or fragments:
+            raise AssertionError("Incompatible url %s" % self.repository)
+
+        if schema not in ('http', 'https'):
+            raise AssertionError("unsupported schema " + schema)
+
+        # Sign if requested
+        if self.sign:
+            gpg_args = ["gpg", "--detach-sign", "-a", filename]
+            if self.identity:
+                gpg_args[2:2] = ["--local-user", self.identity]
+            spawn(gpg_args,
+                  dry_run=self.dry_run)
+
+        # Fill in the data - send all the meta-data in case we need to
+        # register a new release
+        f = open(filename,'rb')
+        try:
+            content = f.read()
+        finally:
+            f.close()
+        meta = self.distribution.metadata
+        data = {
+            # action
+            ':action': 'file_upload',
+            'protcol_version': '1',
+
+            # identify release
+            'name': meta.get_name(),
+            'version': meta.get_version(),
+
+            # file content
+            'content': (os.path.basename(filename),content),
+            'filetype': command,
+            'pyversion': pyversion,
+            'md5_digest': md5(content).hexdigest(),
+
+            # additional meta-data
+            'metadata_version' : '1.0',
+            'summary': meta.get_description(),
+            'home_page': meta.get_url(),
+            'author': meta.get_contact(),
+            'author_email': meta.get_contact_email(),
+            'license': meta.get_licence(),
+            'description': meta.get_long_description(),
+            'keywords': meta.get_keywords(),
+            'platform': meta.get_platforms(),
+            'classifiers': meta.get_classifiers(),
+            'download_url': meta.get_download_url(),
+            # PEP 314
+            'provides': meta.get_provides(),
+            'requires': meta.get_requires(),
+            'obsoletes': meta.get_obsoletes(),
+            }
+        comment = ''
+        if command == 'bdist_rpm':
+            dist, version, id = platform.dist()
+            if dist:
+                comment = 'built for %s %s' % (dist, version)
+        elif command == 'bdist_dumb':
+            comment = 'built for %s' % platform.platform(terse=1)
+        data['comment'] = comment
+
+        if self.sign:
+            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
+                                     open(filename+".asc").read())
+
+        # set up the authentication
+        auth = "Basic " + standard_b64encode(self.username + ":" +
+                                             self.password)
+
+        # Build up the MIME payload for the POST data
+        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+        sep_boundary = '\n--' + boundary
+        end_boundary = sep_boundary + '--'
+        body = StringIO.StringIO()
+        for key, value in data.items():
+            # handle multiple entries for the same name
+            if not isinstance(value, list):
+                value = [value]
+            for value in value:
+                if isinstance(value, tuple):
+                    fn = ';filename="%s"' % value[0]
+                    value = value[1]
+                else:
+                    fn = ""
+
+                body.write(sep_boundary)
+                body.write('\nContent-Disposition: form-data; name="%s"'%key)
+                body.write(fn)
+                body.write("\n\n")
+                body.write(value)
+                if value and value[-1] == '\r':
+                    body.write('\n')  # write an extra newline (lurve Macs)
+        body.write(end_boundary)
+        body.write("\n")
+        body = body.getvalue()
+
+        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)
+
+        # build the Request
+        headers = {'Content-type':
+                        'multipart/form-data; boundary=%s' % boundary,
+                   'Content-length': str(len(body)),
+                   'Authorization': auth}
+
+        request = Request(self.repository, data=body,
+                          headers=headers)
+        # send the data
+        try:
+            result = urlopen(request)
+            status = result.getcode()
+            reason = result.msg
+            if self.show_response:
+                msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
+                self.announce(msg, log.INFO)
+        except socket.error, e:
+            self.announce(str(e), log.ERROR)
+            return
+        except HTTPError, e:
+            status = e.code
+            reason = e.msg
+
+        if status == 200:
+            self.announce('Server response (%s): %s' % (status, reason),
+                          log.INFO)
+        else:
+            self.announce('Upload failed (%s): %s' % (status, reason),
+                          log.ERROR)
diff --git a/lib/distutils/distutils/config.py b/lib/distutils/distutils/config.py
index afa403f..1d32714 100644
--- a/lib/distutils/distutils/config.py
+++ b/lib/distutils/distutils/config.py
@@ -42,16 +42,11 @@
     def _store_pypirc(self, username, password):
         """Creates a default .pypirc file."""
         rc = self._get_rc_file()
-        f = open(rc, 'w')
+        f = os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0600), 'w')
         try:
             f.write(DEFAULT_PYPIRC % (username, password))
         finally:
             f.close()
-        try:
-            os.chmod(rc, 0600)
-        except OSError:
-            # should do something better here
-            pass
 
     def _read_pypirc(self):
         """Reads the .pypirc file."""
diff --git a/lib/distutils/distutils/cygwinccompiler.py b/lib/distutils/distutils/cygwinccompiler.py
index a1ee815..5d11687 100644
--- a/lib/distutils/distutils/cygwinccompiler.py
+++ b/lib/distutils/distutils/cygwinccompiler.py
@@ -319,13 +319,18 @@
         else:
             entry_point = ''
 
-        self.set_executables(compiler='gcc -mno-cygwin -O -Wall',
-                             compiler_so='gcc -mno-cygwin -mdll -O -Wall',
-                             compiler_cxx='g++ -mno-cygwin -O -Wall',
-                             linker_exe='gcc -mno-cygwin',
-                             linker_so='%s -mno-cygwin %s %s'
-                                        % (self.linker_dll, shared_option,
-                                           entry_point))
+        if self.gcc_version < '4' or is_cygwingcc():
+            no_cygwin = ' -mno-cygwin'
+        else:
+            no_cygwin = ''
+
+        self.set_executables(compiler='gcc%s -O -Wall' % no_cygwin,
+                             compiler_so='gcc%s -mdll -O -Wall' % no_cygwin,
+                             compiler_cxx='g++%s -O -Wall' % no_cygwin,
+                             linker_exe='gcc%s' % no_cygwin,
+                             linker_so='%s%s %s %s'
+                                    % (self.linker_dll, no_cygwin,
+                                       shared_option, entry_point))
         # Maybe we should also append -mthreads, but then the finished
         # dlls need another dll (mingwm10.dll see Mingw32 docs)
         # (-mthreads: Support thread-safe exception handling on `Mingw32')
@@ -447,3 +452,12 @@
     else:
         dllwrap_version = None
     return (gcc_version, ld_version, dllwrap_version)
+
+def is_cygwingcc():
+    '''Try to determine if the gcc that would be used is from cygwin.'''
+    out = os.popen('gcc -dumpmachine', 'r')
+    out_string = out.read()
+    out.close()
+    # out_string is the target triplet cpu-vendor-os
+    # Cygwin's gcc sets the os to 'cygwin'
+    return out_string.strip().endswith('cygwin')
diff --git a/lib/distutils/distutils/dep_util.py b/lib/distutils/distutils/dep_util.py
index 4e40df6..2b75905 100644
--- a/lib/distutils/distutils/dep_util.py
+++ b/lib/distutils/distutils/dep_util.py
@@ -7,6 +7,7 @@
 __revision__ = "$Id$"
 
 import os
+from stat import ST_MTIME
 from distutils.errors import DistutilsFileError
 
 def newer(source, target):
@@ -27,7 +28,7 @@
     if not os.path.exists(target):
         return True
 
-    return os.stat(source).st_mtime > os.stat(target).st_mtime
+    return os.stat(source)[ST_MTIME] > os.stat(target)[ST_MTIME]
 
 def newer_pairwise(sources, targets):
     """Walk two filename lists in parallel, testing if each source is newer
@@ -71,7 +72,7 @@
     # is more recent than 'target', then 'target' is out-of-date and
     # we can immediately return true.  If we fall through to the end
     # of the loop, then 'target' is up-to-date and we return false.
-    target_mtime = os.stat(target).st_mtime
+    target_mtime = os.stat(target)[ST_MTIME]
 
     for source in sources:
         if not os.path.exists(source):
@@ -82,7 +83,7 @@
             elif missing == 'newer':    # missing source means target is
                 return True             #  out-of-date
 
-        if os.stat(source).st_mtime > target_mtime:
+        if os.stat(source)[ST_MTIME] > target_mtime:
             return True
 
     return False
diff --git a/lib/distutils/distutils/dir_util.py b/lib/distutils/distutils/dir_util.py
index 9c5cf33..5026e24 100644
--- a/lib/distutils/distutils/dir_util.py
+++ b/lib/distutils/distutils/dir_util.py
@@ -144,6 +144,10 @@
         src_name = os.path.join(src, n)
         dst_name = os.path.join(dst, n)
 
+        if n.startswith('.nfs'):
+            # skip NFS rename files
+            continue
+
         if preserve_symlinks and os.path.islink(src_name):
             link_dest = os.readlink(src_name)
             if verbose >= 1:
diff --git a/lib/distutils/distutils/dist.py b/lib/distutils/distutils/dist.py
index 597909e..e025313 100644
--- a/lib/distutils/distutils/dist.py
+++ b/lib/distutils/distutils/dist.py
@@ -1111,7 +1111,8 @@
         """Write the PKG-INFO format data to a file object.
         """
         version = '1.0'
-        if self.provides or self.requires or self.obsoletes:
+        if (self.provides or self.requires or self.obsoletes or
+            self.classifiers or self.download_url):
             version = '1.1'
 
         self._write_field(file, 'Metadata-Version', version)
diff --git a/lib/distutils/distutils/filelist.py b/lib/distutils/distutils/filelist.py
index 4aac6d3..2f1c457 100644
--- a/lib/distutils/distutils/filelist.py
+++ b/lib/distutils/distutils/filelist.py
@@ -210,6 +210,7 @@
 
         Return 1 if files are found.
         """
+        # XXX docstring lying about what the special chars are?
         files_found = 0
         pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
         self.debug_print("include_pattern: applying regex r'%s'" %
@@ -297,11 +298,14 @@
     # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
     # and by extension they shouldn't match such "special characters" under
     # any OS.  So change all non-escaped dots in the RE to match any
-    # character except the special characters.
-    # XXX currently the "special characters" are just slash -- i.e. this is
-    # Unix-only.
-    pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', r'\1[^/]', pattern_re)
-
+    # character except the special characters (currently: just os.sep).
+    sep = os.sep
+    if os.sep == '\\':
+        # we're using a regex to manipulate a regex, so we need
+        # to escape the backslash twice
+        sep = r'\\\\'
+    escaped = r'\1[^%s]' % sep
+    pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
     return pattern_re
 
 
@@ -328,7 +332,10 @@
         # ditch end of pattern character
         empty_pattern = glob_to_re('')
         prefix_re = glob_to_re(prefix)[:-len(empty_pattern)]
-        pattern_re = "^" + os.path.join(prefix_re, ".*" + pattern_re)
+        sep = os.sep
+        if os.sep == '\\':
+            sep = r'\\'
+        pattern_re = "^" + sep.join((prefix_re, ".*" + pattern_re))
     else:                               # no prefix -- respect anchor flag
         if anchor:
             pattern_re = "^" + pattern_re
diff --git a/lib/distutils/distutils/msvc9compiler.py b/lib/distutils/distutils/msvc9compiler.py
index bf85ac7..7ec9b92 100644
--- a/lib/distutils/distutils/msvc9compiler.py
+++ b/lib/distutils/distutils/msvc9compiler.py
@@ -640,15 +640,7 @@
                     self.library_filename(dll_name))
                 ld_args.append ('/IMPLIB:' + implib_file)
 
-            # Embedded manifests are recommended - see MSDN article titled
-            # "How to: Embed a Manifest Inside a C/C++ Application"
-            # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
-            # Ask the linker to generate the manifest in the temp dir, so
-            # we can embed it later.
-            temp_manifest = os.path.join(
-                    build_temp,
-                    os.path.basename(output_filename) + ".manifest")
-            ld_args.append('/MANIFESTFILE:' + temp_manifest)
+            self.manifest_setup_ldargs(output_filename, build_temp, ld_args)
 
             if extra_preargs:
                 ld_args[:0] = extra_preargs
@@ -666,20 +658,54 @@
             # will still consider the DLL up-to-date, but it will not have a
             # manifest.  Maybe we should link to a temp file?  OTOH, that
             # implies a build environment error that shouldn't go undetected.
-            if target_desc == CCompiler.EXECUTABLE:
-                mfid = 1
-            else:
-                mfid = 2
-                self._remove_visual_c_ref(temp_manifest)
-            out_arg = '-outputresource:%s;%s' % (output_filename, mfid)
-            try:
-                self.spawn(['mt.exe', '-nologo', '-manifest',
-                            temp_manifest, out_arg])
-            except DistutilsExecError, msg:
-                raise LinkError(msg)
+            mfinfo = self.manifest_get_embed_info(target_desc, ld_args)
+            if mfinfo is not None:
+                mffilename, mfid = mfinfo
+                out_arg = '-outputresource:%s;%s' % (output_filename, mfid)
+                try:
+                    self.spawn(['mt.exe', '-nologo', '-manifest',
+                                mffilename, out_arg])
+                except DistutilsExecError, msg:
+                    raise LinkError(msg)
         else:
             log.debug("skipping %s (up-to-date)", output_filename)
 
+    def manifest_setup_ldargs(self, output_filename, build_temp, ld_args):
+        # If we need a manifest at all, an embedded manifest is recommended.
+        # See MSDN article titled
+        # "How to: Embed a Manifest Inside a C/C++ Application"
+        # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
+        # Ask the linker to generate the manifest in the temp dir, so
+        # we can check it, and possibly embed it, later.
+        temp_manifest = os.path.join(
+                build_temp,
+                os.path.basename(output_filename) + ".manifest")
+        ld_args.append('/MANIFESTFILE:' + temp_manifest)
+
+    def manifest_get_embed_info(self, target_desc, ld_args):
+        # If a manifest should be embedded, return a tuple of
+        # (manifest_filename, resource_id).  Returns None if no manifest
+        # should be embedded.  See http://bugs.python.org/issue7833 for why
+        # we want to avoid any manifest for extension modules if we can)
+        for arg in ld_args:
+            if arg.startswith("/MANIFESTFILE:"):
+                temp_manifest = arg.split(":", 1)[1]
+                break
+        else:
+            # no /MANIFESTFILE so nothing to do.
+            return None
+        if target_desc == CCompiler.EXECUTABLE:
+            # by default, executables always get the manifest with the
+            # CRT referenced.
+            mfid = 1
+        else:
+            # Extension modules try and avoid any manifest if possible.
+            mfid = 2
+            temp_manifest = self._remove_visual_c_ref(temp_manifest)
+        if temp_manifest is None:
+            return None
+        return temp_manifest, mfid
+
     def _remove_visual_c_ref(self, manifest_file):
         try:
             # Remove references to the Visual C runtime, so they will
@@ -688,6 +714,8 @@
             # runtimes are not in WinSxS folder, but in Python's own
             # folder), the runtimes do not need to be in every folder
             # with .pyd's.
+            # Returns either the filename of the modified manifest or
+            # None if no manifest should be embedded.
             manifest_f = open(manifest_file)
             try:
                 manifest_buf = manifest_f.read()
@@ -700,9 +728,18 @@
             manifest_buf = re.sub(pattern, "", manifest_buf)
             pattern = "<dependentAssembly>\s*</dependentAssembly>"
             manifest_buf = re.sub(pattern, "", manifest_buf)
+            # Now see if any other assemblies are referenced - if not, we
+            # don't want a manifest embedded.
+            pattern = re.compile(
+                r"""<assemblyIdentity.*?name=(?:"|')(.+?)(?:"|')"""
+                r""".*?(?:/>|</assemblyIdentity>)""", re.DOTALL)
+            if re.search(pattern, manifest_buf) is None:
+                return None
+
             manifest_f = open(manifest_file, 'w')
             try:
                 manifest_f.write(manifest_buf)
+                return manifest_file
             finally:
                 manifest_f.close()
         except IOError:
diff --git a/lib/distutils/distutils/spawn.py b/lib/distutils/distutils/spawn.py
index 5c014c4..7306099 100644
--- a/lib/distutils/distutils/spawn.py
+++ b/lib/distutils/distutils/spawn.py
@@ -96,17 +96,43 @@
             raise DistutilsExecError, \
                   "command '%s' failed with exit status %d" % (cmd[0], rc)
 
+if sys.platform == 'darwin':
+    from distutils import sysconfig
+    _cfg_target = None
+    _cfg_target_split = None
 
 def _spawn_posix(cmd, search_path=1, verbose=0, dry_run=0):
     log.info(' '.join(cmd))
     if dry_run:
         return
     exec_fn = search_path and os.execvp or os.execv
+    exec_args = [cmd[0], cmd]
+    if sys.platform == 'darwin':
+        global _cfg_target, _cfg_target_split
+        if _cfg_target is None:
+            _cfg_target = sysconfig.get_config_var(
+                                  'MACOSX_DEPLOYMENT_TARGET') or ''
+            if _cfg_target:
+                _cfg_target_split = [int(x) for x in _cfg_target.split('.')]
+        if _cfg_target:
+            # ensure that the deployment target of build process is not less
+            # than that used when the interpreter was built. This ensures
+            # extension modules are built with correct compatibility values
+            cur_target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', _cfg_target)
+            if _cfg_target_split > [int(x) for x in cur_target.split('.')]:
+                my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: '
+                          'now "%s" but "%s" during configure'
+                                % (cur_target, _cfg_target))
+                raise DistutilsPlatformError(my_msg)
+            env = dict(os.environ,
+                       MACOSX_DEPLOYMENT_TARGET=cur_target)
+            exec_fn = search_path and os.execvpe or os.execve
+            exec_args.append(env)
     pid = os.fork()
 
     if pid == 0:  # in the child
         try:
-            exec_fn(cmd[0], cmd)
+            exec_fn(*exec_args)
         except OSError, e:
             sys.stderr.write("unable to execute %s: %s\n" %
                              (cmd[0], e.strerror))
diff --git a/lib/distutils/distutils/sysconfig.py b/lib/distutils/distutils/sysconfig.py
index d206e0c..4aa9334 100644
--- a/lib/distutils/distutils/sysconfig.py
+++ b/lib/distutils/distutils/sysconfig.py
@@ -37,6 +37,11 @@
     project_base = os.path.abspath(os.path.join(project_base, os.path.pardir,
                                                 os.path.pardir))
 
+# set for cross builds
+if "_PYTHON_PROJECT_BASE" in os.environ:
+    # this is the build directory, at least for posix
+    project_base = os.path.normpath(os.environ["_PYTHON_PROJECT_BASE"])
+
 # python_build: (Boolean) if true, we're either building Python or
 # building an extension with an un-installed Python, so we use
 # different (hard-wired) directories.
@@ -142,6 +147,7 @@
             "on platform '%s'" % os.name)
 
 
+
 def customize_compiler(compiler):
     """Do any platform-specific customization of a CCompiler instance.
 
@@ -149,12 +155,35 @@
     varies across Unices and is stored in Python's Makefile.
     """
     if compiler.compiler_type == "unix":
-        (cc, cxx, opt, cflags, ccshared, ldshared, so_ext) = \
+        if sys.platform == "darwin":
+            # Perform first-time customization of compiler-related
+            # config vars on OS X now that we know we need a compiler.
+            # This is primarily to support Pythons from binary
+            # installers.  The kind and paths to build tools on
+            # the user system may vary significantly from the system
+            # that Python itself was built on.  Also the user OS
+            # version and build tools may not support the same set
+            # of CPU architectures for universal builds.
+            global _config_vars
+            if not _config_vars.get('CUSTOMIZED_OSX_COMPILER', ''):
+                import _osx_support
+                _osx_support.customize_compiler(_config_vars)
+                _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
+
+        (cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags) = \
             get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
-                            'CCSHARED', 'LDSHARED', 'SO')
+                            'CCSHARED', 'LDSHARED', 'SO', 'AR',
+                            'ARFLAGS')
 
         if 'CC' in os.environ:
-            cc = os.environ['CC']
+            newcc = os.environ['CC']
+            if (sys.platform == 'darwin'
+                    and 'LDSHARED' not in os.environ
+                    and ldshared.startswith(cc)):
+                # On OS X, if CC is overridden, use that as the default
+                #       command for LDSHARED as well
+                ldshared = newcc + ldshared[len(cc):]
+            cc = newcc
         if 'CXX' in os.environ:
             cxx = os.environ['CXX']
         if 'LDSHARED' in os.environ:
@@ -172,6 +201,12 @@
             cpp = cpp + ' ' + os.environ['CPPFLAGS']
             cflags = cflags + ' ' + os.environ['CPPFLAGS']
             ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
+        if 'AR' in os.environ:
+            ar = os.environ['AR']
+        if 'ARFLAGS' in os.environ:
+            archiver = ar + ' ' + os.environ['ARFLAGS']
+        else:
+            archiver = ar + ' ' + ar_flags
 
         cc_cmd = cc + ' ' + cflags
         compiler.set_executables(
@@ -180,7 +215,8 @@
             compiler_so=cc_cmd + ' ' + ccshared,
             compiler_cxx=cxx,
             linker_so=ldshared,
-            linker_exe=cc)
+            linker_exe=cc,
+            archiver=archiver)
 
         compiler.shared_lib_extension = so_ext
 
@@ -205,7 +241,7 @@
 def get_makefile_filename():
     """Return full pathname of installed Makefile from the Python build."""
     if python_build:
-        return os.path.join(os.path.dirname(sys.executable), "Makefile")
+        return os.path.join(project_base, "Makefile")
     lib_dir = get_python_lib(plat_specific=1, standard_lib=1)
     return os.path.join(lib_dir, "config", "Makefile")
 
@@ -357,81 +393,11 @@
 
 def _init_posix():
     """Initialize the module as appropriate for POSIX systems."""
-    g = {}
-    # load the installed Makefile:
-    try:
-        filename = get_makefile_filename()
-        parse_makefile(filename, g)
-    except IOError, msg:
-        my_msg = "invalid Python installation: unable to open %s" % filename
-        if hasattr(msg, "strerror"):
-            my_msg = my_msg + " (%s)" % msg.strerror
-
-        raise DistutilsPlatformError(my_msg)
-
-    # load the installed pyconfig.h:
-    try:
-        filename = get_config_h_filename()
-        parse_config_h(file(filename), g)
-    except IOError, msg:
-        my_msg = "invalid Python installation: unable to open %s" % filename
-        if hasattr(msg, "strerror"):
-            my_msg = my_msg + " (%s)" % msg.strerror
-
-        raise DistutilsPlatformError(my_msg)
-
-    # On MacOSX we need to check the setting of the environment variable
-    # MACOSX_DEPLOYMENT_TARGET: configure bases some choices on it so
-    # it needs to be compatible.
-    # If it isn't set we set it to the configure-time value
-    if sys.platform == 'darwin' and 'MACOSX_DEPLOYMENT_TARGET' in g:
-        cfg_target = g['MACOSX_DEPLOYMENT_TARGET']
-        cur_target = os.getenv('MACOSX_DEPLOYMENT_TARGET', '')
-        if cur_target == '':
-            cur_target = cfg_target
-            os.environ['MACOSX_DEPLOYMENT_TARGET'] = cfg_target
-        elif map(int, cfg_target.split('.')) > map(int, cur_target.split('.')):
-            my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: now "%s" but "%s" during configure'
-                % (cur_target, cfg_target))
-            raise DistutilsPlatformError(my_msg)
-
-    # On AIX, there are wrong paths to the linker scripts in the Makefile
-    # -- these paths are relative to the Python source, but when installed
-    # the scripts are in another directory.
-    if python_build:
-        g['LDSHARED'] = g['BLDSHARED']
-
-    elif get_python_version() < '2.1':
-        # The following two branches are for 1.5.2 compatibility.
-        if sys.platform == 'aix4':          # what about AIX 3.x ?
-            # Linker script is in the config directory, not in Modules as the
-            # Makefile says.
-            python_lib = get_python_lib(standard_lib=1)
-            ld_so_aix = os.path.join(python_lib, 'config', 'ld_so_aix')
-            python_exp = os.path.join(python_lib, 'config', 'python.exp')
-
-            g['LDSHARED'] = "%s %s -bI:%s" % (ld_so_aix, g['CC'], python_exp)
-
-        elif sys.platform == 'beos':
-            # Linker script is in the config directory.  In the Makefile it is
-            # relative to the srcdir, which after installation no longer makes
-            # sense.
-            python_lib = get_python_lib(standard_lib=1)
-            linkerscript_path = string.split(g['LDSHARED'])[0]
-            linkerscript_name = os.path.basename(linkerscript_path)
-            linkerscript = os.path.join(python_lib, 'config',
-                                        linkerscript_name)
-
-            # XXX this isn't the right place to do this: adding the Python
-            # library to the link, if needed, should be in the "build_ext"
-            # command.  (It's also needed for non-MS compilers on Windows, and
-            # it's taken care of for them by the 'build_ext.get_libraries()'
-            # method.)
-            g['LDSHARED'] = ("%s -L%s/lib -lpython%s" %
-                             (linkerscript, PREFIX, get_python_version()))
-
+    # _sysconfigdata is generated at build time, see the sysconfig module
+    from _sysconfigdata import build_time_vars
     global _config_vars
-    _config_vars = g
+    _config_vars = {}
+    _config_vars.update(build_time_vars)
 
 
 def _init_nt():
@@ -494,66 +460,11 @@
         _config_vars['prefix'] = PREFIX
         _config_vars['exec_prefix'] = EXEC_PREFIX
 
+        # OS X platforms require special customization to handle
+        # multi-architecture, multi-os-version installers
         if sys.platform == 'darwin':
-            kernel_version = os.uname()[2] # Kernel version (8.4.3)
-            major_version = int(kernel_version.split('.')[0])
-
-            if major_version < 8:
-                # On Mac OS X before 10.4, check if -arch and -isysroot
-                # are in CFLAGS or LDFLAGS and remove them if they are.
-                # This is needed when building extensions on a 10.3 system
-                # using a universal build of python.
-                for key in ('LDFLAGS', 'BASECFLAGS', 'LDSHARED',
-                        # a number of derived variables. These need to be
-                        # patched up as well.
-                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
-                    flags = _config_vars[key]
-                    flags = re.sub('-arch\s+\w+\s', ' ', flags)
-                    flags = re.sub('-isysroot [^ \t]*', ' ', flags)
-                    _config_vars[key] = flags
-
-            else:
-
-                # Allow the user to override the architecture flags using
-                # an environment variable.
-                # NOTE: This name was introduced by Apple in OSX 10.5 and
-                # is used by several scripting languages distributed with
-                # that OS release.
-
-                if 'ARCHFLAGS' in os.environ:
-                    arch = os.environ['ARCHFLAGS']
-                    for key in ('LDFLAGS', 'BASECFLAGS', 'LDSHARED',
-                        # a number of derived variables. These need to be
-                        # patched up as well.
-                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
-
-                        flags = _config_vars[key]
-                        flags = re.sub('-arch\s+\w+\s', ' ', flags)
-                        flags = flags + ' ' + arch
-                        _config_vars[key] = flags
-
-                # If we're on OSX 10.5 or later and the user tries to
-                # compiles an extension using an SDK that is not present
-                # on the current machine it is better to not use an SDK
-                # than to fail.
-                #
-                # The major usecase for this is users using a Python.org
-                # binary installer  on OSX 10.6: that installer uses
-                # the 10.4u SDK, but that SDK is not installed by default
-                # when you install Xcode.
-                #
-                m = re.search('-isysroot\s+(\S+)', _config_vars['CFLAGS'])
-                if m is not None:
-                    sdk = m.group(1)
-                    if not os.path.exists(sdk):
-                        for key in ('LDFLAGS', 'BASECFLAGS', 'LDSHARED',
-                             # a number of derived variables. These need to be
-                             # patched up as well.
-                            'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
-
-                            flags = _config_vars[key]
-                            flags = re.sub('-isysroot\s+\S+(\s|$)', ' ', flags)
-                            _config_vars[key] = flags
+            import _osx_support
+            _osx_support.customize_config_vars(_config_vars)
 
     if args:
         vals = []
diff --git a/lib/distutils/distutils/unixccompiler.py b/lib/distutils/distutils/unixccompiler.py
index c49ac9b..2aa1cb1 100644
--- a/lib/distutils/distutils/unixccompiler.py
+++ b/lib/distutils/distutils/unixccompiler.py
@@ -26,6 +26,9 @@
      DistutilsExecError, CompileError, LibError, LinkError
 from distutils import log
 
+if sys.platform == 'darwin':
+    import _osx_support
+
 # XXX Things not currently handled:
 #   * optimization/debug/warning flags; we just use whatever's in Python's
 #     Makefile and live with it.  Is this adequate?  If not, we might
@@ -41,68 +44,6 @@
 #     should just happily stuff them into the preprocessor/compiler/linker
 #     options and carry on.
 
-def _darwin_compiler_fixup(compiler_so, cc_args):
-    """
-    This function will strip '-isysroot PATH' and '-arch ARCH' from the
-    compile flags if the user has specified one them in extra_compile_flags.
-
-    This is needed because '-arch ARCH' adds another architecture to the
-    build, without a way to remove an architecture. Furthermore GCC will
-    barf if multiple '-isysroot' arguments are present.
-    """
-    stripArch = stripSysroot = 0
-
-    compiler_so = list(compiler_so)
-    kernel_version = os.uname()[2] # 8.4.3
-    major_version = int(kernel_version.split('.')[0])
-
-    if major_version < 8:
-        # OSX before 10.4.0, these don't support -arch and -isysroot at
-        # all.
-        stripArch = stripSysroot = True
-    else:
-        stripArch = '-arch' in cc_args
-        stripSysroot = '-isysroot' in cc_args
-
-    if stripArch or 'ARCHFLAGS' in os.environ:
-        while 1:
-            try:
-                index = compiler_so.index('-arch')
-                # Strip this argument and the next one:
-                del compiler_so[index:index+2]
-            except ValueError:
-                break
-
-    if 'ARCHFLAGS' in os.environ and not stripArch:
-        # User specified different -arch flags in the environ,
-        # see also distutils.sysconfig
-        compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
-
-    if stripSysroot:
-        try:
-            index = compiler_so.index('-isysroot')
-            # Strip this argument and the next one:
-            del compiler_so[index:index+2]
-        except ValueError:
-            pass
-
-    # Check if the SDK that is used during compilation actually exists,
-    # the universal build requires the usage of a universal SDK and not all
-    # users have that installed by default.
-    sysroot = None
-    if '-isysroot' in cc_args:
-        idx = cc_args.index('-isysroot')
-        sysroot = cc_args[idx+1]
-    elif '-isysroot' in compiler_so:
-        idx = compiler_so.index('-isysroot')
-        sysroot = compiler_so[idx+1]
-
-    if sysroot and not os.path.isdir(sysroot):
-        log.warn("Compiling with an SDK that doesn't seem to exist: %s",
-                sysroot)
-        log.warn("Please check your Xcode installation")
-
-    return compiler_so
 
 class UnixCCompiler(CCompiler):
 
@@ -172,7 +113,8 @@
     def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
         compiler_so = self.compiler_so
         if sys.platform == 'darwin':
-            compiler_so = _darwin_compiler_fixup(compiler_so, cc_args + extra_postargs)
+            compiler_so = _osx_support.compiler_fixup(compiler_so,
+                                                    cc_args + extra_postargs)
         try:
             self.spawn(compiler_so + cc_args + [src, '-o', obj] +
                        extra_postargs)
@@ -251,7 +193,7 @@
                     linker[i] = self.compiler_cxx[i]
 
                 if sys.platform == 'darwin':
-                    linker = _darwin_compiler_fixup(linker, ld_args)
+                    linker = _osx_support.compiler_fixup(linker, ld_args)
 
                 self.spawn(linker + ld_args)
             except DistutilsExecError, msg:
diff --git a/lib/distutils/distutils/util.py b/lib/distutils/distutils/util.py
index 6c49f0b..ea6ed8a 100644
--- a/lib/distutils/distutils/util.py
+++ b/lib/distutils/distutils/util.py
@@ -51,6 +51,10 @@
             return 'win-ia64'
         return sys.platform
 
+    # Set for cross builds explicitly
+    if "_PYTHON_HOST_PLATFORM" in os.environ:
+        return os.environ["_PYTHON_HOST_PLATFORM"]
+
     if os.name != "posix" or not hasattr(os, 'uname'):
         # XXX what about the architecture? NT is Intel or Alpha,
         # Mac OS is M68k or PPC, etc.
@@ -76,6 +80,11 @@
         if release[0] >= "5":           # SunOS 5 == Solaris 2
             osname = "solaris"
             release = "%d.%s" % (int(release[0]) - 3, release[2:])
+            # We can't use "platform.architecture()[0]" because a
+            # bootstrap problem. We use a dict to get an error
+            # if some suspicious happens.
+            bitness = {2147483647:"32bit", 9223372036854775807:"64bit"}
+            machine += ".%s" % bitness[sys.maxint]
         # fall through to standard osname-release-machine representation
     elif osname[:4] == "irix":              # could be "irix64"!
         return "%s-%s" % (osname, release)
@@ -88,94 +97,10 @@
         if m:
             release = m.group()
     elif osname[:6] == "darwin":
-        #
-        # For our purposes, we'll assume that the system version from
-        # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
-        # to. This makes the compatibility story a bit more sane because the
-        # machine is going to compile and link as if it were
-        # MACOSX_DEPLOYMENT_TARGET.
-        from distutils.sysconfig import get_config_vars
-        cfgvars = get_config_vars()
-
-        macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
-
-        if 1:
-            # Always calculate the release of the running machine,
-            # needed to determine if we can build fat binaries or not.
-
-            macrelease = macver
-            # Get the system version. Reading this plist is a documented
-            # way to get the system version (see the documentation for
-            # the Gestalt Manager)
-            try:
-                f = open('/System/Library/CoreServices/SystemVersion.plist')
-            except IOError:
-                # We're on a plain darwin box, fall back to the default
-                # behaviour.
-                pass
-            else:
-                try:
-                    m = re.search(
-                            r'<key>ProductUserVisibleVersion</key>\s*' +
-                            r'<string>(.*?)</string>', f.read())
-                    if m is not None:
-                        macrelease = '.'.join(m.group(1).split('.')[:2])
-                    # else: fall back to the default behaviour
-                finally:
-                    f.close()
-
-        if not macver:
-            macver = macrelease
-
-        if macver:
-            from distutils.sysconfig import get_config_vars
-            release = macver
-            osname = "macosx"
-
-            if (macrelease + '.') >= '10.4.' and \
-                    '-arch' in get_config_vars().get('CFLAGS', '').strip():
-                # The universal build will build fat binaries, but not on
-                # systems before 10.4
-                #
-                # Try to detect 4-way universal builds, those have machine-type
-                # 'universal' instead of 'fat'.
-
-                machine = 'fat'
-                cflags = get_config_vars().get('CFLAGS')
-
-                archs = re.findall('-arch\s+(\S+)', cflags)
-                archs = tuple(sorted(set(archs)))
-
-                if len(archs) == 1:
-                    machine = archs[0]
-                elif archs == ('i386', 'ppc'):
-                    machine = 'fat'
-                elif archs == ('i386', 'x86_64'):
-                    machine = 'intel'
-                elif archs == ('i386', 'ppc', 'x86_64'):
-                    machine = 'fat3'
-                elif archs == ('ppc64', 'x86_64'):
-                    machine = 'fat64'
-                elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
-                    machine = 'universal'
-                else:
-                    raise ValueError(
-                       "Don't know machine value for archs=%r"%(archs,))
-
-            elif machine == 'i386':
-                # On OSX the machine type returned by uname is always the
-                # 32-bit variant, even if the executable architecture is
-                # the 64-bit variant
-                if sys.maxint >= 2**32:
-                    machine = 'x86_64'
-
-            elif machine in ('PowerPC', 'Power_Macintosh'):
-                # Pick a sane name for the PPC architecture.
-                machine = 'ppc'
-
-                # See 'i386' case
-                if sys.maxint >= 2**32:
-                    machine = 'ppc64'
+        import _osx_support, distutils.sysconfig
+        osname, release, machine = _osx_support.get_platform_osx(
+                                        distutils.sysconfig.get_config_vars(),
+                                        osname, release, machine)
 
     return "%s-%s-%s" % (osname, release, machine)
 
diff --git a/lib/grizzled/grizzled/net/ftp/parse.py b/lib/grizzled/grizzled/net/ftp/parse.py
index 7c2e7ec..47967f4 100644
--- a/lib/grizzled/grizzled/net/ftp/parse.py
+++ b/lib/grizzled/grizzled/net/ftp/parse.py
@@ -44,7 +44,7 @@
 # ---------------------------------------------------------------------------
 
 import time
-from enum import Enum
+from deprecated_enum import Enum
 
 # ---------------------------------------------------------------------------
 # Exports
diff --git a/lib/mox/mox_test.py b/lib/mox/mox_test.py
index 9849339..accabf4 100644
--- a/lib/mox/mox_test.py
+++ b/lib/mox/mox_test.py
@@ -284,7 +284,7 @@
     str_list = ["abc", "def"]
     self.assert_(isa_list == str_list)
 
-  def testEquailtyInListInvalid(self):
+  def testEqualityInListInvalid(self):
     """Verify list contents are properly compared."""
     isa_list = [mox.IsA(str),mox.IsA(str)]
     mixed_list = ["abc", 123]
diff --git a/lib/protorpc-1.0/protorpc/definition.py b/lib/protorpc-1.0/protorpc/definition.py
index c0344d4..6029087 100644
--- a/lib/protorpc-1.0/protorpc/definition.py
+++ b/lib/protorpc-1.0/protorpc/definition.py
@@ -21,7 +21,6 @@
 
 import new
 import sys
-import urllib2
 
 from . import descriptor
 from . import message_types
diff --git a/lib/sqlcmd/sqlcmd/__init__.py b/lib/sqlcmd/sqlcmd/__init__.py
index f730ea4..a5ce376 100644
--- a/lib/sqlcmd/sqlcmd/__init__.py
+++ b/lib/sqlcmd/sqlcmd/__init__.py
@@ -74,7 +74,7 @@
 from grizzled.misc import str2bool
 from grizzled import history
 
-from enum import Enum
+from deprecated_enum import Enum
 
 from sqlcmd.config import SQLCmdConfig
 from sqlcmd.exception import *
diff --git a/php/sdk/google/appengine/datastore/datastore_v3_pb.php b/php/sdk/google/appengine/datastore/datastore_v3_pb.php
index d750fdd..0d92f27 100644
--- a/php/sdk/google/appengine/datastore/datastore_v3_pb.php
+++ b/php/sdk/google/appengine/datastore/datastore_v3_pb.php
@@ -5950,6 +5950,7 @@
   class DeleteRequest extends \google\net\ProtocolMessage {
     private $key = array();
     private $snapshot = array();
+    private $composite_index = array();
     public function getTrusted() {
       if (!isset($this->trusted)) {
         return false;
@@ -6107,6 +6108,37 @@
     public function hasHeader() {
       return isset($this->header);
     }
+    public function getCompositeIndexSize() {
+      return sizeof($this->composite_index);
+    }
+    public function getCompositeIndexList() {
+      return $this->composite_index;
+    }
+    public function mutableCompositeIndex($idx) {
+      if (!isset($this->composite_index[$idx])) {
+        $val = new \storage_onestore_v3\CompositeIndex();
+        $this->composite_index[$idx] = $val;
+        return $val;
+      }
+      return $this->composite_index[$idx];
+    }
+    public function getCompositeIndex($idx) {
+      if (isset($this->composite_index[$idx])) {
+        return $this->composite_index[$idx];
+      }
+      if ($idx >= end(array_keys($this->composite_index))) {
+        throw new \OutOfRangeException('index out of range: ' + $idx);
+      }
+      return new \storage_onestore_v3\CompositeIndex();
+    }
+    public function addCompositeIndex() {
+      $val = new \storage_onestore_v3\CompositeIndex();
+      $this->composite_index[] = $val;
+      return $val;
+    }
+    public function clearCompositeIndex() {
+      $this->composite_index = array();
+    }
     public function clear() {
       $this->clearTrusted();
       $this->clearTransaction();
@@ -6115,6 +6147,7 @@
       $this->clearMarkChanges();
       $this->clearSnapshot();
       $this->clearHeader();
+      $this->clearCompositeIndex();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -6145,6 +6178,11 @@
         $res += 1;
         $res += $this->lengthString($this->header->byteSizePartial());
       }
+      $this->checkProtoArray($this->composite_index);
+      $res += 1 * sizeof($this->composite_index);
+      foreach ($this->composite_index as $value) {
+        $res += $this->lengthString($value->byteSizePartial());
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -6182,6 +6220,12 @@
         $out->putVarInt32($this->header->byteSizePartial());
         $this->header->outputPartial($out);
       }
+      $this->checkProtoArray($this->composite_index);
+      foreach ($this->composite_index as $value) {
+        $out->putVarInt32(90);
+        $out->putVarInt32($value->byteSizePartial());
+        $value->outputPartial($out);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -6220,6 +6264,12 @@
             $d->skip($length);
             $this->mutableHeader()->tryMerge($tmp);
             break;
+          case 90:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->addCompositeIndex()->tryMerge($tmp);
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -6237,6 +6287,9 @@
         if (!$value->isInitialized()) return 'snapshot';
       }
       if (isset($this->header) && (!$this->header->isInitialized())) return 'header';
+      foreach ($this->composite_index as $value) {
+        if (!$value->isInitialized()) return 'composite_index';
+      }
       return null;
     }
     public function mergeFrom($x) {
@@ -6262,6 +6315,9 @@
       if ($x->hasHeader()) {
         $this->mutableHeader()->mergeFrom($x->getHeader());
       }
+      foreach ($x->getCompositeIndexList() as $v) {
+        $this->addCompositeIndex()->copyFrom($v);
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -6283,6 +6339,10 @@
       }
       if (isset($this->header) !== isset($x->header)) return false;
       if (isset($this->header) && !$this->header->equals($x->header)) return false;
+      if (sizeof($this->composite_index) !== sizeof($x->composite_index)) return false;
+      foreach (array_map(null, $this->composite_index, $x->composite_index) as $v) {
+        if (!$v[0]->equals($v[1])) return false;
+      }
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -6308,6 +6368,9 @@
       if (isset($this->header)) {
         $res .= $prefix . "header <\n" . $this->header->shortDebugString($prefix . "  ") . $prefix . ">\n";
       }
+      foreach ($this->composite_index as $value) {
+        $res .= $prefix . "composite_index <\n" . $value->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
       return $res;
     }
   }
diff --git a/php/sdk/google/appengine/datastore/datastore_v4_pb.php b/php/sdk/google/appengine/datastore/datastore_v4_pb.php
index bfa1391..fe8f371 100644
--- a/php/sdk/google/appengine/datastore/datastore_v4_pb.php
+++ b/php/sdk/google/appengine/datastore/datastore_v4_pb.php
@@ -1153,9 +1153,55 @@
     public function hasPropertyFilter() {
       return isset($this->property_filter);
     }
+    public function getBoundingCircleFilter() {
+      if (!isset($this->bounding_circle_filter)) {
+        return new \google\appengine\datastore\v4\BoundingCircleFilter();
+      }
+      return $this->bounding_circle_filter;
+    }
+    public function mutableBoundingCircleFilter() {
+      if (!isset($this->bounding_circle_filter)) {
+        $res = new \google\appengine\datastore\v4\BoundingCircleFilter();
+        $this->bounding_circle_filter = $res;
+        return $res;
+      }
+      return $this->bounding_circle_filter;
+    }
+    public function clearBoundingCircleFilter() {
+      if (isset($this->bounding_circle_filter)) {
+        unset($this->bounding_circle_filter);
+      }
+    }
+    public function hasBoundingCircleFilter() {
+      return isset($this->bounding_circle_filter);
+    }
+    public function getBoundingBoxFilter() {
+      if (!isset($this->bounding_box_filter)) {
+        return new \google\appengine\datastore\v4\BoundingBoxFilter();
+      }
+      return $this->bounding_box_filter;
+    }
+    public function mutableBoundingBoxFilter() {
+      if (!isset($this->bounding_box_filter)) {
+        $res = new \google\appengine\datastore\v4\BoundingBoxFilter();
+        $this->bounding_box_filter = $res;
+        return $res;
+      }
+      return $this->bounding_box_filter;
+    }
+    public function clearBoundingBoxFilter() {
+      if (isset($this->bounding_box_filter)) {
+        unset($this->bounding_box_filter);
+      }
+    }
+    public function hasBoundingBoxFilter() {
+      return isset($this->bounding_box_filter);
+    }
     public function clear() {
       $this->clearCompositeFilter();
       $this->clearPropertyFilter();
+      $this->clearBoundingCircleFilter();
+      $this->clearBoundingBoxFilter();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -1167,6 +1213,14 @@
         $res += 1;
         $res += $this->lengthString($this->property_filter->byteSizePartial());
       }
+      if (isset($this->bounding_circle_filter)) {
+        $res += 1;
+        $res += $this->lengthString($this->bounding_circle_filter->byteSizePartial());
+      }
+      if (isset($this->bounding_box_filter)) {
+        $res += 1;
+        $res += $this->lengthString($this->bounding_box_filter->byteSizePartial());
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -1180,6 +1234,16 @@
         $out->putVarInt32($this->property_filter->byteSizePartial());
         $this->property_filter->outputPartial($out);
       }
+      if (isset($this->bounding_circle_filter)) {
+        $out->putVarInt32(26);
+        $out->putVarInt32($this->bounding_circle_filter->byteSizePartial());
+        $this->bounding_circle_filter->outputPartial($out);
+      }
+      if (isset($this->bounding_box_filter)) {
+        $out->putVarInt32(34);
+        $out->putVarInt32($this->bounding_box_filter->byteSizePartial());
+        $this->bounding_box_filter->outputPartial($out);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -1197,6 +1261,18 @@
             $d->skip($length);
             $this->mutablePropertyFilter()->tryMerge($tmp);
             break;
+          case 26:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableBoundingCircleFilter()->tryMerge($tmp);
+            break;
+          case 34:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableBoundingBoxFilter()->tryMerge($tmp);
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -1208,6 +1284,8 @@
     public function checkInitialized() {
       if (isset($this->composite_filter) && (!$this->composite_filter->isInitialized())) return 'composite_filter';
       if (isset($this->property_filter) && (!$this->property_filter->isInitialized())) return 'property_filter';
+      if (isset($this->bounding_circle_filter) && (!$this->bounding_circle_filter->isInitialized())) return 'bounding_circle_filter';
+      if (isset($this->bounding_box_filter) && (!$this->bounding_box_filter->isInitialized())) return 'bounding_box_filter';
       return null;
     }
     public function mergeFrom($x) {
@@ -1218,6 +1296,12 @@
       if ($x->hasPropertyFilter()) {
         $this->mutablePropertyFilter()->mergeFrom($x->getPropertyFilter());
       }
+      if ($x->hasBoundingCircleFilter()) {
+        $this->mutableBoundingCircleFilter()->mergeFrom($x->getBoundingCircleFilter());
+      }
+      if ($x->hasBoundingBoxFilter()) {
+        $this->mutableBoundingBoxFilter()->mergeFrom($x->getBoundingBoxFilter());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -1225,6 +1309,10 @@
       if (isset($this->composite_filter) && !$this->composite_filter->equals($x->composite_filter)) return false;
       if (isset($this->property_filter) !== isset($x->property_filter)) return false;
       if (isset($this->property_filter) && !$this->property_filter->equals($x->property_filter)) return false;
+      if (isset($this->bounding_circle_filter) !== isset($x->bounding_circle_filter)) return false;
+      if (isset($this->bounding_circle_filter) && !$this->bounding_circle_filter->equals($x->bounding_circle_filter)) return false;
+      if (isset($this->bounding_box_filter) !== isset($x->bounding_box_filter)) return false;
+      if (isset($this->bounding_box_filter) && !$this->bounding_box_filter->equals($x->bounding_box_filter)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -1235,6 +1323,12 @@
       if (isset($this->property_filter)) {
         $res .= $prefix . "property_filter <\n" . $this->property_filter->shortDebugString($prefix . "  ") . $prefix . ">\n";
       }
+      if (isset($this->bounding_circle_filter)) {
+        $res .= $prefix . "bounding_circle_filter <\n" . $this->bounding_circle_filter->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
+      if (isset($this->bounding_box_filter)) {
+        $res .= $prefix . "bounding_box_filter <\n" . $this->bounding_box_filter->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
       return $res;
     }
   }
@@ -1564,6 +1658,354 @@
   }
 }
 namespace google\appengine\datastore\v4 {
+  class BoundingCircleFilter extends \google\net\ProtocolMessage {
+    public function getProperty() {
+      if (!isset($this->property)) {
+        return new \google\appengine\datastore\v4\PropertyReference();
+      }
+      return $this->property;
+    }
+    public function mutableProperty() {
+      if (!isset($this->property)) {
+        $res = new \google\appengine\datastore\v4\PropertyReference();
+        $this->property = $res;
+        return $res;
+      }
+      return $this->property;
+    }
+    public function clearProperty() {
+      if (isset($this->property)) {
+        unset($this->property);
+      }
+    }
+    public function hasProperty() {
+      return isset($this->property);
+    }
+    public function getCenter() {
+      if (!isset($this->center)) {
+        return new \google\appengine\datastore\v4\GeoPoint();
+      }
+      return $this->center;
+    }
+    public function mutableCenter() {
+      if (!isset($this->center)) {
+        $res = new \google\appengine\datastore\v4\GeoPoint();
+        $this->center = $res;
+        return $res;
+      }
+      return $this->center;
+    }
+    public function clearCenter() {
+      if (isset($this->center)) {
+        unset($this->center);
+      }
+    }
+    public function hasCenter() {
+      return isset($this->center);
+    }
+    public function getRadiusMeters() {
+      if (!isset($this->radius_meters)) {
+        return 0.0;
+      }
+      return $this->radius_meters;
+    }
+    public function setRadiusMeters($val) {
+      $this->radius_meters = $val;
+      return $this;
+    }
+    public function clearRadiusMeters() {
+      unset($this->radius_meters);
+      return $this;
+    }
+    public function hasRadiusMeters() {
+      return isset($this->radius_meters);
+    }
+    public function clear() {
+      $this->clearProperty();
+      $this->clearCenter();
+      $this->clearRadiusMeters();
+    }
+    public function byteSizePartial() {
+      $res = 0;
+      if (isset($this->property)) {
+        $res += 1;
+        $res += $this->lengthString($this->property->byteSizePartial());
+      }
+      if (isset($this->center)) {
+        $res += 1;
+        $res += $this->lengthString($this->center->byteSizePartial());
+      }
+      if (isset($this->radius_meters)) {
+        $res += 9;
+      }
+      return $res;
+    }
+    public function outputPartial($out) {
+      if (isset($this->property)) {
+        $out->putVarInt32(10);
+        $out->putVarInt32($this->property->byteSizePartial());
+        $this->property->outputPartial($out);
+      }
+      if (isset($this->center)) {
+        $out->putVarInt32(18);
+        $out->putVarInt32($this->center->byteSizePartial());
+        $this->center->outputPartial($out);
+      }
+      if (isset($this->radius_meters)) {
+        $out->putVarInt32(25);
+        $out->putDouble($this->radius_meters);
+      }
+    }
+    public function tryMerge($d) {
+      while($d->avail() > 0) {
+        $tt = $d->getVarInt32();
+        switch ($tt) {
+          case 10:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableProperty()->tryMerge($tmp);
+            break;
+          case 18:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableCenter()->tryMerge($tmp);
+            break;
+          case 25:
+            $this->setRadiusMeters($d->getDouble());
+            break;
+          case 0:
+            throw new \google\net\ProtocolBufferDecodeError();
+            break;
+          default:
+            $d->skipData($tt);
+        }
+      };
+    }
+    public function checkInitialized() {
+      if ((!isset($this->property)) || (!$this->property->isInitialized())) return 'property';
+      if ((!isset($this->center)) || (!$this->center->isInitialized())) return 'center';
+      if (!isset($this->radius_meters)) return 'radius_meters';
+      return null;
+    }
+    public function mergeFrom($x) {
+      if ($x === $this) { throw new \IllegalArgumentException('Cannot copy message to itself'); }
+      if ($x->hasProperty()) {
+        $this->mutableProperty()->mergeFrom($x->getProperty());
+      }
+      if ($x->hasCenter()) {
+        $this->mutableCenter()->mergeFrom($x->getCenter());
+      }
+      if ($x->hasRadiusMeters()) {
+        $this->setRadiusMeters($x->getRadiusMeters());
+      }
+    }
+    public function equals($x) {
+      if ($x === $this) { return true; }
+      if (isset($this->property) !== isset($x->property)) return false;
+      if (isset($this->property) && !$this->property->equals($x->property)) return false;
+      if (isset($this->center) !== isset($x->center)) return false;
+      if (isset($this->center) && !$this->center->equals($x->center)) return false;
+      if (isset($this->radius_meters) !== isset($x->radius_meters)) return false;
+      if (isset($this->radius_meters) && $this->radius_meters !== $x->radius_meters) return false;
+      return true;
+    }
+    public function shortDebugString($prefix = "") {
+      $res = '';
+      if (isset($this->property)) {
+        $res .= $prefix . "property <\n" . $this->property->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
+      if (isset($this->center)) {
+        $res .= $prefix . "center <\n" . $this->center->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
+      if (isset($this->radius_meters)) {
+        $res .= $prefix . "radius_meters: " . $this->debugFormatDouble($this->radius_meters) . "\n";
+      }
+      return $res;
+    }
+  }
+}
+namespace google\appengine\datastore\v4 {
+  class BoundingBoxFilter extends \google\net\ProtocolMessage {
+    public function getProperty() {
+      if (!isset($this->property)) {
+        return new \google\appengine\datastore\v4\PropertyReference();
+      }
+      return $this->property;
+    }
+    public function mutableProperty() {
+      if (!isset($this->property)) {
+        $res = new \google\appengine\datastore\v4\PropertyReference();
+        $this->property = $res;
+        return $res;
+      }
+      return $this->property;
+    }
+    public function clearProperty() {
+      if (isset($this->property)) {
+        unset($this->property);
+      }
+    }
+    public function hasProperty() {
+      return isset($this->property);
+    }
+    public function getSouthwest() {
+      if (!isset($this->southwest)) {
+        return new \google\appengine\datastore\v4\GeoPoint();
+      }
+      return $this->southwest;
+    }
+    public function mutableSouthwest() {
+      if (!isset($this->southwest)) {
+        $res = new \google\appengine\datastore\v4\GeoPoint();
+        $this->southwest = $res;
+        return $res;
+      }
+      return $this->southwest;
+    }
+    public function clearSouthwest() {
+      if (isset($this->southwest)) {
+        unset($this->southwest);
+      }
+    }
+    public function hasSouthwest() {
+      return isset($this->southwest);
+    }
+    public function getNortheast() {
+      if (!isset($this->northeast)) {
+        return new \google\appengine\datastore\v4\GeoPoint();
+      }
+      return $this->northeast;
+    }
+    public function mutableNortheast() {
+      if (!isset($this->northeast)) {
+        $res = new \google\appengine\datastore\v4\GeoPoint();
+        $this->northeast = $res;
+        return $res;
+      }
+      return $this->northeast;
+    }
+    public function clearNortheast() {
+      if (isset($this->northeast)) {
+        unset($this->northeast);
+      }
+    }
+    public function hasNortheast() {
+      return isset($this->northeast);
+    }
+    public function clear() {
+      $this->clearProperty();
+      $this->clearSouthwest();
+      $this->clearNortheast();
+    }
+    public function byteSizePartial() {
+      $res = 0;
+      if (isset($this->property)) {
+        $res += 1;
+        $res += $this->lengthString($this->property->byteSizePartial());
+      }
+      if (isset($this->southwest)) {
+        $res += 1;
+        $res += $this->lengthString($this->southwest->byteSizePartial());
+      }
+      if (isset($this->northeast)) {
+        $res += 1;
+        $res += $this->lengthString($this->northeast->byteSizePartial());
+      }
+      return $res;
+    }
+    public function outputPartial($out) {
+      if (isset($this->property)) {
+        $out->putVarInt32(10);
+        $out->putVarInt32($this->property->byteSizePartial());
+        $this->property->outputPartial($out);
+      }
+      if (isset($this->southwest)) {
+        $out->putVarInt32(18);
+        $out->putVarInt32($this->southwest->byteSizePartial());
+        $this->southwest->outputPartial($out);
+      }
+      if (isset($this->northeast)) {
+        $out->putVarInt32(26);
+        $out->putVarInt32($this->northeast->byteSizePartial());
+        $this->northeast->outputPartial($out);
+      }
+    }
+    public function tryMerge($d) {
+      while($d->avail() > 0) {
+        $tt = $d->getVarInt32();
+        switch ($tt) {
+          case 10:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableProperty()->tryMerge($tmp);
+            break;
+          case 18:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableSouthwest()->tryMerge($tmp);
+            break;
+          case 26:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutableNortheast()->tryMerge($tmp);
+            break;
+          case 0:
+            throw new \google\net\ProtocolBufferDecodeError();
+            break;
+          default:
+            $d->skipData($tt);
+        }
+      };
+    }
+    public function checkInitialized() {
+      if ((!isset($this->property)) || (!$this->property->isInitialized())) return 'property';
+      if ((!isset($this->southwest)) || (!$this->southwest->isInitialized())) return 'southwest';
+      if ((!isset($this->northeast)) || (!$this->northeast->isInitialized())) return 'northeast';
+      return null;
+    }
+    public function mergeFrom($x) {
+      if ($x === $this) { throw new \IllegalArgumentException('Cannot copy message to itself'); }
+      if ($x->hasProperty()) {
+        $this->mutableProperty()->mergeFrom($x->getProperty());
+      }
+      if ($x->hasSouthwest()) {
+        $this->mutableSouthwest()->mergeFrom($x->getSouthwest());
+      }
+      if ($x->hasNortheast()) {
+        $this->mutableNortheast()->mergeFrom($x->getNortheast());
+      }
+    }
+    public function equals($x) {
+      if ($x === $this) { return true; }
+      if (isset($this->property) !== isset($x->property)) return false;
+      if (isset($this->property) && !$this->property->equals($x->property)) return false;
+      if (isset($this->southwest) !== isset($x->southwest)) return false;
+      if (isset($this->southwest) && !$this->southwest->equals($x->southwest)) return false;
+      if (isset($this->northeast) !== isset($x->northeast)) return false;
+      if (isset($this->northeast) && !$this->northeast->equals($x->northeast)) return false;
+      return true;
+    }
+    public function shortDebugString($prefix = "") {
+      $res = '';
+      if (isset($this->property)) {
+        $res .= $prefix . "property <\n" . $this->property->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
+      if (isset($this->southwest)) {
+        $res .= $prefix . "southwest <\n" . $this->southwest->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
+      if (isset($this->northeast)) {
+        $res .= $prefix . "northeast <\n" . $this->northeast->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
+      return $res;
+    }
+  }
+}
+namespace google\appengine\datastore\v4 {
   class GqlQuery extends \google\net\ProtocolMessage {
     private $name_arg = array();
     private $number_arg = array();
diff --git a/php/sdk/google/appengine/datastore/entity_pb.php b/php/sdk/google/appengine/datastore/entity_pb.php
index 1022a58..1fd6566 100644
--- a/php/sdk/google/appengine/datastore/entity_pb.php
+++ b/php/sdk/google/appengine/datastore/entity_pb.php
@@ -2780,10 +2780,17 @@
 }
 namespace storage_onestore_v3\Index\Property {
   class Direction {
+    const DIRECTION_UNSPECIFIED = 0;
     const ASCENDING = 1;
     const DESCENDING = 2;
   }
 }
+namespace storage_onestore_v3\Index\Property {
+  class Mode {
+    const MODE_UNSPECIFIED = 0;
+    const GEOSPATIAL = 3;
+  }
+}
 namespace storage_onestore_v3\Index {
   class Property extends \google\net\ProtocolMessage {
     public function getName() {
@@ -2820,9 +2827,27 @@
     public function hasDirection() {
       return isset($this->direction);
     }
+    public function getMode() {
+      if (!isset($this->mode)) {
+        return 0;
+      }
+      return $this->mode;
+    }
+    public function setMode($val) {
+      $this->mode = $val;
+      return $this;
+    }
+    public function clearMode() {
+      unset($this->mode);
+      return $this;
+    }
+    public function hasMode() {
+      return isset($this->mode);
+    }
     public function clear() {
       $this->clearName();
       $this->clearDirection();
+      $this->clearMode();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -2834,6 +2859,10 @@
         $res += 1;
         $res += $this->lengthVarInt64($this->direction);
       }
+      if (isset($this->mode)) {
+        $res += 1;
+        $res += $this->lengthVarInt64($this->mode);
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -2845,6 +2874,10 @@
         $out->putVarInt32(32);
         $out->putVarInt32($this->direction);
       }
+      if (isset($this->mode)) {
+        $out->putVarInt32(48);
+        $out->putVarInt32($this->mode);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -2859,6 +2892,9 @@
           case 32:
             $this->setDirection($d->getVarInt32());
             break;
+          case 48:
+            $this->setMode($d->getVarInt32());
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -2879,6 +2915,9 @@
       if ($x->hasDirection()) {
         $this->setDirection($x->getDirection());
       }
+      if ($x->hasMode()) {
+        $this->setMode($x->getMode());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -2886,6 +2925,8 @@
       if (isset($this->name) && $this->name !== $x->name) return false;
       if (isset($this->direction) !== isset($x->direction)) return false;
       if (isset($this->direction) && $this->direction !== $x->direction) return false;
+      if (isset($this->mode) !== isset($x->mode)) return false;
+      if (isset($this->mode) && $this->mode !== $x->mode) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -2896,6 +2937,9 @@
       if (isset($this->direction)) {
         $res .= $prefix . "direction: " . ($this->direction) . "\n";
       }
+      if (isset($this->mode)) {
+        $res .= $prefix . "mode: " . ($this->mode) . "\n";
+      }
       return $res;
     }
   }
@@ -3085,6 +3129,7 @@
 }
 namespace storage_onestore_v3 {
   class CompositeIndex extends \google\net\ProtocolMessage {
+    private $read_division_family = array();
     public function getAppId() {
       if (!isset($this->app_id)) {
         return '';
@@ -3179,12 +3224,51 @@
     public function hasOnlyUseIfRequired() {
       return isset($this->only_use_if_required);
     }
+    public function getReadDivisionFamilySize() {
+      return sizeof($this->read_division_family);
+    }
+    public function getReadDivisionFamilyList() {
+      return $this->read_division_family;
+    }
+    public function getReadDivisionFamily($idx) {
+      return $this->read_division_family[$idx];
+    }
+    public function setReadDivisionFamily($idx, $val) {
+      $this->read_division_family[$idx] = $val;
+      return $this;
+    }
+    public function addReadDivisionFamily($val) {
+      $this->read_division_family[] = $val;
+      return $this;
+    }
+    public function clearReadDivisionFamily() {
+      $this->read_division_family = array();
+    }
+    public function getWriteDivisionFamily() {
+      if (!isset($this->write_division_family)) {
+        return '';
+      }
+      return $this->write_division_family;
+    }
+    public function setWriteDivisionFamily($val) {
+      $this->write_division_family = $val;
+      return $this;
+    }
+    public function clearWriteDivisionFamily() {
+      unset($this->write_division_family);
+      return $this;
+    }
+    public function hasWriteDivisionFamily() {
+      return isset($this->write_division_family);
+    }
     public function clear() {
       $this->clearAppId();
       $this->clearId();
       $this->clearDefinition();
       $this->clearState();
       $this->clearOnlyUseIfRequired();
+      $this->clearReadDivisionFamily();
+      $this->clearWriteDivisionFamily();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -3207,6 +3291,15 @@
       if (isset($this->only_use_if_required)) {
         $res += 2;
       }
+      $this->checkProtoArray($this->read_division_family);
+      $res += 1 * sizeof($this->read_division_family);
+      foreach ($this->read_division_family as $value) {
+        $res += $this->lengthString(strlen($value));
+      }
+      if (isset($this->write_division_family)) {
+        $res += 1;
+        $res += $this->lengthString(strlen($this->write_division_family));
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -3231,6 +3324,15 @@
         $out->putVarInt32(48);
         $out->putBoolean($this->only_use_if_required);
       }
+      $this->checkProtoArray($this->read_division_family);
+      foreach ($this->read_division_family as $value) {
+        $out->putVarInt32(58);
+        $out->putPrefixedString($value);
+      }
+      if (isset($this->write_division_family)) {
+        $out->putVarInt32(66);
+        $out->putPrefixedString($this->write_division_family);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -3256,6 +3358,16 @@
           case 48:
             $this->setOnlyUseIfRequired($d->getBoolean());
             break;
+          case 58:
+            $length = $d->getVarInt32();
+            $this->addReadDivisionFamily(substr($d->buffer(), $d->pos(), $length));
+            $d->skip($length);
+            break;
+          case 66:
+            $length = $d->getVarInt32();
+            $this->setWriteDivisionFamily(substr($d->buffer(), $d->pos(), $length));
+            $d->skip($length);
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -3288,6 +3400,12 @@
       if ($x->hasOnlyUseIfRequired()) {
         $this->setOnlyUseIfRequired($x->getOnlyUseIfRequired());
       }
+      foreach ($x->getReadDivisionFamilyList() as $v) {
+        $this->addReadDivisionFamily($v);
+      }
+      if ($x->hasWriteDivisionFamily()) {
+        $this->setWriteDivisionFamily($x->getWriteDivisionFamily());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -3301,6 +3419,12 @@
       if (isset($this->state) && $this->state !== $x->state) return false;
       if (isset($this->only_use_if_required) !== isset($x->only_use_if_required)) return false;
       if (isset($this->only_use_if_required) && $this->only_use_if_required !== $x->only_use_if_required) return false;
+      if (sizeof($this->read_division_family) !== sizeof($x->read_division_family)) return false;
+      foreach (array_map(null, $this->read_division_family, $x->read_division_family) as $v) {
+        if ($v[0] !== $v[1]) return false;
+      }
+      if (isset($this->write_division_family) !== isset($x->write_division_family)) return false;
+      if (isset($this->write_division_family) && $this->write_division_family !== $x->write_division_family) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -3320,6 +3444,346 @@
       if (isset($this->only_use_if_required)) {
         $res .= $prefix . "only_use_if_required: " . $this->debugFormatBool($this->only_use_if_required) . "\n";
       }
+      foreach ($this->read_division_family as $value) {
+        $res .= $prefix . "read_division_family: " . $this->debugFormatString($value) . "\n";
+      }
+      if (isset($this->write_division_family)) {
+        $res .= $prefix . "write_division_family: " . $this->debugFormatString($this->write_division_family) . "\n";
+      }
+      return $res;
+    }
+  }
+}
+namespace storage_onestore_v3 {
+  class SearchIndexEntry extends \google\net\ProtocolMessage {
+    private $division_family = array();
+    public function getIndexId() {
+      if (!isset($this->index_id)) {
+        return "0";
+      }
+      return $this->index_id;
+    }
+    public function setIndexId($val) {
+      if (is_double($val)) {
+        $this->index_id = sprintf('%0.0F', $val);
+      } else {
+        $this->index_id = $val;
+      }
+      return $this;
+    }
+    public function clearIndexId() {
+      unset($this->index_id);
+      return $this;
+    }
+    public function hasIndexId() {
+      return isset($this->index_id);
+    }
+    public function getDivisionFamilySize() {
+      return sizeof($this->division_family);
+    }
+    public function getDivisionFamilyList() {
+      return $this->division_family;
+    }
+    public function getDivisionFamily($idx) {
+      return $this->division_family[$idx];
+    }
+    public function setDivisionFamily($idx, $val) {
+      $this->division_family[$idx] = $val;
+      return $this;
+    }
+    public function addDivisionFamily($val) {
+      $this->division_family[] = $val;
+      return $this;
+    }
+    public function clearDivisionFamily() {
+      $this->division_family = array();
+    }
+    public function getFingerprint1999() {
+      if (!isset($this->fingerprint_1999)) {
+        return "0";
+      }
+      return $this->fingerprint_1999;
+    }
+    public function setFingerprint1999($val) {
+      if (is_double($val)) {
+        $this->fingerprint_1999 = sprintf('%0.0F', $val);
+      } else {
+        $this->fingerprint_1999 = $val;
+      }
+      return $this;
+    }
+    public function clearFingerprint1999() {
+      unset($this->fingerprint_1999);
+      return $this;
+    }
+    public function hasFingerprint1999() {
+      return isset($this->fingerprint_1999);
+    }
+    public function getFingerprint2011() {
+      if (!isset($this->fingerprint_2011)) {
+        return "0";
+      }
+      return $this->fingerprint_2011;
+    }
+    public function setFingerprint2011($val) {
+      if (is_double($val)) {
+        $this->fingerprint_2011 = sprintf('%0.0F', $val);
+      } else {
+        $this->fingerprint_2011 = $val;
+      }
+      return $this;
+    }
+    public function clearFingerprint2011() {
+      unset($this->fingerprint_2011);
+      return $this;
+    }
+    public function hasFingerprint2011() {
+      return isset($this->fingerprint_2011);
+    }
+    public function clear() {
+      $this->clearIndexId();
+      $this->clearDivisionFamily();
+      $this->clearFingerprint1999();
+      $this->clearFingerprint2011();
+    }
+    public function byteSizePartial() {
+      $res = 0;
+      if (isset($this->index_id)) {
+        $res += 1;
+        $res += $this->lengthVarInt64($this->index_id);
+      }
+      $this->checkProtoArray($this->division_family);
+      $res += 1 * sizeof($this->division_family);
+      foreach ($this->division_family as $value) {
+        $res += $this->lengthString(strlen($value));
+      }
+      if (isset($this->fingerprint_1999)) {
+        $res += 9;
+      }
+      if (isset($this->fingerprint_2011)) {
+        $res += 9;
+      }
+      return $res;
+    }
+    public function outputPartial($out) {
+      if (isset($this->index_id)) {
+        $out->putVarInt32(8);
+        $out->putVarInt64($this->index_id);
+      }
+      $this->checkProtoArray($this->division_family);
+      foreach ($this->division_family as $value) {
+        $out->putVarInt32(18);
+        $out->putPrefixedString($value);
+      }
+      if (isset($this->fingerprint_1999)) {
+        $out->putVarInt32(25);
+        $out->put64($this->fingerprint_1999);
+      }
+      if (isset($this->fingerprint_2011)) {
+        $out->putVarInt32(33);
+        $out->put64($this->fingerprint_2011);
+      }
+    }
+    public function tryMerge($d) {
+      while($d->avail() > 0) {
+        $tt = $d->getVarInt32();
+        switch ($tt) {
+          case 8:
+            $this->setIndexId($d->getVarInt64());
+            break;
+          case 18:
+            $length = $d->getVarInt32();
+            $this->addDivisionFamily(substr($d->buffer(), $d->pos(), $length));
+            $d->skip($length);
+            break;
+          case 25:
+            $this->setFingerprint1999($d->getFixed64());
+            break;
+          case 33:
+            $this->setFingerprint2011($d->getFixed64());
+            break;
+          case 0:
+            throw new \google\net\ProtocolBufferDecodeError();
+            break;
+          default:
+            $d->skipData($tt);
+        }
+      };
+    }
+    public function checkInitialized() {
+      if (!isset($this->index_id)) return 'index_id';
+      return null;
+    }
+    public function mergeFrom($x) {
+      if ($x === $this) { throw new \IllegalArgumentException('Cannot copy message to itself'); }
+      if ($x->hasIndexId()) {
+        $this->setIndexId($x->getIndexId());
+      }
+      foreach ($x->getDivisionFamilyList() as $v) {
+        $this->addDivisionFamily($v);
+      }
+      if ($x->hasFingerprint1999()) {
+        $this->setFingerprint1999($x->getFingerprint1999());
+      }
+      if ($x->hasFingerprint2011()) {
+        $this->setFingerprint2011($x->getFingerprint2011());
+      }
+    }
+    public function equals($x) {
+      if ($x === $this) { return true; }
+      if (isset($this->index_id) !== isset($x->index_id)) return false;
+      if (isset($this->index_id) && !$this->integerEquals($this->index_id, $x->index_id)) return false;
+      if (sizeof($this->division_family) !== sizeof($x->division_family)) return false;
+      foreach (array_map(null, $this->division_family, $x->division_family) as $v) {
+        if ($v[0] !== $v[1]) return false;
+      }
+      if (isset($this->fingerprint_1999) !== isset($x->fingerprint_1999)) return false;
+      if (isset($this->fingerprint_1999) && !$this->integerEquals($this->fingerprint_1999, $x->fingerprint_1999)) return false;
+      if (isset($this->fingerprint_2011) !== isset($x->fingerprint_2011)) return false;
+      if (isset($this->fingerprint_2011) && !$this->integerEquals($this->fingerprint_2011, $x->fingerprint_2011)) return false;
+      return true;
+    }
+    public function shortDebugString($prefix = "") {
+      $res = '';
+      if (isset($this->index_id)) {
+        $res .= $prefix . "index_id: " . $this->debugFormatInt64($this->index_id) . "\n";
+      }
+      foreach ($this->division_family as $value) {
+        $res .= $prefix . "division_family: " . $this->debugFormatString($value) . "\n";
+      }
+      if (isset($this->fingerprint_1999)) {
+        $res .= $prefix . "fingerprint_1999: " . $this->debugFormatFixed64($this->fingerprint_1999) . "\n";
+      }
+      if (isset($this->fingerprint_2011)) {
+        $res .= $prefix . "fingerprint_2011: " . $this->debugFormatFixed64($this->fingerprint_2011) . "\n";
+      }
+      return $res;
+    }
+  }
+}
+namespace storage_onestore_v3 {
+  class SearchIndexExternalId extends \google\net\ProtocolMessage {
+    public function getIndexId() {
+      if (!isset($this->index_id)) {
+        return "0";
+      }
+      return $this->index_id;
+    }
+    public function setIndexId($val) {
+      if (is_double($val)) {
+        $this->index_id = sprintf('%0.0F', $val);
+      } else {
+        $this->index_id = $val;
+      }
+      return $this;
+    }
+    public function clearIndexId() {
+      unset($this->index_id);
+      return $this;
+    }
+    public function hasIndexId() {
+      return isset($this->index_id);
+    }
+    public function getPrimaryKey() {
+      if (!isset($this->primary_key)) {
+        return new \storage_onestore_v3\Reference();
+      }
+      return $this->primary_key;
+    }
+    public function mutablePrimaryKey() {
+      if (!isset($this->primary_key)) {
+        $res = new \storage_onestore_v3\Reference();
+        $this->primary_key = $res;
+        return $res;
+      }
+      return $this->primary_key;
+    }
+    public function clearPrimaryKey() {
+      if (isset($this->primary_key)) {
+        unset($this->primary_key);
+      }
+    }
+    public function hasPrimaryKey() {
+      return isset($this->primary_key);
+    }
+    public function clear() {
+      $this->clearIndexId();
+      $this->clearPrimaryKey();
+    }
+    public function byteSizePartial() {
+      $res = 0;
+      if (isset($this->index_id)) {
+        $res += 1;
+        $res += $this->lengthVarInt64($this->index_id);
+      }
+      if (isset($this->primary_key)) {
+        $res += 1;
+        $res += $this->lengthString($this->primary_key->byteSizePartial());
+      }
+      return $res;
+    }
+    public function outputPartial($out) {
+      if (isset($this->index_id)) {
+        $out->putVarInt32(8);
+        $out->putVarInt64($this->index_id);
+      }
+      if (isset($this->primary_key)) {
+        $out->putVarInt32(18);
+        $out->putVarInt32($this->primary_key->byteSizePartial());
+        $this->primary_key->outputPartial($out);
+      }
+    }
+    public function tryMerge($d) {
+      while($d->avail() > 0) {
+        $tt = $d->getVarInt32();
+        switch ($tt) {
+          case 8:
+            $this->setIndexId($d->getVarInt64());
+            break;
+          case 18:
+            $length = $d->getVarInt32();
+            $tmp = new \google\net\Decoder($d->buffer(), $d->pos(), $d->pos() + $length);
+            $d->skip($length);
+            $this->mutablePrimaryKey()->tryMerge($tmp);
+            break;
+          case 0:
+            throw new \google\net\ProtocolBufferDecodeError();
+            break;
+          default:
+            $d->skipData($tt);
+        }
+      };
+    }
+    public function checkInitialized() {
+      if (!isset($this->index_id)) return 'index_id';
+      if ((!isset($this->primary_key)) || (!$this->primary_key->isInitialized())) return 'primary_key';
+      return null;
+    }
+    public function mergeFrom($x) {
+      if ($x === $this) { throw new \IllegalArgumentException('Cannot copy message to itself'); }
+      if ($x->hasIndexId()) {
+        $this->setIndexId($x->getIndexId());
+      }
+      if ($x->hasPrimaryKey()) {
+        $this->mutablePrimaryKey()->mergeFrom($x->getPrimaryKey());
+      }
+    }
+    public function equals($x) {
+      if ($x === $this) { return true; }
+      if (isset($this->index_id) !== isset($x->index_id)) return false;
+      if (isset($this->index_id) && !$this->integerEquals($this->index_id, $x->index_id)) return false;
+      if (isset($this->primary_key) !== isset($x->primary_key)) return false;
+      if (isset($this->primary_key) && !$this->primary_key->equals($x->primary_key)) return false;
+      return true;
+    }
+    public function shortDebugString($prefix = "") {
+      $res = '';
+      if (isset($this->index_id)) {
+        $res .= $prefix . "index_id: " . $this->debugFormatInt64($this->index_id) . "\n";
+      }
+      if (isset($this->primary_key)) {
+        $res .= $prefix . "primary_key <\n" . $this->primary_key->shortDebugString($prefix . "  ") . $prefix . ">\n";
+      }
       return $res;
     }
   }
diff --git a/php/sdk/google/appengine/runtime/autoloader.php b/php/sdk/google/appengine/runtime/autoloader.php
index c06c318..c120ddb 100644
--- a/php/sdk/google/appengine/runtime/autoloader.php
+++ b/php/sdk/google/appengine/runtime/autoloader.php
@@ -90,10 +90,13 @@
         'storage_onestore_v3\entityproto' => 'google/appengine/datastore/entity_pb.php',
         'storage_onestore_v3\compositeproperty' => 'google/appengine/datastore/entity_pb.php',
         'storage_onestore_v3\index\property\direction' => 'google/appengine/datastore/entity_pb.php',
+        'storage_onestore_v3\index\property\mode' => 'google/appengine/datastore/entity_pb.php',
         'storage_onestore_v3\index\property' => 'google/appengine/datastore/entity_pb.php',
         'storage_onestore_v3\index' => 'google/appengine/datastore/entity_pb.php',
         'storage_onestore_v3\compositeindex\state' => 'google/appengine/datastore/entity_pb.php',
         'storage_onestore_v3\compositeindex' => 'google/appengine/datastore/entity_pb.php',
+        'storage_onestore_v3\searchindexentry' => 'google/appengine/datastore/entity_pb.php',
+        'storage_onestore_v3\searchindexexternalid' => 'google/appengine/datastore/entity_pb.php',
         'storage_onestore_v3\indexpostfix\indexvalue' => 'google/appengine/datastore/entity_pb.php',
         'storage_onestore_v3\indexpostfix' => 'google/appengine/datastore/entity_pb.php',
         'storage_onestore_v3\indexposition' => 'google/appengine/datastore/entity_pb.php',
@@ -113,6 +116,8 @@
         'google\appengine\datastore\v4\compositefilter' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\propertyfilter\operator' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\propertyfilter' => 'google/appengine/datastore/datastore_v4_pb.php',
+        'google\appengine\datastore\v4\boundingcirclefilter' => 'google/appengine/datastore/datastore_v4_pb.php',
+        'google\appengine\datastore\v4\boundingboxfilter' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\gqlquery' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\gqlqueryarg' => 'google/appengine/datastore/datastore_v4_pb.php',
         'google\appengine\datastore\v4\queryresultbatch\moreresultstype' => 'google/appengine/datastore/datastore_v4_pb.php',
diff --git a/wrapper_util.py b/wrapper_util.py
index 7ba646a..70e0bad 100644
--- a/wrapper_util.py
+++ b/wrapper_util.py
@@ -152,7 +152,7 @@
 
 
     self.google_sql_extra_paths = self.oauth_client_extra_paths + [
-        os.path.join(dir_path, 'lib', 'enum'),
+        os.path.join(dir_path, 'lib', 'deprecated_enum'),
         os.path.join(dir_path, 'lib', 'grizzled'),
         os.path.join(dir_path, 'lib', 'oauth2'),
         os.path.join(dir_path, 'lib', 'prettytable'),