hexsha
stringlengths 40
40
| size
int64 10
805k
| ext
stringclasses 6
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
176
| max_stars_repo_name
stringlengths 7
114
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
176
| max_issues_repo_name
stringlengths 7
114
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
48.5k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
176
| max_forks_repo_name
stringlengths 7
114
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 10
805k
| avg_line_length
float64 5.53
11k
| max_line_length
int64 10
129k
| alphanum_fraction
float64 0.13
0.93
| content_no_comment
stringlengths 0
449k
| is_comment_constant_removed
bool 2
classes | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71a7ea56d84335a1f6f15af7d71e033e8ced3a1
| 996
|
py
|
Python
|
tests/test_inference.py
|
weiyx16/mmsegmentation
|
6d35d76195f173fbc6b119a7d7815e67d78024c6
|
[
"Apache-2.0"
] | 21
|
2022-01-11T14:06:25.000Z
|
2022-03-29T06:42:13.000Z
|
tests/test_inference.py
|
weiyx16/mmsegmentation
|
6d35d76195f173fbc6b119a7d7815e67d78024c6
|
[
"Apache-2.0"
] | 13
|
2022-02-15T20:05:18.000Z
|
2022-02-15T20:05:21.000Z
|
tests/test_inference.py
|
weiyx16/mmsegmentation
|
6d35d76195f173fbc6b119a7d7815e67d78024c6
|
[
"Apache-2.0"
] | 11
|
2022-01-11T16:05:24.000Z
|
2022-03-17T01:58:52.000Z
|
# Copyright (c) OpenMMLab. All rights reserved.
import os.path as osp
import mmcv
from mmseg.apis import inference_segmentor, init_segmentor
def test_test_time_augmentation_on_cpu():
config_file = 'configs/pspnet/pspnet_r50-d8_512x1024_40k_cityscapes.py'
config = mmcv.Config.fromfile(config_file)
# Remove pretrain model download for testing
config.model.pretrained = None
# Replace SyncBN with BN to inference on CPU
norm_cfg = dict(type='BN', requires_grad=True)
config.model.backbone.norm_cfg = norm_cfg
config.model.decode_head.norm_cfg = norm_cfg
config.model.auxiliary_head.norm_cfg = norm_cfg
# Enable test time augmentation
config.data.test.pipeline[1].flip = True
checkpoint_file = None
model = init_segmentor(config, checkpoint_file, device='cpu')
img = mmcv.imread(
osp.join(osp.dirname(__file__), 'data/color.jpg'), 'color')
result = inference_segmentor(model, img)
assert result[0].shape == (288, 512)
| 32.129032
| 75
| 0.73494
|
import os.path as osp
import mmcv
from mmseg.apis import inference_segmentor, init_segmentor
def test_test_time_augmentation_on_cpu():
config_file = 'configs/pspnet/pspnet_r50-d8_512x1024_40k_cityscapes.py'
config = mmcv.Config.fromfile(config_file)
config.model.pretrained = None
norm_cfg = dict(type='BN', requires_grad=True)
config.model.backbone.norm_cfg = norm_cfg
config.model.decode_head.norm_cfg = norm_cfg
config.model.auxiliary_head.norm_cfg = norm_cfg
config.data.test.pipeline[1].flip = True
checkpoint_file = None
model = init_segmentor(config, checkpoint_file, device='cpu')
img = mmcv.imread(
osp.join(osp.dirname(__file__), 'data/color.jpg'), 'color')
result = inference_segmentor(model, img)
assert result[0].shape == (288, 512)
| true
| true
|
f71a801eee241a74789c0995cf4813e2cdb9335f
| 19,958
|
py
|
Python
|
sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py
|
RusOr10n/beam
|
ede14d4aa7d239f74d5565a28a7c4433eaaa7d47
|
[
"Apache-2.0"
] | 1
|
2019-12-05T04:36:46.000Z
|
2019-12-05T04:36:46.000Z
|
sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py
|
RusOr10n/beam
|
ede14d4aa7d239f74d5565a28a7c4433eaaa7d47
|
[
"Apache-2.0"
] | 14
|
2020-02-12T22:20:41.000Z
|
2021-11-09T19:41:23.000Z
|
sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py
|
violalyu/beam
|
dd605e568d70b1a6ebea60c15b2aec3e240f3914
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
A connector for reading from and writing to Google Cloud Datastore.
Please use this module for Datastore I/O since
``apache_beam.io.gcp.datastore.v1.datastoreio`` will be deprecated in the
next Beam major release.
This module uses the newer google-cloud-datastore package. Its API was different
enough to require extensive changes to this and associated modules.
This module is experimental, no backwards compatibility guarantees.
"""
from __future__ import absolute_import
from __future__ import division
import logging
import time
from builtins import round
from apache_beam import typehints
from apache_beam.io.gcp.datastore.v1 import util
from apache_beam.io.gcp.datastore.v1.adaptive_throttler import AdaptiveThrottler
from apache_beam.io.gcp.datastore.v1new import helper
from apache_beam.io.gcp.datastore.v1new import query_splitter
from apache_beam.io.gcp.datastore.v1new import types
from apache_beam.metrics.metric import Metrics
from apache_beam.transforms import Create
from apache_beam.transforms import DoFn
from apache_beam.transforms import ParDo
from apache_beam.transforms import PTransform
from apache_beam.transforms import Reshuffle
from apache_beam.utils import retry
__all__ = ['ReadFromDatastore', 'WriteToDatastore', 'DeleteFromDatastore']
@typehints.with_output_types(types.Entity)
class ReadFromDatastore(PTransform):
"""A ``PTransform`` for querying Google Cloud Datastore.
To read a ``PCollection[Entity]`` from a Cloud Datastore ``Query``, use
the ``ReadFromDatastore`` transform by providing a `query` to
read from. The project and optional namespace are set in the query.
The query will be split into multiple queries to allow for parallelism. The
degree of parallelism is automatically determined, but can be overridden by
setting `num_splits` to a value of 1 or greater.
Note: Normally, a runner will read from Cloud Datastore in parallel across
many workers. However, when the `query` is configured with a `limit` or if the
query contains inequality filters like `GREATER_THAN, LESS_THAN` etc., then
all the returned results will be read by a single worker in order to ensure
correct data. Since data is read from a single worker, this could have
significant impact on the performance of the job. Using a
:class:`~apache_beam.transforms.util.Reshuffle` transform after the read in
this case might be beneficial for parallelizing work across workers.
The semantics for query splitting is defined below:
1. If `num_splits` is equal to 0, then the number of splits will be chosen
dynamically at runtime based on the query data size.
2. Any value of `num_splits` greater than
`ReadFromDatastore._NUM_QUERY_SPLITS_MAX` will be capped at that value.
3. If the `query` has a user limit set, or contains inequality filters, then
`num_splits` will be ignored and no split will be performed.
4. Under certain cases Cloud Datastore is unable to split query to the
requested number of splits. In such cases we just use whatever Cloud
Datastore returns.
See https://developers.google.com/datastore/ for more details on Google Cloud
Datastore.
"""
# An upper bound on the number of splits for a query.
_NUM_QUERY_SPLITS_MAX = 50000
# A lower bound on the number of splits for a query. This is to ensure that
# we parallelize the query even when Datastore statistics are not available.
_NUM_QUERY_SPLITS_MIN = 12
# Default bundle size of 64MB.
_DEFAULT_BUNDLE_SIZE_BYTES = 64 * 1024 * 1024
def __init__(self, query, num_splits=0):
"""Initialize the `ReadFromDatastore` transform.
This transform outputs elements of type
:class:`~apache_beam.io.gcp.datastore.v1new.types.Entity`.
Args:
query: (:class:`~apache_beam.io.gcp.datastore.v1new.types.Query`) query
used to fetch entities.
num_splits: (:class:`int`) (optional) Number of splits for the query.
"""
super(ReadFromDatastore, self).__init__()
if not query.project:
raise ValueError("query.project cannot be empty")
if not query:
raise ValueError("query cannot be empty")
if num_splits < 0:
raise ValueError("num_splits must be greater than or equal 0")
self._project = query.project
# using _namespace conflicts with DisplayData._namespace
self._datastore_namespace = query.namespace
self._query = query
self._num_splits = num_splits
def expand(self, pcoll):
# This is a composite transform involves the following:
# 1. Create a singleton of the user provided `query` and apply a ``ParDo``
# that splits the query into `num_splits` queries if possible.
#
# If the value of `num_splits` is 0, the number of splits will be
# computed dynamically based on the size of the data for the `query`.
#
# 2. The resulting ``PCollection`` is sharded across workers using a
# ``Reshuffle`` operation.
#
# 3. In the third step, a ``ParDo`` reads entities for each query and
# outputs a ``PCollection[Entity]``.
return (pcoll.pipeline
| 'UserQuery' >> Create([self._query])
| 'SplitQuery' >> ParDo(ReadFromDatastore._SplitQueryFn(
self._num_splits))
| Reshuffle()
| 'Read' >> ParDo(ReadFromDatastore._QueryFn()))
def display_data(self):
disp_data = {'project': self._query.project,
'query': str(self._query),
'num_splits': self._num_splits}
if self._datastore_namespace is not None:
disp_data['namespace'] = self._datastore_namespace
return disp_data
@typehints.with_input_types(types.Query)
@typehints.with_output_types(types.Query)
class _SplitQueryFn(DoFn):
"""A `DoFn` that splits a given query into multiple sub-queries."""
def __init__(self, num_splits):
super(ReadFromDatastore._SplitQueryFn, self).__init__()
self._num_splits = num_splits
def process(self, query, *args, **kwargs):
client = helper.get_client(query.project, query.namespace)
try:
# Short circuit estimating num_splits if split is not possible.
query_splitter.validate_split(query)
if self._num_splits == 0:
estimated_num_splits = self.get_estimated_num_splits(client, query)
else:
estimated_num_splits = self._num_splits
logging.info("Splitting the query into %d splits", estimated_num_splits)
query_splits = query_splitter.get_splits(
client, query, estimated_num_splits)
except query_splitter.QuerySplitterError:
logging.info("Unable to parallelize the given query: %s", query,
exc_info=True)
query_splits = [query]
return query_splits
def display_data(self):
disp_data = {'num_splits': self._num_splits}
return disp_data
@staticmethod
def query_latest_statistics_timestamp(client):
"""Fetches the latest timestamp of statistics from Cloud Datastore.
Cloud Datastore system tables with statistics are periodically updated.
This method fetches the latest timestamp (in microseconds) of statistics
update using the `__Stat_Total__` table.
"""
if client.namespace is None:
kind = '__Stat_Total__'
else:
kind = '__Stat_Ns_Total__'
query = client.query(kind=kind, order=["-timestamp", ])
entities = list(query.fetch(limit=1))
if not entities:
raise RuntimeError("Datastore total statistics unavailable.")
return entities[0]['timestamp']
@staticmethod
def get_estimated_size_bytes(client, query):
"""Get the estimated size of the data returned by this instance's query.
Cloud Datastore provides no way to get a good estimate of how large the
result of a query is going to be. Hence we use the __Stat_Kind__ system
table to get size of the entire kind as an approximate estimate, assuming
exactly 1 kind is specified in the query.
See https://cloud.google.com/datastore/docs/concepts/stats.
"""
kind_name = query.kind
latest_timestamp = (
ReadFromDatastore._SplitQueryFn
.query_latest_statistics_timestamp(client))
logging.info('Latest stats timestamp for kind %s is %s',
kind_name, latest_timestamp)
if client.namespace is None:
kind = '__Stat_Kind__'
else:
kind = '__Stat_Ns_Kind__'
query = client.query(kind=kind)
query.add_filter('kind_name', '=', kind_name)
query.add_filter('timestamp', '=', latest_timestamp)
entities = list(query.fetch(limit=1))
if not entities:
raise RuntimeError(
'Datastore statistics for kind %s unavailable' % kind_name)
return entities[0]['entity_bytes']
@staticmethod
def get_estimated_num_splits(client, query):
"""Computes the number of splits to be performed on the query."""
try:
estimated_size_bytes = (
ReadFromDatastore._SplitQueryFn
.get_estimated_size_bytes(client, query))
logging.info('Estimated size bytes for query: %s', estimated_size_bytes)
num_splits = int(min(ReadFromDatastore._NUM_QUERY_SPLITS_MAX, round(
(float(estimated_size_bytes) /
ReadFromDatastore._DEFAULT_BUNDLE_SIZE_BYTES))))
except Exception as e:
logging.warning('Failed to fetch estimated size bytes: %s', e)
# Fallback in case estimated size is unavailable.
num_splits = ReadFromDatastore._NUM_QUERY_SPLITS_MIN
return max(num_splits, ReadFromDatastore._NUM_QUERY_SPLITS_MIN)
@typehints.with_input_types(types.Query)
@typehints.with_output_types(types.Entity)
class _QueryFn(DoFn):
"""A DoFn that fetches entities from Cloud Datastore, for a given query."""
def process(self, query, *unused_args, **unused_kwargs):
_client = helper.get_client(query.project, query.namespace)
client_query = query._to_client_query(_client)
for client_entity in client_query.fetch(query.limit):
yield types.Entity.from_client_entity(client_entity)
class _Mutate(PTransform):
"""A ``PTransform`` that writes mutations to Cloud Datastore.
Only idempotent Datastore mutation operations (upsert and delete) are
supported, as the commits are retried when failures occur.
"""
def __init__(self, mutate_fn):
"""Initializes a Mutate transform.
Args:
mutate_fn: Instance of `DatastoreMutateFn` to use.
"""
self._mutate_fn = mutate_fn
def expand(self, pcoll):
return pcoll | 'Write Batch to Datastore' >> ParDo(self._mutate_fn)
class DatastoreMutateFn(DoFn):
"""A ``DoFn`` that write mutations to Datastore.
Mutations are written in batches, where the maximum batch size is
`util.WRITE_BATCH_SIZE`.
Commits are non-transactional. If a commit fails because of a conflict over
an entity group, the commit will be retried. This means that the mutation
should be idempotent (`upsert` and `delete` mutations) to prevent duplicate
data or errors.
"""
def __init__(self, project):
"""
Args:
project: (str) cloud project id
"""
self._project = project
self._client = None
self._rpc_successes = Metrics.counter(
_Mutate.DatastoreMutateFn, "datastoreRpcSuccesses")
self._rpc_errors = Metrics.counter(
_Mutate.DatastoreMutateFn, "datastoreRpcErrors")
self._throttled_secs = Metrics.counter(
_Mutate.DatastoreMutateFn, "cumulativeThrottlingSeconds")
self._throttler = AdaptiveThrottler(window_ms=120000, bucket_ms=1000,
overload_ratio=1.25)
def _update_rpc_stats(self, successes=0, errors=0, throttled_secs=0):
self._rpc_successes.inc(successes)
self._rpc_errors.inc(errors)
self._throttled_secs.inc(throttled_secs)
def start_bundle(self):
self._client = helper.get_client(self._project, namespace=None)
self._init_batch()
self._batch_sizer = util.DynamicBatchSizer()
self._target_batch_size = self._batch_sizer.get_batch_size(
time.time() * 1000)
def element_to_client_batch_item(self, element):
raise NotImplementedError
def add_to_batch(self, client_batch_item):
raise NotImplementedError
@retry.with_exponential_backoff(num_retries=5,
retry_filter=helper.retry_on_rpc_error)
def write_mutations(self, throttler, rpc_stats_callback, throttle_delay=1):
"""Writes a batch of mutations to Cloud Datastore.
If a commit fails, it will be retried up to 5 times. All mutations in the
batch will be committed again, even if the commit was partially
successful. If the retry limit is exceeded, the last exception from
Cloud Datastore will be raised.
Assumes that the Datastore client library does not perform any retries on
commits. It has not been determined how such retries would interact with
the retries and throttler used here.
See ``google.cloud.datastore_v1.gapic.datastore_client_config`` for
retry config.
Args:
rpc_stats_callback: a function to call with arguments `successes` and
`failures` and `throttled_secs`; this is called to record successful
and failed RPCs to Datastore and time spent waiting for throttling.
throttler: (``apache_beam.io.gcp.datastore.v1.adaptive_throttler.
AdaptiveThrottler``)
Throttler instance used to select requests to be throttled.
throttle_delay: (:class:`float`) time in seconds to sleep when
throttled.
Returns:
(int) The latency of the successful RPC in milliseconds.
"""
# Client-side throttling.
while throttler.throttle_request(time.time() * 1000):
logging.info("Delaying request for %ds due to previous failures",
throttle_delay)
time.sleep(throttle_delay)
rpc_stats_callback(throttled_secs=throttle_delay)
if self._batch is None:
# this will only happen when we re-try previously failed batch
self._batch = self._client.batch()
self._batch.begin()
for element in self._batch_elements:
self.add_to_batch(element)
try:
start_time = time.time()
self._batch.commit()
end_time = time.time()
rpc_stats_callback(successes=1)
throttler.successful_request(start_time * 1000)
commit_time_ms = int((end_time-start_time) * 1000)
return commit_time_ms
except Exception:
self._batch = None
rpc_stats_callback(errors=1)
raise
def process(self, element):
client_element = self.element_to_client_batch_item(element)
self._batch_elements.append(client_element)
self.add_to_batch(client_element)
self._batch_bytes_size += self._batch.mutations[-1].ByteSize()
if (len(self._batch.mutations) >= self._target_batch_size or
self._batch_bytes_size > util.WRITE_BATCH_MAX_BYTES_SIZE):
self._flush_batch()
def finish_bundle(self):
if self._batch_elements:
self._flush_batch()
def _init_batch(self):
self._batch_bytes_size = 0
self._batch = self._client.batch()
self._batch.begin()
self._batch_elements = []
def _flush_batch(self):
# Flush the current batch of mutations to Cloud Datastore.
latency_ms = self.write_mutations(
self._throttler,
rpc_stats_callback=self._update_rpc_stats,
throttle_delay=util.WRITE_BATCH_TARGET_LATENCY_MS // 1000)
logging.debug("Successfully wrote %d mutations in %dms.",
len(self._batch.mutations), latency_ms)
now = time.time() * 1000
self._batch_sizer.report_latency(
now, latency_ms, len(self._batch.mutations))
self._target_batch_size = self._batch_sizer.get_batch_size(now)
self._init_batch()
@typehints.with_input_types(types.Entity)
class WriteToDatastore(_Mutate):
"""
Writes elements of type
:class:`~apache_beam.io.gcp.datastore.v1new.types.Entity` to Cloud Datastore.
Entity keys must be complete. The ``project`` field in each key must match the
project ID passed to this transform. If ``project`` field in entity or
property key is empty then it is filled with the project ID passed to this
transform.
"""
def __init__(self, project):
"""Initialize the `WriteToDatastore` transform.
Args:
project: (:class:`str`) The ID of the project to write entities to.
"""
mutate_fn = WriteToDatastore._DatastoreWriteFn(project)
super(WriteToDatastore, self).__init__(mutate_fn)
class _DatastoreWriteFn(_Mutate.DatastoreMutateFn):
def element_to_client_batch_item(self, element):
if not isinstance(element, types.Entity):
raise ValueError('apache_beam.io.gcp.datastore.v1new.datastoreio.Entity'
' expected, got: %s' % type(element))
if not element.key.project:
element.key.project = self._project
client_entity = element.to_client_entity()
if client_entity.key.is_partial:
raise ValueError('Entities to be written to Cloud Datastore must '
'have complete keys:\n%s' % client_entity)
return client_entity
def add_to_batch(self, client_entity):
self._batch.put(client_entity)
def display_data(self):
return {
'mutation': 'Write (upsert)',
'project': self._project,
}
@typehints.with_input_types(types.Key)
class DeleteFromDatastore(_Mutate):
"""
Deletes elements matching input
:class:`~apache_beam.io.gcp.datastore.v1new.types.Key` elements from Cloud
Datastore.
Keys must be complete. The ``project`` field in each key must match the
project ID passed to this transform. If ``project`` field in key is empty then
it is filled with the project ID passed to this transform.
"""
def __init__(self, project):
"""Initialize the `DeleteFromDatastore` transform.
Args:
project: (:class:`str`) The ID of the project from which the entities will
be deleted.
"""
mutate_fn = DeleteFromDatastore._DatastoreDeleteFn(project)
super(DeleteFromDatastore, self).__init__(mutate_fn)
class _DatastoreDeleteFn(_Mutate.DatastoreMutateFn):
def element_to_client_batch_item(self, element):
if not isinstance(element, types.Key):
raise ValueError('apache_beam.io.gcp.datastore.v1new.datastoreio.Key'
' expected, got: %s' % type(element))
if not element.project:
element.project = self._project
client_key = element.to_client_key()
if client_key.is_partial:
raise ValueError('Keys to be deleted from Cloud Datastore must be '
'complete:\n%s' % client_key)
return client_key
def add_to_batch(self, client_key):
self._batch.delete(client_key)
def display_data(self):
return {
'mutation': 'Delete',
'project': self._project,
}
| 38.980469
| 80
| 0.702225
|
from __future__ import absolute_import
from __future__ import division
import logging
import time
from builtins import round
from apache_beam import typehints
from apache_beam.io.gcp.datastore.v1 import util
from apache_beam.io.gcp.datastore.v1.adaptive_throttler import AdaptiveThrottler
from apache_beam.io.gcp.datastore.v1new import helper
from apache_beam.io.gcp.datastore.v1new import query_splitter
from apache_beam.io.gcp.datastore.v1new import types
from apache_beam.metrics.metric import Metrics
from apache_beam.transforms import Create
from apache_beam.transforms import DoFn
from apache_beam.transforms import ParDo
from apache_beam.transforms import PTransform
from apache_beam.transforms import Reshuffle
from apache_beam.utils import retry
__all__ = ['ReadFromDatastore', 'WriteToDatastore', 'DeleteFromDatastore']
@typehints.with_output_types(types.Entity)
class ReadFromDatastore(PTransform):
_NUM_QUERY_SPLITS_MAX = 50000
_NUM_QUERY_SPLITS_MIN = 12
_DEFAULT_BUNDLE_SIZE_BYTES = 64 * 1024 * 1024
def __init__(self, query, num_splits=0):
super(ReadFromDatastore, self).__init__()
if not query.project:
raise ValueError("query.project cannot be empty")
if not query:
raise ValueError("query cannot be empty")
if num_splits < 0:
raise ValueError("num_splits must be greater than or equal 0")
self._project = query.project
self._datastore_namespace = query.namespace
self._query = query
self._num_splits = num_splits
def expand(self, pcoll):
return (pcoll.pipeline
| 'UserQuery' >> Create([self._query])
| 'SplitQuery' >> ParDo(ReadFromDatastore._SplitQueryFn(
self._num_splits))
| Reshuffle()
| 'Read' >> ParDo(ReadFromDatastore._QueryFn()))
def display_data(self):
disp_data = {'project': self._query.project,
'query': str(self._query),
'num_splits': self._num_splits}
if self._datastore_namespace is not None:
disp_data['namespace'] = self._datastore_namespace
return disp_data
@typehints.with_input_types(types.Query)
@typehints.with_output_types(types.Query)
class _SplitQueryFn(DoFn):
def __init__(self, num_splits):
super(ReadFromDatastore._SplitQueryFn, self).__init__()
self._num_splits = num_splits
def process(self, query, *args, **kwargs):
client = helper.get_client(query.project, query.namespace)
try:
query_splitter.validate_split(query)
if self._num_splits == 0:
estimated_num_splits = self.get_estimated_num_splits(client, query)
else:
estimated_num_splits = self._num_splits
logging.info("Splitting the query into %d splits", estimated_num_splits)
query_splits = query_splitter.get_splits(
client, query, estimated_num_splits)
except query_splitter.QuerySplitterError:
logging.info("Unable to parallelize the given query: %s", query,
exc_info=True)
query_splits = [query]
return query_splits
def display_data(self):
disp_data = {'num_splits': self._num_splits}
return disp_data
@staticmethod
def query_latest_statistics_timestamp(client):
if client.namespace is None:
kind = '__Stat_Total__'
else:
kind = '__Stat_Ns_Total__'
query = client.query(kind=kind, order=["-timestamp", ])
entities = list(query.fetch(limit=1))
if not entities:
raise RuntimeError("Datastore total statistics unavailable.")
return entities[0]['timestamp']
@staticmethod
def get_estimated_size_bytes(client, query):
kind_name = query.kind
latest_timestamp = (
ReadFromDatastore._SplitQueryFn
.query_latest_statistics_timestamp(client))
logging.info('Latest stats timestamp for kind %s is %s',
kind_name, latest_timestamp)
if client.namespace is None:
kind = '__Stat_Kind__'
else:
kind = '__Stat_Ns_Kind__'
query = client.query(kind=kind)
query.add_filter('kind_name', '=', kind_name)
query.add_filter('timestamp', '=', latest_timestamp)
entities = list(query.fetch(limit=1))
if not entities:
raise RuntimeError(
'Datastore statistics for kind %s unavailable' % kind_name)
return entities[0]['entity_bytes']
@staticmethod
def get_estimated_num_splits(client, query):
try:
estimated_size_bytes = (
ReadFromDatastore._SplitQueryFn
.get_estimated_size_bytes(client, query))
logging.info('Estimated size bytes for query: %s', estimated_size_bytes)
num_splits = int(min(ReadFromDatastore._NUM_QUERY_SPLITS_MAX, round(
(float(estimated_size_bytes) /
ReadFromDatastore._DEFAULT_BUNDLE_SIZE_BYTES))))
except Exception as e:
logging.warning('Failed to fetch estimated size bytes: %s', e)
num_splits = ReadFromDatastore._NUM_QUERY_SPLITS_MIN
return max(num_splits, ReadFromDatastore._NUM_QUERY_SPLITS_MIN)
@typehints.with_input_types(types.Query)
@typehints.with_output_types(types.Entity)
class _QueryFn(DoFn):
def process(self, query, *unused_args, **unused_kwargs):
_client = helper.get_client(query.project, query.namespace)
client_query = query._to_client_query(_client)
for client_entity in client_query.fetch(query.limit):
yield types.Entity.from_client_entity(client_entity)
class _Mutate(PTransform):
def __init__(self, mutate_fn):
self._mutate_fn = mutate_fn
def expand(self, pcoll):
return pcoll | 'Write Batch to Datastore' >> ParDo(self._mutate_fn)
class DatastoreMutateFn(DoFn):
def __init__(self, project):
self._project = project
self._client = None
self._rpc_successes = Metrics.counter(
_Mutate.DatastoreMutateFn, "datastoreRpcSuccesses")
self._rpc_errors = Metrics.counter(
_Mutate.DatastoreMutateFn, "datastoreRpcErrors")
self._throttled_secs = Metrics.counter(
_Mutate.DatastoreMutateFn, "cumulativeThrottlingSeconds")
self._throttler = AdaptiveThrottler(window_ms=120000, bucket_ms=1000,
overload_ratio=1.25)
def _update_rpc_stats(self, successes=0, errors=0, throttled_secs=0):
self._rpc_successes.inc(successes)
self._rpc_errors.inc(errors)
self._throttled_secs.inc(throttled_secs)
def start_bundle(self):
self._client = helper.get_client(self._project, namespace=None)
self._init_batch()
self._batch_sizer = util.DynamicBatchSizer()
self._target_batch_size = self._batch_sizer.get_batch_size(
time.time() * 1000)
def element_to_client_batch_item(self, element):
raise NotImplementedError
def add_to_batch(self, client_batch_item):
raise NotImplementedError
@retry.with_exponential_backoff(num_retries=5,
retry_filter=helper.retry_on_rpc_error)
def write_mutations(self, throttler, rpc_stats_callback, throttle_delay=1):
while throttler.throttle_request(time.time() * 1000):
logging.info("Delaying request for %ds due to previous failures",
throttle_delay)
time.sleep(throttle_delay)
rpc_stats_callback(throttled_secs=throttle_delay)
if self._batch is None:
self._batch = self._client.batch()
self._batch.begin()
for element in self._batch_elements:
self.add_to_batch(element)
try:
start_time = time.time()
self._batch.commit()
end_time = time.time()
rpc_stats_callback(successes=1)
throttler.successful_request(start_time * 1000)
commit_time_ms = int((end_time-start_time) * 1000)
return commit_time_ms
except Exception:
self._batch = None
rpc_stats_callback(errors=1)
raise
def process(self, element):
client_element = self.element_to_client_batch_item(element)
self._batch_elements.append(client_element)
self.add_to_batch(client_element)
self._batch_bytes_size += self._batch.mutations[-1].ByteSize()
if (len(self._batch.mutations) >= self._target_batch_size or
self._batch_bytes_size > util.WRITE_BATCH_MAX_BYTES_SIZE):
self._flush_batch()
def finish_bundle(self):
if self._batch_elements:
self._flush_batch()
def _init_batch(self):
self._batch_bytes_size = 0
self._batch = self._client.batch()
self._batch.begin()
self._batch_elements = []
def _flush_batch(self):
latency_ms = self.write_mutations(
self._throttler,
rpc_stats_callback=self._update_rpc_stats,
throttle_delay=util.WRITE_BATCH_TARGET_LATENCY_MS // 1000)
logging.debug("Successfully wrote %d mutations in %dms.",
len(self._batch.mutations), latency_ms)
now = time.time() * 1000
self._batch_sizer.report_latency(
now, latency_ms, len(self._batch.mutations))
self._target_batch_size = self._batch_sizer.get_batch_size(now)
self._init_batch()
@typehints.with_input_types(types.Entity)
class WriteToDatastore(_Mutate):
def __init__(self, project):
mutate_fn = WriteToDatastore._DatastoreWriteFn(project)
super(WriteToDatastore, self).__init__(mutate_fn)
class _DatastoreWriteFn(_Mutate.DatastoreMutateFn):
def element_to_client_batch_item(self, element):
if not isinstance(element, types.Entity):
raise ValueError('apache_beam.io.gcp.datastore.v1new.datastoreio.Entity'
' expected, got: %s' % type(element))
if not element.key.project:
element.key.project = self._project
client_entity = element.to_client_entity()
if client_entity.key.is_partial:
raise ValueError('Entities to be written to Cloud Datastore must '
'have complete keys:\n%s' % client_entity)
return client_entity
def add_to_batch(self, client_entity):
self._batch.put(client_entity)
def display_data(self):
return {
'mutation': 'Write (upsert)',
'project': self._project,
}
@typehints.with_input_types(types.Key)
class DeleteFromDatastore(_Mutate):
def __init__(self, project):
mutate_fn = DeleteFromDatastore._DatastoreDeleteFn(project)
super(DeleteFromDatastore, self).__init__(mutate_fn)
class _DatastoreDeleteFn(_Mutate.DatastoreMutateFn):
def element_to_client_batch_item(self, element):
if not isinstance(element, types.Key):
raise ValueError('apache_beam.io.gcp.datastore.v1new.datastoreio.Key'
' expected, got: %s' % type(element))
if not element.project:
element.project = self._project
client_key = element.to_client_key()
if client_key.is_partial:
raise ValueError('Keys to be deleted from Cloud Datastore must be '
'complete:\n%s' % client_key)
return client_key
def add_to_batch(self, client_key):
self._batch.delete(client_key)
def display_data(self):
return {
'mutation': 'Delete',
'project': self._project,
}
| true
| true
|
f71a808666b13ce290442e22bb59d1788d36b370
| 1,950
|
py
|
Python
|
tools/find_run_binary.py
|
pospx/external_skia
|
7a135275c9fc2a4b3cbdcf9a96e7102724752234
|
[
"BSD-3-Clause"
] | 6,304
|
2015-01-05T23:45:12.000Z
|
2022-03-31T09:48:13.000Z
|
third_party/skia/tools/find_run_binary.py
|
w4454962/miniblink49
|
b294b6eacb3333659bf7b94d670d96edeeba14c0
|
[
"Apache-2.0"
] | 459
|
2016-09-29T00:51:38.000Z
|
2022-03-07T14:37:46.000Z
|
third_party/skia/tools/find_run_binary.py
|
w4454962/miniblink49
|
b294b6eacb3333659bf7b94d670d96edeeba14c0
|
[
"Apache-2.0"
] | 1,231
|
2015-01-05T03:17:39.000Z
|
2022-03-31T22:54:58.000Z
|
#!/usr/bin/python
# Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Module that finds and runs a binary by looking in the likely locations."""
import os
import subprocess
import sys
def run_command(args):
"""Runs a program from the command line and returns stdout.
Args:
args: Command line to run, as a list of string parameters. args[0] is the
binary to run.
Returns:
stdout from the program, as a single string.
Raises:
Exception: the program exited with a nonzero return code.
"""
proc = subprocess.Popen(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
if proc.returncode is not 0:
raise Exception('command "%s" failed: %s' % (args, stderr))
return stdout
def find_path_to_program(program):
"""Returns path to an existing program binary.
Args:
program: Basename of the program to find (e.g., 'render_pictures').
Returns:
Absolute path to the program binary, as a string.
Raises:
Exception: unable to find the program binary.
"""
trunk_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir))
possible_paths = [os.path.join(trunk_path, 'out', 'Release', program),
os.path.join(trunk_path, 'out', 'Debug', program),
os.path.join(trunk_path, 'out', 'Release',
program + '.exe'),
os.path.join(trunk_path, 'out', 'Debug',
program + '.exe')]
for try_path in possible_paths:
if os.path.isfile(try_path):
return try_path
raise Exception('cannot find %s in paths %s; maybe you need to '
'build %s?' % (program, possible_paths, program))
| 31.451613
| 77
| 0.615385
|
import os
import subprocess
import sys
def run_command(args):
proc = subprocess.Popen(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
if proc.returncode is not 0:
raise Exception('command "%s" failed: %s' % (args, stderr))
return stdout
def find_path_to_program(program):
trunk_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir))
possible_paths = [os.path.join(trunk_path, 'out', 'Release', program),
os.path.join(trunk_path, 'out', 'Debug', program),
os.path.join(trunk_path, 'out', 'Release',
program + '.exe'),
os.path.join(trunk_path, 'out', 'Debug',
program + '.exe')]
for try_path in possible_paths:
if os.path.isfile(try_path):
return try_path
raise Exception('cannot find %s in paths %s; maybe you need to '
'build %s?' % (program, possible_paths, program))
| true
| true
|
f71a80a3465b00ac689f97b6980266f31c3f42cb
| 1,463
|
py
|
Python
|
medium/binary-tree-level-order-traversal.py
|
therealabdi2/LeetcodeQuestions
|
4c45ee836482a2c7b59906f7a7a99b5b3fa17317
|
[
"MIT"
] | null | null | null |
medium/binary-tree-level-order-traversal.py
|
therealabdi2/LeetcodeQuestions
|
4c45ee836482a2c7b59906f7a7a99b5b3fa17317
|
[
"MIT"
] | null | null | null |
medium/binary-tree-level-order-traversal.py
|
therealabdi2/LeetcodeQuestions
|
4c45ee836482a2c7b59906f7a7a99b5b3fa17317
|
[
"MIT"
] | null | null | null |
'''
Given the root of a binary tree, return the level order traversal of its nodes' values.
(i.e., from left to right, level by level).
Example 1:
Input: root = [3,9,20,null,null,15,7]
Output: [[3],[9,20],[15,7]]
Example 2:
Input: root = [1]
Output: [[1]]
Example 3:
Input: root = []
Output: []
'''
# Definition for a binary tree node.
import collections
from collections import deque
from typing import Optional, List
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def levelOrder(self, root: Optional[TreeNode]) -> List[List[int]]:
res = []
if root is None:
return res
q = collections.deque()
q.append(root)
while q:
# this len will make sure we go through one level at a time
q_len = len(q)
# we will add this list to our result
level = []
for i in range(q_len):
# first in first out
node = q.popleft()
if node:
level.append(node.val)
# the children can be null that's why we've got the if statement
q.append(node.left)
q.append(node.right)
# our queue can have null nodes so we dont wanna add them
if level:
res.append(level)
return res
| 22.859375
| 87
| 0.548189
|
import collections
from collections import deque
from typing import Optional, List
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def levelOrder(self, root: Optional[TreeNode]) -> List[List[int]]:
res = []
if root is None:
return res
q = collections.deque()
q.append(root)
while q:
q_len = len(q)
level = []
for i in range(q_len):
node = q.popleft()
if node:
level.append(node.val)
q.append(node.left)
q.append(node.right)
if level:
res.append(level)
return res
| true
| true
|
f71a8102f3baad74119a666239cf334a1a7047ff
| 23,415
|
py
|
Python
|
sfa_dash/conftest.py
|
lboeman/solarforecastarbiter_dashboard
|
cd4dcffa922625b548d93f83be2b7c10457e1bbe
|
[
"MIT"
] | 4
|
2020-06-02T01:40:40.000Z
|
2021-06-01T20:15:00.000Z
|
sfa_dash/conftest.py
|
lboeman/solarforecastarbiter_dashboard
|
cd4dcffa922625b548d93f83be2b7c10457e1bbe
|
[
"MIT"
] | 181
|
2020-05-14T15:42:55.000Z
|
2021-12-02T20:27:44.000Z
|
sfa_dash/conftest.py
|
lboeman/solarforecastarbiter_dashboard
|
cd4dcffa922625b548d93f83be2b7c10457e1bbe
|
[
"MIT"
] | 2
|
2018-11-02T19:32:16.000Z
|
2018-11-06T17:06:28.000Z
|
import os
import requests
import pymysql
import pytest
from flask import url_for
from solarforecastarbiter.datamodel import QualityFlagFilter as QFF
from sfa_dash import create_app
BASE_URL = 'http://localhost'
resample_threshold = QFF.resample_threshold_percentage
@pytest.fixture(scope='session')
def auth_token():
token_req = requests.post(
'https://solarforecastarbiter.auth0.com/oauth/token',
headers={'content-type': 'application/json'},
data=('{"grant_type": "password", '
'"username": "testing@solarforecastarbiter.org",'
'"password": "Thepassword123!", '
'"audience": "https://api.solarforecastarbiter.org", '
'"client_id": "c16EJo48lbTCQEhqSztGGlmxxxmZ4zX7"}'))
if token_req.status_code != 200:
pytest.skip('Cannot retrieve valid Auth0 token')
else:
token = token_req.json()
return token
@pytest.fixture()
def expired_token():
stored = {'access_token': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6Ik5UZENSRGRFTlVNMk9FTTJNVGhCTWtRelFUSXpNRFF6TUVRd1JUZ3dNekV3T1VWR1FrRXpSUSJ9.eyJpc3MiOiJodHRwczovL3NvbGFyZm9yZWNhc3RhcmJpdGVyLmF1dGgwLmNvbS8iLCJzdWIiOiJhdXRoMHw1YmUzNDNkZjcwMjU0MDYyMzc4MjBiODUiLCJhdWQiOlsiaHR0cHM6Ly9hcGkuc29sYXJmb3JlY2FzdGFyYml0ZXIub3JnIiwiaHR0cHM6Ly9zb2xhcmZvcmVjYXN0YXJiaXRlci5hdXRoMC5jb20vdXNlcmluZm8iXSwiaWF0IjoxNTU1NDU0NzcwLCJleHAiOjE1NTU0NjU1NzAsImF6cCI6IlByRTM5QWR0R01QSTRnSzJoUnZXWjJhRFJhcmZwZzdBIiwic2NvcGUiOiJvcGVuaWQgcHJvZmlsZSBlbWFpbCBvZmZsaW5lX2FjY2VzcyJ9.lT1XPtLkYCVGUZjcAgWFCU6AJbKWtE077zw_KO4fhIaF0wo6TTpLTkZBmF9Sxmrwb5NfeR5XuJmkX3SPCjpzcZG9wdXIpPWRGhsOAAUdoSkoHKFzALoc46VPjA3A5SZxlGqNeh6RoKFlWRAp5EJN9Z-JcwT06JyJGrbx7ip4tCbAADqWuDY2tzkjKD3EHjHTO9OSJiCRxlNA22OCfMTF6B8-8RLUabZ414bypezw83S9g25mLLWtlGhQvzWGA8F7yhhVXbEsAPPC1QoyjevXzn8PBqL5dSDp6u1gL6T29PsbhZ0diZ1xt5jkm4iX-cryc7tqwq-5D5ZkC3wbhNpLuQ', 'refresh_token': 'QlLHR9wyFS5cokItX-ym7jWlCCuLO1fC3AtZLUeDVX-mI', 'id_token': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6Ik5UZENSRGRFTlVNMk9FTTJNVGhCTWtRelFUSXpNRFF6TUVRd1JUZ3dNekV3T1VWR1FrRXpSUSJ9.eyJuaWNrbmFtZSI6InRlc3RpbmciLCJuYW1lIjoidGVzdGluZ0Bzb2xhcmZvcmVjYXN0YXJiaXRlci5vcmciLCJwaWN0dXJlIjoiaHR0cHM6Ly9zLmdyYXZhdGFyLmNvbS9hdmF0YXIvY2MxMTNkZjY5NmY4ZTlmMjA2Nzc5OTQzMzUxNzRhYjY_cz00ODAmcj1wZyZkPWh0dHBzJTNBJTJGJTJGY2RuLmF1dGgwLmNvbSUyRmF2YXRhcnMlMkZ0ZS5wbmciLCJ1cGRhdGVkX2F0IjoiMjAxOS0wNC0xNlQyMjo0NjoxMC42NTdaIiwiZW1haWwiOiJ0ZXN0aW5nQHNvbGFyZm9yZWNhc3RhcmJpdGVyLm9yZyIsImVtYWlsX3ZlcmlmaWVkIjpmYWxzZSwiaXNzIjoiaHR0cHM6Ly9zb2xhcmZvcmVjYXN0YXJiaXRlci5hdXRoMC5jb20vIiwic3ViIjoiYXV0aDB8NWJlMzQzZGY3MDI1NDA2MjM3ODIwYjg1IiwiYXVkIjoiUHJFMzlBZHRHTVBJNGdLMmhSdldaMmFEUmFyZnBnN0EiLCJpYXQiOjE1NTU0NTQ3NzAsImV4cCI6MTU1NTQ5MDc3MH0.axw45-ms_LVIS_WsUdcCryZeOwpZVAn95zbUm9WO23bpIja7QaR1h6_Emb9nUNJIk44vp-J-zwKIZd4j7bg5_vaVcJER4_rL8vlc6f5lJdZAU20yeisTT4q1YcwlvQhg7avWMUkZaiO3SgK0eJ3371Gm2gJgK2b21bnpzmUHQ0vS906GLGngaVzb3VEE_g4CgR4u6qmBQRwq3Z6DyRBq572Qhn3TXk_0Xvj43Q9TyYjV5ioou5Xe-3T5HHsCoUWqDp0BZ3bP9FlYFws9DffnFzf1yVtpwfk9shmAe8V6Fn9N0OjuS4LJP0Tc-I7adspJlYfB9BeTEci6MKn58OQCrw', 'scope': ['openid', 'profile', 'email', 'offline_access'], 'expires_in': 0, 'token_type': 'Bearer', 'expires_at': 1555465570.9597309} # NOQA
return stored
@pytest.fixture()
def mocked_storage(mocker, auth_token, expired_token):
def make_storage(authenticated=False):
if authenticated:
token = auth_token
else:
token = expired_token
class fake_storage:
def __init__(*args, **kwargs):
pass
def get(self, *args):
return token
def set(self, *args):
pass
def delete(self, *args):
pass
return fake_storage
return make_storage
@pytest.fixture()
def mocked_unauth_storage(mocker, mocked_storage):
mocker.patch('sfa_dash.session_storage',
new=mocked_storage())
@pytest.fixture()
def mocked_auth_storage(mocker, mocked_storage):
mocker.patch('sfa_dash.session_storage',
new=mocked_storage(True))
@pytest.fixture()
def app_unauth(mocked_unauth_storage):
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
return create_app('sfa_dash.config.TestConfig')
@pytest.fixture()
def app(mocked_auth_storage):
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
return create_app('sfa_dash.config.TestConfig')
@pytest.fixture()
def client(app):
yield app.test_client()
no_arg_routes_list = [
'/sites/',
'/observations/',
'/forecasts/single/',
'/forecasts/cdf/',
'/reports/',
'/aggregates/',
'/sites/create',
'/reports/deterministic/create',
'/reports/event/create',
'/aggregates/create',
]
@pytest.fixture(params=no_arg_routes_list)
def no_arg_route(request):
return request.param
admin_routes_list = [
'/admin/permissions/create/cdf_forecast_group',
'/admin/permissions/create/observation',
'/admin/permissions/create/forecast',
'/admin/permissions/create/report',
'/admin/permissions/create/site',
'/admin/roles/create',
'/admin/permissions/',
'/admin/roles/',
'/admin/users/',
]
@pytest.fixture(params=admin_routes_list)
def admin_route(request):
return request.param
admin_multiarg_route_list = [
'/admin/permissions/{permission_id}/remove/{object_id}',
'/admin/roles/{role_id}/remove/{permission_id}',
'/admin/users/{user_id}/remove/{role_id}',
]
@pytest.fixture(params=admin_multiarg_route_list)
def admin_multiarg_route(request):
def fn(object_id, permission_id, user_id, role_id):
return request.param.format(
object_id=object_id,
permission_id=permission_id,
user_id=user_id,
role_id=role_id)
return fn
user_id_route_list = [
'/admin/users/{user_id}',
'/admin/users/{user_id}/add/',
]
@pytest.fixture(params=user_id_route_list)
def user_id_route(request):
def fn(user_id):
return request.param.format(user_id=user_id)
return fn
role_id_route_list = [
'/admin/roles/{role_id}',
'/admin/roles/{role_id}/delete',
'/admin/roles/{role_id}/add/',
'/admin/roles/{role_id}/grant/',
]
@pytest.fixture(params=role_id_route_list)
def role_id_route(request):
def fn(role_id):
return request.param.format(role_id=role_id)
return fn
permission_id_route_list = [
'/admin/permissions/{permission_id}',
'/admin/permissions/{permission_id}/delete',
'/admin/permissions/{permission_id}/add',
]
@pytest.fixture(params=permission_id_route_list)
def permission_id_route(request):
def fn(permission_id):
return request.param.format(permission_id=permission_id)
return fn
report_id_route_list = [
'/reports/{report_id}',
'/reports/{report_id}/delete',
]
@pytest.fixture(params=report_id_route_list)
def report_id_route(request):
def fn(report_id):
return request.param.format(report_id=report_id)
return fn
site_id_route_list = [
'/sites/{site_id}/',
'/sites/{site_id}/delete',
'/sites/{site_id}/forecasts/single/create',
'/sites/{site_id}/forecasts/cdf/create',
'/sites/{site_id}/observations/create',
'/sites/{site_id}/observations/create',
]
@pytest.fixture(params=site_id_route_list)
def site_id_route(request):
def fn(site_id):
return request.param.format(site_id=site_id)
return fn
observation_id_route_list = [
'/observations/{observation_id}',
'/observations/{observation_id}/delete',
]
@pytest.fixture(params=observation_id_route_list)
def observation_id_route(request):
def fn(observation_id):
return request.param.format(observation_id=observation_id)
return fn
forecast_id_route_list = [
'/forecasts/single/{forecast_id}',
'/forecasts/single/{forecast_id}/delete',
]
@pytest.fixture(params=forecast_id_route_list)
def forecast_id_route(request):
def fn(forecast_id):
return request.param.format(forecast_id=forecast_id)
return fn
cdf_forecast_id_route_list = [
'/forecasts/cdf/{forecast_id}',
'/forecasts/cdf/{forecast_id}/delete',
]
@pytest.fixture(params=cdf_forecast_id_route_list)
def cdf_forecast_id_route(request):
def fn(forecast_id):
return request.param.format(forecast_id=forecast_id)
return fn
cdf_forecast_single_id_routes_list = [
'/forecasts/cdf/single/{forecast_id}',
]
@pytest.fixture(params=cdf_forecast_single_id_routes_list)
def cdf_forecast_single_id_route(request):
def fn(forecast_id):
return request.param.format(forecast_id=forecast_id)
return fn
aggregate_id_route_list = [
'/aggregates/{aggregate_id}',
'/aggregates/{aggregate_id}/delete',
'/aggregates/{aggregate_id}/add',
'/aggregates/{aggregate_id}/forecasts/single/create',
'/aggregates/{aggregate_id}/forecasts/cdf/create',
]
@pytest.fixture(params=aggregate_id_route_list)
def aggregate_id_route(request):
def fn(aggregate_id):
return request.param.format(aggregate_id=aggregate_id)
return fn
clone_route_list = [
'/sites/{site_id}/clone',
'/observations/{observation_id}/clone',
'/forecasts/single/{forecast_id}/clone',
]
@pytest.fixture(params=clone_route_list)
def clone_route(request):
def fn(uuids):
# NOTE: expects a dict of all possible ids to use for formatting
return request.param.format(**uuids)
return fn
@pytest.fixture()
def missing_id():
return '7d2c3208-5243-11e9-8647-d663bd873d93'
@pytest.fixture()
def observation_id():
return '123e4567-e89b-12d3-a456-426655440000'
@pytest.fixture()
def cdf_forecast_group_id():
return 'ef51e87c-50b9-11e9-8647-d663bd873d93'
@pytest.fixture()
def cdf_forecast_id():
return '633f9396-50bb-11e9-8647-d663bd873d93'
@pytest.fixture()
def forecast_id():
return '11c20780-76ae-4b11-bef1-7a75bdc784e3'
@pytest.fixture()
def site_id():
return '123e4567-e89b-12d3-a456-426655440001'
@pytest.fixture()
def site_id_plant():
return '123e4567-e89b-12d3-a456-426655440002'
@pytest.fixture()
def test_orgid():
return 'b76ab62e-4fe1-11e9-9e44-64006a511e6f'
@pytest.fixture()
def user_id():
return '0c90950a-7cca-11e9-a81f-54bf64606445'
@pytest.fixture()
def aggregate_id():
return '458ffc27-df0b-11e9-b622-62adb5fd6af0'
@pytest.fixture()
def report_id():
return '9f290dd4-42b8-11ea-abdf-f4939feddd82'
@pytest.fixture
def all_metadata_ids(
observation_id, forecast_id, cdf_forecast_group_id, cdf_forecast_id,
site_id, site_id_plant, aggregate_id, report_id):
return {
'observation_id': observation_id,
'forecast_id': forecast_id,
'cdf_forecast_group_id': cdf_forecast_group_id,
'cdf_forecast_id': cdf_forecast_id,
'site_id': site_id,
'site_id_plant': site_id_plant,
'aggregate_id': aggregate_id,
'report_id': report_id,
}
@pytest.fixture()
def test_url(app):
def fn(view):
with app.test_request_context():
return url_for(view, _external=True)
return fn
@pytest.fixture(scope='session')
def connection():
connection = pymysql.connect(
host=os.getenv('MYSQL_HOST', '127.0.0.1'),
port=int(os.getenv('MYSQL_PORT', '3306')),
user='root',
password='testpassword',
database='arbiter_data',
binary_prefix=True)
# with no connection.commit(), no data should stay in db
return connection
@pytest.fixture()
def cursor(connection):
connection.rollback()
return connection.cursor()
@pytest.fixture()
def dictcursor(connection):
connection.rollback()
return connection.cursor(cursor=pymysql.cursors.DictCursor)
@pytest.fixture()
def role_id(cursor):
cursor.execute(
'SELECT BIN_TO_UUID(id, 1) from arbiter_data.roles '
'WHERE name = "Test user role"')
role_id = cursor.fetchone()[0]
return role_id
@pytest.fixture()
def permission_id(cursor, role_id):
cursor.execute(
'SELECT BIN_TO_UUID(id, 1) FROM arbiter_data.permissions '
'WHERE id IN (SELECT permission_id FROM '
'arbiter_data.role_permission_mapping WHERE role_id '
'= UUID_TO_BIN(%s, 1) ) LIMIT 1', role_id)
permission_id = cursor.fetchone()[0]
return permission_id
@pytest.fixture()
def permission_object_type(cursor, permission_id):
cursor.execute(
'SELECT object_type FROM arbiter_data.permissions '
'WHERE id = UUID_TO_BIN(%s, 1)', permission_id)
return cursor.fetchone()[0]
@pytest.fixture()
def valid_permission_object_id(
observation_id, forecast_id, cdf_forecast_group_id, aggregate_id,
site_id, role_id, user_id, permission_id, report_id,
permission_object_type):
ot = permission_object_type
if ot == 'forecasts':
return forecast_id
if ot == 'observations':
return observation_id
if ot == 'cdf_forecasts':
return cdf_forecast_group_id
if ot == 'agggregates':
return aggregate_id
if ot == 'sites':
return site_id
if ot == 'reports':
return report_id
if ot == 'users':
return user_id
if ot == 'permissions':
return permission_id
if ot == 'roles':
return role_id
@pytest.fixture()
def site():
return {
'created_at': '2019-03-01T11:44:38+00:00',
'elevation': 595.0,
'extra_parameters': '{"network_api_abbreviation": "AS","network": "University of Oregon SRML","network_api_id": "94040"}', # noqa
'latitude': 42.19,
'longitude': -122.7,
'modeling_parameters': {'ac_capacity': None,
'ac_loss_factor': None,
'axis_azimuth': None,
'axis_tilt': None,
'backtrack': None,
'dc_capacity': None,
'dc_loss_factor': None,
'ground_coverage_ratio': None,
'max_rotation_angle': None,
'surface_azimuth': None,
'surface_tilt': None,
'temperature_coefficient': None,
'tracking_type': None},
'modified_at': '2019-03-01T11:44:38+00:00',
'name': 'Weather Station',
'provider': 'Organization 1',
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'timezone': 'Etc/GMT+8'}
@pytest.fixture()
def site_with_modeling_params():
return {
'created_at': '2019-03-01T11:44:46+00:00',
'elevation': 786.0,
'extra_parameters': '',
'latitude': 43.73403,
'longitude': -96.62328,
'modeling_parameters': {
'ac_capacity': 0.015,
'ac_loss_factor': 0.0,
'axis_azimuth': None,
'axis_tilt': None,
'backtrack': None,
'dc_capacity': 0.015,
'dc_loss_factor': 0.0,
'ground_coverage_ratio': None,
'max_rotation_angle': None,
'surface_azimuth': 180.0,
'surface_tilt': 45.0,
'temperature_coefficient': -0.2,
'tracking_type': 'fixed'},
'modified_at': '2019-03-01T11:44:46+00:00',
'name': 'Power Plant 1',
'provider': 'Organization 1',
'site_id': '123e4567-e89b-12d3-a456-426655440002',
'timezone': 'Etc/GMT+6'}
@pytest.fixture()
def observation():
return {
'_links': {'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'}, # noqa
'created_at': '2019-03-01T12:01:39+00:00',
'extra_parameters': '{"instrument": "Ascension Technology Rotating Shadowband Pyranometer", "network": "UO SRML"}', # noqa
'interval_label': 'beginning',
'interval_length': 5,
'interval_value_type': 'interval_mean',
'modified_at': '2019-03-01T12:01:39+00:00',
'name': 'GHI Instrument 1',
'observation_id': '123e4567-e89b-12d3-a456-426655440000',
'provider': 'Organization 1',
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'uncertainty': 0.1,
'variable': 'ghi'}
@pytest.fixture()
def forecast():
return {
'_links': {'aggregate': None,
'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'}, # noqa
'aggregate_id': None,
'created_at': '2019-03-01T11:55:37+00:00',
'extra_parameters': '',
'forecast_id': '11c20780-76ae-4b11-bef1-7a75bdc784e3',
'interval_label': 'beginning',
'interval_length': 5,
'interval_value_type': 'interval_mean',
'issue_time_of_day': '06:00',
'lead_time_to_start': 60,
'modified_at': '2019-03-01T11:55:37+00:00',
'name': 'DA GHI',
'provider': 'Organization 1',
'run_length': 1440,
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'variable': 'ghi'}
@pytest.fixture()
def cdf_forecast():
return {
'_links': {'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'}, # noqa
'aggregate_id': None,
'axis': 'y',
'constant_values': [{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9396-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa
'values': 'http://localhost:5000/forecasts/cdf/single/633f9396-50bb-11e9-8647-d663bd873d93/values'}, # noqa
'constant_value': 5.0,
'forecast_id': '633f9396-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9864-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa
'values': 'http://localhost:5000/forecasts/cdf/single/633f9864-50bb-11e9-8647-d663bd873d93/values'}, # noqa
'constant_value': 20.0,
'forecast_id': '633f9864-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9b2a-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa
'values': 'http://localhost:5000/forecasts/cdf/single/633f9b2a-50bb-11e9-8647-d663bd873d93/values'}, # noqa
'constant_value': 50.0,
'forecast_id': '633f9b2a-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9d96-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa
'values': 'http://localhost:5000/forecasts/cdf/single/633f9d96-50bb-11e9-8647-d663bd873d93/values'}, # noqa
'constant_value': 80.0,
'forecast_id': '633f9d96-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633fa548-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa
'values': 'http://localhost:5000/forecasts/cdf/single/633fa548-50bb-11e9-8647-d663bd873d93/values'}, # noqa
'constant_value': 95.0,
'forecast_id': '633fa548-50bb-11e9-8647-d663bd873d93'}],
'created_at': '2019-03-02T14:55:37+00:00',
'extra_parameters': '',
'forecast_id': 'ef51e87c-50b9-11e9-8647-d663bd873d93',
'interval_label': 'beginning',
'interval_length': 5,
'interval_value_type': 'interval_mean',
'issue_time_of_day': '06:00',
'lead_time_to_start': 60,
'modified_at': '2019-03-02T14:55:37+00:00',
'name': 'DA GHI',
'provider': 'Organization 1',
'run_length': 1440,
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'variable': 'ghi'}
@pytest.fixture()
def aggregate():
return {
'aggregate_id': '458ffc27-df0b-11e9-b622-62adb5fd6af0',
'aggregate_type': 'mean',
'created_at': '2019-09-24T12:00:00+00:00',
'description': 'ghi agg',
'extra_parameters': 'extra',
'interval_label': 'ending',
'interval_length': 60,
'interval_value_type': 'interval_mean',
'modified_at': '2019-09-24T12:00:00+00:00',
'name': 'Test Aggregate ghi',
'observations': [
{'_links': {'observation': 'http://localhost:5000/observations/123e4567-e89b-12d3-a456-426655440000/metadata'}, # noqa
'created_at': '2019-09-25T00:00:00+00:00',
'effective_from': '2019-01-01T00:00:00+00:00',
'effective_until': None,
'observation_deleted_at': None,
'observation_id': '123e4567-e89b-12d3-a456-426655440000'},
{'_links': {'observation': 'http://localhost:5000/observations/e0da0dea-9482-4073-84de-f1b12c304d23/metadata'}, # noqa
'created_at': '2019-09-25T00:00:00+00:00',
'effective_from': '2019-01-01T00:00:00+00:00',
'effective_until': None,
'observation_deleted_at': None,
'observation_id': 'e0da0dea-9482-4073-84de-f1b12c304d23'},
{'_links': {'observation': 'http://localhost:5000/observations/b1dfe2cb-9c8e-43cd-afcf-c5a6feaf81e2/metadata'}, # noqa
'created_at': '2019-09-25T00:00:00+00:00',
'effective_from': '2019-01-01T00:00:00+00:00',
'effective_until': None,
'observation_deleted_at': None,
'observation_id': 'b1dfe2cb-9c8e-43cd-afcf-c5a6feaf81e2'}],
'provider': 'Organization 1',
'timezone': 'America/Denver',
'variable': 'ghi'}
@pytest.fixture()
def report():
return {
'created_at': '2020-01-22T13:48:00+00:00',
'modified_at': '2020-01-22T13:50:00+00:00',
'provider': 'Organization 1',
'raw_report': {
'data_checksum': None,
'generated_at': '2019-07-01T12:00:00+00:00',
'messages': [
{'function': 'fcn',
'level': 'error',
'message': 'FAILED',
'step': 'dunno'}],
'metrics': [],
'plots': None,
'processed_forecasts_observations': [],
'timezone': 'Etc/GMT+8',
'versions': []},
'report_id': '9f290dd4-42b8-11ea-abdf-f4939feddd82',
'report_parameters': {
'categories': ['total', 'date'],
'end': '2019-06-01T06:59:00+00:00',
'filters': [{'quality_flags': ['USER FLAGGED'],
'discard_before_resample': True,
'resample_threshold_percentage': resample_threshold,
},
{'quality_flags': ['STALE VALUES'],
'discard_before_resample': True,
'resample_threshold_percentage': resample_threshold,
}],
'metrics': ['mae', 'rmse'],
'name': 'NREL MIDC OASIS GHI Forecast Analysis',
'object_pairs': [
{'forecast': '11c20780-76ae-4b11-bef1-7a75bdc784e3',
'observation': '123e4567-e89b-12d3-a456-426655440000',
'reference_forecast': None,
'uncertainty': None,
'forecast_type': 'forecast',
}],
'start': '2019-04-01T07:00:00+00:00',
'costs': [{
'name': 'example cost',
'type': 'constant',
'parameters': {
"cost": 1.1,
"aggregation": "sum",
"net": False,
},
}],
},
'status': 'failed',
'values': [
{'id': 'a2b6ed14-42d0-11ea-aa3c-f4939feddd82',
'object_id': '123e4567-e89b-12d3-a456-426655440000',
'processed_values': 'superencodedvalues'}]
}
| 33.690647
| 2,216
| 0.658808
|
import os
import requests
import pymysql
import pytest
from flask import url_for
from solarforecastarbiter.datamodel import QualityFlagFilter as QFF
from sfa_dash import create_app
BASE_URL = 'http://localhost'
resample_threshold = QFF.resample_threshold_percentage
@pytest.fixture(scope='session')
def auth_token():
token_req = requests.post(
'https://solarforecastarbiter.auth0.com/oauth/token',
headers={'content-type': 'application/json'},
data=('{"grant_type": "password", '
'"username": "testing@solarforecastarbiter.org",'
'"password": "Thepassword123!", '
'"audience": "https://api.solarforecastarbiter.org", '
'"client_id": "c16EJo48lbTCQEhqSztGGlmxxxmZ4zX7"}'))
if token_req.status_code != 200:
pytest.skip('Cannot retrieve valid Auth0 token')
else:
token = token_req.json()
return token
@pytest.fixture()
def expired_token():
stored = {'access_token': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6Ik5UZENSRGRFTlVNMk9FTTJNVGhCTWtRelFUSXpNRFF6TUVRd1JUZ3dNekV3T1VWR1FrRXpSUSJ9.eyJpc3MiOiJodHRwczovL3NvbGFyZm9yZWNhc3RhcmJpdGVyLmF1dGgwLmNvbS8iLCJzdWIiOiJhdXRoMHw1YmUzNDNkZjcwMjU0MDYyMzc4MjBiODUiLCJhdWQiOlsiaHR0cHM6Ly9hcGkuc29sYXJmb3JlY2FzdGFyYml0ZXIub3JnIiwiaHR0cHM6Ly9zb2xhcmZvcmVjYXN0YXJiaXRlci5hdXRoMC5jb20vdXNlcmluZm8iXSwiaWF0IjoxNTU1NDU0NzcwLCJleHAiOjE1NTU0NjU1NzAsImF6cCI6IlByRTM5QWR0R01QSTRnSzJoUnZXWjJhRFJhcmZwZzdBIiwic2NvcGUiOiJvcGVuaWQgcHJvZmlsZSBlbWFpbCBvZmZsaW5lX2FjY2VzcyJ9.lT1XPtLkYCVGUZjcAgWFCU6AJbKWtE077zw_KO4fhIaF0wo6TTpLTkZBmF9Sxmrwb5NfeR5XuJmkX3SPCjpzcZG9wdXIpPWRGhsOAAUdoSkoHKFzALoc46VPjA3A5SZxlGqNeh6RoKFlWRAp5EJN9Z-JcwT06JyJGrbx7ip4tCbAADqWuDY2tzkjKD3EHjHTO9OSJiCRxlNA22OCfMTF6B8-8RLUabZ414bypezw83S9g25mLLWtlGhQvzWGA8F7yhhVXbEsAPPC1QoyjevXzn8PBqL5dSDp6u1gL6T29PsbhZ0diZ1xt5jkm4iX-cryc7tqwq-5D5ZkC3wbhNpLuQ', 'refresh_token': 'QlLHR9wyFS5cokItX-ym7jWlCCuLO1fC3AtZLUeDVX-mI', 'id_token': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6Ik5UZENSRGRFTlVNMk9FTTJNVGhCTWtRelFUSXpNRFF6TUVRd1JUZ3dNekV3T1VWR1FrRXpSUSJ9.eyJuaWNrbmFtZSI6InRlc3RpbmciLCJuYW1lIjoidGVzdGluZ0Bzb2xhcmZvcmVjYXN0YXJiaXRlci5vcmciLCJwaWN0dXJlIjoiaHR0cHM6Ly9zLmdyYXZhdGFyLmNvbS9hdmF0YXIvY2MxMTNkZjY5NmY4ZTlmMjA2Nzc5OTQzMzUxNzRhYjY_cz00ODAmcj1wZyZkPWh0dHBzJTNBJTJGJTJGY2RuLmF1dGgwLmNvbSUyRmF2YXRhcnMlMkZ0ZS5wbmciLCJ1cGRhdGVkX2F0IjoiMjAxOS0wNC0xNlQyMjo0NjoxMC42NTdaIiwiZW1haWwiOiJ0ZXN0aW5nQHNvbGFyZm9yZWNhc3RhcmJpdGVyLm9yZyIsImVtYWlsX3ZlcmlmaWVkIjpmYWxzZSwiaXNzIjoiaHR0cHM6Ly9zb2xhcmZvcmVjYXN0YXJiaXRlci5hdXRoMC5jb20vIiwic3ViIjoiYXV0aDB8NWJlMzQzZGY3MDI1NDA2MjM3ODIwYjg1IiwiYXVkIjoiUHJFMzlBZHRHTVBJNGdLMmhSdldaMmFEUmFyZnBnN0EiLCJpYXQiOjE1NTU0NTQ3NzAsImV4cCI6MTU1NTQ5MDc3MH0.axw45-ms_LVIS_WsUdcCryZeOwpZVAn95zbUm9WO23bpIja7QaR1h6_Emb9nUNJIk44vp-J-zwKIZd4j7bg5_vaVcJER4_rL8vlc6f5lJdZAU20yeisTT4q1YcwlvQhg7avWMUkZaiO3SgK0eJ3371Gm2gJgK2b21bnpzmUHQ0vS906GLGngaVzb3VEE_g4CgR4u6qmBQRwq3Z6DyRBq572Qhn3TXk_0Xvj43Q9TyYjV5ioou5Xe-3T5HHsCoUWqDp0BZ3bP9FlYFws9DffnFzf1yVtpwfk9shmAe8V6Fn9N0OjuS4LJP0Tc-I7adspJlYfB9BeTEci6MKn58OQCrw', 'scope': ['openid', 'profile', 'email', 'offline_access'], 'expires_in': 0, 'token_type': 'Bearer', 'expires_at': 1555465570.9597309}
return stored
@pytest.fixture()
def mocked_storage(mocker, auth_token, expired_token):
def make_storage(authenticated=False):
if authenticated:
token = auth_token
else:
token = expired_token
class fake_storage:
def __init__(*args, **kwargs):
pass
def get(self, *args):
return token
def set(self, *args):
pass
def delete(self, *args):
pass
return fake_storage
return make_storage
@pytest.fixture()
def mocked_unauth_storage(mocker, mocked_storage):
mocker.patch('sfa_dash.session_storage',
new=mocked_storage())
@pytest.fixture()
def mocked_auth_storage(mocker, mocked_storage):
mocker.patch('sfa_dash.session_storage',
new=mocked_storage(True))
@pytest.fixture()
def app_unauth(mocked_unauth_storage):
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
return create_app('sfa_dash.config.TestConfig')
@pytest.fixture()
def app(mocked_auth_storage):
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
return create_app('sfa_dash.config.TestConfig')
@pytest.fixture()
def client(app):
yield app.test_client()
no_arg_routes_list = [
'/sites/',
'/observations/',
'/forecasts/single/',
'/forecasts/cdf/',
'/reports/',
'/aggregates/',
'/sites/create',
'/reports/deterministic/create',
'/reports/event/create',
'/aggregates/create',
]
@pytest.fixture(params=no_arg_routes_list)
def no_arg_route(request):
return request.param
admin_routes_list = [
'/admin/permissions/create/cdf_forecast_group',
'/admin/permissions/create/observation',
'/admin/permissions/create/forecast',
'/admin/permissions/create/report',
'/admin/permissions/create/site',
'/admin/roles/create',
'/admin/permissions/',
'/admin/roles/',
'/admin/users/',
]
@pytest.fixture(params=admin_routes_list)
def admin_route(request):
return request.param
admin_multiarg_route_list = [
'/admin/permissions/{permission_id}/remove/{object_id}',
'/admin/roles/{role_id}/remove/{permission_id}',
'/admin/users/{user_id}/remove/{role_id}',
]
@pytest.fixture(params=admin_multiarg_route_list)
def admin_multiarg_route(request):
def fn(object_id, permission_id, user_id, role_id):
return request.param.format(
object_id=object_id,
permission_id=permission_id,
user_id=user_id,
role_id=role_id)
return fn
user_id_route_list = [
'/admin/users/{user_id}',
'/admin/users/{user_id}/add/',
]
@pytest.fixture(params=user_id_route_list)
def user_id_route(request):
def fn(user_id):
return request.param.format(user_id=user_id)
return fn
role_id_route_list = [
'/admin/roles/{role_id}',
'/admin/roles/{role_id}/delete',
'/admin/roles/{role_id}/add/',
'/admin/roles/{role_id}/grant/',
]
@pytest.fixture(params=role_id_route_list)
def role_id_route(request):
def fn(role_id):
return request.param.format(role_id=role_id)
return fn
permission_id_route_list = [
'/admin/permissions/{permission_id}',
'/admin/permissions/{permission_id}/delete',
'/admin/permissions/{permission_id}/add',
]
@pytest.fixture(params=permission_id_route_list)
def permission_id_route(request):
def fn(permission_id):
return request.param.format(permission_id=permission_id)
return fn
report_id_route_list = [
'/reports/{report_id}',
'/reports/{report_id}/delete',
]
@pytest.fixture(params=report_id_route_list)
def report_id_route(request):
def fn(report_id):
return request.param.format(report_id=report_id)
return fn
site_id_route_list = [
'/sites/{site_id}/',
'/sites/{site_id}/delete',
'/sites/{site_id}/forecasts/single/create',
'/sites/{site_id}/forecasts/cdf/create',
'/sites/{site_id}/observations/create',
'/sites/{site_id}/observations/create',
]
@pytest.fixture(params=site_id_route_list)
def site_id_route(request):
def fn(site_id):
return request.param.format(site_id=site_id)
return fn
observation_id_route_list = [
'/observations/{observation_id}',
'/observations/{observation_id}/delete',
]
@pytest.fixture(params=observation_id_route_list)
def observation_id_route(request):
def fn(observation_id):
return request.param.format(observation_id=observation_id)
return fn
forecast_id_route_list = [
'/forecasts/single/{forecast_id}',
'/forecasts/single/{forecast_id}/delete',
]
@pytest.fixture(params=forecast_id_route_list)
def forecast_id_route(request):
def fn(forecast_id):
return request.param.format(forecast_id=forecast_id)
return fn
cdf_forecast_id_route_list = [
'/forecasts/cdf/{forecast_id}',
'/forecasts/cdf/{forecast_id}/delete',
]
@pytest.fixture(params=cdf_forecast_id_route_list)
def cdf_forecast_id_route(request):
def fn(forecast_id):
return request.param.format(forecast_id=forecast_id)
return fn
cdf_forecast_single_id_routes_list = [
'/forecasts/cdf/single/{forecast_id}',
]
@pytest.fixture(params=cdf_forecast_single_id_routes_list)
def cdf_forecast_single_id_route(request):
def fn(forecast_id):
return request.param.format(forecast_id=forecast_id)
return fn
aggregate_id_route_list = [
'/aggregates/{aggregate_id}',
'/aggregates/{aggregate_id}/delete',
'/aggregates/{aggregate_id}/add',
'/aggregates/{aggregate_id}/forecasts/single/create',
'/aggregates/{aggregate_id}/forecasts/cdf/create',
]
@pytest.fixture(params=aggregate_id_route_list)
def aggregate_id_route(request):
def fn(aggregate_id):
return request.param.format(aggregate_id=aggregate_id)
return fn
clone_route_list = [
'/sites/{site_id}/clone',
'/observations/{observation_id}/clone',
'/forecasts/single/{forecast_id}/clone',
]
@pytest.fixture(params=clone_route_list)
def clone_route(request):
def fn(uuids):
return request.param.format(**uuids)
return fn
@pytest.fixture()
def missing_id():
return '7d2c3208-5243-11e9-8647-d663bd873d93'
@pytest.fixture()
def observation_id():
return '123e4567-e89b-12d3-a456-426655440000'
@pytest.fixture()
def cdf_forecast_group_id():
return 'ef51e87c-50b9-11e9-8647-d663bd873d93'
@pytest.fixture()
def cdf_forecast_id():
return '633f9396-50bb-11e9-8647-d663bd873d93'
@pytest.fixture()
def forecast_id():
return '11c20780-76ae-4b11-bef1-7a75bdc784e3'
@pytest.fixture()
def site_id():
return '123e4567-e89b-12d3-a456-426655440001'
@pytest.fixture()
def site_id_plant():
return '123e4567-e89b-12d3-a456-426655440002'
@pytest.fixture()
def test_orgid():
return 'b76ab62e-4fe1-11e9-9e44-64006a511e6f'
@pytest.fixture()
def user_id():
return '0c90950a-7cca-11e9-a81f-54bf64606445'
@pytest.fixture()
def aggregate_id():
return '458ffc27-df0b-11e9-b622-62adb5fd6af0'
@pytest.fixture()
def report_id():
return '9f290dd4-42b8-11ea-abdf-f4939feddd82'
@pytest.fixture
def all_metadata_ids(
observation_id, forecast_id, cdf_forecast_group_id, cdf_forecast_id,
site_id, site_id_plant, aggregate_id, report_id):
return {
'observation_id': observation_id,
'forecast_id': forecast_id,
'cdf_forecast_group_id': cdf_forecast_group_id,
'cdf_forecast_id': cdf_forecast_id,
'site_id': site_id,
'site_id_plant': site_id_plant,
'aggregate_id': aggregate_id,
'report_id': report_id,
}
@pytest.fixture()
def test_url(app):
def fn(view):
with app.test_request_context():
return url_for(view, _external=True)
return fn
@pytest.fixture(scope='session')
def connection():
connection = pymysql.connect(
host=os.getenv('MYSQL_HOST', '127.0.0.1'),
port=int(os.getenv('MYSQL_PORT', '3306')),
user='root',
password='testpassword',
database='arbiter_data',
binary_prefix=True)
return connection
@pytest.fixture()
def cursor(connection):
connection.rollback()
return connection.cursor()
@pytest.fixture()
def dictcursor(connection):
connection.rollback()
return connection.cursor(cursor=pymysql.cursors.DictCursor)
@pytest.fixture()
def role_id(cursor):
cursor.execute(
'SELECT BIN_TO_UUID(id, 1) from arbiter_data.roles '
'WHERE name = "Test user role"')
role_id = cursor.fetchone()[0]
return role_id
@pytest.fixture()
def permission_id(cursor, role_id):
cursor.execute(
'SELECT BIN_TO_UUID(id, 1) FROM arbiter_data.permissions '
'WHERE id IN (SELECT permission_id FROM '
'arbiter_data.role_permission_mapping WHERE role_id '
'= UUID_TO_BIN(%s, 1) ) LIMIT 1', role_id)
permission_id = cursor.fetchone()[0]
return permission_id
@pytest.fixture()
def permission_object_type(cursor, permission_id):
cursor.execute(
'SELECT object_type FROM arbiter_data.permissions '
'WHERE id = UUID_TO_BIN(%s, 1)', permission_id)
return cursor.fetchone()[0]
@pytest.fixture()
def valid_permission_object_id(
observation_id, forecast_id, cdf_forecast_group_id, aggregate_id,
site_id, role_id, user_id, permission_id, report_id,
permission_object_type):
ot = permission_object_type
if ot == 'forecasts':
return forecast_id
if ot == 'observations':
return observation_id
if ot == 'cdf_forecasts':
return cdf_forecast_group_id
if ot == 'agggregates':
return aggregate_id
if ot == 'sites':
return site_id
if ot == 'reports':
return report_id
if ot == 'users':
return user_id
if ot == 'permissions':
return permission_id
if ot == 'roles':
return role_id
@pytest.fixture()
def site():
return {
'created_at': '2019-03-01T11:44:38+00:00',
'elevation': 595.0,
'extra_parameters': '{"network_api_abbreviation": "AS","network": "University of Oregon SRML","network_api_id": "94040"}',
'latitude': 42.19,
'longitude': -122.7,
'modeling_parameters': {'ac_capacity': None,
'ac_loss_factor': None,
'axis_azimuth': None,
'axis_tilt': None,
'backtrack': None,
'dc_capacity': None,
'dc_loss_factor': None,
'ground_coverage_ratio': None,
'max_rotation_angle': None,
'surface_azimuth': None,
'surface_tilt': None,
'temperature_coefficient': None,
'tracking_type': None},
'modified_at': '2019-03-01T11:44:38+00:00',
'name': 'Weather Station',
'provider': 'Organization 1',
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'timezone': 'Etc/GMT+8'}
@pytest.fixture()
def site_with_modeling_params():
return {
'created_at': '2019-03-01T11:44:46+00:00',
'elevation': 786.0,
'extra_parameters': '',
'latitude': 43.73403,
'longitude': -96.62328,
'modeling_parameters': {
'ac_capacity': 0.015,
'ac_loss_factor': 0.0,
'axis_azimuth': None,
'axis_tilt': None,
'backtrack': None,
'dc_capacity': 0.015,
'dc_loss_factor': 0.0,
'ground_coverage_ratio': None,
'max_rotation_angle': None,
'surface_azimuth': 180.0,
'surface_tilt': 45.0,
'temperature_coefficient': -0.2,
'tracking_type': 'fixed'},
'modified_at': '2019-03-01T11:44:46+00:00',
'name': 'Power Plant 1',
'provider': 'Organization 1',
'site_id': '123e4567-e89b-12d3-a456-426655440002',
'timezone': 'Etc/GMT+6'}
@pytest.fixture()
def observation():
return {
'_links': {'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'},
'created_at': '2019-03-01T12:01:39+00:00',
'extra_parameters': '{"instrument": "Ascension Technology Rotating Shadowband Pyranometer", "network": "UO SRML"}',
'interval_label': 'beginning',
'interval_length': 5,
'interval_value_type': 'interval_mean',
'modified_at': '2019-03-01T12:01:39+00:00',
'name': 'GHI Instrument 1',
'observation_id': '123e4567-e89b-12d3-a456-426655440000',
'provider': 'Organization 1',
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'uncertainty': 0.1,
'variable': 'ghi'}
@pytest.fixture()
def forecast():
return {
'_links': {'aggregate': None,
'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'},
'aggregate_id': None,
'created_at': '2019-03-01T11:55:37+00:00',
'extra_parameters': '',
'forecast_id': '11c20780-76ae-4b11-bef1-7a75bdc784e3',
'interval_label': 'beginning',
'interval_length': 5,
'interval_value_type': 'interval_mean',
'issue_time_of_day': '06:00',
'lead_time_to_start': 60,
'modified_at': '2019-03-01T11:55:37+00:00',
'name': 'DA GHI',
'provider': 'Organization 1',
'run_length': 1440,
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'variable': 'ghi'}
@pytest.fixture()
def cdf_forecast():
return {
'_links': {'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'},
'aggregate_id': None,
'axis': 'y',
'constant_values': [{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9396-50bb-11e9-8647-d663bd873d93/values/timerange',
'values': 'http://localhost:5000/forecasts/cdf/single/633f9396-50bb-11e9-8647-d663bd873d93/values'},
'constant_value': 5.0,
'forecast_id': '633f9396-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9864-50bb-11e9-8647-d663bd873d93/values/timerange',
'values': 'http://localhost:5000/forecasts/cdf/single/633f9864-50bb-11e9-8647-d663bd873d93/values'},
'constant_value': 20.0,
'forecast_id': '633f9864-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9b2a-50bb-11e9-8647-d663bd873d93/values/timerange',
'values': 'http://localhost:5000/forecasts/cdf/single/633f9b2a-50bb-11e9-8647-d663bd873d93/values'},
'constant_value': 50.0,
'forecast_id': '633f9b2a-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9d96-50bb-11e9-8647-d663bd873d93/values/timerange',
'values': 'http://localhost:5000/forecasts/cdf/single/633f9d96-50bb-11e9-8647-d663bd873d93/values'},
'constant_value': 80.0,
'forecast_id': '633f9d96-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633fa548-50bb-11e9-8647-d663bd873d93/values/timerange',
'values': 'http://localhost:5000/forecasts/cdf/single/633fa548-50bb-11e9-8647-d663bd873d93/values'},
'constant_value': 95.0,
'forecast_id': '633fa548-50bb-11e9-8647-d663bd873d93'}],
'created_at': '2019-03-02T14:55:37+00:00',
'extra_parameters': '',
'forecast_id': 'ef51e87c-50b9-11e9-8647-d663bd873d93',
'interval_label': 'beginning',
'interval_length': 5,
'interval_value_type': 'interval_mean',
'issue_time_of_day': '06:00',
'lead_time_to_start': 60,
'modified_at': '2019-03-02T14:55:37+00:00',
'name': 'DA GHI',
'provider': 'Organization 1',
'run_length': 1440,
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'variable': 'ghi'}
@pytest.fixture()
def aggregate():
return {
'aggregate_id': '458ffc27-df0b-11e9-b622-62adb5fd6af0',
'aggregate_type': 'mean',
'created_at': '2019-09-24T12:00:00+00:00',
'description': 'ghi agg',
'extra_parameters': 'extra',
'interval_label': 'ending',
'interval_length': 60,
'interval_value_type': 'interval_mean',
'modified_at': '2019-09-24T12:00:00+00:00',
'name': 'Test Aggregate ghi',
'observations': [
{'_links': {'observation': 'http://localhost:5000/observations/123e4567-e89b-12d3-a456-426655440000/metadata'},
'created_at': '2019-09-25T00:00:00+00:00',
'effective_from': '2019-01-01T00:00:00+00:00',
'effective_until': None,
'observation_deleted_at': None,
'observation_id': '123e4567-e89b-12d3-a456-426655440000'},
{'_links': {'observation': 'http://localhost:5000/observations/e0da0dea-9482-4073-84de-f1b12c304d23/metadata'},
'created_at': '2019-09-25T00:00:00+00:00',
'effective_from': '2019-01-01T00:00:00+00:00',
'effective_until': None,
'observation_deleted_at': None,
'observation_id': 'e0da0dea-9482-4073-84de-f1b12c304d23'},
{'_links': {'observation': 'http://localhost:5000/observations/b1dfe2cb-9c8e-43cd-afcf-c5a6feaf81e2/metadata'},
'created_at': '2019-09-25T00:00:00+00:00',
'effective_from': '2019-01-01T00:00:00+00:00',
'effective_until': None,
'observation_deleted_at': None,
'observation_id': 'b1dfe2cb-9c8e-43cd-afcf-c5a6feaf81e2'}],
'provider': 'Organization 1',
'timezone': 'America/Denver',
'variable': 'ghi'}
@pytest.fixture()
def report():
return {
'created_at': '2020-01-22T13:48:00+00:00',
'modified_at': '2020-01-22T13:50:00+00:00',
'provider': 'Organization 1',
'raw_report': {
'data_checksum': None,
'generated_at': '2019-07-01T12:00:00+00:00',
'messages': [
{'function': 'fcn',
'level': 'error',
'message': 'FAILED',
'step': 'dunno'}],
'metrics': [],
'plots': None,
'processed_forecasts_observations': [],
'timezone': 'Etc/GMT+8',
'versions': []},
'report_id': '9f290dd4-42b8-11ea-abdf-f4939feddd82',
'report_parameters': {
'categories': ['total', 'date'],
'end': '2019-06-01T06:59:00+00:00',
'filters': [{'quality_flags': ['USER FLAGGED'],
'discard_before_resample': True,
'resample_threshold_percentage': resample_threshold,
},
{'quality_flags': ['STALE VALUES'],
'discard_before_resample': True,
'resample_threshold_percentage': resample_threshold,
}],
'metrics': ['mae', 'rmse'],
'name': 'NREL MIDC OASIS GHI Forecast Analysis',
'object_pairs': [
{'forecast': '11c20780-76ae-4b11-bef1-7a75bdc784e3',
'observation': '123e4567-e89b-12d3-a456-426655440000',
'reference_forecast': None,
'uncertainty': None,
'forecast_type': 'forecast',
}],
'start': '2019-04-01T07:00:00+00:00',
'costs': [{
'name': 'example cost',
'type': 'constant',
'parameters': {
"cost": 1.1,
"aggregation": "sum",
"net": False,
},
}],
},
'status': 'failed',
'values': [
{'id': 'a2b6ed14-42d0-11ea-aa3c-f4939feddd82',
'object_id': '123e4567-e89b-12d3-a456-426655440000',
'processed_values': 'superencodedvalues'}]
}
| true
| true
|
f71a810dff951c596883e731538ad5809e7693d4
| 676
|
py
|
Python
|
pyutilscr/check.py
|
GoodMusic8596/pyutilscr
|
5965d3ab66767e2ef14ab119739b673859bbab81
|
[
"MIT"
] | 1
|
2021-12-25T16:09:41.000Z
|
2021-12-25T16:09:41.000Z
|
pyutilscr/check.py
|
GoodMusic8596/pyutilscr
|
5965d3ab66767e2ef14ab119739b673859bbab81
|
[
"MIT"
] | null | null | null |
pyutilscr/check.py
|
GoodMusic8596/pyutilscr
|
5965d3ab66767e2ef14ab119739b673859bbab81
|
[
"MIT"
] | null | null | null |
import glob
def search(directory, searchElem:list, extension = ".txt"):
"""Searches files in a specified directory and checks if they contain the specified elements.
directory format: /home/runner/project
directory type: folder
element type: list
extensions requirments: MUST have a period before, such as ".txt"
:param directory:
:param searchElem:list:
:param extension: (Default value = ".txt")
"""
files = glob.glob(directory+"*"+extension)
files_detected = []
for file in files:
sf = open(file)
stored = sf.read()
for elem in searchElem:
if elem in stored:
files_detected.append(file)
return files_detected
| 28.166667
| 94
| 0.690828
|
import glob
def search(directory, searchElem:list, extension = ".txt"):
files = glob.glob(directory+"*"+extension)
files_detected = []
for file in files:
sf = open(file)
stored = sf.read()
for elem in searchElem:
if elem in stored:
files_detected.append(file)
return files_detected
| true
| true
|
f71a812d7ff386f03ab0d70d638640016bdd2c44
| 3,007
|
py
|
Python
|
air_pollution_death_rate_related/scripts/air_pollution/predict_aqi.py
|
nghitrampham/air_pollution_death_rate_related
|
3fd72b9684e8362de5706ba37c1d90b844d4afe0
|
[
"MIT"
] | null | null | null |
air_pollution_death_rate_related/scripts/air_pollution/predict_aqi.py
|
nghitrampham/air_pollution_death_rate_related
|
3fd72b9684e8362de5706ba37c1d90b844d4afe0
|
[
"MIT"
] | 15
|
2019-12-10T02:05:58.000Z
|
2022-03-12T00:06:38.000Z
|
air_pollution_death_rate_related/scripts/air_pollution/predict_aqi.py
|
nghitrampham/CSE583_FinalProject
|
3fd72b9684e8362de5706ba37c1d90b844d4afe0
|
[
"MIT"
] | 1
|
2020-06-04T17:48:21.000Z
|
2020-06-04T17:48:21.000Z
|
"""
This module is used to predict the Air Quality Index model for 2019 for all counties.
"""
import pickle
import warnings
import pandas as pd
import numpy as np
from keras.models import load_model
import helpers
warnings.filterwarnings("ignore")
def main():
data2019_raw = pd.read_csv("""air_pollution_death_rate_related/data/air_pollution/
data_air_raw/daily_aqi_by_county_2019.csv""")
data2019 = helpers.data_cleaning(data2019_raw)
predicted_date = "2019-03-12"
file = open("temp.csv", "w")
file.write("date,state_county,AQI\n")
# for county in list(data2019["state_county"].unique()):
for county in list(data2019["state_county"].unique())[:5]:
## load model to predict AQI
print("---> Loading model for county {} ...".format(county))
try:
scaler_path = ("air_pollution_death_rate_related/trained_model/min_scaler_model/" +
county + "_scaler.pickle")
model_path = ("air_pollution_death_rate_related/trained_model/county_aqi/" +
county + "_model.h5")
model = load_model(model_path)
mm_scaler = pickle.load(open(scaler_path, "rb"))
### feature engineering for model
data_feature_temp = helpers.data_feature_engineering_for_test(
data2019,
county,
predicted_date)
x_test, y_test = helpers.load_test_data(data_feature_temp["data"], mm_scaler)
## predicting AQI
predictions = helpers.predict_point_by_point(model, x_test)
# helpers.plot_results(predictions, y_test)
## keep prediction for all counties
print("Predicting ....")
y_pred = np.append(x_test, predictions.reshape(1, 1, 1)).reshape(1, 39)
y_scale = mm_scaler.inverse_transform(y_pred)[-1][-1]
file.write(predicted_date+","+county+","+str(y_scale)+"\n")
del data_feature_temp, scaler_path,\
model_path, model, mm_scaler, x_test, y_test, predictions, y_pred, y_scale
except Exception as exp:
print(exp)
exp.args += ('Path and list_year must not be empty', "check read_raw_data function")
file.close()
## creating dataframe containing county, state, predicted AQI,
## predicted date for interactive visualization map
county_code = pd.read_csv("""air_pollution_death_rate_related/data/air_pollution/
data_misc/county_with_code.csv""")
df_prediction = pd.read_csv("temp.csv")
df_result = (pd.merge(county_code, df_prediction,
how='inner',
left_on=["state_county"],
right_on=["state_county"])
)
df_result.to_csv("predicted_AQI" + predicted_date + ".csv", index=False)
if __name__ == '__main__':
main()
| 36.228916
| 96
| 0.606252
|
import pickle
import warnings
import pandas as pd
import numpy as np
from keras.models import load_model
import helpers
warnings.filterwarnings("ignore")
def main():
data2019_raw = pd.read_csv("""air_pollution_death_rate_related/data/air_pollution/
data_air_raw/daily_aqi_by_county_2019.csv""")
data2019 = helpers.data_cleaning(data2019_raw)
predicted_date = "2019-03-12"
file = open("temp.csv", "w")
file.write("date,state_county,AQI\n")
for county in list(data2019["state_county"].unique())[:5]:
g model for county {} ...".format(county))
try:
scaler_path = ("air_pollution_death_rate_related/trained_model/min_scaler_model/" +
county + "_scaler.pickle")
model_path = ("air_pollution_death_rate_related/trained_model/county_aqi/" +
county + "_model.h5")
model = load_model(model_path)
mm_scaler = pickle.load(open(scaler_path, "rb"))
ng_for_test(
data2019,
county,
predicted_date)
x_test, y_test = helpers.load_test_data(data_feature_temp["data"], mm_scaler)
dictions = helpers.predict_point_by_point(model, x_test)
.")
y_pred = np.append(x_test, predictions.reshape(1, 1, 1)).reshape(1, 39)
y_scale = mm_scaler.inverse_transform(y_pred)[-1][-1]
file.write(predicted_date+","+county+","+str(y_scale)+"\n")
del data_feature_temp, scaler_path,\
model_path, model, mm_scaler, x_test, y_test, predictions, y_pred, y_scale
except Exception as exp:
print(exp)
exp.args += ('Path and list_year must not be empty', "check read_raw_data function")
file.close()
data_misc/county_with_code.csv""")
df_prediction = pd.read_csv("temp.csv")
df_result = (pd.merge(county_code, df_prediction,
how='inner',
left_on=["state_county"],
right_on=["state_county"])
)
df_result.to_csv("predicted_AQI" + predicted_date + ".csv", index=False)
if __name__ == '__main__':
main()
| true
| true
|
f71a8190b09e04bc1e4e8dc5cf8762becc12f3bd
| 2,108
|
py
|
Python
|
office365/directory/identities/userflows/language_configuration.py
|
juguerre/Office365-REST-Python-Client
|
dbadaddb14e7bad199499c898cdef1ada9694fc9
|
[
"MIT"
] | null | null | null |
office365/directory/identities/userflows/language_configuration.py
|
juguerre/Office365-REST-Python-Client
|
dbadaddb14e7bad199499c898cdef1ada9694fc9
|
[
"MIT"
] | null | null | null |
office365/directory/identities/userflows/language_configuration.py
|
juguerre/Office365-REST-Python-Client
|
dbadaddb14e7bad199499c898cdef1ada9694fc9
|
[
"MIT"
] | null | null | null |
from office365.directory.identities.userflows.language_page import UserFlowLanguagePage
from office365.entity import Entity
from office365.entity_collection import EntityCollection
from office365.runtime.resource_path import ResourcePath
class UserFlowLanguageConfiguration(Entity):
"""Allows a user flow to support the use of multiple languages.
For Azure Active Directory user flows, you can only leverage the built-in languages provided by Microsoft.
User flows for Azure Active Directory support defining the language and strings shown to users
as they go through the journeys you configure with your user flows."""
@property
def display_name(self):
"""
The language name to display.
:rtype: str or None
"""
return self.properties.get("displayName", None)
@property
def default_pages(self):
"""Collection of pages with the default content to display in a user flow for a specified language.
:rtype: EntityCollection
"""
return self.get_property('defaultPages',
EntityCollection(self.context, UserFlowLanguagePage,
ResourcePath("defaultPages", self.resource_path)))
@property
def overrides_pages(self):
"""Collection of pages with the default content to display in a user flow for a specified language.
:rtype: EntityCollection
"""
return self.get_property('overridesPages',
EntityCollection(self.context, UserFlowLanguagePage,
ResourcePath("overridesPages", self.resource_path)))
def get_property(self, name, default_value=None):
if default_value is None:
property_mapping = {
"defaultPages": self.default_pages,
"overridesPages": self.overrides_pages
}
default_value = property_mapping.get(name, None)
return super(UserFlowLanguageConfiguration, self).get_property(name, default_value)
| 41.333333
| 113
| 0.65797
|
from office365.directory.identities.userflows.language_page import UserFlowLanguagePage
from office365.entity import Entity
from office365.entity_collection import EntityCollection
from office365.runtime.resource_path import ResourcePath
class UserFlowLanguageConfiguration(Entity):
@property
def display_name(self):
return self.properties.get("displayName", None)
@property
def default_pages(self):
return self.get_property('defaultPages',
EntityCollection(self.context, UserFlowLanguagePage,
ResourcePath("defaultPages", self.resource_path)))
@property
def overrides_pages(self):
return self.get_property('overridesPages',
EntityCollection(self.context, UserFlowLanguagePage,
ResourcePath("overridesPages", self.resource_path)))
def get_property(self, name, default_value=None):
if default_value is None:
property_mapping = {
"defaultPages": self.default_pages,
"overridesPages": self.overrides_pages
}
default_value = property_mapping.get(name, None)
return super(UserFlowLanguageConfiguration, self).get_property(name, default_value)
| true
| true
|
f71a821c13b082b4b5b3c1c3dd849583b82123e0
| 4,830
|
py
|
Python
|
detect_secrets/core/baseline.py
|
paulo-sampaio/detect-secrets
|
73ffbc35a72cb316d9e1842cc131b6098cf3c36a
|
[
"Apache-2.0"
] | 2,212
|
2018-04-03T20:58:42.000Z
|
2022-03-31T17:58:38.000Z
|
detect_secrets/core/baseline.py
|
paulo-sampaio/detect-secrets
|
73ffbc35a72cb316d9e1842cc131b6098cf3c36a
|
[
"Apache-2.0"
] | 354
|
2018-04-03T16:29:55.000Z
|
2022-03-31T18:26:26.000Z
|
detect_secrets/core/baseline.py
|
paulo-sampaio/detect-secrets
|
73ffbc35a72cb316d9e1842cc131b6098cf3c36a
|
[
"Apache-2.0"
] | 298
|
2018-04-02T19:35:15.000Z
|
2022-03-28T04:52:14.000Z
|
import json
import time
from typing import Any
from typing import Callable
from typing import cast
from typing import Dict
from typing import List
from typing import Optional
from typing import Union
from . import upgrades
from ..__version__ import VERSION
from ..exceptions import UnableToReadBaselineError
from ..settings import configure_settings_from_baseline
from ..settings import get_settings
from ..util.importlib import import_modules_from_package
from ..util.semver import Version
from .scan import get_files_to_scan
from .secrets_collection import SecretsCollection
def create(
*paths: str,
should_scan_all_files: bool = False,
root: str = '',
num_processors: Optional[int] = None,
) -> SecretsCollection:
"""Scans all the files recursively in path to initialize a baseline."""
kwargs = {}
if num_processors:
kwargs['num_processors'] = num_processors
secrets = SecretsCollection(root=root)
secrets.scan_files(
*get_files_to_scan(*paths, should_scan_all_files=should_scan_all_files, root=root),
**kwargs,
)
return secrets
def load(baseline: Dict[str, Any], filename: str = '') -> SecretsCollection:
"""
With a given baseline file, load all settings and discovered secrets from it.
:raises: KeyError
"""
# This is required for backwards compatibility, and supporting upgrades from older versions.
baseline = upgrade(baseline)
configure_settings_from_baseline(baseline, filename=filename)
return SecretsCollection.load_from_baseline(baseline)
def load_from_file(filename: str) -> Dict[str, Any]:
"""
:raises: UnableToReadBaselineError
:raises: InvalidBaselineError
"""
try:
with open(filename) as f:
return cast(Dict[str, Any], json.loads(f.read()))
except (FileNotFoundError, IOError, json.decoder.JSONDecodeError) as e:
raise UnableToReadBaselineError from e
def format_for_output(secrets: SecretsCollection, is_slim_mode: bool = False) -> Dict[str, Any]:
output = {
'version': VERSION,
# This will populate settings of filters and plugins,
**get_settings().json(),
'results': secrets.json(),
}
if not is_slim_mode:
output['generated_at'] = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime())
else:
# NOTE: This has a nice little side effect of keeping it ordered by line number,
# even though we don't output it.
for filename, secret_list in cast(
Dict[str, List[Dict[str, Any]]],
output['results'],
).items():
for secret_dict in secret_list:
secret_dict.pop('line_number')
return output
def save_to_file(
secrets: Union[SecretsCollection, Dict[str, Any]],
filename: str,
) -> None: # pragma: no cover
"""
:param secrets: if this is a SecretsCollection, it will output the baseline in its latest
format. Otherwise, you should pass in a dictionary to this function, to manually
specify the baseline format to save as.
If you're trying to decide the difference, ask yourself whether there are any changes
that does not directly impact the results of the scan.
"""
# TODO: I wonder whether this should add the `detect_secrets.filters.common.is_baseline_file`
# filter, since we know the filename already. However, one could argue that it would cause
# this function to "do more than one thing".
output = secrets
if isinstance(secrets, SecretsCollection):
output = format_for_output(secrets)
with open(filename, 'w') as f:
f.write(json.dumps(output, indent=2) + '\n')
def upgrade(baseline: Dict[str, Any]) -> Dict[str, Any]:
"""
Baselines will eventually require format changes. This function is responsible for upgrading
an older baseline to the latest version.
"""
baseline_version = Version(baseline['version'])
if baseline_version >= Version(VERSION):
return baseline
modules = import_modules_from_package(
upgrades,
filter=lambda x: not _is_relevant_upgrade_module(baseline_version)(x),
)
new_baseline = {**baseline}
for module in modules:
module.upgrade(new_baseline) # type: ignore
new_baseline['version'] = VERSION
return new_baseline
def _is_relevant_upgrade_module(current_version: Version) -> Callable:
def wrapped(module_path: str) -> bool:
# This converts `v1_0` to `1.0`
affected_version_string = module_path.rsplit('.', 1)[-1].lstrip('v').replace('_', '.')
# Patch version doesn't matter, because patches should not require baseline bumps.
affected_version = Version(f'{affected_version_string}.0')
return current_version < affected_version
return wrapped
| 32.635135
| 97
| 0.69089
|
import json
import time
from typing import Any
from typing import Callable
from typing import cast
from typing import Dict
from typing import List
from typing import Optional
from typing import Union
from . import upgrades
from ..__version__ import VERSION
from ..exceptions import UnableToReadBaselineError
from ..settings import configure_settings_from_baseline
from ..settings import get_settings
from ..util.importlib import import_modules_from_package
from ..util.semver import Version
from .scan import get_files_to_scan
from .secrets_collection import SecretsCollection
def create(
*paths: str,
should_scan_all_files: bool = False,
root: str = '',
num_processors: Optional[int] = None,
) -> SecretsCollection:
kwargs = {}
if num_processors:
kwargs['num_processors'] = num_processors
secrets = SecretsCollection(root=root)
secrets.scan_files(
*get_files_to_scan(*paths, should_scan_all_files=should_scan_all_files, root=root),
**kwargs,
)
return secrets
def load(baseline: Dict[str, Any], filename: str = '') -> SecretsCollection:
baseline = upgrade(baseline)
configure_settings_from_baseline(baseline, filename=filename)
return SecretsCollection.load_from_baseline(baseline)
def load_from_file(filename: str) -> Dict[str, Any]:
try:
with open(filename) as f:
return cast(Dict[str, Any], json.loads(f.read()))
except (FileNotFoundError, IOError, json.decoder.JSONDecodeError) as e:
raise UnableToReadBaselineError from e
def format_for_output(secrets: SecretsCollection, is_slim_mode: bool = False) -> Dict[str, Any]:
output = {
'version': VERSION,
**get_settings().json(),
'results': secrets.json(),
}
if not is_slim_mode:
output['generated_at'] = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime())
else:
for filename, secret_list in cast(
Dict[str, List[Dict[str, Any]]],
output['results'],
).items():
for secret_dict in secret_list:
secret_dict.pop('line_number')
return output
def save_to_file(
secrets: Union[SecretsCollection, Dict[str, Any]],
filename: str,
) -> None: # pragma: no cover
# TODO: I wonder whether this should add the `detect_secrets.filters.common.is_baseline_file`
# filter, since we know the filename already. However, one could argue that it would cause
# this function to "do more than one thing".
output = secrets
if isinstance(secrets, SecretsCollection):
output = format_for_output(secrets)
with open(filename, 'w') as f:
f.write(json.dumps(output, indent=2) + '\n')
def upgrade(baseline: Dict[str, Any]) -> Dict[str, Any]:
baseline_version = Version(baseline['version'])
if baseline_version >= Version(VERSION):
return baseline
modules = import_modules_from_package(
upgrades,
filter=lambda x: not _is_relevant_upgrade_module(baseline_version)(x),
)
new_baseline = {**baseline}
for module in modules:
module.upgrade(new_baseline) # type: ignore
new_baseline['version'] = VERSION
return new_baseline
def _is_relevant_upgrade_module(current_version: Version) -> Callable:
def wrapped(module_path: str) -> bool:
# This converts `v1_0` to `1.0`
affected_version_string = module_path.rsplit('.', 1)[-1].lstrip('v').replace('_', '.')
# Patch version doesn't matter, because patches should not require baseline bumps.
affected_version = Version(f'{affected_version_string}.0')
return current_version < affected_version
return wrapped
| true
| true
|
f71a825a050d1c91553d9c34bfd983037a77f740
| 1,367
|
py
|
Python
|
app/core/tests/test_admin.py
|
tarcisioLima/recipe-app-api
|
62e5dfa49c0b4bd400454e78d0c0bea673f32f58
|
[
"MIT"
] | null | null | null |
app/core/tests/test_admin.py
|
tarcisioLima/recipe-app-api
|
62e5dfa49c0b4bd400454e78d0c0bea673f32f58
|
[
"MIT"
] | null | null | null |
app/core/tests/test_admin.py
|
tarcisioLima/recipe-app-api
|
62e5dfa49c0b4bd400454e78d0c0bea673f32f58
|
[
"MIT"
] | null | null | null |
from django.test import TestCase, Client
from django.contrib.auth import get_user_model
from django.urls import reverse
class AdminSiteTests(TestCase):
"""A funcion that executes before all tests"""
def setUp(self):
self.client = Client()
self.admin_user = get_user_model().objects.create_superuser(
email='test2@test.com',
password='password123'
)
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email='test3@test.com',
password='password123',
name='Test user full name'
)
def test_users_listed(self):
"""Test that users are listed on user page"""
url = reverse('admin:core_user_changelist')
res = self.client.get(url)
self.assertContains(res, self.user.name)
self.assertContains(res, self.user.email)
def test_user_change_page(self):
"""Test that the user edit page works"""
url = reverse('admin:core_user_change', args=[self.user.id])
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
def test_create_user_page(self):
"""Test that the create user page works"""
url = reverse('admin:core_user_add')
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
| 31.790698
| 68
| 0.640088
|
from django.test import TestCase, Client
from django.contrib.auth import get_user_model
from django.urls import reverse
class AdminSiteTests(TestCase):
def setUp(self):
self.client = Client()
self.admin_user = get_user_model().objects.create_superuser(
email='test2@test.com',
password='password123'
)
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email='test3@test.com',
password='password123',
name='Test user full name'
)
def test_users_listed(self):
url = reverse('admin:core_user_changelist')
res = self.client.get(url)
self.assertContains(res, self.user.name)
self.assertContains(res, self.user.email)
def test_user_change_page(self):
url = reverse('admin:core_user_change', args=[self.user.id])
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
def test_create_user_page(self):
url = reverse('admin:core_user_add')
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
| true
| true
|
f71a82a3be56b6be5477fc4c0651779ed08eab77
| 3,249
|
py
|
Python
|
src/state.py
|
Amayas29/pyautomate
|
ea966348ea9d7ec153274bd7e2266bd50131cd3d
|
[
"MIT"
] | 1
|
2021-03-24T21:12:56.000Z
|
2021-03-24T21:12:56.000Z
|
src/state.py
|
Amayas29/pyautomate
|
ea966348ea9d7ec153274bd7e2266bd50131cd3d
|
[
"MIT"
] | null | null | null |
src/state.py
|
Amayas29/pyautomate
|
ea966348ea9d7ec153274bd7e2266bd50131cd3d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
class State :
"""
Classe définissant un état caractérisée par :
- un identifiant
- un booleen pour savoir si c'est un état initial
- un booleen pour savoir si c'est un état final
- un label utilisé pour les constructions
ou il faut memoriser d'ou vient l'etat construit
"""
def __init__ (self, id, init, fin, label=None) :
""" int x bool x bool x str -> State
constructeur d'état
"""
self.id = id
self.init = init
self.fin = fin
if label == None :
self.label = str(self.id)
else :
self.label =label
def __repr__(self) :
""" -> str
renvoie une description de l'état sous la forme d'une chaîne
de caractères contenant son label puis (init) si c'est un état
initial puis (fin) si c'est un état final
elle permet d'utiliser print pour les états
"""
# ret : str
ret = str(self.label)
if self.init :
ret = ret + "(init)"
if self.fin :
ret = ret+ "(fin)"
return ret
def insertPrefix(self, prefid, preflab=None):
""" int x str ->
met à jour l'identifiant et le label de l'état en les
préfixant avec la chaîne de caractères pref
"""
if self.id < 0 :
tempid = - self.id
else :
tempid = self.id
tempid2 = 10**len(str(tempid))*prefid + tempid
if self.id < 0 :
self.id = - tempid2
else :
self.id = tempid2
if preflab == None :
self.label = str(prefid) + "_" + str(self.label)
else :
self.label = str(preflab) + "_" + str(self.label)
def __eq__(self, other) :
""" Val -> bool
rend le booléen vrai si l'état est égal à other, faux sinon
elle permet que == fonctionne pour les états
"""
return type(self) == type(other) and self.id == other.id
def __ne__(self, other) :
""" Val -> bool
rend le booléen vrai si l'état est différent de other, faux sinon
elle permet que != fonctionne pour les états
"""
return not self.__eq__(other)
def __hash__(self) :
""" -> int
rend un identifiant unique (utile pour les tables de hachage)
elle permet que les états puissent appartenir à des ensembles
"""
if type(self.id)== int :
return self.id
# s : str
s=str(self.id)
# res : str
res=''.join(str(ord(c)) for c in s)
return int(res)
@staticmethod
def isInitialIn(list) :
""" list[State] -> bool
rend vrai si l'un des états de list est un état initial, faux sinon
"""
# s : State
for s in list :
if s.init :
return True
return False
@staticmethod
def isFinalIn(list) :
""" list[State] -> bool
rend vrai si l'un des états de list est un état final, faux sinon
"""
for s in list :
if s.fin :
return True
return False
| 29.536364
| 75
| 0.515851
|
class State :
def __init__ (self, id, init, fin, label=None) :
self.id = id
self.init = init
self.fin = fin
if label == None :
self.label = str(self.id)
else :
self.label =label
def __repr__(self) :
ret = str(self.label)
if self.init :
ret = ret + "(init)"
if self.fin :
ret = ret+ "(fin)"
return ret
def insertPrefix(self, prefid, preflab=None):
if self.id < 0 :
tempid = - self.id
else :
tempid = self.id
tempid2 = 10**len(str(tempid))*prefid + tempid
if self.id < 0 :
self.id = - tempid2
else :
self.id = tempid2
if preflab == None :
self.label = str(prefid) + "_" + str(self.label)
else :
self.label = str(preflab) + "_" + str(self.label)
def __eq__(self, other) :
return type(self) == type(other) and self.id == other.id
def __ne__(self, other) :
return not self.__eq__(other)
def __hash__(self) :
if type(self.id)== int :
return self.id
s=str(self.id)
res=''.join(str(ord(c)) for c in s)
return int(res)
@staticmethod
def isInitialIn(list) :
for s in list :
if s.init :
return True
return False
@staticmethod
def isFinalIn(list) :
for s in list :
if s.fin :
return True
return False
| true
| true
|
f71a82c5e1e63b262c2a25dc6c75f427f5c4eea8
| 8,829
|
py
|
Python
|
deep-clustering-conv-autoencoder/main.py
|
positivevaib/semi-supervised-imagenet-classification
|
4fb6427f5a72951c1b866a1ddbc2599811bb5770
|
[
"MIT"
] | null | null | null |
deep-clustering-conv-autoencoder/main.py
|
positivevaib/semi-supervised-imagenet-classification
|
4fb6427f5a72951c1b866a1ddbc2599811bb5770
|
[
"MIT"
] | null | null | null |
deep-clustering-conv-autoencoder/main.py
|
positivevaib/semi-supervised-imagenet-classification
|
4fb6427f5a72951c1b866a1ddbc2599811bb5770
|
[
"MIT"
] | null | null | null |
# import
import numpy as np
import sklearn as skl
import sklearn.cluster as cluster
import sklearn.metrics as metrics
import torch
import torch.distributions.kl as kl
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.utils.data as data
import torchvision
import torchvision.datasets as datasets
import torchvision.transforms as transforms
import tqdm
# model
class CAE_ENC(nn.Module):
def __init__(self):
super().__init__()
# self.enc = nn.Sequential(*list(model.features.children())[:-5])
self.conv1 = nn.Conv2d(3, 32, kernel_size=5, padding=2, stride=2)
self.conv2 = nn.Conv2d(32, 64, kernel_size=3, padding=1, stride=2)
self.conv3 = nn.Conv2d(64, 128, kernel_size=3, padding=1, stride=2)
self.conv4 = nn.Conv2d(128, 256, kernel_size=3, padding=1, stride=2)
self.fc1 = nn.Linear(256 * 6 * 6, 1000)
def forward(self, x):
# x = self.features(x)
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
x = x.view(-1, 256 * 6 * 6)
x = self.fc1(x)
return x
class CAE_DEC(nn.Module):
def __init__(self):
super().__init__()
self.fc2 = nn.Linear(1000, 256 * 6 * 6)
self.deconv1 = nn.ConvTranspose2d(256, 128, 2, stride=2)
self.deconv2 = nn.ConvTranspose2d(128, 64, 2, stride=2)
self.deconv3 = nn.ConvTranspose2d(64, 32, 2, stride=2)
self.deconv4 = nn.ConvTranspose2d(32, 3, 2, stride=2)
self.conv5 = nn.Conv2d(3, 3, kernel_size=1) # might have to remove
def forward(self, x):
x = F.relu(self.fc2(x))
x = x.view(128, 256, 6, 6)
x = F.relu(self.deconv1(x))
x = F.relu(self.deconv2(x))
x = F.relu(self.deconv3(x))
x = F.relu(self.deconv4(x))
x = torch.sigmoid(self.conv5(x)) # might have to remove
return x
class ClusteringLayer(nn.Module):
def __init__(self, weights=None, alpha=1.0):
super().__init__()
if weights:
self.weights = weights
else:
self.weights = torch.empty(1000, 1000)
nn.init.xavier_uniform_(self.weights)
self.alpha = alpha
def forward(self, x):
q = 1.0 / (1.0 + (torch.sum(
(x.unsqueeze(1) - self.weights)**2, dim=2) / self.alpha))
q **= (self.alpha + 1.0) / 2.0
q = torch.transpose(
torch.transpose(q, 1, 2) / torch.sum(q, dim=1), 1, 2)
return q
def set_weights(module, weights):
if isinstance(module, ClusteringLayer):
module.weights = weights
class CAE(nn.Module):
def __init__(self):
super().__init__()
self.enc = CAE_ENC()
self.dec = CAE_DEC()
self.clus = ClusteringLayer()
def forward(self, x):
h = self.enc(x)
q = self.clus(h)
o = self.dec(h)
return (h, q, o)
def loss(q, p, o, gamma=0.1):
mse = nn.MSELoss(o)
kld = gamma * kl.kl_divergence(p, q)
l = mse + kld
return l
def target_distribution(q):
weight = q**2 / torch.sum(q, dim=0)
return torch.transpose(torch.transpose(q) / torch.sum(weight, dim=1))
# data
transformations = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225),
inplace=True)
])
dataset1 = datasets.ImageFolder('/beegfs/vag273/ssl_data_96/supervised/train/',
transform=transformations)
dataset2 = datasets.ImageFolder('/beegfs/vag273/ssl_data_96/unsupervised/',
transform=transformations)
dataset = data.ConcatDataset((dataset1, dataset2))
train_ratio = 0.9
train_set_size = int(train_ratio * len(dataset))
val_set_size = len(dataset) - train_set_size
train_data, val_data = data.random_split(dataset,
(train_set_size, val_set_size))
train_loader = data.DataLoader(train_data, batch_size=128, shuffle=True)
val_loader = data.DataLoader(val_data, batch_size=128, shuffle=False)
# training
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
model = CAE().to(device)
# criterion = nn.MSELoss()
optimizer = optim.Adam(model.parameters())
# pretrain
best_val_loss = float('inf')
tot_epochs = 200 # maybe lower it on one of the runs
print('pretrain')
for epoch in range(tot_epochs):
model.train()
print('epoch {} of {}'.format(epoch + 1, tot_epochs))
desc = "ITERATION - loss: {:.2f}"
pbar = tqdm.tqdm(desc=desc.format(0),
total=len(train_loader),
leave=False,
file=None,
initial=0)
running_loss = 0
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
optimizer.zero_grad()
_, _, out = model(img)
loss = nn.MSELoss(out, img)
running_loss += loss.item()
loss.backward()
optimizer.step()
pbar.desc = desc.format(loss.item())
pbar.update()
print('loss: {}'.format(running_loss / len(train_loader)))
model.eval()
with torch.no_grad():
val_running_loss = 0
for val_batch_idx, val_data in enumerate(val_loader):
val_img, _ = val_data
val_img = val_img.to(device)
_, _, val_out = model(val_img)
val_loss = nn.MSELoss(val_out, val_img)
val_running_loss += val_loss.item()
if val_running_loss / len(val_loader) < best_val_loss:
torch.save(model.state_dict(), 'weights.pth')
print('val loss: {}'.format(val_running_loss / len(val_loader)))
pbar.close()
# first cluster
features = None
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
if not features:
features = model(img)
else:
torch.cat((features, model(img)), 0)
kmeans = cluster.kMeans(n_clusters=1000, n_init=20)
features = features.view(-1)
pred_last = kmeans.fit_predict(features)
q = kmeans.cluster_centers_
# deep cluster
print('deep cklustering')
update_interval = 140 # maybe reduce this for sake of time
maxiter = 20000 # maybe reduce this for sake of time
for ite in range(int(maxiter)):
model.train()
if ite % update_interval == 0:
q = None
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
if not features:
_, q, _ = model(img)
else:
_, new_q, _ = model(img)
torch.cat((q, new_q), 0)
p = target_distribution(
q) # update the auxiliary target distribution p
# evaluate the clustering performance
pred = q.argmax(1)
# check stop criterion
delta_label = np.sum(pred != pred_last).astype(
np.float32) / pred.shape[0]
pred_last = np.copy(pred)
if ite > 0 and delta_label < 0.001: # 0.001 is the tolerance
print('delta_label ', delta_label, '< tol ', 0.001) # tol
print('Reached tolerance threshold. Stopping training.')
break
print('epoch {} of {}'.format(epoch + 1, tot_epochs))
desc = "ITERATION - loss: {:.2f}"
pbar = tqdm.tqdm(desc=desc.format(0),
total=len(train_loader),
leave=False,
file=None,
initial=0)
running_loss = 0
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
optimizer.zero_grad()
_, q, out = model(img)
loss = loss(q,
p[batch_idx * 128:batch_idx * 128 + 128, :],
out,
gamma=0.1)
running_loss += loss.item()
loss.backward()
optimizer.step()
pbar.desc = desc.format(loss.item())
pbar.update()
print('loss: {}'.format(running_loss / len(train_loader)))
model.eval()
with torch.no_grad():
val_running_loss = 0
for val_batch_idx, val_data in enumerate(val_loader):
val_img, _ = val_data
val_img = val_img.to(device)
_, val_q, val_out = model(val_img)
val_loss = loss(val_q,
p[val_batch_idx * 128:val_batch_idx * 128 +
128, :],
val_out,
gamma=0.1)
val_running_loss += val_loss.item()
if val_running_loss / len(val_loader) < best_val_loss:
torch.save(model.state_dict(), 'overall_weights.pth')
print('val loss: {}'.format(val_running_loss / len(val_loader)))
pbar.close()
| 29.727273
| 79
| 0.581493
|
import numpy as np
import sklearn as skl
import sklearn.cluster as cluster
import sklearn.metrics as metrics
import torch
import torch.distributions.kl as kl
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.utils.data as data
import torchvision
import torchvision.datasets as datasets
import torchvision.transforms as transforms
import tqdm
class CAE_ENC(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(3, 32, kernel_size=5, padding=2, stride=2)
self.conv2 = nn.Conv2d(32, 64, kernel_size=3, padding=1, stride=2)
self.conv3 = nn.Conv2d(64, 128, kernel_size=3, padding=1, stride=2)
self.conv4 = nn.Conv2d(128, 256, kernel_size=3, padding=1, stride=2)
self.fc1 = nn.Linear(256 * 6 * 6, 1000)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
x = x.view(-1, 256 * 6 * 6)
x = self.fc1(x)
return x
class CAE_DEC(nn.Module):
def __init__(self):
super().__init__()
self.fc2 = nn.Linear(1000, 256 * 6 * 6)
self.deconv1 = nn.ConvTranspose2d(256, 128, 2, stride=2)
self.deconv2 = nn.ConvTranspose2d(128, 64, 2, stride=2)
self.deconv3 = nn.ConvTranspose2d(64, 32, 2, stride=2)
self.deconv4 = nn.ConvTranspose2d(32, 3, 2, stride=2)
self.conv5 = nn.Conv2d(3, 3, kernel_size=1)
def forward(self, x):
x = F.relu(self.fc2(x))
x = x.view(128, 256, 6, 6)
x = F.relu(self.deconv1(x))
x = F.relu(self.deconv2(x))
x = F.relu(self.deconv3(x))
x = F.relu(self.deconv4(x))
x = torch.sigmoid(self.conv5(x))
return x
class ClusteringLayer(nn.Module):
def __init__(self, weights=None, alpha=1.0):
super().__init__()
if weights:
self.weights = weights
else:
self.weights = torch.empty(1000, 1000)
nn.init.xavier_uniform_(self.weights)
self.alpha = alpha
def forward(self, x):
q = 1.0 / (1.0 + (torch.sum(
(x.unsqueeze(1) - self.weights)**2, dim=2) / self.alpha))
q **= (self.alpha + 1.0) / 2.0
q = torch.transpose(
torch.transpose(q, 1, 2) / torch.sum(q, dim=1), 1, 2)
return q
def set_weights(module, weights):
if isinstance(module, ClusteringLayer):
module.weights = weights
class CAE(nn.Module):
def __init__(self):
super().__init__()
self.enc = CAE_ENC()
self.dec = CAE_DEC()
self.clus = ClusteringLayer()
def forward(self, x):
h = self.enc(x)
q = self.clus(h)
o = self.dec(h)
return (h, q, o)
def loss(q, p, o, gamma=0.1):
mse = nn.MSELoss(o)
kld = gamma * kl.kl_divergence(p, q)
l = mse + kld
return l
def target_distribution(q):
weight = q**2 / torch.sum(q, dim=0)
return torch.transpose(torch.transpose(q) / torch.sum(weight, dim=1))
transformations = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225),
inplace=True)
])
dataset1 = datasets.ImageFolder('/beegfs/vag273/ssl_data_96/supervised/train/',
transform=transformations)
dataset2 = datasets.ImageFolder('/beegfs/vag273/ssl_data_96/unsupervised/',
transform=transformations)
dataset = data.ConcatDataset((dataset1, dataset2))
train_ratio = 0.9
train_set_size = int(train_ratio * len(dataset))
val_set_size = len(dataset) - train_set_size
train_data, val_data = data.random_split(dataset,
(train_set_size, val_set_size))
train_loader = data.DataLoader(train_data, batch_size=128, shuffle=True)
val_loader = data.DataLoader(val_data, batch_size=128, shuffle=False)
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
model = CAE().to(device)
optimizer = optim.Adam(model.parameters())
best_val_loss = float('inf')
tot_epochs = 200
print('pretrain')
for epoch in range(tot_epochs):
model.train()
print('epoch {} of {}'.format(epoch + 1, tot_epochs))
desc = "ITERATION - loss: {:.2f}"
pbar = tqdm.tqdm(desc=desc.format(0),
total=len(train_loader),
leave=False,
file=None,
initial=0)
running_loss = 0
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
optimizer.zero_grad()
_, _, out = model(img)
loss = nn.MSELoss(out, img)
running_loss += loss.item()
loss.backward()
optimizer.step()
pbar.desc = desc.format(loss.item())
pbar.update()
print('loss: {}'.format(running_loss / len(train_loader)))
model.eval()
with torch.no_grad():
val_running_loss = 0
for val_batch_idx, val_data in enumerate(val_loader):
val_img, _ = val_data
val_img = val_img.to(device)
_, _, val_out = model(val_img)
val_loss = nn.MSELoss(val_out, val_img)
val_running_loss += val_loss.item()
if val_running_loss / len(val_loader) < best_val_loss:
torch.save(model.state_dict(), 'weights.pth')
print('val loss: {}'.format(val_running_loss / len(val_loader)))
pbar.close()
features = None
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
if not features:
features = model(img)
else:
torch.cat((features, model(img)), 0)
kmeans = cluster.kMeans(n_clusters=1000, n_init=20)
features = features.view(-1)
pred_last = kmeans.fit_predict(features)
q = kmeans.cluster_centers_
print('deep cklustering')
update_interval = 140
maxiter = 20000
for ite in range(int(maxiter)):
model.train()
if ite % update_interval == 0:
q = None
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
if not features:
_, q, _ = model(img)
else:
_, new_q, _ = model(img)
torch.cat((q, new_q), 0)
p = target_distribution(
q)
pred = q.argmax(1)
delta_label = np.sum(pred != pred_last).astype(
np.float32) / pred.shape[0]
pred_last = np.copy(pred)
if ite > 0 and delta_label < 0.001:
print('delta_label ', delta_label, '< tol ', 0.001)
print('Reached tolerance threshold. Stopping training.')
break
print('epoch {} of {}'.format(epoch + 1, tot_epochs))
desc = "ITERATION - loss: {:.2f}"
pbar = tqdm.tqdm(desc=desc.format(0),
total=len(train_loader),
leave=False,
file=None,
initial=0)
running_loss = 0
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
optimizer.zero_grad()
_, q, out = model(img)
loss = loss(q,
p[batch_idx * 128:batch_idx * 128 + 128, :],
out,
gamma=0.1)
running_loss += loss.item()
loss.backward()
optimizer.step()
pbar.desc = desc.format(loss.item())
pbar.update()
print('loss: {}'.format(running_loss / len(train_loader)))
model.eval()
with torch.no_grad():
val_running_loss = 0
for val_batch_idx, val_data in enumerate(val_loader):
val_img, _ = val_data
val_img = val_img.to(device)
_, val_q, val_out = model(val_img)
val_loss = loss(val_q,
p[val_batch_idx * 128:val_batch_idx * 128 +
128, :],
val_out,
gamma=0.1)
val_running_loss += val_loss.item()
if val_running_loss / len(val_loader) < best_val_loss:
torch.save(model.state_dict(), 'overall_weights.pth')
print('val loss: {}'.format(val_running_loss / len(val_loader)))
pbar.close()
| true
| true
|
f71a8324690c575db20b997daa92e561d98a87c5
| 21,037
|
py
|
Python
|
wsgidav/samples/mysql_dav_provider.py
|
KnoooW/wsgidav
|
2cf357f72d2c835f376f2c1295897cb879ef6bc1
|
[
"MIT"
] | 1
|
2021-12-29T08:27:04.000Z
|
2021-12-29T08:27:04.000Z
|
wsgidav/samples/mysql_dav_provider.py
|
KnoooW/wsgidav
|
2cf357f72d2c835f376f2c1295897cb879ef6bc1
|
[
"MIT"
] | null | null | null |
wsgidav/samples/mysql_dav_provider.py
|
KnoooW/wsgidav
|
2cf357f72d2c835f376f2c1295897cb879ef6bc1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# (c) 2009-2021 Martin Wendt and contributors; see WsgiDAV https://github.com/mar10/wsgidav
# Original PyFileServer (c) 2005 Ho Chun Wei.
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
"""
Implementation of a WebDAV provider that provides a very basic, read-only
resource layer emulation of a MySQL database.
This module is specific to the WsgiDAV application. It provides a
classes ``MySQLBrowserProvider``.
Usage::
(see docs/sample_wsgidav.yaml)
MySQLBrowserProvider(host, user, passwd, db)
host - host of database server
user - user_name to access database
passwd - passwd to access database
db - name of database on database server
The ``MySQLBrowserProvider`` provides a very basic, read-only
resource layer emulation of a MySQL database.
It provides the following interface:
- the root collection shared consists of collections that correspond to
table names
- in each table collection, there is a resource called "_ENTIRE_CONTENTS".
This is a non-collection resource that returns a csv representation of the
entire table
- if the table has a single primary key, each table record will also appear
as a non-collection resource in the table collection using the primary key
value as its name. This resource returns a csv representation of the record
and will also include the record attributes as live properties with
attribute name as property name and table name suffixed with colon as the
property namespace
This is a very basic interface and below is a by no means thorough summary of
its limitations:
- Really only supports having numbers or strings as primary keys. The code uses
a numeric or string comparison that may not hold up if the primary key is
a date or some other datatype.
- There is no handling for cases like BLOBs as primary keys or such. Well, there is
no handling for BLOBs in general.
- When returning contents, it buffers the entire contents! A bad way to return
large tables. Ideally you would have a FileMixin that reads the database even
as the application reads the file object....
- It takes too many database queries to return information.
Ideally there should be some sort of caching for metadata at least, to avoid
unnecessary queries to the database.
"""
import csv
import hashlib
import time
from io import StringIO
import MySQLdb # @UnresolvedImport
from wsgidav import util
from wsgidav.dav_error import (
HTTP_FORBIDDEN,
DAVError,
PRECONDITION_CODE_ProtectedProperty,
)
from wsgidav.dav_provider import DAVProvider, _DAVResource
__docformat__ = "reStructuredText"
_logger = util.get_module_logger(__name__)
class MySQLBrowserResource(_DAVResource):
"""Represents a single existing DAV resource instance.
See also DAVResource and MySQLBrowserProvider.
"""
def __init__(self, provider, path, is_collection, environ):
super().__init__(path, is_collection, environ)
self._cache = None
def _init(self):
"""Read resource information into self._cache, for cached access.
See DAVResource._init()
"""
# TODO: recalc self.path from <self._file_path>, to fix correct file system case
# On windows this would lead to correct URLs
self.provider._count_get_resource_inst_init += 1
tableName, primKey = self.provider._split_path(self.path)
display_type = "Unknown"
displayTypeComment = ""
contentType = "text/html"
# _logger.debug("getInfoDict(%s), nc=%s" % (path, self.connectCount))
if tableName is None:
display_type = "Database"
elif primKey is None: # "database" and table name
display_type = "Database Table"
else:
contentType = "text/csv"
if primKey == "_ENTIRE_CONTENTS":
display_type = "Database Table Contents"
displayTypeComment = "CSV Representation of Table Contents"
else:
display_type = "Database Record"
displayTypeComment = "Attributes available as properties"
# Avoid calling is_collection, since it would call isExisting -> _init_connection
is_collection = primKey is None
self._cache = {
"content_length": None,
"contentType": contentType,
"created": time.time(),
"display_name": self.name,
"etag": hashlib.md5().update(self.path).hexdigest(),
# "etag": md5.new(self.path).hexdigest(),
"modified": None,
"support_ranges": False,
"display_info": {"type": display_type, "typeComment": displayTypeComment},
}
# Some resource-only infos:
if not is_collection:
self._cache["modified"] = time.time()
_logger.debug("---> _init, nc=%s" % self.provider._count_initConnection)
def _get_info(self, info):
if self._cache is None:
self._init()
return self._cache.get(info)
# Getter methods for standard live properties
def get_content_length(self):
return self._get_info("content_length")
def get_content_type(self):
return self._get_info("contentType")
def get_creation_date(self):
return self._get_info("created")
def get_display_name(self):
return self.name
def get_display_info(self):
return self._get_info("display_info")
def get_etag(self):
return self._get_info("etag")
def get_last_modified(self):
return self._get_info("modified")
def get_member_list(self):
"""Return list of (direct) collection member names (UTF-8 byte strings).
See DAVResource.get_member_list()
"""
members = []
conn = self.provider._init_connection()
try:
tableName, primKey = self.provider._split_path(self.path)
if tableName is None:
retlist = self.provider._list_tables(conn)
for name in retlist:
members.append(
MySQLBrowserResource(
self.provider,
util.join_uri(self.path, name),
True,
self.environ,
)
)
elif primKey is None:
pri_key = self.provider._find_primary_key(conn, tableName)
if pri_key is not None:
retlist = self.provider._list_fields(conn, tableName, pri_key)
for name in retlist:
members.append(
MySQLBrowserResource(
self.provider,
util.join_uri(self.path, name),
False,
self.environ,
)
)
members.insert(
0,
MySQLBrowserResource(
self.provider,
util.join_uri(self.path, "_ENTIRE_CONTENTS"),
False,
self.environ,
),
)
finally:
conn.close()
return members
def get_content(self):
"""Open content as a stream for reading.
See DAVResource.get_content()
"""
filestream = StringIO()
tableName, primKey = self.provider._split_path(self.path)
if primKey is not None:
conn = self.provider._init_connection()
listFields = self.provider._get_field_list(conn, tableName)
csvwriter = csv.DictWriter(filestream, listFields, extrasaction="ignore")
dictFields = {}
for field_name in listFields:
dictFields[field_name] = field_name
csvwriter.writerow(dictFields)
if primKey == "_ENTIRE_CONTENTS":
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("SELECT * from " + self.provider._db + "." + tableName)
result_set = cursor.fetchall()
for row in result_set:
csvwriter.writerow(row)
cursor.close()
else:
row = self.provider._get_record_by_primary_key(conn, tableName, primKey)
if row is not None:
csvwriter.writerow(row)
conn.close()
# this suffices for small dbs, but
# for a production big database, I imagine you would have a FileMixin that
# does the retrieving and population even as the file object is being read
filestream.seek(0)
return filestream
def get_property_names(self, *, is_allprop):
"""Return list of supported property names in Clark Notation.
Return supported live and dead properties. (See also DAVProvider.get_property_names().)
In addition, all table field names are returned as properties.
"""
# Let default implementation return supported live and dead properties
propNames = super().get_property_names(is_allprop=is_allprop)
# Add fieldnames as properties
tableName, primKey = self.provider._split_path(self.path)
if primKey is not None:
conn = self.provider._init_connection()
fieldlist = self.provider._get_field_list(conn, tableName)
for fieldname in fieldlist:
propNames.append("{%s:}%s" % (tableName, fieldname))
conn.close()
return propNames
def get_property_value(self, name):
"""Return the value of a property.
The base implementation handles:
- ``{DAV:}lockdiscovery`` and ``{DAV:}supportedlock`` using the
associated lock manager.
- All other *live* properties (i.e. name starts with ``{DAV:}``) are
delegated to self.getLivePropertyValue()
- Finally, other properties are considered *dead*, and are handled using
the associated property manager, if one is present.
"""
# Return table field as property
tableName, primKey = self.provider._split_path(self.path)
if primKey is not None:
ns, localName = util.split_namespace(name)
if ns == (tableName + ":"):
conn = self.provider._init_connection()
fieldlist = self.provider._get_field_list(conn, tableName)
if localName in fieldlist:
val = self.provider._get_field_by_primary_key(
conn, tableName, primKey, localName
)
conn.close()
return val
conn.close()
# else, let default implementation return supported live and dead properties
return super().get_property_value(name)
def set_property_value(self, name, value, dry_run=False):
"""Set or remove property value.
See DAVResource.set_property_value()
"""
raise DAVError(
HTTP_FORBIDDEN, err_condition=PRECONDITION_CODE_ProtectedProperty
)
# ============================================================================
# MySQLBrowserProvider
# ============================================================================
class MySQLBrowserProvider(DAVProvider):
def __init__(self, host, user, passwd, db):
super().__init__()
self._host = host
self._user = user
self._passwd = passwd
self._db = db
self._count_initConnection = 0
def __repr__(self):
return "%s for db '%s' on '%s' (user: '%s')'" % (
self.__class__.__name__,
self._db,
self._host,
self._user,
)
def _split_path(self, path):
"""Return (tableName, primaryKey) tuple for a request path."""
if path.strip() in (None, "", "/"):
return (None, None)
tableName, primKey = util.save_split(path.strip("/"), "/", 1)
# _logger.debug("'%s' -> ('%s', '%s')" % (path, tableName, primKey))
return (tableName, primKey)
def _init_connection(self):
self._count_initConnection += 1
return MySQLdb.connect(
host=self._host, user=self._user, passwd=self._passwd, db=self._db
)
def _get_field_list(self, conn, table_name):
retlist = []
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
retlist.append(row["Field"])
cursor.close()
return retlist
def _is_data_type_numeric(self, datatype):
if datatype is None:
return False
# how many MySQL datatypes does it take to change a lig... I mean, store numbers
numerictypes = [
"BIGINT",
"INTT",
"MEDIUMINT",
"SMALLINT",
"TINYINT",
"BIT",
"DEC",
"DECIMAL",
"DOUBLE",
"FLOAT",
"REAL",
"DOUBLE PRECISION",
"INTEGER",
"NUMERIC",
]
datatype = datatype.upper()
for numtype in numerictypes:
if datatype.startswith(numtype):
return True
return False
def _exists_record_by_primary_key(self, conn, table_name, pri_key_value):
pri_key = None
pri_field_type = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
if row["Key"] == "PRI":
if pri_key is None:
pri_key = row["Field"]
pri_field_type = row["Type"]
else:
return False # more than one primary key - multipart key?
cursor.close()
isNumType = self._is_data_type_numeric(pri_field_type)
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
if isNumType:
cursor.execute(
"SELECT "
+ pri_key
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = "
+ pri_key_value
)
else:
cursor.execute(
"SELECT "
+ pri_key
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = '"
+ pri_key_value
+ "'"
)
row = cursor.fetchone()
if row is None:
cursor.close()
return False
cursor.close()
return True
def _get_field_by_primary_key(self, conn, table_name, pri_key_value, field_name):
pri_key = None
pri_field_type = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
if row["Key"] == "PRI":
if pri_key is None:
pri_key = row["Field"]
pri_field_type = row["Type"]
else:
return None # more than one primary key - multipart key?
cursor.close()
isNumType = self._is_data_type_numeric(pri_field_type)
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
if isNumType:
cursor.execute(
"SELECT "
+ field_name
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = "
+ pri_key_value
)
else:
cursor.execute(
"SELECT "
+ field_name
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = '"
+ pri_key_value
+ "'"
)
row = cursor.fetchone()
if row is None:
cursor.close()
return None
val = util.to_str(row[field_name])
cursor.close()
return val
def _get_record_by_primary_key(self, conn, table_name, pri_key_value):
dictRet = {}
pri_key = None
pri_field_type = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
if row["Key"] == "PRI":
if pri_key is None:
pri_key = row["Field"]
pri_field_type = row["Type"]
else:
return None # more than one primary key - multipart key?
cursor.close()
isNumType = self._is_data_type_numeric(pri_field_type)
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
if isNumType:
cursor.execute(
"SELECT * FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = "
+ pri_key_value
)
else:
cursor.execute(
"SELECT * FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = '"
+ pri_key_value
+ "'"
)
row = cursor.fetchone()
if row is None:
cursor.close()
return None
for fname in row.keys():
dictRet[fname] = util.to_str(row[fname])
cursor.close()
return dictRet
def _find_primary_key(self, conn, table_name):
pri_key = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
fieldname = row["Field"]
keyvalue = row["Key"]
if keyvalue == "PRI":
if pri_key is None:
pri_key = fieldname
else:
return None # more than one primary key - multipart key?
cursor.close()
return pri_key
def _list_fields(self, conn, table_name, field_name):
retlist = []
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("SELECT " + field_name + " FROM " + self._db + "." + table_name)
result_set = cursor.fetchall()
for row in result_set:
retlist.append(util.to_str(row[field_name]))
cursor.close()
return retlist
def _list_tables(self, conn):
retlist = []
cursor = conn.cursor()
cursor.execute("SHOW TABLES")
result_set = cursor.fetchall()
for row in result_set:
retlist.append("%s" % (row[0]))
cursor.close()
return retlist
def get_resource_inst(self, path, environ):
"""Return info dictionary for path.
See get_resource_inst()
"""
# TODO: calling exists() makes directory browsing VERY slow.
# At least compared to PyFileServer, which simply used string
# functions to get display_type and displayTypeComment
self._count_get_resource_inst += 1
if not self.exists(path, environ):
return None
_tableName, primKey = self._split_path(path)
is_collection = primKey is None
return MySQLBrowserResource(self, path, is_collection, environ)
def exists(self, path, environ):
tableName, primKey = self._split_path(path)
if tableName is None:
return True
try:
conn = None
conn = self._init_connection()
# Check table existence:
tbllist = self._list_tables(conn)
if tableName not in tbllist:
return False
# Check table key existence:
if primKey and primKey != "_ENTIRE_CONTENTS":
return self._exists_record_by_primary_key(conn, tableName, primKey)
return True
finally:
if conn:
conn.close()
def is_collection(self, path, environ):
_tableName, primKey = self._split_path(path)
return self.exists(path, environ) and primKey is None
| 34.657331
| 95
| 0.553216
|
import csv
import hashlib
import time
from io import StringIO
import MySQLdb
from wsgidav import util
from wsgidav.dav_error import (
HTTP_FORBIDDEN,
DAVError,
PRECONDITION_CODE_ProtectedProperty,
)
from wsgidav.dav_provider import DAVProvider, _DAVResource
__docformat__ = "reStructuredText"
_logger = util.get_module_logger(__name__)
class MySQLBrowserResource(_DAVResource):
def __init__(self, provider, path, is_collection, environ):
super().__init__(path, is_collection, environ)
self._cache = None
def _init(self):
self.provider._count_get_resource_inst_init += 1
tableName, primKey = self.provider._split_path(self.path)
display_type = "Unknown"
displayTypeComment = ""
contentType = "text/html"
if tableName is None:
display_type = "Database"
elif primKey is None:
display_type = "Database Table"
else:
contentType = "text/csv"
if primKey == "_ENTIRE_CONTENTS":
display_type = "Database Table Contents"
displayTypeComment = "CSV Representation of Table Contents"
else:
display_type = "Database Record"
displayTypeComment = "Attributes available as properties"
is_collection = primKey is None
self._cache = {
"content_length": None,
"contentType": contentType,
"created": time.time(),
"display_name": self.name,
"etag": hashlib.md5().update(self.path).hexdigest(),
"modified": None,
"support_ranges": False,
"display_info": {"type": display_type, "typeComment": displayTypeComment},
}
if not is_collection:
self._cache["modified"] = time.time()
_logger.debug("---> _init, nc=%s" % self.provider._count_initConnection)
def _get_info(self, info):
if self._cache is None:
self._init()
return self._cache.get(info)
def get_content_length(self):
return self._get_info("content_length")
def get_content_type(self):
return self._get_info("contentType")
def get_creation_date(self):
return self._get_info("created")
def get_display_name(self):
return self.name
def get_display_info(self):
return self._get_info("display_info")
def get_etag(self):
return self._get_info("etag")
def get_last_modified(self):
return self._get_info("modified")
def get_member_list(self):
members = []
conn = self.provider._init_connection()
try:
tableName, primKey = self.provider._split_path(self.path)
if tableName is None:
retlist = self.provider._list_tables(conn)
for name in retlist:
members.append(
MySQLBrowserResource(
self.provider,
util.join_uri(self.path, name),
True,
self.environ,
)
)
elif primKey is None:
pri_key = self.provider._find_primary_key(conn, tableName)
if pri_key is not None:
retlist = self.provider._list_fields(conn, tableName, pri_key)
for name in retlist:
members.append(
MySQLBrowserResource(
self.provider,
util.join_uri(self.path, name),
False,
self.environ,
)
)
members.insert(
0,
MySQLBrowserResource(
self.provider,
util.join_uri(self.path, "_ENTIRE_CONTENTS"),
False,
self.environ,
),
)
finally:
conn.close()
return members
def get_content(self):
filestream = StringIO()
tableName, primKey = self.provider._split_path(self.path)
if primKey is not None:
conn = self.provider._init_connection()
listFields = self.provider._get_field_list(conn, tableName)
csvwriter = csv.DictWriter(filestream, listFields, extrasaction="ignore")
dictFields = {}
for field_name in listFields:
dictFields[field_name] = field_name
csvwriter.writerow(dictFields)
if primKey == "_ENTIRE_CONTENTS":
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("SELECT * from " + self.provider._db + "." + tableName)
result_set = cursor.fetchall()
for row in result_set:
csvwriter.writerow(row)
cursor.close()
else:
row = self.provider._get_record_by_primary_key(conn, tableName, primKey)
if row is not None:
csvwriter.writerow(row)
conn.close()
filestream.seek(0)
return filestream
def get_property_names(self, *, is_allprop):
propNames = super().get_property_names(is_allprop=is_allprop)
tableName, primKey = self.provider._split_path(self.path)
if primKey is not None:
conn = self.provider._init_connection()
fieldlist = self.provider._get_field_list(conn, tableName)
for fieldname in fieldlist:
propNames.append("{%s:}%s" % (tableName, fieldname))
conn.close()
return propNames
def get_property_value(self, name):
tableName, primKey = self.provider._split_path(self.path)
if primKey is not None:
ns, localName = util.split_namespace(name)
if ns == (tableName + ":"):
conn = self.provider._init_connection()
fieldlist = self.provider._get_field_list(conn, tableName)
if localName in fieldlist:
val = self.provider._get_field_by_primary_key(
conn, tableName, primKey, localName
)
conn.close()
return val
conn.close()
return super().get_property_value(name)
def set_property_value(self, name, value, dry_run=False):
raise DAVError(
HTTP_FORBIDDEN, err_condition=PRECONDITION_CODE_ProtectedProperty
)
class MySQLBrowserProvider(DAVProvider):
def __init__(self, host, user, passwd, db):
super().__init__()
self._host = host
self._user = user
self._passwd = passwd
self._db = db
self._count_initConnection = 0
def __repr__(self):
return "%s for db '%s' on '%s' (user: '%s')'" % (
self.__class__.__name__,
self._db,
self._host,
self._user,
)
def _split_path(self, path):
if path.strip() in (None, "", "/"):
return (None, None)
tableName, primKey = util.save_split(path.strip("/"), "/", 1)
# _logger.debug("'%s' -> ('%s', '%s')" % (path, tableName, primKey))
return (tableName, primKey)
def _init_connection(self):
self._count_initConnection += 1
return MySQLdb.connect(
host=self._host, user=self._user, passwd=self._passwd, db=self._db
)
def _get_field_list(self, conn, table_name):
retlist = []
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
retlist.append(row["Field"])
cursor.close()
return retlist
def _is_data_type_numeric(self, datatype):
if datatype is None:
return False
# how many MySQL datatypes does it take to change a lig... I mean, store numbers
numerictypes = [
"BIGINT",
"INTT",
"MEDIUMINT",
"SMALLINT",
"TINYINT",
"BIT",
"DEC",
"DECIMAL",
"DOUBLE",
"FLOAT",
"REAL",
"DOUBLE PRECISION",
"INTEGER",
"NUMERIC",
]
datatype = datatype.upper()
for numtype in numerictypes:
if datatype.startswith(numtype):
return True
return False
def _exists_record_by_primary_key(self, conn, table_name, pri_key_value):
pri_key = None
pri_field_type = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
if row["Key"] == "PRI":
if pri_key is None:
pri_key = row["Field"]
pri_field_type = row["Type"]
else:
return False # more than one primary key - multipart key?
cursor.close()
isNumType = self._is_data_type_numeric(pri_field_type)
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
if isNumType:
cursor.execute(
"SELECT "
+ pri_key
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = "
+ pri_key_value
)
else:
cursor.execute(
"SELECT "
+ pri_key
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = '"
+ pri_key_value
+ "'"
)
row = cursor.fetchone()
if row is None:
cursor.close()
return False
cursor.close()
return True
def _get_field_by_primary_key(self, conn, table_name, pri_key_value, field_name):
pri_key = None
pri_field_type = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
if row["Key"] == "PRI":
if pri_key is None:
pri_key = row["Field"]
pri_field_type = row["Type"]
else:
return None # more than one primary key - multipart key?
cursor.close()
isNumType = self._is_data_type_numeric(pri_field_type)
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
if isNumType:
cursor.execute(
"SELECT "
+ field_name
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = "
+ pri_key_value
)
else:
cursor.execute(
"SELECT "
+ field_name
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = '"
+ pri_key_value
+ "'"
)
row = cursor.fetchone()
if row is None:
cursor.close()
return None
val = util.to_str(row[field_name])
cursor.close()
return val
def _get_record_by_primary_key(self, conn, table_name, pri_key_value):
dictRet = {}
pri_key = None
pri_field_type = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
if row["Key"] == "PRI":
if pri_key is None:
pri_key = row["Field"]
pri_field_type = row["Type"]
else:
return None # more than one primary key - multipart key?
cursor.close()
isNumType = self._is_data_type_numeric(pri_field_type)
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
if isNumType:
cursor.execute(
"SELECT * FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = "
+ pri_key_value
)
else:
cursor.execute(
"SELECT * FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = '"
+ pri_key_value
+ "'"
)
row = cursor.fetchone()
if row is None:
cursor.close()
return None
for fname in row.keys():
dictRet[fname] = util.to_str(row[fname])
cursor.close()
return dictRet
def _find_primary_key(self, conn, table_name):
pri_key = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
fieldname = row["Field"]
keyvalue = row["Key"]
if keyvalue == "PRI":
if pri_key is None:
pri_key = fieldname
else:
return None # more than one primary key - multipart key?
cursor.close()
return pri_key
def _list_fields(self, conn, table_name, field_name):
retlist = []
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("SELECT " + field_name + " FROM " + self._db + "." + table_name)
result_set = cursor.fetchall()
for row in result_set:
retlist.append(util.to_str(row[field_name]))
cursor.close()
return retlist
def _list_tables(self, conn):
retlist = []
cursor = conn.cursor()
cursor.execute("SHOW TABLES")
result_set = cursor.fetchall()
for row in result_set:
retlist.append("%s" % (row[0]))
cursor.close()
return retlist
def get_resource_inst(self, path, environ):
# TODO: calling exists() makes directory browsing VERY slow.
# At least compared to PyFileServer, which simply used string
# functions to get display_type and displayTypeComment
self._count_get_resource_inst += 1
if not self.exists(path, environ):
return None
_tableName, primKey = self._split_path(path)
is_collection = primKey is None
return MySQLBrowserResource(self, path, is_collection, environ)
def exists(self, path, environ):
tableName, primKey = self._split_path(path)
if tableName is None:
return True
try:
conn = None
conn = self._init_connection()
# Check table existence:
tbllist = self._list_tables(conn)
if tableName not in tbllist:
return False
# Check table key existence:
if primKey and primKey != "_ENTIRE_CONTENTS":
return self._exists_record_by_primary_key(conn, tableName, primKey)
return True
finally:
if conn:
conn.close()
def is_collection(self, path, environ):
_tableName, primKey = self._split_path(path)
return self.exists(path, environ) and primKey is None
| true
| true
|
f71a833507215096556a8d151f20ec58347e380e
| 1,804
|
py
|
Python
|
meraki_sdk/models/switch_profile_port_model.py
|
meraki/meraki-python-sdk
|
9894089eb013318243ae48869cc5130eb37f80c0
|
[
"MIT"
] | 37
|
2019-04-24T14:01:33.000Z
|
2022-01-28T01:37:21.000Z
|
meraki_sdk/models/switch_profile_port_model.py
|
ankita66666666/meraki-python-sdk
|
9894089eb013318243ae48869cc5130eb37f80c0
|
[
"MIT"
] | 10
|
2019-07-09T16:35:11.000Z
|
2021-12-07T03:47:53.000Z
|
meraki_sdk/models/switch_profile_port_model.py
|
ankita66666666/meraki-python-sdk
|
9894089eb013318243ae48869cc5130eb37f80c0
|
[
"MIT"
] | 17
|
2019-04-30T23:53:21.000Z
|
2022-02-07T22:57:44.000Z
|
# -*- coding: utf-8 -*-
"""
meraki_sdk
This file was automatically generated for meraki by APIMATIC v2.0 ( https://apimatic.io ).
"""
class SwitchProfilePortModel(object):
"""Implementation of the 'SwitchProfilePort' model.
TODO: type model description here.
Attributes:
profile (string): Profile identifier.
port_id (string): Port identifier of switch port. For modules, the
identifier is "SlotNumber_ModuleType_PortNumber" (Ex:
“1_8X10G_1”), otherwise it is just the port number (Ex: "8").
"""
# Create a mapping from Model property names to API property names
_names = {
"profile":'profile',
"port_id":'portId'
}
def __init__(self,
profile=None,
port_id=None):
"""Constructor for the SwitchProfilePortModel class"""
# Initialize members of the class
self.profile = profile
self.port_id = port_id
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
profile = dictionary.get('profile')
port_id = dictionary.get('portId')
# Return an object of this model
return cls(profile,
port_id)
| 27.333333
| 95
| 0.583703
|
class SwitchProfilePortModel(object):
_names = {
"profile":'profile',
"port_id":'portId'
}
def __init__(self,
profile=None,
port_id=None):
self.profile = profile
self.port_id = port_id
@classmethod
def from_dictionary(cls,
dictionary):
if dictionary is None:
return None
profile = dictionary.get('profile')
port_id = dictionary.get('portId')
return cls(profile,
port_id)
| true
| true
|
f71a8426ac96ef7e52fb2b1c74212768cb00e3c5
| 497
|
py
|
Python
|
sprites/blocker.py
|
ErezOr18/pygame-space-invaders
|
f2e129bb2e2e18470599573910e6cad34f501df8
|
[
"MIT"
] | null | null | null |
sprites/blocker.py
|
ErezOr18/pygame-space-invaders
|
f2e129bb2e2e18470599573910e6cad34f501df8
|
[
"MIT"
] | null | null | null |
sprites/blocker.py
|
ErezOr18/pygame-space-invaders
|
f2e129bb2e2e18470599573910e6cad34f501df8
|
[
"MIT"
] | null | null | null |
from pygame import *
class Blocker(sprite.Sprite):
def __init__(self, size, color, row, column):
sprite.Sprite.__init__(self)
self.height = size
self.width = size
self.color = color
self.image = Surface((self.width, self.height))
self.image.fill(self.color)
self.rect = self.image.get_rect()
self.row = row
self.column = column
def update(self, game, keys, *args):
game.screen .blit(self.image, self.rect)
| 27.611111
| 55
| 0.607646
|
from pygame import *
class Blocker(sprite.Sprite):
def __init__(self, size, color, row, column):
sprite.Sprite.__init__(self)
self.height = size
self.width = size
self.color = color
self.image = Surface((self.width, self.height))
self.image.fill(self.color)
self.rect = self.image.get_rect()
self.row = row
self.column = column
def update(self, game, keys, *args):
game.screen .blit(self.image, self.rect)
| true
| true
|
f71a84f7b27fecc7c26682e691d6999bc0138353
| 245
|
py
|
Python
|
chaptertwo/famousquote2.py
|
cmotek/python_crashcourse
|
29cbdd6699cd17192bb599d235852d547630d110
|
[
"Apache-2.0"
] | null | null | null |
chaptertwo/famousquote2.py
|
cmotek/python_crashcourse
|
29cbdd6699cd17192bb599d235852d547630d110
|
[
"Apache-2.0"
] | null | null | null |
chaptertwo/famousquote2.py
|
cmotek/python_crashcourse
|
29cbdd6699cd17192bb599d235852d547630d110
|
[
"Apache-2.0"
] | null | null | null |
famousauthor = "Herman Melville"
print(famousauthor + ' wrote in Moby Dick, "Now then, thought I, unconsciously rolling up the sleeves of my frock, here goes a cool, collected dive at death and destruction, and the devil fetch the hindmost."')
| 61.25
| 210
| 0.767347
|
famousauthor = "Herman Melville"
print(famousauthor + ' wrote in Moby Dick, "Now then, thought I, unconsciously rolling up the sleeves of my frock, here goes a cool, collected dive at death and destruction, and the devil fetch the hindmost."')
| true
| true
|
f71a8535015df8c0b0c3d55332640c315d8527a4
| 162,189
|
py
|
Python
|
jp.atcoder/abc081/arc086_b/17664033.py
|
kagemeka/atcoder-submissions
|
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
|
[
"MIT"
] | 1
|
2022-02-09T03:06:25.000Z
|
2022-02-09T03:06:25.000Z
|
jp.atcoder/abc081/arc086_b/17664033.py
|
kagemeka/atcoder-submissions
|
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
|
[
"MIT"
] | 1
|
2022-02-05T22:53:18.000Z
|
2022-02-09T01:29:30.000Z
|
jp.atcoder/abc081/arc086_b/17664033.py
|
kagemeka/atcoder-submissions
|
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
|
[
"MIT"
] | null | null | null |
import itertools
import math
import string
import sys
from bisect import bisect_left as bi_l
from bisect import bisect_right as bi_r
from collections import Counter, defaultdict, deque
from functools import lru_cache, reduce
from heapq import heapify, heappop, heappush
from operator import or_, xor
sys.setrecursionlimit(10**7)
inf = float("inf")
MOD = 10**9 + 7
# MOD = 998244353
using_numpy = 1
import networkx as nx
import numpy as np
from numba import i8, njit
from scipy import optimize
from scipy.ndimage import distance_transform_cdt
from scipy.sparse import csr_matrix
from scipy.sparse.csgraph import (
connected_components,
csgraph_to_dense,
maximum_flow,
minimum_spanning_tree,
shortest_path,
)
from scipy.spatial import ConvexHull
from scipy.special import comb
class Algebra:
class Modular(int):
def __init__(self, n, mod=MOD):
self.value = n
self.mod = mod
def __str__(self):
return f"{self.value}"
def __add__(self, other):
return self.__class__((self.value + other.value) % self.mod)
def __sub__(self, x):
return self.__class__((self.value - x.value) % self.mod)
def __mul__(self, x):
return self.__class__((self.value * x.value) % self.mod)
def __pow__(self, x):
return self.__class__(pow(self.value, x.value, self.mod))
def __lt__(self, x):
return self.value < x.value
def __le__(self, x):
return self.value <= x.value
def __eq__(self, x):
return self.value == x.value
def __ne__(self, x):
return self.value != x.value
def __gt__(self, x):
return self.value > x.value
def __ge__(self, x):
return self.value >= x.value
class SemiGroup:
pass
class Monoid:
pass
class Group:
pass
class SemiRing:
pass
class Ring:
pass
@staticmethod
def identity(n):
if using_numpy:
return np.identity(n, dtype=np.int64)
else:
a = [[0] * n for _ in range(n)]
for i in range(n):
a[i][i] = 1
return a
@staticmethod
def dot(a, b):
if using_numpy:
return np.dot(a, b)
else:
h, w, l = len(a), len(b[0]), len(b)
assert len(a[0]) == l
c = [[0] * w for _ in range(h)]
for i in range(h):
for j in range(w):
for k in range(l):
c[i][j] += a[i][k] * b[k][j]
return c
@classmethod
def matrix_pow(cls, a, n, mod=10**9 + 7):
m = len(a)
b = cls.identity(m)
while n:
if n & 1:
b = cls.dot(b, a)
n >>= 1
a = cls.dot(a, a)
if using_numpy:
a %= mod
b %= mod
else:
for i in range(m):
for j in range(m):
a[i][j] %= mod
b[i][j] %= mod
return b
@staticmethod
def bitwise_dot(a, b):
if using_numpy:
return np.bitwise_xor.reduce(
a[:, None, :] & b.T[None, :, :], axis=-1
)
else:
h, w, l = len(a), len(b[0]), len(b)
assert len(a[0]) == l
c = [[0] * w for _ in range(h)]
for i in range(h):
for j in range(w):
for k in range(l):
c[i][j] ^= a[i][k] & b[k][j]
return c
@classmethod
def bitwise_mat_pow(cls, a, n):
if n == 0:
return np.eye(len(a), dtype=np.uint32) * ((1 << 32) - 1)
res = cls.bitwise_mat_pow(a, n // 2)
res = cls.bitwise_dot(res, res)
return cls.bitwise_dot(res, a) if n & 1 else res
@staticmethod
def cumprod(a, mod):
l = len(a)
sql = int(np.sqrt(l) + 1)
a = np.resize(a, sql**2).reshape(sql, sql)
for i in range(sql - 1):
a[:, i + 1] *= a[:, i]
a[:, i + 1] %= mod
for i in range(sql - 1):
a[i + 1] *= a[i, -1]
a[i + 1] %= mod
return np.ravel(a)[:l]
@classmethod
def generate_fac_ifac(cls, n, p=MOD):
if using_numpy:
fac = np.arange(n + 1)
fac[0] = 1
fac = cls.cumprod(fac, p)
ifac = np.arange(n + 1, 0, -1)
ifac[0] = pow(int(fac[-1]), p - 2, p)
ifac = cls.cumprod(ifac, p)[n::-1]
else:
fac = [None] * (n + 1)
fac[0] = 1
for i in range(n):
fac[i + 1] = fac[i] * (i + 1) % p
ifac = [None] * (n + 1)
ifac[n] = pow(fac[n], p - 2, p)
for i in range(n, 0, -1):
ifac[i - 1] = ifac[i] * i % p
return fac, ifac
class Kitamasa:
pass
mint = Algebra.Modular
class NumberTheory:
class PrimeNumbers: # pn
def __init__(self, n=2 * 10**6):
self.is_prime, self.prime_nums = self.find(n)
def __call__(self, n):
return self.is_prime[n]
def __iter__(self):
return iter(self.prime_nums)
def __getitem__(self, key):
return self.prime_nums[key]
@staticmethod
def find(n): # Sieve of eratosthenes
if using_numpy:
is_prime = np.ones(n + 1, dtype=np.bool)
is_prime[:2] = 0
for i in range(2, int(n**0.5) + 1):
if is_prime[i]:
is_prime[i * 2 :: i] = 0
prime_nums = np.flatnonzero(is_prime)
else:
is_prime = [True] * (n + 1)
is_prime[0] = is_prime[1] = 0
for i in range(2, int(n**0.5) + 1):
if not is_prime[i]:
continue
for j in range(i * 2, n + 1, i):
is_prime[j] = 0
prime_nums = [i for i in range(2, n + 1) if is_prime[i]]
return is_prime, prime_nums
@lru_cache(maxsize=None)
def factorize(self, n):
res = defaultdict(int)
if n < 2:
return res
for p in self:
if p * p > n:
break
while n % p == 0:
res[p] += 1
n //= p
if n == 1:
return res
res[n] = 1
return res
def factorize_factorial(self, n):
res = defaultdict(int)
for i in range(2, n + 1):
for p, c in self.factorize(i).items():
res[p] += c
return res
@classmethod
@lru_cache(maxsize=None)
def gcd(cls, a, b):
return cls.gcd(b, a % b) if b else abs(a)
@classmethod
def lcm(cls, a, b):
return abs(a // cls.gcd(a, b) * b)
@staticmethod
def find_divisors(n):
divisors = []
for i in range(1, int(n**0.5) + 1):
if n % i:
continue
divisors.append(i)
j = n // i
if j != i:
divisors.append(j)
return sorted(divisors)
@staticmethod
def base_convert(n, b):
if not n:
return [0]
res = []
while n:
n, r = divmod(n, b)
if r < 0:
n += 1
r -= b
res.append(r)
return res
class Combinatorics:
@classmethod
@lru_cache(maxsize=None)
def choose(cls, n, r, mod=None):
if r > n or r < 0:
return 0
if r == 0:
return 1
res = cls.choose(n - 1, r, mod) + cls.choose(n - 1, r - 1, mod)
if mod:
res %= mod
return res
class CombinationsMod:
def __init__(self, n=2 * 10**6, mod=MOD):
self.__mod = mod
self.fac, self.ifac = Algebra.generate_fac_ifac(n, mod)
def __call__(self, n, r):
return self.__choose(n, r)
def __choose(self, n, r):
bl = (0 <= r) & (r <= n)
p = self.__mod
return bl * self.fac[n] * self.ifac[r] % p * self.ifac[n - r] % p
def make_nchoose_table(self, n):
p = self.__mod
r = len(self.__fac) - 1
if using_numpy:
n_choose = np.arange(n + 1, n - r, -1)
n_choose[0] = 1
n_choose = Algebra.cumprod(n_choose, p) * self.ifac % p
else:
n_choose = [None] * (r + 1)
n_choose[0] = 1
for i in range(r):
n_choose[i + 1] = n_choose[i] * (n - i) % p
for i in range(1, r + 1):
n_choose[i] = n_choose[i] * self.ifac[i] % p
return n_choose
@classmethod
def permutations(cls, a, r=None, i=0):
a = list(a)
n = len(a)
if r is None:
r = n
res = []
if r > n or i > r:
return res
if i == r:
return [tuple(a[:r])]
for j in range(i, n):
a[i], a[j] = a[j], a[i]
res += cls.permutations(a, r, i + 1)
return res
@staticmethod
def combinations(a, r):
a = tuple(a)
n = len(a)
if r > n:
return
indices = list(range(r))
yield a[:r]
while True:
for i in range(r - 1, -1, -1):
if indices[i] != i + n - r:
break
else:
return
indices[i] += 1
for j in range(i + 1, r):
indices[j] = indices[j - 1] + 1
yield tuple(a[i] for i in indices)
class DP:
@staticmethod
def LIS(a):
res = [inf] * len(a)
for x in a:
res[bi_l(res, x)] = x
return res
class String:
@staticmethod
def z_algorithm(s):
n = len(s)
a = [0] * n
a[0] = n
l = r = -1
for i in range(1, n):
if r >= i:
a[i] = min(a[i - l], r - i)
while i + a[i] < n and s[i + a[i]] == s[a[i]]:
a[i] += 1
if i + a[i] >= r:
l, r = i, i + a[i]
return a
class GeometryTopology:
class Graph:
class __Edge:
def __init__(self, weight=1, capacity=1, **args):
self.weight = weight
self.capacity = capacity
def __str__(self):
return f"weight: {self.weight}, cap: {self.capacity}"
class __Node:
def __init__(self, **args):
pass
def __init__(self, n=0):
self.__N = n
self.nodes = [None] * n
self.edges = [{} for _ in range(n)]
def add_node_info(self, v, **args):
self.nodes[v] = self.__Node(**args)
def add_edge(self, u, v, update=False, **args):
if not update and v in self.edges[u]:
return
self.edges[u][v] = self.__Edge(**args)
def get_size(self):
return self.__N
def bfs(self, src=0):
n = self.__N
self.depth = self.lv = lv = [None] * n
lv[src] = 0 # depth in tree, or level in general graph.
self.dist = dist = [inf] * n
dist[src] = 0 # dist for only tree.
self.parent = par = [None] * n
par[src] = src
q = deque([src])
while q:
u = q.popleft()
for v, e in self.edges[u].items():
if e.capacity == 0 or lv[v] is not None:
continue
lv[v], dist[v], par[v] = lv[u] + 1, dist[u] + e.weight, u
q.append(v)
return dist
def dinic(self, src, sink):
def flow_to_sink(u, flow_in):
if u == sink:
return flow_in
flow = 0
for v, e in self.edges[u].items():
if e.capacity == 0 or self.lv[v] <= self.lv[u]:
continue
f = flow_to_sink(v, min(flow_in, e.capacity))
if not f:
continue
self.edges[u][v].capacity -= f
if u in self.edges[v]:
self.edges[v][u].capacity += f
else:
self.add_edge(v, u, capacity=f)
flow_in -= f
flow += f
return flow
flow = 0
while True:
self.bfs(src)
if self.lv[sink] is None:
return flow
flow += flow_to_sink(src, inf)
def ford_fulkerson(self):
pass
def push_relabel(self):
pass
def floyd_warshall(self):
n = self.__N
d = [[inf] * n for _ in range(n)]
for u in range(n):
d[u][u] = 0
for v, e in self.edges[u].items():
d[u][v] = e.weight
for w in range(n):
for u in range(n):
for v in range(n):
d[u][v] = min(d[u][v], d[u][w] + d[w][v])
return d
def dijkstra(self, src, paths_cnt=False, mod=None):
dist = [inf] * self.__N
dist[src] = 0
visited = [False] * self.__N
paths = [0] * self.__N
paths[src] = 1
q = [(0, src)]
while q:
d, u = heappop(q)
if visited[u]:
continue
visited[u] = True
for v, e in self.edges[u].items():
dv = d + e.weight
if dv > dist[v]:
continue
elif dv == dist[v]:
paths[v] += paths[u]
if mod:
paths[v] %= mod
continue
paths[v], dist[v] = paths[u], dv
heappush(q, (dv, v))
if paths_cnt:
return dist, paths
else:
return dist
def astar(self, src, tgt, heuristic_func):
cost = [inf] * self.__N
q = [(heuristic_func(src, tgt), 0, src)]
while q:
_, c, u = heappop(q)
if u == tgt:
return c
if cost[u] != inf:
continue
cost[u] = c
for v, e in self.edges[u].items():
if cost[v] != inf:
continue
h = heuristic_func(v, tgt)
nc = c + e.weight
heappush(q, (h + nc, nc, v))
return inf
def bellman_ford(self, src):
n = self.__N
d = [inf] * n
d[src] = 0
for _ in range(n - 1):
for u in range(n):
for v, e in self.edges[u].items():
d[v] = min(d[v], d[u] + e.weight)
for u in range(n):
for v, e in self.edges[u].items():
if d[u] + e.weight < d[v]:
raise Exception("found negative cycle.")
return d
def bfs01(self, src=0):
d = [inf] * self.__N
d[src] = 0
q = deque([src])
while q:
u = q.popleft()
for v, e in self.edges[u].items():
dv = d[u] + e.weight
if d[v] <= dv:
continue
d[v] = dv
if e.weight:
q.append(v)
else:
q.appendleft(v)
return d
def find_ancestors(self): # tree doubling.
self.__ancestors = ancestors = [self.parent]
for _ in range(max(self.depth).bit_length()):
ancestors.append([ancestors[-1][u] for u in ancestors[-1]])
def find_dist(self, u, v):
return (
self.dist[u]
+ self.dist[v]
- 2 * self.dist[self.__find_lca(u, v)]
)
def __find_lca(self, u, v):
du, dv = self.depth[u], self.depth[v]
if du > dv:
u, v = v, u
du, dv = dv, du
d = dv - du
for i in range(d.bit_length()): # up-stream
if d >> i & 1:
v = self.__ancestors[i][v]
if v == u:
return v
for i in range(
du.bit_length() - 1, -1, -1
): # find direct child of LCA.
nu, nv = self.__ancestors[i][u], self.__ancestors[i][v]
if nu == nv:
continue
u, v = nu, nv
return self.__ancestors[0][u]
def init_dsu(self): # disjoint set union (union-find)
n = self.__N
self.parent = list(range(n))
self.rank = [0] * n
self.size = [1] * n
def find(self, u):
if self.parent[u] == u:
return u
self.parent[u] = self.find(self.parent[u])
return self.parent[u]
def unite(self, u, v):
u, v = self.find(u), self.find(v)
if u == v:
return
if self.rank[u] < self.rank[v]:
u, v = v, u
self.parent[v] = u
self.size[u] += self.size[v]
self.rank[u] = max(self.rank[u], self.rank[v] + 1)
def same(self, u, v):
return self.find(u) == self.find(v)
def scc(self): # strongly connected components
n = self.__N
visited, q, root, r = [False] * n, [], [None] * n, 0
gg = self.__class__(n)
for u in range(n):
for v in self.edges[u]:
gg.add_edge(v, u)
def dfs(u):
if visited[u]:
return
visited[u] = True
for v in self.edges[u]:
dfs(v)
q.append(u)
def rev_dfs(u, r):
if root[u] is not None:
return
root[u] = r
for v in gg.edges[u]:
rev_dfs(v, r)
for u in range(n):
dfs(u)
for u in q[::-1]:
rev_dfs(u, r)
r += 1
return root
def kruskal(self): # minimum spanning tree
n = self.__N
uf = self.__class__(n)
uf.init_dsu()
edges = sorted(
[
(u, v, e.weight)
for u in range(n)
for v, e in self.edges[u].items()
],
key=lambda x: x[2],
)
g = self.__class__(n)
d = 0
for u, v, w in edges:
if uf.same(u, v):
continue
uf.unite(u, v)
g.add_edge(u, v, weight=w)
d += w
return g, d
def prim(self, src=0, return_parent=False): # minimum spanning tree
n = self.__N
g = self.__class__(n)
parent, visited, dist = [None] * n, [False] * n, 0
q = [(0, (src, src))]
while q:
d, (w, u) = heappop(q)
if visited[u]:
continue
visited[u], parent[u] = True, w
dist += d
g.add_edge(w, u, weight=d)
for v, e in self.edges[u].items():
if not visited[v]:
heappush(q, (e.weight, (u, v)))
if return_parent:
return g, dist, parent
return g, dist
def boruvka(self): # minimum spanning tree
n = self.__N
uf = self.__class__(n)
uf.init_dsu()
g = self.__class__(n)
d = 0
def dfs(u):
if visited[u]:
return (inf, (None, None))
visited[u] = True
cand = []
for v, e in self.edges[u].items():
if uf.same(u, v):
cand.append(dfs(v))
continue
cand.append((e.weight, (u, v)))
return sorted(cand)[0]
while len(set(uf.parent)) != 1:
edges, visited = [], [False] * n
for u in range(n):
if visited[u]:
continue
edges.append(dfs(u))
for w, (u, v) in edges:
if uf.same(u, v):
continue
g.add_edge(u, v, weight=w)
uf.unite(u, v)
d += w
for u in range(n):
uf.find(u)
return g, d
def tsp(self): # traveling salesperson problem
pass
@staticmethod
def triangle_area(p0, p1, p2, signed=False):
x1, y1, x2, y2 = (
p1[0] - p0[0],
p1[1] - p0[1],
p2[0] - p0[0],
p2[1] - p0[1],
)
return (
(x1 * y2 - x2 * y1) / 2 if signed else abs(x1 * y2 - x2 * y1) / 2
)
@classmethod
def intersect(cls, seg1, seg2):
(p1, p2), (p3, p4) = seg1, seg2
t1 = cls.triangle_area(p1, p2, p3, signed=True)
t2 = cls.triangle_area(p1, p2, p4, signed=True)
t3 = cls.triangle_area(p3, p4, p1, signed=True)
t4 = cls.triangle_area(p3, p4, p2, signed=True)
return (t1 * t2 < 0) & (t3 * t4 < 0)
def cumxor(a):
return reduce(xor, a, 0)
def cumor(a):
return reduce(or_, a, 0)
def bit_count(n):
cnt = 0
while n:
cnt += n & 1
n >>= 1
return cnt
class AtCoder:
class ABC001:
@staticmethod
def a():
h1, h2 = map(int, sys.stdin.read().split())
print(h1 - h2)
@staticmethod
def d():
def to_minuites(x):
q, r = divmod(x, 100)
return 60 * q + r
def to_hmform(x):
q, r = divmod(x, 60)
return 100 * q + r
n = int(sys.stdin.readline().rstrip())
term = [0] * 2001
for _ in range(n):
s, e = map(
to_minuites,
map(int, sys.stdin.readline().rstrip().split("-")),
)
s = s // 5 * 5
e = (e + 4) // 5 * 5
term[s] += 1
term[e + 1] -= 1
for i in range(2000):
term[i + 1] += term[i]
res = []
raining = False
for i in range(2001):
if term[i]:
if not raining:
s = i
raining = True
elif raining:
res.append((s, i - 1))
raining = False
for s, e in res:
print(f"{to_hmform(s):04}-{to_hmform(e):04}")
class ABC002:
@staticmethod
def a():
print(max(map(int, sys.stdin.readline().split())))
@staticmethod
def b():
vowels = set("aeiou")
print(
"".join(
[
c
for c in sys.stdin.readline().rstrip()
if c not in vowels
]
)
)
@staticmethod
def c():
print(
GeometryTopology.triangle_area(
*map(int, sys.stdin.readline().split())
)
)
@staticmethod
def d():
n, m = map(int, sys.stdin.readline().split())
edges = set(
(x - 1, y - 1)
for x, y in zip(*[map(int, sys.stdin.read().split())] * 2)
)
print(
max(
len(s)
for i in range(1, 1 << n)
for s in [[j for j in range(n) if i >> j & 1]]
if all(
(x, y) in edges
for x, y in itertools.combinations(s, 2)
)
)
)
@staticmethod
def d_2():
n, m = map(int, sys.stdin.readline().split())
relations = [1 << i for i in range(n)]
for x, y in zip(*[map(int, sys.stdin.read().split())] * 2):
relations[x] |= 1 << (y - 1)
relations[y] |= 1 << (x - 1)
res = 0
for i in range(1 << n):
s, cnt = (1 << n) - 1, 0
for j in range(n):
if i >> j & 1:
t &= relations[j] | 1 << j
cnt += 1
if s & i == i:
res = max(res, cnt)
print(res)
class ABC003:
@staticmethod
def a():
print((int(sys.stdin.readline().rstrip()) + 1) * 5000)
@staticmethod
def b():
atcoder = set("atcoder")
s, t = sys.stdin.read().split()
print(
all(
s[i] == t[i]
or s[i] == "@"
and t[i] in atcoder
or t[i] == "@"
and s[i] in atcoder
for i in range(len(s))
)
and "You can win"
or "You will lose"
)
@staticmethod
def c():
n, k, *r = map(int, sys.stdin.read().split())
print(reduce(lambda x, y: (x + y) / 2, sorted(r)[-k:], 0))
class ABC004:
@staticmethod
def a():
print(int(sys.stdin.readline().rstrip()) * 2)
@staticmethod
def b():
for l in [sys.stdin.readline().rstrip() for _ in range(4)][::-1]:
print(l[::-1])
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip()) % 30
res = list(range(1, 7))
for i in range(n):
i %= 5
res[i], res[i + 1] = res[i + 1], res[i]
print(*res, sep="")
class ABC005:
@staticmethod
def a():
x, y = map(int, sys.stdin.readline().split())
print(y // x)
@staticmethod
def b():
n, *t = map(int, sys.stdin.read().split())
print(min(t))
@staticmethod
def c():
t = int(sys.stdin.readline().rstrip())
n = int(sys.stdin.readline().rstrip())
a = [int(x) for x in sys.stdin.readline().split()]
m = int(sys.stdin.readline().rstrip())
b = [int(x) for x in sys.stdin.readline().split()]
i = 0
for p in b:
if i == n:
print("no")
return
while p - a[i] > t:
i += 1
if i == n:
print("no")
return
if a[i] > p:
print("no")
return
i += 1
print("yes")
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
d = np.array(
[sys.stdin.readline().split() for _ in range(n)], np.int64
)
s = d.cumsum(axis=0).cumsum(axis=1)
s = np.pad(s, 1)
max_del = np.zeros((n + 1, n + 1), dtype=np.int64)
for y in range(1, n + 1):
for x in range(1, n + 1):
max_del[y, x] = np.amax(
s[y : n + 1, x : n + 1]
- s[0 : n - y + 1, x : n + 1]
- s[y : n + 1, 0 : n - x + 1]
+ s[0 : n - y + 1, 0 : n - x + 1]
)
res = np.arange(n**2 + 1)[:, None]
i = np.arange(1, n + 1)
res = max_del[i, np.minimum(res // i, n)].max(axis=1)
q = int(sys.stdin.readline().rstrip())
p = np.array(sys.stdin.read().split(), dtype=np.int64)
print(*res[p], sep="\n")
class ABC006:
@staticmethod
def a():
n = sys.stdin.readline().rstrip()
if "3" in n:
print("YES")
elif int(n) % 3 == 0:
print("YES")
else:
print("NO")
@staticmethod
def b():
mod = 10007
a = np.eye(N=3, k=-1, dtype=np.int64)
a[0] = 1
n = int(sys.stdin.readline().rstrip())
a = Algebra.matrix_pow(a, n - 1, mod)
print(a[2][0])
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
cnt = [0, 0, 0]
if m == 1:
cnt = [-1, -1, -1]
else:
if m & 1:
m -= 3
cnt[1] += 1
n -= 1
cnt[2] = m // 2 - n
cnt[0] = n - cnt[2]
if cnt[0] < 0 or cnt[1] < 0 or cnt[2] < 0:
print(-1, -1, -1)
else:
print(*cnt, sep=" ")
@staticmethod
def d():
n, *c = map(int, sys.stdin.read().split())
lis = [inf] * n
for x in c:
lis[bi_l(lis, x)] = x
print(n - bi_l(lis, inf))
class ABC007:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(n - 1)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
if s == "a":
print(-1)
else:
print("a")
@staticmethod
def c():
r, c = map(int, sys.stdin.readline().split())
sy, sx = map(int, sys.stdin.readline().split())
gy, gx = map(int, sys.stdin.readline().split())
sy -= 1
sx -= 1
gy -= 1
gx -= 1
maze = [sys.stdin.readline().rstrip() for _ in range(r)]
queue = deque([(sy, sx)])
dist = np.full((r, c), np.inf)
dist[sy, sx] = 0
while queue:
y, x = queue.popleft()
for i, j in [(-1, 0), (1, 0), (0, -1), (0, 1)]:
i += y
j += x
if maze[i][j] == "#" or dist[i, j] != np.inf:
continue
dist[i, j] = dist[y, x] + 1
queue.append((i, j))
print(int(dist[gy, gx]))
@staticmethod
def d():
ng = set([4, 9])
def count(d):
return d if d <= 4 else d - 1
def f(n):
x = [int(d) for d in str(n)]
flg = True
dp = 0
for d in x:
dp = dp * 8 + flg * count(d)
if d in ng:
flg = False
return n - (dp + flg)
a, b = map(int, sys.stdin.readline().split())
print(f(b) - f(a - 1))
class ABC008:
@staticmethod
def a():
s, t = map(int, sys.stdin.readline().split())
print(t - s + 1)
@staticmethod
def b():
n, *s = sys.stdin.read().split()
res = defaultdict(int)
for name in s:
res[name] += 1
print(sorted(res.items(), key=lambda x: x[1])[-1][0])
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
c = n - np.count_nonzero(a[:, None] % a, axis=1)
print(np.sum((c + 1) // 2 / c))
@staticmethod
def d():
w, h, n, *xy = map(int, sys.stdin.read().split())
(*xy,) = zip(*([iter(xy)] * 2))
@lru_cache(maxsize=None)
def count(x1, y1, x2, y2):
res = 0
for x, y in xy:
if not (x1 <= x <= x2 and y1 <= y <= y2):
continue
cnt = (x2 - x1) + (y2 - y1) + 1
cnt += count(x1, y1, x - 1, y - 1)
cnt += count(x1, y + 1, x - 1, y2)
cnt += count(x + 1, y1, x2, y - 1)
cnt += count(x + 1, y + 1, x2, y2)
res = max(res, cnt)
return res
print(count(1, 1, w, h))
class ABC009:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print((n + 1) // 2)
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
print(sorted(set(a))[-2])
@staticmethod
def c():
n, k = map(int, sys.stdin.readline().split())
s = list(sys.stdin.readline().rstrip())
cost = [1] * n
r = k
for i in range(n - 1):
q = []
for j in range(i + 1, n):
if s[j] < s[i] and cost[i] + cost[j] <= r:
heappush(q, (s[j], cost[i] + cost[j], -j))
if not q:
continue
_, c, j = heappop(q)
j = -j
s[i], s[j] = s[j], s[i]
r -= c
cost[i] = cost[j] = 0
print("".join(s))
@staticmethod
def d():
k, m = map(int, sys.stdin.readline().split())
a = np.array([int(x) for x in sys.stdin.readline().split()])
c = np.array([int(x) for x in sys.stdin.readline().split()])
mask = (1 << 32) - 1
d = np.eye(k, k, -1, dtype=np.uint32) * mask
d[0] = c
if m <= k:
print(a[m - 1])
return
# print(Algebra.bitwise_mat_pow(d, m-k))
# print(Algebra.bitwise_dot(Algebra.bitwise_mat_pow(d, m-k), a[::-1].reshape(-1, 1))[0].item())
print(
Algebra.bitwise_dot(
Algebra.bitwise_mat_pow(d, m - k), a[::-1].reshape(-1, 1)
)[0][0]
)
class ABC010:
@staticmethod
def a():
print(sys.stdin.readline().rstrip() + "pp")
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
tot = 0
for x in a:
c = 0
while x % 2 == 0 or x % 3 == 2:
x -= 1
c += 1
tot += c
print(tot)
@staticmethod
def c():
sx, sy, gx, gy, t, v, n, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy).reshape(-1, 2).T
def dist(x1, y1, x2, y2):
return np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2)
ans = (
"YES"
if (dist(sx, sy, x, y) + dist(x, y, gx, gy) <= v * t).any()
else "NO"
)
print(ans)
@staticmethod
def d():
n, g, e = map(int, sys.stdin.readline().split())
p = [int(x) for x in sys.stdin.readline().split()]
x, y = [], []
for _ in range(e):
a, b = map(int, sys.stdin.readline().split())
x.append(a)
y.append(b)
x.append(b)
y.append(a)
for a in p:
x.append(a)
y.append(n)
if not x:
print(0)
return
c = [1] * len(x)
min_cut = maximum_flow(
csr_matrix((c, (x, y)), (n + 1, n + 1)), source=0, sink=n
).flow_value
print(min_cut)
@staticmethod
def d_2():
n, g, e = map(int, sys.stdin.readline().split())
graph = nx.DiGraph()
graph.add_nodes_from(range(n + 1))
for p in [int(x) for x in sys.stdin.readline().split()]:
graph.add_edge(p, n, capacity=1)
for _ in range(e):
a, b = map(int, sys.stdin.readline().split())
graph.add_edge(a, b, capacity=1)
graph.add_edge(b, a, capacity=1)
print(nx.minimum_cut_value(graph, 0, n))
@staticmethod
def d_3():
n, q, m = map(int, sys.stdin.readline().split())
g = GeometryTopology.Graph(n + 1)
# for i in range(n+1): g.add_node(i)
for p in [int(x) for x in sys.stdin.readline().split()]:
g.add_edge(p, n, capacity=1)
for a, b in zip(*[map(int, sys.stdin.read().split())] * 2):
g.add_edge(a, b, capacity=1)
g.add_edge(b, a, capacity=1)
print(g.dinic(0, n))
class ABC011:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(n % 12 + 1)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
print(s[0].upper() + s[1:].lower())
@staticmethod
def c():
n, *ng = map(int, sys.stdin.read().split())
ng = set(ng)
if n in ng:
print("NO")
else:
r = 100
while n > 0:
if r == 0:
print("NO")
return
for i in range(3, 0, -1):
if (n - i) in ng:
continue
n -= i
r -= 1
break
else:
print("NO")
return
print("YES")
@staticmethod
def d():
n, d, x, y = map(int, sys.stdin.read().split())
x, y = abs(x), abs(y)
if x % d or y % d:
print(0)
return
x, y = x // d, y // d
r = n - (x + y)
if r < 0 or r & 1:
print(0)
return
res = 0
half_p = pow(1 / 2, n)
for d in range(r // 2 + 1): # 0 <= d <= r//2, south
south, north = d, y + d
west = (r - 2 * d) // 2
res += (
half_p
* comb(n, south, exact=True)
* comb(n - south, north, exact=True)
* comb(n - south - north, west, exact=True)
* half_p
)
print(res)
class ABC012:
@staticmethod
def a():
a, b = map(int, sys.stdin.readline().split())
print(b, a)
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
h, n = divmod(n, 3600)
m, s = divmod(n, 60)
print(f"{h:02}:{m:02}:{s:02}")
@staticmethod
def c():
n = 2025 - int(sys.stdin.readline().rstrip())
res = []
for i in range(1, 10):
if n % i != 0 or n // i > 9:
continue
res.append(f"{i} x {n//i}")
print(*sorted(res), sep="\n")
@staticmethod
def d():
n, m, *abt = map(int, sys.stdin.read().split())
a, b, t = np.array(abt).reshape(m, 3).T
res = shortest_path(
csr_matrix((t, (a - 1, b - 1)), (n, n)),
method="FW",
directed=False,
)
print(res.max(axis=-1).min().astype(np.int64))
@staticmethod
def d_2():
n, m, *abt = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b, t in zip(*[iter(abt)] * 3):
a -= 1
b -= 1
g.add_edge(a, b, weight=t)
g.add_edge(b, a, weight=t)
print(min(max(d) for d in g.floyd_warshall()))
class ABC013:
@staticmethod
def a():
print(ord(sys.stdin.readline().rstrip()) - ord("A") + 1)
@staticmethod
def b():
a, b = map(int, sys.stdin.read().split())
d = abs(a - b)
print(min(d, 10 - d))
@staticmethod
def c():
n, h, a, b, c, d, e = map(int, sys.stdin.read().split())
y = np.arange(n + 1)
x = (n * e - h - (d + e) * y) // (b + e) + 1
np.maximum(x, 0, out=x)
np.minimum(x, n - y, out=x)
print(np.amin(a * x + c * y))
@staticmethod
def d():
n, m, d, *a = map(int, sys.stdin.read().split())
res = list(range(n))
def swap(i, j):
res[i], res[j] = res[j], res[i]
for i in a[::-1]:
swap(i - 1, i)
res = np.array(res)
def binary_method(a, p):
b = np.arange(n)
while p:
if p & 1:
b = a[b]
p >>= 1
a = a[a]
return b
print(*(binary_method(res, d) + 1), sep="\n")
class ABC014:
@staticmethod
def a():
a, b = map(int, sys.stdin.read().split())
print((a + b - 1) // b * b - a)
@staticmethod
def b():
n, x, *a = map(int, sys.stdin.read().split())
print(sum(a[i] for i in range(n) if x >> i & 1))
@staticmethod
def c():
n, *ab = map(int, sys.stdin.read().split())
a, b = np.array(ab).reshape(n, 2).T
res = np.zeros(10**6 + 2, dtype=np.int64)
np.add.at(res, a, 1)
np.subtract.at(res, b + 1, 1)
np.cumsum(res, out=res)
print(res.max())
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
g = GeometryTopology.Graph(n)
for _ in range(n - 1):
x, y = map(int, sys.stdin.readline().split())
x -= 1
y -= 1
g.add_edge(x, y, weight=1)
g.add_edge(y, x, weight=1)
g.bfs(0)
g.find_ancestors()
q, *ab = map(int, sys.stdin.read().split())
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
print(g.find_dist(a, b) + 1)
class ABC015:
@staticmethod
def a():
a, b = sys.stdin.read().split()
print(a if len(a) > len(b) else b)
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
print(
np.ceil(
a[np.nonzero(a)[0]].sum() / np.count_nonzero(a)
).astype(np.int8)
)
@staticmethod
def c():
n, k, *t = map(int, sys.stdin.read().split())
t = np.array(t).reshape(n, k)
x = np.zeros((1, 1), dtype=np.int8)
for i in range(n):
x = x.reshape(-1, 1) ^ t[i]
print("Found" if np.count_nonzero(x == 0) > 0 else "Nothing")
@staticmethod
def d():
w, n, k, *ab = map(int, sys.stdin.read().split())
dp = np.zeros((k + 1, w + 1), dtype=np.int32)
for a, b in zip(*[iter(ab)] * 2):
np.maximum(dp[1:, a:], dp[:-1, :-a] + b, out=dp[1:, a:])
print(dp[k][w])
class ABC016:
@staticmethod
def a():
m, d = map(int, sys.stdin.readline().split())
print("YES" if m % d == 0 else "NO")
@staticmethod
def b():
a, b, c = map(int, sys.stdin.readline().split())
f1, f2 = a + b == c, a - b == c
if f1 & f2:
print("?")
elif f1 & (~f2):
print("+")
elif (~f1) & f2:
print("-")
else:
print("!")
@staticmethod
def c():
n, _, *ab = map(int, sys.stdin.read().split())
f = [0] * n
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
f[a] |= 1 << b
f[b] |= 1 << a
res = [
bit_count(
cumor(f[j] for j in range(n) if f[i] >> j & 1)
& ~(f[i] | 1 << i)
)
for i in range(n)
]
print(*res, sep="\n")
@staticmethod
def d():
sx, sy, gx, gy = map(int, sys.stdin.readline().split())
seg1 = ((sx, sy), (gx, gy))
n = int(sys.stdin.readline().rstrip())
p1 = (
np.array(sys.stdin.read().split(), dtype=np.int64)
.reshape(n, 2)
.T
)
p2 = np.hstack((p1[:, 1:], p1[:, :1]))
seg2 = (p1, p2)
print(
np.count_nonzero(GeometryTopology.intersect(seg1, seg2)) // 2
+ 1
)
class ABC017:
@staticmethod
def a():
s, e = (
np.array(sys.stdin.read().split(), dtype=np.int16)
.reshape(3, 2)
.T
)
print((s // 10 * e).sum())
@staticmethod
def b():
choku_tail = set("ch, o, k, u".split(", "))
def is_choku(s):
if s == "":
return True
if len(s) >= 1 and (s[-1] in choku_tail) and is_choku(s[:-1]):
return True
if len(s) >= 2 and (s[-2:] in choku_tail) and is_choku(s[:-2]):
return True
return False
print("YES" if is_choku(sys.stdin.readline().rstrip()) else "NO")
@staticmethod
def c():
n, m, *lrs = map(int, sys.stdin.read().split())
l, r, s = np.array(lrs).reshape(n, 3).T
score = np.zeros((m + 1,), dtype=np.int32)
np.add.at(score, l - 1, s)
np.subtract.at(score, r, s)
np.cumsum(score, out=score)
print(s.sum() - score[:m].min())
@staticmethod
def d():
n, m, *f = map(int, sys.stdin.read().split())
prev = [0] * (n + 1)
tmp = defaultdict(int)
for i in range(n):
prev[i + 1] = tmp[f[i]]
tmp[f[i]] = i + 1
dp = [0] * (n + 1)
dp[0] = 1
l, s = 0, dp[0]
for i in range(1, n + 1):
while l < prev[i]:
s = (s - dp[l]) % MOD
l += 1
dp[i] = s
s = (s + dp[i]) % MOD
print(dp[n])
class ABC018:
@staticmethod
def a():
(*a,) = map(int, sys.stdin.read().split())
a = sorted(enumerate(a), key=lambda x: -x[1])
res = [None] * 3
for i in range(3):
res[a[i][0]] = i + 1
print(*res, sep="\n")
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
n, *lr = map(int, sys.stdin.read().split())
for l, r in zip(*[iter(lr)] * 2):
l -= 1
r -= 1
s = s[:l] + s[l : r + 1][::-1] + s[r + 1 :]
print(s)
@staticmethod
def c():
r, c, k = map(int, sys.stdin.readline().split())
s = np.array([list(s) for s in sys.stdin.read().split()])
s = np.pad(s, 1, constant_values="x")
a = np.zeros_like(s, dtype=np.float64)
a[s == "o"] = np.inf
for i in range(1, r + 1):
np.minimum(a[i - 1, :] + 1, a[i, :], out=a[i, :])
for i in range(r, 0, -1):
np.minimum(a[i + 1, :] + 1, a[i, :], out=a[i, :])
for j in range(1, c + 1):
np.minimum(a[:, j - 1] + 1, a[:, j], out=a[:, j])
for j in range(c, 0, -1):
np.minimum(a[:, j + 1] + 1, a[:, j], out=a[:, j])
print(np.count_nonzero(a >= k))
@staticmethod
def c_2():
r, c, k = map(int, sys.stdin.readline().split())
s = np.array([list(s) for s in sys.stdin.read().split()])
s = np.pad(s, 1, constant_values="x")
a = (s == "o").astype(np.int16)
a = distance_transform_cdt(a, metric="taxicab")
print(np.count_nonzero(a >= k))
@staticmethod
def d():
n, m, p, q, r, *xyz = map(int, sys.stdin.read().split())
x, y, z = np.array(xyz).reshape(r, 3).T
h = np.zeros((n, m), dtype=np.int32)
h[x - 1, y - 1] = z
g = np.array([*itertools.combinations(range(n), p)])
print(np.sort(h[g].sum(axis=1), axis=1)[:, -q:].sum(axis=1).max())
class ABC019:
@staticmethod
def a():
(*a,) = map(int, sys.stdin.readline().split())
print(sorted(a)[1])
@staticmethod
def b():
s = sys.stdin.readline().rstrip() + "$"
cnt = 0
prev = "$"
t = ""
for c in s:
if c == prev:
cnt += 1
continue
t += prev + str(cnt)
prev = c
cnt = 1
print(t[2:])
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
res = set()
for x in a:
while not x & 1:
x >>= 1
res.add(x)
print(len(res))
@staticmethod
def d():
def inquire(u, v):
print(f"? {u} {v}".format(u, v), flush=True)
return int(sys.stdin.readline().rstrip())
n = int(sys.stdin.readline().rstrip())
u = sorted([(inquire(1, v), v) for v in range(2, n + 1)])[-1][1]
d = max((inquire(u, v)) for v in range(1, n + 1) if u != v)
print(f"! {d}")
class ABC020:
@staticmethod
def a():
print(
"ABC"
if int(sys.stdin.readline().rstrip()) == 1
else "chokudai"
)
@staticmethod
def b():
a, b = sys.stdin.readline().split()
print(int(a + b) * 2)
@staticmethod
def c():
h, w, t = map(int, sys.stdin.readline().split())
s = [list(s) for s in sys.stdin.read().split()]
for i in range(h):
for j in range(w):
if s[i][j] == "S":
sy, sx = i, j
if s[i][j] == "G":
gy, gx = i, j
s[sy][sx] = s[gy][gx] = "."
source, target = sy * w + sx, gy * w + gx
def heuristic_function(u, v=target):
uy, ux = divmod(u, w)
vy, vx = divmod(v, w)
return abs(vy - uy) + abs(ux - vx)
def min_time(x):
g = GeometryTopology.Graph(h * w)
# g = nx.DiGraph()
for i in range(h):
for j in range(w):
u = i * w + j
if i > 0:
g.add_edge(
u,
(i - 1) * w + j,
weight=(1 if s[i - 1][j] == "." else x),
)
if i < h - 1:
g.add_edge(
u,
(i + 1) * w + j,
weight=(1 if s[i + 1][j] == "." else x),
)
if j > 0:
g.add_edge(
u,
i * w + j - 1,
weight=(1 if s[i][j - 1] == "." else x),
)
if j < w - 1:
g.add_edge(
u,
i * w + j + 1,
weight=(1 if s[i][j + 1] == "." else x),
)
return g.dijkstra(source)[target]
return g.astar(source, target, heuristic_function)
# return nx.dijkstra_path_length(g, source, target)
# return nx.astar_path_length(g, source, target, heuristic_function)
def binary_search():
lo, hi = 1, t + 1
while lo + 1 < hi:
x = (lo + hi) // 2
if min_time(x) > t:
hi = x
else:
lo = x
return lo
print(binary_search())
@staticmethod
def d():
n, k = map(int, sys.stdin.readline().split())
div = sorted(NumberTheory.find_divisors(k))
l = len(div)
s = [0] * l
for i, d in enumerate(div):
s[i] = (1 + n // d) * (n // d) // 2 * d % MOD
for i in range(l - 1, -1, -1):
for j in range(i + 1, l):
if div[j] % div[i]:
continue
s[i] = (s[i] - s[j]) % MOD
print(
sum(s[i] * k // div[i] % MOD for i in range(l)) % MOD
) # ans is LCM.
class ABC021:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
s = [1 << i for i in range(5) if n >> i & 1]
print(len(s), *s, sep="\n")
@staticmethod
def b():
n, a, b, k, *p = map(int, sys.stdin.read().split())
print("YES" if len(set(p) | set([a, b])) == k + 2 else "NO")
@staticmethod
def c():
n, a, b, m, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy).reshape(m, 2).T - 1
a -= 1
b -= 1
g = csgraph_to_dense(
csr_matrix((np.ones(m), (x, y)), (n, n), dtype=np.int8)
)
g = np.logical_or(g, g.T)
paths = np.zeros(n, dtype=np.int64).reshape(-1, 1)
paths[a, 0] = 1
while not paths[b, 0]:
paths = np.dot(g, paths) % MOD
print(paths[b, 0])
@staticmethod
def c_2():
n, a, b, m, *xy = map(int, sys.stdin.read().split())
a -= 1
b -= 1
g = GeometryTopology.Graph()
for x, y in zip(*[iter(xy)] * 2):
x -= 1
y -= 1
g.add_edge(x, y, weight=1)
g.add_edge(y, x, weight=1)
dist, paths = g.dijkstra(a, paths_cnt=True, mod=MOD)
print(paths[b])
@staticmethod
def d():
n, k = map(int, sys.stdin.read().split())
cn = Combinatorics.CombinationsMod()
print(cn(n + k - 1, k))
class ABC022:
@staticmethod
def a():
n, s, t, *a = map(int, sys.stdin.read().split())
a = np.array(a)
np.cumsum(a, out=a)
print(((s <= a) & (a <= t)).sum())
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
c = Counter(a)
print(sum(c.values()) - len(c))
@staticmethod
def c():
n, m, *uvl = map(int, sys.stdin.read().split())
u, v, l = np.array(uvl).reshape(m, 3).T
u -= 1
v -= 1
g = csgraph_to_dense(csr_matrix((l, (u, v)), (n, n)))
g += g.T
g[g == 0] = np.inf
dist0 = g[0].copy()
g[0] = 0
g[:, 0] = 0
dist = shortest_path(g, method="FW", directed=False)
u, v = np.array([*itertools.combinations(range(1, n), 2)]).T
res = (dist0[u] + dist[u, v] + dist0[v]).min()
print(-1 if res == np.inf else int(res))
@staticmethod
def d():
n, *ab = map(int, sys.stdin.read().split())
c = np.array(ab).reshape(2, n, 2)
g = c.mean(axis=1)
d = np.sqrt(((c - g[:, None, :]) ** 2).sum(axis=-1)).sum(axis=1)
print(d[1] / d[0])
class ABC023:
@staticmethod
def a():
print(sum(divmod(int(sys.stdin.readline().rstrip()), 10)))
@staticmethod
def b():
n, s = sys.stdin.read().split()
n = int(n)
t = "b"
for i in range(n // 2):
if i % 3 == 0:
t = "a" + t + "c"
elif i % 3 == 1:
t = "c" + t + "a"
else:
t = "b" + t + "b"
print(n // 2 if t == s else -1)
@staticmethod
def b_2():
n, s = sys.stdin.read().split()
n = int(n)
if n & 1 ^ 1:
print(-1)
return
a = list("abc")
i = (1 - n // 2) % 3
for c in s:
if c != a[i]:
print(-1)
return
i = (i + 1) % 3
print(n // 2)
@staticmethod
def c():
h, w, k, n, *rc = map(int, sys.stdin.read().split())
r, c = np.array(rc).reshape(n, 2).T - 1
rb = np.bincount(r, minlength=h)
cb = np.bincount(c, minlength=w)
rbb = np.bincount(rb, minlength=k + 1)
cbb = np.bincount(cb, minlength=k + 1)
tot = (rbb[: k + 1] * cbb[k::-1]).sum()
real = np.bincount(rb[r] + cb[c] - 1, minlength=k + 1)
print(tot - real[k - 1] + real[k])
@staticmethod
def d():
n, *hs = map(int, sys.stdin.read().split())
h, s = np.array(hs).reshape(n, 2).T
t = np.arange(n)
def is_ok(x):
return np.all(np.sort((x - h) // s) >= t)
def binary_search():
lo, hi = 0, 10**14
while lo + 1 < hi:
x = (lo + hi) // 2
if is_ok(x):
hi = x
else:
lo = x
return hi
print(binary_search())
class ABC024:
@staticmethod
def a():
a, b, c, k, s, t = map(int, sys.stdin.read().split())
print(a * s + b * t - c * (s + t) * (s + t >= k))
@staticmethod
def b():
n, t, *a = map(int, sys.stdin.read().split())
a = np.array(a)
print(np.minimum(a[1:] - a[:-1], t).sum() + t)
@staticmethod
def c():
n, d, k, *lrst = map(int, sys.stdin.read().split())
lrst = np.array(lrst)
lr = lrst[: 2 * d].reshape(d, 2)
s, t = lrst[2 * d :].reshape(k, 2).T
day = np.zeros((k,), dtype=np.int32)
for i in range(d):
l, r = lr[i]
move = (l <= s) & (s <= r) & (s != t)
reach = move & (l <= t) & (t <= r)
s[move & (s < t)] = r
s[move & (s > t)] = l
s[reach] = t[reach]
day[reach] = i + 1
print(*day, sep="\n")
@staticmethod
def d():
a, b, c = map(int, sys.stdin.read().split())
p = MOD
denom = pow(a * b % p - b * c % p + c * a % p, p - 2, p)
w = (b * c - a * b) % p * denom % p
h = (b * c - a * c) % p * denom % p
print(h, w)
class ABC025:
@staticmethod
def a():
s, n = sys.stdin.read().split()
n = int(n)
i, j = divmod(n - 1, 5)
print(s[i] + s[j])
@staticmethod
def b():
n, a, b = map(int, sys.stdin.readline().split())
res = defaultdict(int)
for _ in range(n):
s, d = sys.stdin.readline().split()
d = int(d)
res[s] += min(max(d, a), b)
res = res["East"] - res["West"]
if res == 0:
ans = 0
elif res > 0:
ans = f"East {res}"
else:
ans = f"West {-res}"
print(ans)
@staticmethod
def c():
b = [0] * 6
for i in range(2):
(*row,) = map(int, sys.stdin.readline().split())
for j in range(3):
b[i * 3 + j] = row[j]
c = [0] * 8
for i in range(3):
(*row,) = map(int, sys.stdin.readline().split())
for j in range(2):
c[i * 3 + j] = row[j]
tot = sum(b) + sum(c)
@lru_cache(maxsize=None)
def f(s=tuple(0 for _ in range(9))):
if all(s):
res = 0
for i in range(6):
res += (s[i] == s[i + 3]) * b[i]
for i in range(8):
res += (s[i] == s[i + 1]) * c[i]
return res
cand = [i for i in range(9) if not s[i]]
flg = len(cand) & 1
s = list(s)
res = []
for i in cand:
s[i] = (flg ^ 1) + 1
res.append(f(tuple(s)))
s[i] = 0
return sorted(res, reverse=flg)[0]
a = f()
b = tot - a
print(a)
print(b)
class ABC026:
@staticmethod
def a():
a = int(sys.stdin.readline().rstrip())
print(a // 2 * (a - a // 2))
@staticmethod
def b():
n, *r = map(int, sys.stdin.read().split())
s = np.pi * np.array([0] + r) ** 2
s.sort()
res = s[n::-2].sum() - s[n - 1 :: -2].sum()
print(res)
@staticmethod
def c():
n, *b = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph()
for i in range(1, n):
g.add_edge(b[i - 1] - 1, i, weight=1)
def f(u=0):
if not g.edges[u]:
return 1
s = [f(v) for v in g.edges[u]]
return max(s) + min(s) + 1
print(f())
@staticmethod
def d():
a, b, c = map(int, sys.stdin.readline().split())
def f(t):
return a * t + b * np.sin(c * t * np.pi) - 100
print(optimize.brenth(f, 0, 200))
class ABC027:
@staticmethod
def a():
l = [int(l) for l in sys.stdin.readline().split()]
l.sort()
print(l[2] if l[0] == l[1] else l[0])
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
m, r = divmod(sum(a), n)
if r:
print(-1)
return
population = 0
towns = 0
cnt = 0
for x in a:
population += x
towns += 1
if population / towns != m:
cnt += 1
continue
population, towns = 0, 0
print(cnt)
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
flg = n.bit_length() & 1 ^ 1
t = 0
x = 1
while x <= n:
t += 1
x = 2 * x + 1 if t & 1 ^ flg else 2 * x
print("Aoki" if t & 1 else "Takahashi")
class ABC028:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(
"Bad"
if n < 60
else "Good"
if n < 90
else "Great"
if n < 100
else "Perfect"
)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
cnt = Counter(s)
print(*[cnt.get(c, 0) for c in "ABCDEF"])
@staticmethod
def c():
a, b, c, d, e = map(int, sys.stdin.readline().split())
print(max(b + c + e, a + d + e))
@staticmethod
def d():
n, k = map(int, sys.stdin.readline().split())
c = 3 * 2 * (n - k) * (k - 1) + 3 * (n - 1) + 1
print(c / n**3)
class ABC029:
@staticmethod
def a():
print(sys.stdin.readline().rstrip() + "s")
@staticmethod
def b():
print(sum("r" in s for s in sys.stdin.read().split()))
@staticmethod
def c():
print(
*[
"".join(s)
for s in itertools.product(
"abc", repeat=int(sys.stdin.readline().rstrip())
)
],
sep="\n",
)
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
print(
sum(
n // 10 ** (i + 1) * 10**i
+ min(max((n % 10 ** (i + 1) - 10**i + 1), 0), 10**i)
for i in range(9)
)
)
class ABC030:
@staticmethod
def a():
a, b, c, d = map(int, sys.stdin.readline().split())
e, f = b * c, d * a
print("TAKAHASHI" if e > f else "AOKI" if f > e else "DRAW")
@staticmethod
def b():
n, m = map(int, sys.stdin.readline().split())
n = (n % 12 + m / 60) * 30
m *= 6
d = abs(n - m)
print(min(d, 360 - d))
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
x, y = map(int, sys.stdin.readline().split())
a = [int(x) for x in sys.stdin.readline().split()]
b = [int(x) for x in sys.stdin.readline().split()]
t = 0
p = 1
cnt = 0
while True:
if p:
i = bi_l(a, t)
if i == n:
break
t = a[i] + x
else:
i = bi_l(b, t)
if i == m:
break
t = b[i] + y
cnt += 1
p ^= 1
print(cnt)
@staticmethod
def d():
n, a = map(int, sys.stdin.readline().split())
a -= 1
k = sys.stdin.readline().rstrip()
b = [int(x) - 1 for x in sys.stdin.readline().split()]
c = [None] * n
for i in range(n + 1):
if str(i) == k:
print(a + 1)
return
if c[a] is not None:
l, d = i - c[a], c[a]
break
c[a] = i
a = b[a]
r = [None] * len(k)
r[0] = 1
for i in range(len(k) - 1):
r[i + 1] = r[i] * 10 % l
k = [int(c) for c in k][::-1]
d = (sum(r[i] * k[i] for i in range(len(k))) - d) % l
for _ in range(d):
a = b[a]
print(a + 1)
@staticmethod
def d_2():
n, a, k, *b = map(int, sys.stdin.read().split())
a -= 1
b = [x - 1 for x in b]
c = [None] * n
for i in range(n + 1):
if i == k:
print(a + 1)
return
if c[a] is not None:
for _ in range((k - c[a]) % (i - c[a])):
a = b[a]
print(a + 1)
return
c[a] = i
a = b[a]
class ABC031:
@staticmethod
def a():
a, d = map(int, sys.stdin.readline().split())
if a > d:
a, d = d, a
print((a + 1) * d)
@staticmethod
def b():
l, h, n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
res = np.maximum(l - a, 0)
res[a > h] = -1
print(*res, sep="\n")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
np.cumsum(a[::2], out=a[::2])
np.cumsum(a[1::2], out=a[1::2])
a = list(a) + [0] * 2
def score(i, j):
if i > j:
i, j = j, i
if (j - i) & 1:
x, y = a[j - 1] - a[i - 2], a[j] - a[i - 1]
else:
x, y = a[j] - a[i - 2], a[j - 1] - a[i - 1]
return x, y
res = -inf
for i in range(n):
s = -inf
for j in range(n):
if i == j:
continue
x, y = score(i, j)
if y > s:
s, t = y, x
res = max(res, t)
print(res)
@staticmethod
def d():
k, m = map(int, sys.stdin.readline().split())
(*vw,) = zip(*[iter(sys.stdin.read().split())] * 2)
for l in itertools.product((1, 2, 3), repeat=k):
s = dict()
for v, w in vw:
i = 0
for d in v:
d = int(d) - 1
j = i + l[d]
if j > len(w):
break
t = w[i:j]
if d in s and s[d] != t:
break
s[d] = t
i = j
else:
if i == len(w):
continue
break
else:
for i in range(k):
print(s[i])
return
class ABC032:
@staticmethod
def a():
a, b, n = map(int, sys.stdin.read().split())
l = NumberTheory.lcm(a, b)
print((n + l - 1) // l * l)
@staticmethod
def b():
s, k = sys.stdin.read().split()
k = int(k)
res = set()
for i in range(len(s) - k + 1):
res.add(s[i : i + k])
print(len(res))
@staticmethod
def c():
n, k, *s = map(int, sys.stdin.read().split())
if 0 in s:
print(n)
return
if k == 0:
print(0)
return
res, tmp, l = 0, 1, 0
for r in range(n):
tmp *= s[r]
while tmp > k:
tmp //= s[l]
l += 1
res = max(res, r - l + 1)
print(res)
class ABC033:
@staticmethod
def a():
print(
"SAME"
if len(set(sys.stdin.readline().rstrip())) == 1
else "DIFFERENT"
)
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
res = dict()
for _ in range(n):
s, p = sys.stdin.readline().split()
res[s] = int(p)
tot = sum(res.values())
for s, p in res.items():
if p > tot / 2:
print(s)
return
print("atcoder")
@staticmethod
def c():
s = sys.stdin.readline().rstrip()
print(sum(not "0" in f for f in s.split("+")))
class ABC034:
@staticmethod
def a():
x, y = map(int, sys.stdin.readline().split())
print("Better" if y > x else "Worse")
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
print(n + 1 if n & 1 else n - 1)
@staticmethod
def c():
h, w = map(int, sys.stdin.read().split())
choose = Combinatorics.CombinationsMod()
print(choose(h + w - 2, h - 1))
@staticmethod
def d():
n, k, *wp = map(int, sys.stdin.read().split())
w, p = np.array(wp).reshape(-1, 2).T
def f(x):
return np.sort(w * (p - x))[-k:].sum()
print(optimize.bisect(f, 0, 100))
class ABC035:
@staticmethod
def a():
w, h = map(int, sys.stdin.readline().split())
print("4:3" if 4 * h == 3 * w else "16:9")
@staticmethod
def b():
s, t = sys.stdin.read().split()
y = x = z = 0
for c in s:
if c == "?":
z += 1
elif c == "L":
x -= 1
elif c == "R":
x += 1
elif c == "D":
y -= 1
elif c == "U":
y += 1
d = abs(y) + abs(x)
print(d + z if t == "1" else max(d - z, (d - z) & 1))
@staticmethod
def c():
n, q, *lr = map(int, sys.stdin.read().split())
l, r = np.array(lr).reshape(q, 2).T
res = np.zeros(n + 1, dtype=int)
np.add.at(res, l - 1, 1)
np.subtract.at(res, r, 1)
np.cumsum(res, out=res)
res = res & 1
print("".join(map(str, res[:-1])))
@staticmethod
def d():
n, m, t = map(int, sys.stdin.readline().split())
point = np.array(sys.stdin.readline().split(), dtype=int)
a, b, c = (
np.array(sys.stdin.read().split(), dtype=np.int64)
.reshape(m, 3)
.T
)
a -= 1
b -= 1
d_1 = shortest_path(
csr_matrix((c, (a, b)), (n, n)),
method="D",
directed=True,
indices=0,
)
d_2 = shortest_path(
csr_matrix((c, (b, a)), (n, n)),
method="D",
directed=True,
indices=0,
)
print(int(np.amax((t - (d_1 + d_2)) * point)))
class ABC036:
@staticmethod
def a():
a, b = map(int, sys.stdin.readline().split())
print((b + a - 1) // a)
@staticmethod
def b():
n, *s = sys.stdin.read().split()
n = int(n)
for j in range(n):
row = ""
for i in range(n - 1, -1, -1):
row += s[i][j]
print(row)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
b = [None] * n
prev = None
j = -1
for i, x in sorted(enumerate(a), key=lambda x: x[1]):
if x != prev:
j += 1
b[i] = j
prev = x
print(*b, sep="\n")
@staticmethod
def d():
n, *ab = map(int, sys.stdin.read().split())
edges = [[] for _ in range(n)]
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
edges[a].append(b)
edges[b].append(a)
parent = [None] * n
def count(u):
black, white = 1, 1
for v in edges[u]:
if v == parent[u]:
continue
parent[v] = u
b, w = count(v)
black *= w
black %= MOD
white *= (b + w) % MOD
white %= MOD
return black, white
print(sum(count(0)) % MOD)
class ABC037:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print(c // min(a, b))
@staticmethod
def b():
n, q, *lrt = map(int, sys.stdin.read().split())
a = np.zeros(n, dtype=int)
for l, r, t in zip(*[iter(lrt)] * 3):
a[l - 1 : r] = t
print(*a, sep="\n")
@staticmethod
def c():
n, k, *a = map(int, sys.stdin.read().split())
a = np.array([0] + a)
np.cumsum(a, out=a)
s = (a[k:] - a[:-k]).sum()
print(s)
@staticmethod
def d():
h, w, *a = map(int, sys.stdin.read().split())
p = [None] * (h * w)
def paths(k):
if p[k]:
return p[k]
p[k] = 1
i, j = divmod(k, w)
if j > 0 and a[k] > a[k - 1]:
p[k] += paths(k - 1)
if j < w - 1 and a[k] > a[k + 1]:
p[k] += paths(k + 1)
if i > 0 and a[k] > a[k - w]:
p[k] += paths(k - w)
if i < h - 1 and a[k] > a[k + w]:
p[k] += paths(k + w)
p[k] %= MOD
return p[k]
print(sum(paths(i) for i in range(h * w)) % MOD)
class ABC038:
@staticmethod
def a():
s = sys.stdin.readline().rstrip()
print("YES" if s[-1] == "T" else "NO")
@staticmethod
def b():
a, b, c, d = map(int, sys.stdin.read().split())
print("YES" if a == c or b == c or a == d or b == d else "NO")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a += [-1]
cnt = n
tmp = 1
for i in range(n):
if a[i + 1] > a[i]:
tmp += 1
else:
cnt += tmp * (tmp - 1) // 2
tmp = 1
print(cnt)
@staticmethod
def d():
n, *wh = map(int, sys.stdin.read().split())
a = [
x[1]
for x in sorted(
zip(*[iter(wh)] * 2), key=lambda x: (x[0], -x[1])
)
]
print(bi_l(DP.LIS(a), inf))
class ABC039:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print((a * b + b * c + c * a) * 2)
@staticmethod
def b():
x = int(sys.stdin.readline().rstrip())
for n in range(1, int(x**0.5) + 1):
if pow(n, 4) == x:
print(n)
return
@staticmethod
def c():
board = "WBWBWWBWBWBW" * 3
convert = "Do, *, Re, *, Mi, Fa, *, So, *, La, *, Si".split(", ")
s = sys.stdin.readline().rstrip()
print(convert[board.index(s)])
@staticmethod
def d():
h, w = map(int, sys.stdin.readline().split())
s = "".join(sys.stdin.read().split())
white = set()
for i in range(h * w):
if s[i] == "#":
continue
l = 0 if i % w == 0 else -1
r = 0 if (i + 1) % w == 0 else 1
white |= {
i + dy + dx
for dy in range(-w, w + 1, w)
for dx in range(l, r + 1)
}
black_before = set(range(h * w)) - white
black_after = set()
for i in black_before:
l = 0 if i % w == 0 else -1
r = 0 if (i + 1) % w == 0 else 1
black_after |= {
i + dy + dx
for dy in range(-w, w + 1, w)
for dx in range(l, r + 1)
}
black_after &= set(range(h * w))
for i in range(h * w):
if s[i] == "#" and not i in black_after:
print("impossible")
return
print("possible")
for i in range(h):
print(
"".join(
[
"#" if i * w + j in black_before else "."
for j in range(w)
]
)
)
class ABC040:
@staticmethod
def a():
n, x = map(int, sys.stdin.readline().split())
print(min(x - 1, n - x))
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
res = inf
for i in range(1, int(n**0.5) + 1):
res = min(res, n // i - i + n % i)
print(res)
@staticmethod
def c():
n, *h = map(int, sys.stdin.read().split())
h = [h[0]] + h
cost = [None] * (n + 1)
cost[0] = cost[1] = 0
for i in range(2, n + 1):
cost[i] = min(
cost[i - 2] + abs(h[i] - h[i - 2]),
cost[i - 1] + abs(h[i] - h[i - 1]),
)
print(cost[n])
@staticmethod
def d():
n, m = map(int, sys.stdin.readline().split())
uf = GeometryTopology.Graph(n)
uf.init_dsu()
queue = []
for _ in range(m):
a, b, y = map(int, sys.stdin.readline().split())
heappush(queue, (-(2 * y), a - 1, b - 1))
q = int(sys.stdin.readline().rstrip())
for i in range(q):
v, y = map(int, sys.stdin.readline().split())
heappush(queue, (-(2 * y + 1), v - 1, i))
res = [None] * q
while queue:
y, i, j = heappop(queue)
if y & 1:
res[j] = uf.size[uf.find(i)]
else:
uf.unite(i, j)
print(*res, sep="\n")
class ABC041:
@staticmethod
def a():
s, i = sys.stdin.read().split()
i = int(i)
print(s[i - 1])
@staticmethod
def b():
a, b, c = map(int, sys.stdin.readline().split())
ans = a * b % MOD * c % MOD
print(ans)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
for i, h in sorted(enumerate(a), key=lambda x: -x[1]):
print(i + 1)
@staticmethod
def d():
n, _, *xy = map(int, sys.stdin.read().split())
g = [0] * n
for x, y in zip(*[iter(xy)] * 2):
g[x - 1] |= 1 << (y - 1)
res = [0] * (1 << n)
res[0] = 1
for i in range(1 << n):
for j in range(n):
if i >> j & 1 ^ 1:
continue
if not (g[j] & i):
res[i] += res[i & ~(1 << j)]
print(res[-1])
class ABC042:
@staticmethod
def a():
a = [int(x) for x in sys.stdin.readline().split()]
c = Counter(a)
print("YES" if c[5] == 2 and c[7] == 1 else "NO")
@staticmethod
def b():
n, l, *s = sys.stdin.read().split()
print("".join(sorted(s)))
@staticmethod
def c():
n, k, *d = sys.stdin.read().split()
l = len(n)
ok = sorted(set(string.digits) - set(d))
cand = [
int("".join(p)) for p in itertools.product(ok, repeat=l)
] + [int(min(x for x in ok if x > "0") + min(ok) * l)]
print(cand[bi_l(cand, int(n))])
@staticmethod
def d():
h, w, a, b = map(int, sys.stdin.read().split())
combinations = Combinatorics.CombinationsMod(
n=2 * 10**5, mod=MOD
)
i = np.arange(h - a, h)
ng = np.sum(
combinations(i + b - 1, i)
* combinations(h - i + w - b - 2, h - 1 - i)
% MOD
)
print((combinations(h + w - 2, h - 1) - ng) % MOD)
class ABC043:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print((1 + n) * n // 2)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
t = ""
for c in s:
if c == "B":
t = t[:-1]
else:
t += c
print(t)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
x = np.around(a.sum() / n).astype(int)
print(np.sum((a - x) ** 2))
@staticmethod
def d():
s = sys.stdin.readline().rstrip()
n = len(s)
for i in range(n - 1):
if s[i] == s[i + 1]:
print(i + 1, i + 2)
return
for i in range(n - 2):
if s[i] == s[i + 2]:
print(i + 1, i + 3)
return
print(-1, -1)
class ABC044:
@staticmethod
def a():
n, k, x, y = map(int, sys.stdin.read().split())
print(min(n, k) * x + max(0, n - k) * y)
@staticmethod
def b():
res = set(
c & 1 for c in Counter(sys.stdin.readline().rstrip()).values()
)
print("Yes" if len(res) == 1 and res.pop() == 0 else "No")
@staticmethod
def c():
n, a, *x = map(int, sys.stdin.read().split())
dp = np.zeros((n + 1, 2501), dtype=np.int64)
dp[0, 0] = 1
for v in x:
dp[1:, v:] += dp[:-1, :-v]
i = np.arange(1, n + 1)
print(dp[i, i * a].sum())
@staticmethod
def c_2():
n, a, *x = map(int, sys.stdin.read().split())
for i in range(n):
x[i] -= a
s = defaultdict(int)
s[0] = 1
for i in range(n):
ns = s.copy()
for k, v in s.items():
ns[k + x[i]] += v
s = ns
print(s[0] - 1)
@staticmethod
def d():
pass
class ABC045:
@staticmethod
def a():
a, b, h = map(int, sys.stdin.read().split())
print((a + b) * h // 2)
@staticmethod
def b():
a, b, c = sys.stdin.read().split()
d = {"a": a[::-1], "b": b[::-1], "c": c[::-1]}
nx = "a"
while 1:
if not d[nx]:
print(nx.upper())
return
d[nx], nx = d[nx][:-1], d[nx][-1]
@staticmethod
def c():
def c(l):
return pow(2, max(0, l - 1))
s = sys.stdin.readline().rstrip()
n = len(s)
print(
sum(
int(s[i : j + 1]) * c(i) * c(n - 1 - j)
for i in range(n)
for j in range(i, n)
)
)
@staticmethod
def d():
h, w, n, *ab = map(int, sys.stdin.read().split())
c = defaultdict(int)
for y, x in zip(*[iter(ab)] * 2):
y -= 1
x -= 1
for dy, dx in itertools.product(range(-1, 2), repeat=2):
i, j = y + dy, x + dx
if not (0 < i < h - 1 and 0 < j < w - 1):
continue
c[(i, j)] += 1
c = Counter(c.values())
c[0] = (h - 2) * (w - 2) - sum(c.values())
for i in range(10):
print(c[i])
class ABC046:
@staticmethod
def a():
print(len(set(sys.stdin.readline().split())))
@staticmethod
def b():
n, k = map(int, sys.stdin.readline().split())
print(k * pow(k - 1, n - 1))
@staticmethod
def c():
n, *xy = map(int, sys.stdin.read().split())
a, b = 1, 1
for x, y in zip(*[iter(xy)] * 2):
n = max((a + x - 1) // x, (b + y - 1) // y)
a, b = n * x, n * y
print(a + b)
@staticmethod
def d():
c = Counter(sys.stdin.readline().rstrip())
print((c["g"] - c["p"]) // 2)
class ABC047:
@staticmethod
def a():
c = sorted(map(int, sys.stdin.readline().split()))
print("Yes" if c[0] + c[1] == c[2] else "No")
@staticmethod
def b():
w, h, n, *xyf = map(int, sys.stdin.read().split())
l, r, d, u = 0, w, 0, h
for x, y, f in zip(*[iter(xyf)] * 3):
if f == 1:
l = max(l, x)
if f == 2:
r = min(r, x)
if f == 3:
d = max(d, y)
if f == 4:
u = min(u, y)
print(max(0, r - l) * max(0, u - d))
@staticmethod
def c():
s = sys.stdin.readline().rstrip()
print(sum(s[i] != s[i + 1] for i in range(len(s) - 1)))
@staticmethod
def d():
mn, mx, c = inf, -1, 0
n, t, *a = map(int, sys.stdin.read().split())
for p in a:
if p - mn == mx:
c += 1
elif p - mn > mx:
mx, c = p - mn, 1
mn = min(mn, p)
print(c)
class ABC048:
@staticmethod
def a():
def initial(s):
return s[0].upper()
print("".join(map(initial, sys.stdin.readline().split())))
@staticmethod
def b():
a, b, x = map(int, sys.stdin.readline().split())
print(
b // x - (a - 1) // x
) # if a=0, (a-1)/x is rounded down to -1.
@staticmethod
def c():
n, x, *a = map(int, sys.stdin.read().split())
cnt = prev = 0
for i in range(n):
d = prev + a[i] - x
prev = a[i]
if d <= 0:
continue
cnt += d
prev -= d
print(cnt)
@staticmethod
def d():
s = sys.stdin.readline().rstrip()
print("First" if len(s) & 1 ^ (s[0] == s[-1]) else "Second")
class ABC049:
@staticmethod
def a():
vowels = set("aeiou")
print(
"vowel"
if sys.stdin.readline().rstrip() in vowels
else "consonant"
)
@staticmethod
def b():
h, w, *s = sys.stdin.read().split()
for l in s:
for _ in range(2):
print(l)
@staticmethod
def c():
t = set("dream, dreamer, erase, eraser".split(", "))
def obtainable(s):
while True:
for i in range(5, 8):
if s[-i:] in t:
s = s[:-i]
if not s:
return True
break
else:
return False
s = sys.stdin.readline().rstrip()
print("YES" if obtainable(s) else "NO")
@staticmethod
def d():
n, k, l = map(int, sys.stdin.readline().split())
uf1 = GeometryTopology.Graph(n)
uf1.init_dsu()
uf2 = GeometryTopology.Graph(n)
uf2.init_dsu()
def add_edges(uf, m):
for _ in range(m):
x, y = map(int, sys.stdin.readline().split())
x -= 1
y -= 1
uf.unite(x, y)
add_edges(uf1, k)
add_edges(uf2, l)
g = defaultdict(list)
for i in range(n):
g[(uf1.find(i), uf2.find(i))].append(i)
res = [None] * n
for a in g:
for i in g[a]:
res[i] = len(g[a])
print(*res, sep=" ")
class ABC050:
@staticmethod
def a():
print(eval(sys.stdin.readline().rstrip()))
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
t = np.array(sys.stdin.readline().split(), dtype=np.int64)
m, *px = map(int, sys.stdin.read().split())
p, x = np.array(px).reshape(m, 2).T
p -= 1
print(*(t.sum() + x - t[p]), sep="\n")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = Counter(a)
if n & 1 and not (
a[0] == 1 and all(a[i] == 2 for i in range(2, n, 2))
):
print(0)
return
if ~n & 1 and any(a[i] != 2 for i in range(1, n, 2)):
print(0)
return
print(pow(2, n // 2, MOD))
@staticmethod
def d():
pass
class ABC051:
@staticmethod
def a():
print(" ".join(sys.stdin.readline().rstrip().split(",")))
@staticmethod
def b():
k, s = map(int, sys.stdin.readline().split())
tot = 0
for x in range(k + 1):
if s - x < 0:
break
if s - x > 2 * k:
continue
tot += s - x + 1 if s - x <= k else 2 * k - (s - x) + 1
print(tot)
@staticmethod
def c():
x1, y1, x2, y2 = map(int, sys.stdin.readline().split())
dx, dy = x2 - x1, y2 - y1
print(
"U" * dy
+ "R" * (dx + 1)
+ "D" * (dy + 1)
+ "L" * (dx + 1)
+ "U"
+ "L"
+ "U" * (dy + 1)
+ "R" * (dx + 1)
+ "D" * (dy + 1)
+ "L" * dx
)
@staticmethod
def d():
n, m, *abc = map(int, sys.stdin.read().split())
x = np.arange(n)
a, b, c = np.array(abc).reshape(m, 3).T
a -= 1
b -= 1
d = shortest_path(
csr_matrix((c, (a, b)), shape=(n, n)),
method="FW",
directed=False,
).astype(np.int64)
print(
m
- np.any(
d[x, a[:, None]] + c[:, None] == d[x, b[:, None]], axis=1
).sum()
)
class ABC052:
@staticmethod
def a():
a, b, c, d = map(int, sys.stdin.readline().split())
print(max(a * b, c * d))
@staticmethod
def b():
n, s = sys.stdin.read().split()
n = int(n)
a = [0] * (n + 1)
for i in range(n):
a[i + 1] = a[i] + (1 if s[i] == "I" else -1)
print(max(a))
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
pn = NumberTheory.PrimeNumbers(n)
s = 1
for c in pn.factorize_factorial(n).values():
s = s * (c + 1) % MOD
print(s)
@staticmethod
def d():
n, a, b, *x = map(int, sys.stdin.read().split())
x = np.array(x)
print(np.minimum((x[1:] - x[:-1]) * a, b).sum())
class ABC053:
@staticmethod
def a():
print(
"ABC" if int(sys.stdin.readline().rstrip()) < 1200 else "ARC"
)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
print(len(s) - s.find("A") - s[::-1].find("Z"))
@staticmethod
def c():
x = int(sys.stdin.readline().rstrip())
q, r = divmod(x, 11)
print(2 * q + (r + 5) // 6)
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
print(n - ((n - len(set(a)) + 1) // 2 * 2))
class ABC054:
@staticmethod
def a():
def f(x):
return (x + 11) % 13
a, b = map(int, sys.stdin.readline().split())
print("Alice" if f(a) > f(b) else "Bob" if f(a) < f(b) else "Draw")
@staticmethod
def b():
n, m = map(int, sys.stdin.readline().split())
a = [sys.stdin.readline().rstrip() for _ in range(n)]
b = [sys.stdin.readline().rstrip() for _ in range(m)]
for i in range(n - m + 1):
for j in range(n - m + 1):
for y in range(m):
for x in range(m):
if a[i + y][j + x] == b[y][x]:
continue
break
else:
continue
break
else:
print("Yes")
return
print("No")
@staticmethod
def c():
n, m, *ab = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
g.add_edge(a, b)
g.add_edge(b, a)
cnt = 0
stack = [(0, 1)]
while stack:
u, s = stack.pop()
if s == (1 << n) - 1:
cnt += 1
continue
for v in g.edges[u]:
if s >> v & 1:
continue
stack.append((v, s | 1 << v))
print(cnt)
@staticmethod
def d():
n, ma, mb, *abc = map(int, sys.stdin.read().split())
dp = np.full((401, 401), np.inf)
dp[0, 0] = 0
for a, b, c in zip(*[iter(abc)] * 3):
np.minimum(dp[a:, b:], dp[:-a, :-b] + c, out=dp[a:, b:])
i = np.arange(1, 400 // max(ma, mb) + 1)
res = dp[i * ma, i * mb].min()
print(int(res) if res != np.inf else -1)
class ABC055:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(800 * n - 200 * (n // 15))
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
fac, _ = Algebra.generate_fac_ifac(n, MOD)
print(fac[-1])
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
print(m // 2 if m <= 2 * n else n + (m - 2 * n) // 4)
@staticmethod
def d():
n, s = sys.stdin.read().split()
n = int(n)
s = [1 if c == "o" else 0 for c in s]
def possible(t):
for i in range(1, n - 1):
t[i + 1] = t[i - 1] ^ t[i] ^ s[i]
return (
(t[0] ^ s[0] ^ t[1] ^ t[-1])
| (t[-1] ^ s[-1] ^ t[-2] ^ t[0])
) ^ 1
for fst in [(1, 0), (0, 1), (1, 1), (0, 0)]:
t = [None] * n
t[0], t[1] = fst[0], fst[1]
if possible(t):
print("".join("S" if x == 1 else "W" for x in t))
return
print(-1)
class ABC056:
@staticmethod
def a():
def to_i(c):
return 1 if c == "H" else 0
a, b = map(to_i, sys.stdin.readline().split())
print("D" if a ^ b else "H")
@staticmethod
def b():
w, a, b = map(int, sys.stdin.readline().split())
if a > b:
a, b = b, a
print(max(b - (a + w), 0))
@staticmethod
def c():
x = int(sys.stdin.readline().rstrip())
print(int(math.ceil(math.sqrt(2 * x + 1 / 4) - 0.5)))
@staticmethod
def d():
n, k, *a = map(int, sys.stdin.read().split())
a = sorted(min(x, k) for x in a)
def necessary(i):
dp = np.zeros(k, dtype=np.bool)
dp[0] = True
for j in range(n):
if j == i:
continue
dp[a[j] :] += dp[: -a[j]]
return np.any(dp[k - a[i] :])
def binary_search():
lo, hi = -1, n
while hi - lo > 1:
i = (lo + hi) // 2
if necessary(i):
hi = i
else:
lo = i
return hi
print(binary_search())
class ABC057:
@staticmethod
def a():
a, b = map(int, sys.stdin.readline().split())
print((a + b) % 24)
@staticmethod
def b():
n, m, *I = map(int, sys.stdin.read().split())
I = np.array(I).reshape(-1, 2)
ab, cd = I[:n], I[n:]
print(
*(
np.argmin(
np.absolute(ab[:, None] - cd).sum(axis=-1), axis=-1
)
+ 1
),
sep="\n",
)
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
divs = NumberTheory.find_divisors(n)
print(len(str(divs[bi_l(divs, math.sqrt(n))])))
@staticmethod
def d():
c = Combinatorics.choose
n, a, b, *v = map(int, sys.stdin.read().split())
v.sort()
print(sum(v[-a:]) / a)
l, r = bi_l(v, v[-a]), bi_r(v, v[-a])
print(
sum(
c(r - l, i)
for i in range(r - n + a, r - max(l, n - b) + 1)
)
if r == n
else c(r - l, r - n + a)
)
class ABC058:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print("YES" if c - b == b - a else "NO")
@staticmethod
def b():
s, t = sys.stdin.read().split()
a = ""
for i in range(len(t)):
a += s[i] + t[i]
if len(s) > len(t):
a += s[-1]
print(a)
@staticmethod
def c():
n, *s = sys.stdin.read().split()
res = {c: 100 for c in string.ascii_lowercase}
for counter in map(Counter, s):
for (
c,
x,
) in res.items():
res[c] = min(x, counter[c])
t = ""
for c, x in sorted(res.items()):
t += c * x
print(t)
@staticmethod
def d():
n, m, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy[:n]), np.array(xy[n:])
print(
(x * (np.arange(n) + 1) - np.cumsum(x)).sum()
% MOD
* ((y * (np.arange(m) + 1) - np.cumsum(y)).sum() % MOD)
% MOD
)
class ABC059:
@staticmethod
def a():
def initial(s):
return s[0].upper()
print("".join(map(initial, sys.stdin.readline().split())))
@staticmethod
def b():
a, b = sys.stdin.read().split()
la, lb = len(a), len(b)
print(
"GREATER"
if la > lb
else "LESS"
if la < lb
else "GREATER"
if a > b
else "LESS"
if a < b
else "EQUAL"
)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
c = s = 0
for i in range(n):
s += a[i]
if i & 1 and s >= 0:
c += s + 1
s = -1
elif i & 1 ^ 1 and s <= 0:
c += 1 - s
s = 1
c1 = c
c = s = 0
for i in range(n):
s += a[i]
if i & 1 and s <= 0:
c += 1 - s
s = 1
elif i & 1 ^ 1 and s >= 0:
c += s + 1
s = -1
c2 = c
print(min(c1, c2))
@staticmethod
def d():
x, y = map(int, sys.stdin.readline().split())
print("Brown" if abs(x - y) <= 1 else "Alice")
class ABC060:
@staticmethod
def a():
a, b, c = sys.stdin.readline().split()
print("YES" if a[-1] == b[0] and b[-1] == c[0] else "NO")
@staticmethod
def b():
a, b, c = map(int, sys.stdin.readline().split())
print("NO" if c % NumberTheory.gcd(a, b) else "YES")
@staticmethod
def c():
n, t, *a = map(int, sys.stdin.read().split())
print(sum(min(a[i + 1] - a[i], t) for i in range(n - 1)) + t)
@staticmethod
def d():
pass
class ABC061:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print("Yes" if a <= c <= b else "No")
@staticmethod
def b():
n, m, *ab = map(int, sys.stdin.read().split())
ab = np.array(ab) - 1
g = np.zeros(n, dtype=np.int32)
np.add.at(g, ab, 1)
print(*g, sep="\n")
@staticmethod
def c():
n, k, *ab = map(int, sys.stdin.read().split())
ab = np.transpose(np.array(ab).reshape(n, 2))
a, b = ab[:, np.argsort(ab[0])]
print(a[np.cumsum(b) >= k][0])
@staticmethod
def d():
n, m, *abc = map(int, sys.stdin.read().split())
a, b, c = np.array(abc).reshape(m, 3).T
a -= 1
b -= 1
c *= -1
g = csr_matrix(
([1] * (m + 1), (np.append(a, n - 1), np.append(b, 0))), (n, n)
)
_, labels = connected_components(g, connection="strong")
bl = (labels[a] == labels[0]) & (labels[b] == labels[0])
g = csr_matrix((c[bl], (a[bl], b[bl])), (n, n))
try:
print(
-shortest_path(g, method="BF", directed=True, indices=0)[
-1
].astype(int)
)
except:
print("inf")
@staticmethod
def d_2():
n, m, *abc = map(int, sys.stdin.read().split())
a, b, c = np.array(abc).reshape(m, 3).T
a -= 1
b -= 1
c *= -1
d = np.full(n, np.inf)
d[0] = 0
for _ in range(n - 1):
np.minimum.at(d, b, d[a] + c)
neg_cycle = np.zeros(n, dtype=np.bool)
for _ in range(n):
np.logical_or.at(neg_cycle, b, d[a] + c < d[b])
np.minimum.at(d, b, d[a] + c)
print(inf if neg_cycle[-1] else -d[-1].astype(int))
class ABC062:
@staticmethod
def a():
g = [0, 2, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0]
x, y = map(int, sys.stdin.readline().split())
print("Yes" if g[x - 1] == g[y - 1] else "No")
@staticmethod
def b():
h, w = map(int, sys.stdin.readline().split())
a = np.array(
[list(s) for s in sys.stdin.read().split()], dtype="U1"
)
a = np.pad(a, pad_width=1, constant_values="#")
for s in a:
print("".join(s))
@staticmethod
def c():
h, w = map(int, sys.stdin.readline().split())
if h * w % 3 == 0:
print(0)
return
def minimize(h, w):
return min(
h,
*(
s[-1] - s[0]
for x in range(w // 3, w // 3 + 2)
for s in (
sorted(
[
h * x,
h // 2 * (w - x),
(h + 1) // 2 * (w - x),
]
),
)
),
)
print(min(minimize(h, w), minimize(w, h)))
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
def optimize(a):
a = list(a)
l, r = a[:n], a[n:]
heapify(l)
s = [None] * (n + 1)
s[0] = sum(l)
for i in range(n):
x = heappop(l)
heappush(l, max(x, r[i]))
s[i + 1] = s[i] + max(0, r[i] - x)
return np.array(s)
print(
(
optimize(a[: 2 * n]) + optimize(-a[-1 : n - 1 : -1])[::-1]
).max()
)
class ABC063:
@staticmethod
def a():
a = sum(map(int, sys.stdin.readline().split()))
print("error" if a >= 10 else a)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
print("yes" if len(set(s)) == len(s) else "no")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
s = a.sum()
if s % 10:
print(s)
elif not np.count_nonzero(a % 10):
print(0)
else:
print(s - a[a % 10 != 0].min())
@staticmethod
def d():
n, a, b, *h = map(int, sys.stdin.read().split())
h = np.array(h)
d = a - b
def possible(c):
hh = h.copy()
np.maximum(hh - b * c, 0, out=hh)
return ((hh + d - 1) // d).sum() <= c
def binary_search():
lo, hi = 0, 10**9
while hi - lo > 1:
c = (lo + hi) // 2
if possible(c):
hi = c
else:
lo = c
return hi
print(binary_search())
class ABC064:
@staticmethod
def a():
r, g, b = map(int, sys.stdin.readline().split())
print("NO" if (10 * g + b) % 4 else "YES")
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
a.sort()
print(a[-1] - a[0])
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.bincount(np.minimum(np.array(a) // 400, 8), minlength=9)
mx = np.count_nonzero(a[:-1]) + a[-1]
mn = max(mx - a[-1], 1)
print(mn, mx)
@staticmethod
def d():
n, s = sys.stdin.read().split()
l = r = 0
for c in s:
if c == "(":
r += 1
else:
if r == 0:
l += 1
else:
r -= 1
print("(" * l + s + ")" * r)
class ABC065:
@staticmethod
def a():
x, a, b = map(int, sys.stdin.readline().split())
y = -a + b
print("delicious" if y <= 0 else "safe" if y <= x else "dangerous")
@staticmethod
def b():
n, *a = [int(x) - 1 for x in sys.stdin.read().split()]
i = 0
for c in range(n):
i = a[i]
if i == 1:
print(c + 1)
return
print(-1)
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
d = abs(n - m)
if d >= 2:
print(0)
return
fac, _ = Algebra.generate_fac_ifac(10**5)
print(fac[n] * fac[m] * (1 if d else 2) % MOD)
@staticmethod
def d():
n, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy).reshape(n, 2).T
i = np.argsort(x)
ax, bx, cx = (
i[:-1],
i[1:],
x[
i[1:],
]
- x[i[:-1]],
)
i = np.argsort(y)
ay, by, cy = (
i[:-1],
i[1:],
y[
i[1:],
]
- y[i[:-1]],
)
e = np.vstack(
[np.hstack([ax, ay]), np.hstack([bx, by]), np.hstack([cx, cy])]
)
e = e[:, np.argsort(e[-1])]
_, i = np.unique(e[:-1], return_index=True, axis=1)
a, b, c = e[:, i]
print(
minimum_spanning_tree(csr_matrix((c, (a, b)), (n, n)))
.astype(np.int64)
.sum()
)
@staticmethod
def d_2():
n, *xy = map(int, sys.stdin.read().split())
x, y = xy[::2], xy[1::2]
g = GeometryTopology.Graph(n)
def make(a):
b = sorted(enumerate(a), key=lambda x: x[1])
for i in range(n - 1):
u, v, w = b[i][0], b[i + 1][0], b[i + 1][1] - b[i][1]
for u, v in [(v, u), (u, v)]:
if not v in g.edges[u]:
g.add_edge(u, v, weight=w)
else:
g.edges[u][v].weight = min(g.edges[u][v].weight, w)
make(x)
make(y)
_, d = g.kruskal()
# _, d = g.prim()
# _, d = g.boruvka()
print(d)
class ABC066:
@staticmethod
def a():
print(sum(sorted(map(int, sys.stdin.readline().split()))[:-1]))
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
def f(s):
n = len(s) // 2
return s[:n] == s[n:]
for i in range(len(s) - 2, 0, -2):
if f(s[:i]):
print(i)
return
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
b = deque()
for i in range(n):
if i & 1:
b.appendleft(a[i])
else:
b.append(a[i])
if n & 1:
b.reverse()
print(*b)
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
tmp = [None] * (n + 1)
for i in range(n + 1):
if tmp[a[i]] is not None:
d = tmp[a[i]] + n - i
break
tmp[a[i]] = i
k = np.arange(1, n + 2)
c = Combinatorics.CombinationsMod(n + 1, MOD)
print(*((c(n + 1, k) - c(d, k - 1)) % MOD), sep="\n")
class ABC067:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, *ab = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
g.add_edge(a, b)
g.add_edge(b, a)
d1, d2 = g.bfs(0), g.bfs(n - 1)
print(
"Fennec"
if sum(d1[i] <= d2[i] for i in range(n)) > n // 2
else "Snuke"
)
class ABC068:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
k = int(sys.stdin.readline().rstrip())
n = 50
print(n)
q, r = divmod(k, n)
a = np.arange(n - 1, -1, -1) + q
a[:r] += 1
print(*a)
class ABC069:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
h, w, n, *a = map(int, sys.stdin.read().split())
c = [i + 1 for i in range(n) for j in range(a[i])]
for i in range(h):
row = c[i * w : (i + 1) * w]
if i & 1:
row = row[::-1]
print(*row)
class ABC070:
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
g = GeometryTopology.Graph(n)
for _ in range(n - 1):
a, b, c = map(int, sys.stdin.readline().split())
a -= 1
b -= 1
g.add_edge(a, b, weight=c)
g.add_edge(b, a, weight=c)
q, k = map(int, sys.stdin.readline().split())
d = g.bfs(k - 1)
for _ in range(q):
x, y = map(int, sys.stdin.readline().split())
x -= 1
y -= 1
print(d[x] + d[y])
class ABC071:
@staticmethod
def d():
n, *s = sys.stdin.read().split()
n = int(n)
s = list(zip(*s))
dp = [0] * n
dp[0] = 3 if s[0][0] == s[0][1] else 6
for i in range(1, n):
dp[i] = dp[i - 1]
if s[i][0] == s[i - 1][0]:
continue
dp[i] *= (
2
if s[i - 1][0] == s[i - 1][1]
else 3
if s[i][0] != s[i][1]
else 1
)
dp[i] %= MOD
print(dp[-1])
class ABC072:
@staticmethod
def d():
n, *p = map(int, sys.stdin.read().split())
p += [-1]
cnt, i = 0, 0
while i < n:
if p[i] == i + 1:
cnt += p[i] == i + 1
if p[i + 1] == i + 2:
i += 1
i += 1
print(cnt)
class ABC073:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, m, r, *I = map(int, sys.stdin.read().split())
I = np.array(I)
a, b, c = I[r:].reshape(m, 3).T
d = shortest_path(
csr_matrix((c, (a - 1, b - 1)), (n, n)),
method="FW",
directed=False,
).astype(np.int32)
r = np.array([*itertools.permutations(I[:r] - 1)])
print((d[r[:, :-1], r[:, 1:]].sum(axis=1)).min())
class ABC074:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a, dtype=np.int32).reshape(n, n)
b = shortest_path(a, method="FW").astype(np.int32)
if (b < a).any():
print(-1)
return
np.fill_diagonal(b, 10**9)
a[np.any(b[:, None] + b <= a[:, :, None], axis=2)] = 0
print(a.sum() // 2)
class ABC075:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, k, *xy = map(int, sys.stdin.read().split())
xy = np.array(xy).reshape(n, 2)
x_y = xy.copy()[np.argsort(xy[:, 0])]
y_x = xy.copy()[np.argsort(xy[:, 1])]
comb = np.array([*itertools.combinations(range(n), 2)])
i1, i2 = comb.T
j1, j2 = comb[None, :].T
s = (y_x[:, 1][i2] - y_x[:, 1][i1]) * (
x_y[:, 0][j2] - x_y[:, 0][j1]
)
c = np.zeros((n + 1, n + 1), dtype=np.int64)
for i in range(n):
c[i + 1, 1:] += c[i, 1:] + (y_x[i, 0] <= x_y[:, 0])
a = c[i2 + 1, j2 + 1] - c[i2 + 1, j1] - c[i1, j2 + 1] + c[i1, j1]
print(s[a >= k].min())
class ABC076:
@staticmethod
def d():
n, *tv = map(int, sys.stdin.read().split())
t, v = np.array(tv).reshape(2, n)
t = np.pad(t, pad_width=[2, 1], constant_values=0)
np.cumsum(t, out=t)
l, r = t[:-1], t[1:]
v = np.pad(v, pad_width=[1, 1], constant_values=0)
x = np.arange(0, r[-1] + 0.1, 0.5, dtype=np.float32)[:, None]
# y = np.stack([v-(x-l), np.zeros(r[-1]*2+1, dtype=np.float32)[:,None]+v, v+(x-r)]).max(axis=0).min(axis=1)
mx = v - (x - l)
np.maximum(mx, v, out=mx)
np.maximum(mx, v + (x - r), out=mx)
y = mx.min(axis=1)
print(((y[:-1] + y[1:]) / 4).sum())
class ABC077:
@staticmethod
def d():
k = int(sys.stdin.readline().rstrip())
g = GeometryTopology.Graph(k)
for i in range(k):
g.add_edge(i, i * 10 % k, weight=0)
g.add_edge(i, (i + 1) % k, update=False, weight=1)
print(1 + g.bfs01(1)[0])
class ABC078:
@staticmethod
def d():
n, z, w, *a = map(int, sys.stdin.read().split())
print(
abs(a[0] - w)
if n == 1
else max(abs(a[-1] - w), abs(a[-1] - a[-2]))
)
class ABC079:
@staticmethod
def d():
h, w, *I = map(int, sys.stdin.read().split())
I = np.array(I)
c = I[:100].reshape(10, 10)
a = I[100:].reshape(h, w)
c = shortest_path(c.T, method="D", indices=1).astype(np.int32)
print(c[a[a != -1]].sum())
class ABC080:
@staticmethod
def d():
n, c, *stc = map(int, sys.stdin.read().split())
using = np.zeros((c, 10**5 + 2), dtype=np.int8)
s, t, c = np.array(stc).reshape(n, 3).T
np.add.at(using, (c - 1, s), 1)
np.subtract.at(using, (c - 1, t + 1), 1)
np.cumsum(using, axis=1, out=using)
print(np.count_nonzero(using, axis=0).max())
class ABC081:
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
i = np.argmax(np.absolute(a))
# a +=
print(2 * n)
for j in range(n):
print(i + 1, j + 1)
if a[i] >= 0:
for j in range(n - 1):
print(j + 1, j + 2)
else:
for j in range(n - 1, 0, -1):
print(j + 1, j)
class ABC082:
pass
class ABC083:
pass
class ABC084:
pass
class ABC085:
pass
class ABC086:
pass
class ABC087:
pass
class ABC088:
pass
class ABC089:
pass
class ABC090:
pass
class ABC091:
pass
class ABC092:
pass
class ABC093:
pass
class ABC094:
pass
class ABC095:
pass
class ABC096:
pass
class ABC097:
pass
class ABC098:
pass
class ABC099:
pass
class ABC100:
pass
class ABC101:
pass
class ABC102:
pass
class ABC103:
pass
class ABC104:
pass
class ABC105:
pass
class ABC106:
pass
class ABC107:
pass
class ABC108:
pass
class ABC109:
pass
class ABC110:
pass
class ABC111:
pass
class ABC112:
pass
class ABC113:
pass
class ABC114:
pass
class ABC115:
pass
class ABC116:
pass
class ABC117:
pass
class ABC118:
pass
class ABC119:
pass
class ABC120:
pass
class ABC121:
pass
class ABC122:
pass
class ABC123:
pass
class ABC124:
pass
class ABC125:
pass
class ABC126:
pass
class ABC127:
pass
class ABC128:
pass
class ABC129:
pass
class ABC130:
pass
class ABC131:
pass
class ABC132:
pass
class ABC133:
pass
class ABC134:
pass
class ABC135:
pass
class ABC136:
pass
class ABC137:
pass
class ABC138:
pass
class ABC139:
pass
class ABC140:
pass
class ABC141:
pass
class ABC142:
pass
class ABC143:
pass
class ABC144:
pass
class ABC145:
pass
class ABC146:
pass
class ABC147:
pass
class ABC148:
pass
class ABC149:
pass
class ABC150:
pass
class ABC151:
pass
class ABC152:
pass
class ABC153:
pass
class ABC154:
pass
class ABC155:
pass
class ABC156:
pass
class ABC157:
pass
class ABC158:
pass
class ABC159:
pass
class ABC160:
pass
class ABC161:
pass
class ABC162:
pass
class ABC163:
pass
class ABC164:
pass
class ABC165:
pass
class ABC166:
pass
class ABC167:
pass
class ABC168:
pass
class ABC169:
pass
class ABC170:
@staticmethod
def a():
x = [int(x) for x in sys.stdin.readline().split()]
for i in range(5):
if x[i] != i + 1:
print(i + 1)
break
@staticmethod
def b():
x, y = map(int, sys.stdin.readline().split())
print("Yes" if 2 * x <= y <= 4 * x and y % 2 == 0 else "No")
@staticmethod
def c():
x, n, *p = map(int, sys.stdin.read().split())
a = list(set(range(102)) - set(p))
a = [(abs(y - x), y) for y in a]
print(sorted(a)[0][1])
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
cand = set(a)
cnt = 0
for x, c in sorted(Counter(a).items()):
cnt += c == 1 and x in cand
cand -= set(range(x * 2, 10**6 + 1, x))
print(cnt)
@staticmethod
def e():
n, q = map(int, sys.stdin.readline().split())
queue = []
m = 2 * 10**5
infants = [[] for _ in range(m)]
highest_rate = [None] * m
where = [None] * n
rate = [None] * n
def entry(i, k):
where[i] = k
while infants[k]:
r, j = heappop(infants[k])
if where[j] != k or j == i:
continue
if rate[i] >= -r:
highest_rate[k] = rate[i]
heappush(queue, (rate[i], k, i))
heappush(infants[k], (r, j))
break
else:
highest_rate[k] = rate[i]
heappush(queue, (rate[i], k, i))
heappush(infants[k], (-rate[i], i))
def transfer(i, k):
now = where[i]
while infants[now]:
r, j = heappop(infants[now])
if where[j] != now or j == i:
continue
if highest_rate[now] != -r:
highest_rate[now] = -r
heappush(queue, (-r, now, j))
heappush(infants[now], (r, j))
break
else:
highest_rate[now] = None
entry(i, k)
def inquire():
while True:
r, k, i = heappop(queue)
if where[i] != k or r != highest_rate[k]:
continue
heappush(queue, (r, k, i))
return r
for i in range(n):
a, b = map(int, sys.stdin.readline().split())
rate[i] = a
entry(i, b - 1)
for _ in range(q):
c, d = map(int, sys.stdin.readline().split())
transfer(c - 1, d - 1)
print(inquire())
class ABC171:
@staticmethod
def a():
c = sys.stdin.readline().rstrip()
print("A" if c < "a" else "a")
@staticmethod
def b():
n, k, *p = map(int, sys.stdin.read().split())
print(sum(sorted(p)[:k]))
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
n -= 1
l = 1
while True:
if n < pow(26, l):
break
n -= pow(26, l)
l += 1
res = "".join(
[chr(ord("a") + d) for d in NumberTheory.base_convert(n, 26)][
::-1
]
)
res = "a" * (l - len(res)) + res
print(res)
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
a = [int(x) for x in sys.stdin.readline().split()]
s = sum(a)
cnt = Counter(a)
q = int(sys.stdin.readline().rstrip())
for _ in range(q):
b, c = map(int, sys.stdin.readline().split())
s += (c - b) * cnt[b]
print(s)
cnt[c] += cnt[b]
cnt[b] = 0
@staticmethod
def e():
n, *a = map(int, sys.stdin.read().split())
s = 0
for x in a:
s ^= x
b = map(lambda x: x ^ s, a)
print(*b, sep=" ")
class ABC172:
@staticmethod
def a():
a = int(sys.stdin.readline().rstrip())
print(a * (1 + a + a**2))
@staticmethod
def b():
s, t = sys.stdin.read().split()
print(sum(s[i] != t[i] for i in range(len(s))))
@staticmethod
def c():
n, m, k = map(int, sys.stdin.readline().split())
a = [0] + [int(x) for x in sys.stdin.readline().split()]
b = [int(x) for x in sys.stdin.readline().split()]
(*sa,) = itertools.accumulate(a)
(*sb,) = itertools.accumulate(b)
res = 0
for i in range(n + 1):
r = k - sa[i]
if r < 0:
break
res = max(res, i + bi_r(sb, r))
print(res)
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
f = np.zeros(n + 1, dtype=np.int64)
for i in range(1, n + 1):
f[i::i] += 1
print((np.arange(1, n + 1) * f[1:]).sum())
class ABC173:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
charge = (n + 999) // 1000 * 1000 - n
print(charge)
@staticmethod
def b():
n, *s = sys.stdin.read().split()
c = Counter(s)
for v in "AC, WA, TLE, RE".split(", "):
print(f"{v} x {c[v]}")
@staticmethod
def c():
h, w, k = map(int, sys.stdin.readline().split())
c = [sys.stdin.readline().rstrip() for _ in range(h)]
tot = 0
for i in range(1 << h):
for j in range(1 << w):
cnt = 0
for y in range(h):
for x in range(w):
if i >> y & 1 or j >> x & 1:
continue
cnt += c[y][x] == "#"
tot += cnt == k
print(tot)
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a.sort(reverse=True)
res = (
a[0]
+ sum(a[1 : 1 + (n - 2) // 2]) * 2
+ a[1 + (n - 2) // 2] * (n & 1)
)
print(res)
@staticmethod
def e():
MOD = 10**9 + 7
n, k, *a = map(int, sys.stdin.read().split())
minus = [x for x in a if x < 0]
plus = [x for x in a if x > 0]
if len(plus) + len(minus) // 2 * 2 >= k: # plus
(*minus,) = map(abs, minus)
minus.sort(reverse=True)
plus.sort(reverse=True)
cand = []
if len(minus) & 1:
minus = minus[:-1]
for i in range(0, len(minus) - 1, 2):
cand.append(minus[i] * minus[i + 1] % MOD)
if k & 1:
res = plus[0]
plus = plus[1:]
else:
res = 1
if len(plus) & 1:
plus = plus[:-1]
for i in range(0, len(plus) - 1, 2):
cand.append(plus[i] * plus[i + 1] % MOD)
cand.sort(reverse=True)
for x in cand[: k // 2]:
res *= x
res %= MOD
print(res)
elif 0 in a:
print(0)
else:
cand = sorted(map(abs, a))
res = 1
for i in range(k):
res *= cand[i]
res %= MOD
res = MOD - res
print(res)
pass
class ABC174:
@staticmethod
def a():
print("Yes" if int(sys.stdin.readline().rstrip()) >= 30 else "No")
class ABC178:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
s = int(sys.stdin.readline().rstrip())
if s == 0:
print(1)
return
elif s == 1:
print(0)
return
c = np.eye(3, k=-1, dtype=np.int64)
c[0, 0] = c[0, 2] = 1
a = np.array([0, 0, 1])
print(Algebra.dot(Algebra.matrix_pow(c, s - 2), a)[0])
class ABC179:
@staticmethod
def a():
s = sys.stdin.readline().rstrip()
print(s + "s" if s[-1] != "s" else s + "es")
@staticmethod
def b():
n, *d = map(int, sys.stdin.read().split())
d = np.array(d).reshape(n, 2).T
d = np.equal(d[0], d[1]).astype(int)
dd = d.copy()
dd[1:] += d[:-1]
dd[:-1] += d[1:]
print("Yes" if (dd >= 3).any() else "No")
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
res = (n // np.arange(1, n + 1)).sum() - len(
NumberTheory.find_divisors(n)
)
print(res)
@staticmethod
def d():
mod = 998244353
n, k, *lr = map(int, sys.stdin.read().split())
l, r = np.array(lr).reshape(k, -1).T
@njit((i8, i8[:], i8[:]), cache=True)
def solve(n, l, r):
res = np.zeros(n * 2, dtype=np.int64)
res[0], res[1] = 1, -1
for i in range(n - 1):
res[i + 1] = (res[i + 1] + res[i]) % mod
res[i + l] = (res[i + l] + res[i]) % mod
res[i + r + 1] = (res[i + r + 1] - res[i]) % mod
print(res[n - 1])
solve(n, l, r)
@staticmethod
def e():
n, x, m = map(int, sys.stdin.readline().split())
res = [-1 for _ in range(m)]
s = 0
loop = np.zeros(m, dtype=np.int64)
for i in range(m + 1):
if i == n:
print(s)
return
if res[x] != -1:
l, loop = i - res[x], loop[res[x] : i]
q, r = divmod(n - i, l)
print(s + q * loop.sum() + loop[:r].sum())
return
res[x], loop[i] = i, x
s += x
x = x**2 % m
class ABC180:
@staticmethod
def a():
n, a, b = map(int, sys.stdin.readline().split())
print(n - a + b)
@staticmethod
def b():
n, *x = map(int, sys.stdin.read().split())
x = np.absolute(np.array(x))
print(x.sum())
print(np.sqrt((x**2).sum()))
print(x.max())
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
div = NumberTheory.find_divisors(n)
print(*div, sep="\n")
@staticmethod
def d():
x, y, a, b = map(int, sys.stdin.readline().split())
cnt = 0
while x * a <= x + b:
x *= a
if x >= y:
print(cnt)
return
cnt += 1
cnt += (y - x - 1) // b
print(cnt)
@staticmethod
def e():
n, *xyz = map(int, sys.stdin.read().split())
xyz = list(zip(*[iter(xyz)] * 3))
dist = [[0] * n for _ in range(n)]
for i in range(n):
a, b, c = xyz[i]
for j in range(n):
p, q, r = xyz[j]
dist[i][j] = abs(p - a) + abs(q - b) + max(0, r - c)
dp = [[inf] * n for _ in range(1 << n)]
dp[0][0] = 0
for s in range(1 << n):
for i in range(n):
t = s | (1 << i)
for j in range(n):
dp[t][i] = min(dp[t][i], dp[s][j] + dist[j][i])
print(dp[-1][0])
@staticmethod
def f(): # rewrite with jit compiling later.
n, m, l = map(int, sys.stdin.readline().split())
c = Combinatorics.CombinationsMod(n, MOD)
path = np.zeros(n + 1, dtype=np.int64)
path[1] = path[2] = 1
for i in range(3, n + 1):
path[i] = path[i - 1] * i % MOD
cycle = np.zeros(n + 1, dtype=np.int64)
cycle[1:] = path[:-1]
dp = np.zeros((n + 1, m + 1), dtype=np.int64)
def f(l):
dp[:, :] = 0
dp[0, 0] = 1
for i in range(n):
for j in range(m + 1):
k = np.arange(1, min(l, n - i, m - j + 1) + 1)
dp[i + k, j + k - 1] += (
dp[i, j]
* c(n - i - 1, k - 1)
% MOD
* path[k]
% MOD
)
dp[i + k, j + k - 1] %= MOD
k = np.arange(2, min(l, n - i, m - j) + 1)
dp[i + k, j + k] += (
dp[i, j]
* c(n - i - 1, k - 1)
% MOD
* cycle[k]
% MOD
)
dp[i + k, j + k] %= MOD
return dp[n, m]
print((f(l) - f(l - 1)) % MOD)
@staticmethod
def f_2(): # PyPy
n, m, l = map(int, sys.stdin.readline().split())
c = Combinatorics.CombinationsMod(n, MOD)
path = [0] * (n + 1)
path[1] = path[2] = 1
for i in range(3, n + 1):
path[i] = path[i - 1] * i % MOD
cycle = [0] + path[:-1]
def f(l):
dp = [[0] * (m + 1) for _ in range(n + 1)]
dp[0][0] = 1
for i in range(n):
for j in range(m + 1):
for k in range(1, min(l, n - i, m - j + 1) + 1):
dp[i + k][j + k - 1] += (
dp[i][j]
* c(n - i - 1, k - 1)
% MOD
* path[k]
% MOD
)
dp[i + k][j + k - 1] %= MOD
for k in range(1, min(l, n - i, m - j) + 1):
dp[i + k][j + k] += (
dp[i][j]
* c(n - i - 1, k - 1)
% MOD
* cycle[k]
% MOD
)
dp[i + k][j + k] %= MOD
return dp[n][m]
print((f(l) - f(l - 1)) % MOD)
class ARC106:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
a = 1
while pow(3, a) <= n:
m = n - pow(3, a)
b = 1
while pow(5, b) <= m:
if pow(5, b) == m:
print(a, b)
return
b += 1
a += 1
print(-1)
@staticmethod
def b():
n, m = map(int, sys.stdin.readline().split())
a = [int(x) for x in sys.stdin.readline().split()]
b = [int(x) for x in sys.stdin.readline().split()]
uf = GeometryTopology.Graph(n)
uf.init_dsu()
for _ in range(m):
c, d = map(int, sys.stdin.readline().split())
c -= 1
d -= 1
uf.unite(c, d)
visited = [False] * n
ga = [[] for _ in range(n)]
gb = [[] for _ in range(n)]
for i in range(n):
r = uf.find(i)
ga[r].append(a[i])
gb[r].append(b[i])
print(
"Yes"
if all(sum(ga[i]) == sum(gb[i]) for i in range(n))
else "No"
)
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
if m < 0:
print(-1)
return
if n == 1:
if m != 0:
print(-1)
return
print(1, 2)
return
if m >= n - 1:
print(-1)
return
l, r = 1, 10**9
print(l, r)
for _ in range(n - 2 - m):
l += 1
r -= 1
print(l, r)
r = l
for _ in range(m + 1):
l, r = r + 1, r + 2
print(l, r)
@staticmethod
def d():
mod = 998244353
n, k, *a = map(int, sys.stdin.read().split())
a = np.array(a)
b = np.zeros((k + 1, n), dtype=np.int64)
b[0] = 1
for i in range(k):
b[i + 1] = b[i] * a % mod
s = b.sum(axis=1) % mod
inv_2 = pow(2, mod - 2, mod)
c = Combinatorics.CombinationsMod(mod=mod)
for x in range(1, k + 1):
l = np.arange(x + 1)
print(
(
(c(x, l) * s[l] % mod * s[l][::-1] % mod).sum() % mod
- pow(2, x, mod) * s[x]
)
% mod
* inv_2
% mod
)
@staticmethod
def e():
pass
@staticmethod
def f():
pass
class ACL001:
@staticmethod
def a():
n, *xy = map(int, sys.stdin.read().split())
(*xy,) = zip(*[iter(xy)] * 2)
print(xy)
pass
class TDPC:
@staticmethod
def t():
pass
class MSolutions2020:
@staticmethod
def a():
x = int(sys.stdin.readline().rstrip())
x -= 400
print(8 - x // 200)
@staticmethod
def b():
r, g, b, k = map(int, sys.stdin.read().split())
while k and g <= r:
g *= 2
k -= 1
while k and b <= g:
b *= 2
k -= 1
print("Yes" if r < g < b else "No")
@staticmethod
def c():
n, k, *a = map(int, sys.stdin.read().split())
for i in range(k, n):
print("Yes" if a[i] > a[i - k] else "No")
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a += [-1]
m = 1000
s = 0
for i in range(n):
if a[i + 1] == a[i]:
continue
elif a[i + 1] > a[i]:
cnt = m // a[i]
m -= a[i] * cnt
s += cnt
else:
m += a[i] * s
s = 0
print(m)
class Codeforces:
class CR676div2:
@staticmethod
def a():
t = int(sys.stdin.readline().rstrip())
for _ in range(t):
a, b = map(int, sys.stdin.readline().split())
print(a ^ b)
@staticmethod
def b():
t = int(sys.stdin.readline().rstrip())
for _ in range(t):
n = int(sys.stdin.readline().rstrip())
s = [list(sys.stdin.readline().rstrip()) for _ in range(n)]
s[0][0] = s[-1][-1] = "0"
for i in range(n):
for j in range(n):
s[i][j] = int(s[i][j])
def can_goal(g, c=0):
visited = [0] * n
stack = [(0, 0)]
visited[0] |= 1 << 0
while stack:
y, x = stack.pop()
for dy, dx in [(-1, 0), (0, -1), (1, 0), (0, 1)]:
i, j = y + dy, x + dx
if i < 0 or i >= n or j < 0 or j >= n:
continue
if i == j == n - 1:
return True
if visited[i] >> j & 1:
continue
visited[i] |= 1 << j
if g[i][j] != c:
continue
stack.append((i, j))
return False
if not (can_goal(s, 0) or can_goal(s, 1)):
print(0)
continue
flg = 0
for i in range(n):
for j in range(n):
if i == j == 0 or i == j == n - 1:
continue
s[i][j] ^= 1
if not (can_goal(s, 0) or can_goal(s, 1)):
print(1)
print(i + 1, j + 1)
flg = 1
break
s[i][j] ^= 1
if flg:
break
if flg:
continue
print(2)
if s[0][1] == s[1][0]:
print(n, n - 1)
print(n - 1, n)
continue
if s[0][1] == s[-1][-2]:
print(1, 2)
print(n - 1, n)
else:
print(1, 2)
print(n, n - 1)
@staticmethod
def c():
pass
class ProjectEuler:
@staticmethod
def p1():
def f(n, x):
return (x + n // x * x) * (n // x) // 2
n = 1000
ans = f(n - 1, 3) + f(n - 1, 5) - f(n - 1, 15)
print(ans)
@staticmethod
def p2():
fib = [1, 2]
while fib[-1] < 4 * 10**6:
fib.append(fib[-1] + fib[-2])
print(sum(fib[1:-1:3]))
@staticmethod
def p3():
pn = NumberTheory.PrimeNumbers()
res = pn.factorize(600851475143)
print(max(res.keys()))
@staticmethod
def p4():
def is_palindrome(n):
n = str(n)
return n == n[::-1]
cand = []
for a in range(100, 1000):
for b in range(a, 1000):
n = a * b
if is_palindrome(n):
cand.append(n)
print(max(cand))
@staticmethod
def p5():
pn = NumberTheory.PrimeNumbers()
res = defaultdict(int)
for i in range(1, 21):
for p, c in pn.factorize(i).items():
res[p] = max(res[p], c)
ans = 1
for p, c in res.items():
ans *= pow(p, c)
print(ans)
@staticmethod
def p6():
a = np.arange(101)
b = np.cumsum(a**2)
a = a.cumsum()
print(a[100] ** 2 - b[100])
@staticmethod
def p7():
nt = NumberTheory.PrimeNumbers()
print(sorted(nt)[10000])
@staticmethod
def p8():
n = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
n = [int(d) for d in list(n)]
res = 0
for i in range(988):
x = 1
for j in range(13):
x *= n[i + j]
res = max(res, x)
print(res)
@staticmethod
def p9():
for a in range(1, 997):
for b in range(a, 998 - a):
c = 1000 - a - b
if a**2 + b**2 == c**2:
print(a * b * c)
return
@staticmethod
def p10():
pn = NumberTheory.PrimeNumbers(2 * 10**6 + 1)
print(sum(pn))
@staticmethod
def p11():
grid = "08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48"
print(grid)
pass
class Yukicoder:
def __init__(self):
pass
def __call__(self):
print(1)
class AOJ:
@staticmethod
def ALDS1_12_A():
n, *a = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for i in range(n - 1):
for j in range(i + 1, n):
if a[i * n + j] == -1:
continue
g.add_edge(i, j, weight=a[i * n + j])
g.add_edge(j, i, weight=a[i * n + j])
_, d = g.kruskal()
# _, d = g.prim()
# _, d = g.boruvka()
print(d)
@staticmethod
def GRL_3_C(): # strongly connected components
n, m = map(int, sys.stdin.readline().split())
g = GeometryTopology.Graph(n)
for _ in range(m):
g.add_edge(*map(int, sys.stdin.readline().split()))
r = g.scc()
q, *uv = map(int, sys.stdin.read().split())
for u, v in zip(*[iter(uv)] * 2):
print(int(r[u] == r[v]))
class YosupoJudge:
@staticmethod
def Directed_MST():
n, m, s, *abc = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b, c in zip(*[iter(abc)] * 3):
g.add_edge(a, b, weight=c)
_, d, p = g.prim(src=s, return_parent=True)
print(d)
print(*p)
@staticmethod
def Manhattan_MST():
n, *xy = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
if __name__ == "__main__":
# AtCoder.ABC179.f()
# AtCoder.ABC060.d()
AtCoder.ABC081.d()
# AtCoder.ARC106.d()
# YosupoJudge.Directed_MST()
pass
| 30.567094
| 1,217
| 0.35524
|
import itertools
import math
import string
import sys
from bisect import bisect_left as bi_l
from bisect import bisect_right as bi_r
from collections import Counter, defaultdict, deque
from functools import lru_cache, reduce
from heapq import heapify, heappop, heappush
from operator import or_, xor
sys.setrecursionlimit(10**7)
inf = float("inf")
MOD = 10**9 + 7
using_numpy = 1
import networkx as nx
import numpy as np
from numba import i8, njit
from scipy import optimize
from scipy.ndimage import distance_transform_cdt
from scipy.sparse import csr_matrix
from scipy.sparse.csgraph import (
connected_components,
csgraph_to_dense,
maximum_flow,
minimum_spanning_tree,
shortest_path,
)
from scipy.spatial import ConvexHull
from scipy.special import comb
class Algebra:
class Modular(int):
def __init__(self, n, mod=MOD):
self.value = n
self.mod = mod
def __str__(self):
return f"{self.value}"
def __add__(self, other):
return self.__class__((self.value + other.value) % self.mod)
def __sub__(self, x):
return self.__class__((self.value - x.value) % self.mod)
def __mul__(self, x):
return self.__class__((self.value * x.value) % self.mod)
def __pow__(self, x):
return self.__class__(pow(self.value, x.value, self.mod))
def __lt__(self, x):
return self.value < x.value
def __le__(self, x):
return self.value <= x.value
def __eq__(self, x):
return self.value == x.value
def __ne__(self, x):
return self.value != x.value
def __gt__(self, x):
return self.value > x.value
def __ge__(self, x):
return self.value >= x.value
class SemiGroup:
pass
class Monoid:
pass
class Group:
pass
class SemiRing:
pass
class Ring:
pass
@staticmethod
def identity(n):
if using_numpy:
return np.identity(n, dtype=np.int64)
else:
a = [[0] * n for _ in range(n)]
for i in range(n):
a[i][i] = 1
return a
@staticmethod
def dot(a, b):
if using_numpy:
return np.dot(a, b)
else:
h, w, l = len(a), len(b[0]), len(b)
assert len(a[0]) == l
c = [[0] * w for _ in range(h)]
for i in range(h):
for j in range(w):
for k in range(l):
c[i][j] += a[i][k] * b[k][j]
return c
@classmethod
def matrix_pow(cls, a, n, mod=10**9 + 7):
m = len(a)
b = cls.identity(m)
while n:
if n & 1:
b = cls.dot(b, a)
n >>= 1
a = cls.dot(a, a)
if using_numpy:
a %= mod
b %= mod
else:
for i in range(m):
for j in range(m):
a[i][j] %= mod
b[i][j] %= mod
return b
@staticmethod
def bitwise_dot(a, b):
if using_numpy:
return np.bitwise_xor.reduce(
a[:, None, :] & b.T[None, :, :], axis=-1
)
else:
h, w, l = len(a), len(b[0]), len(b)
assert len(a[0]) == l
c = [[0] * w for _ in range(h)]
for i in range(h):
for j in range(w):
for k in range(l):
c[i][j] ^= a[i][k] & b[k][j]
return c
@classmethod
def bitwise_mat_pow(cls, a, n):
if n == 0:
return np.eye(len(a), dtype=np.uint32) * ((1 << 32) - 1)
res = cls.bitwise_mat_pow(a, n // 2)
res = cls.bitwise_dot(res, res)
return cls.bitwise_dot(res, a) if n & 1 else res
@staticmethod
def cumprod(a, mod):
l = len(a)
sql = int(np.sqrt(l) + 1)
a = np.resize(a, sql**2).reshape(sql, sql)
for i in range(sql - 1):
a[:, i + 1] *= a[:, i]
a[:, i + 1] %= mod
for i in range(sql - 1):
a[i + 1] *= a[i, -1]
a[i + 1] %= mod
return np.ravel(a)[:l]
@classmethod
def generate_fac_ifac(cls, n, p=MOD):
if using_numpy:
fac = np.arange(n + 1)
fac[0] = 1
fac = cls.cumprod(fac, p)
ifac = np.arange(n + 1, 0, -1)
ifac[0] = pow(int(fac[-1]), p - 2, p)
ifac = cls.cumprod(ifac, p)[n::-1]
else:
fac = [None] * (n + 1)
fac[0] = 1
for i in range(n):
fac[i + 1] = fac[i] * (i + 1) % p
ifac = [None] * (n + 1)
ifac[n] = pow(fac[n], p - 2, p)
for i in range(n, 0, -1):
ifac[i - 1] = ifac[i] * i % p
return fac, ifac
class Kitamasa:
pass
mint = Algebra.Modular
class NumberTheory:
class PrimeNumbers:
def __init__(self, n=2 * 10**6):
self.is_prime, self.prime_nums = self.find(n)
def __call__(self, n):
return self.is_prime[n]
def __iter__(self):
return iter(self.prime_nums)
def __getitem__(self, key):
return self.prime_nums[key]
@staticmethod
def find(n):
if using_numpy:
is_prime = np.ones(n + 1, dtype=np.bool)
is_prime[:2] = 0
for i in range(2, int(n**0.5) + 1):
if is_prime[i]:
is_prime[i * 2 :: i] = 0
prime_nums = np.flatnonzero(is_prime)
else:
is_prime = [True] * (n + 1)
is_prime[0] = is_prime[1] = 0
for i in range(2, int(n**0.5) + 1):
if not is_prime[i]:
continue
for j in range(i * 2, n + 1, i):
is_prime[j] = 0
prime_nums = [i for i in range(2, n + 1) if is_prime[i]]
return is_prime, prime_nums
@lru_cache(maxsize=None)
def factorize(self, n):
res = defaultdict(int)
if n < 2:
return res
for p in self:
if p * p > n:
break
while n % p == 0:
res[p] += 1
n //= p
if n == 1:
return res
res[n] = 1
return res
def factorize_factorial(self, n):
res = defaultdict(int)
for i in range(2, n + 1):
for p, c in self.factorize(i).items():
res[p] += c
return res
@classmethod
@lru_cache(maxsize=None)
def gcd(cls, a, b):
return cls.gcd(b, a % b) if b else abs(a)
@classmethod
def lcm(cls, a, b):
return abs(a // cls.gcd(a, b) * b)
@staticmethod
def find_divisors(n):
divisors = []
for i in range(1, int(n**0.5) + 1):
if n % i:
continue
divisors.append(i)
j = n // i
if j != i:
divisors.append(j)
return sorted(divisors)
@staticmethod
def base_convert(n, b):
if not n:
return [0]
res = []
while n:
n, r = divmod(n, b)
if r < 0:
n += 1
r -= b
res.append(r)
return res
class Combinatorics:
@classmethod
@lru_cache(maxsize=None)
def choose(cls, n, r, mod=None):
if r > n or r < 0:
return 0
if r == 0:
return 1
res = cls.choose(n - 1, r, mod) + cls.choose(n - 1, r - 1, mod)
if mod:
res %= mod
return res
class CombinationsMod:
def __init__(self, n=2 * 10**6, mod=MOD):
self.__mod = mod
self.fac, self.ifac = Algebra.generate_fac_ifac(n, mod)
def __call__(self, n, r):
return self.__choose(n, r)
def __choose(self, n, r):
bl = (0 <= r) & (r <= n)
p = self.__mod
return bl * self.fac[n] * self.ifac[r] % p * self.ifac[n - r] % p
def make_nchoose_table(self, n):
p = self.__mod
r = len(self.__fac) - 1
if using_numpy:
n_choose = np.arange(n + 1, n - r, -1)
n_choose[0] = 1
n_choose = Algebra.cumprod(n_choose, p) * self.ifac % p
else:
n_choose = [None] * (r + 1)
n_choose[0] = 1
for i in range(r):
n_choose[i + 1] = n_choose[i] * (n - i) % p
for i in range(1, r + 1):
n_choose[i] = n_choose[i] * self.ifac[i] % p
return n_choose
@classmethod
def permutations(cls, a, r=None, i=0):
a = list(a)
n = len(a)
if r is None:
r = n
res = []
if r > n or i > r:
return res
if i == r:
return [tuple(a[:r])]
for j in range(i, n):
a[i], a[j] = a[j], a[i]
res += cls.permutations(a, r, i + 1)
return res
@staticmethod
def combinations(a, r):
a = tuple(a)
n = len(a)
if r > n:
return
indices = list(range(r))
yield a[:r]
while True:
for i in range(r - 1, -1, -1):
if indices[i] != i + n - r:
break
else:
return
indices[i] += 1
for j in range(i + 1, r):
indices[j] = indices[j - 1] + 1
yield tuple(a[i] for i in indices)
class DP:
@staticmethod
def LIS(a):
res = [inf] * len(a)
for x in a:
res[bi_l(res, x)] = x
return res
class String:
@staticmethod
def z_algorithm(s):
n = len(s)
a = [0] * n
a[0] = n
l = r = -1
for i in range(1, n):
if r >= i:
a[i] = min(a[i - l], r - i)
while i + a[i] < n and s[i + a[i]] == s[a[i]]:
a[i] += 1
if i + a[i] >= r:
l, r = i, i + a[i]
return a
class GeometryTopology:
class Graph:
class __Edge:
def __init__(self, weight=1, capacity=1, **args):
self.weight = weight
self.capacity = capacity
def __str__(self):
return f"weight: {self.weight}, cap: {self.capacity}"
class __Node:
def __init__(self, **args):
pass
def __init__(self, n=0):
self.__N = n
self.nodes = [None] * n
self.edges = [{} for _ in range(n)]
def add_node_info(self, v, **args):
self.nodes[v] = self.__Node(**args)
def add_edge(self, u, v, update=False, **args):
if not update and v in self.edges[u]:
return
self.edges[u][v] = self.__Edge(**args)
def get_size(self):
return self.__N
def bfs(self, src=0):
n = self.__N
self.depth = self.lv = lv = [None] * n
lv[src] = 0
self.dist = dist = [inf] * n
dist[src] = 0
self.parent = par = [None] * n
par[src] = src
q = deque([src])
while q:
u = q.popleft()
for v, e in self.edges[u].items():
if e.capacity == 0 or lv[v] is not None:
continue
lv[v], dist[v], par[v] = lv[u] + 1, dist[u] + e.weight, u
q.append(v)
return dist
def dinic(self, src, sink):
def flow_to_sink(u, flow_in):
if u == sink:
return flow_in
flow = 0
for v, e in self.edges[u].items():
if e.capacity == 0 or self.lv[v] <= self.lv[u]:
continue
f = flow_to_sink(v, min(flow_in, e.capacity))
if not f:
continue
self.edges[u][v].capacity -= f
if u in self.edges[v]:
self.edges[v][u].capacity += f
else:
self.add_edge(v, u, capacity=f)
flow_in -= f
flow += f
return flow
flow = 0
while True:
self.bfs(src)
if self.lv[sink] is None:
return flow
flow += flow_to_sink(src, inf)
def ford_fulkerson(self):
pass
def push_relabel(self):
pass
def floyd_warshall(self):
n = self.__N
d = [[inf] * n for _ in range(n)]
for u in range(n):
d[u][u] = 0
for v, e in self.edges[u].items():
d[u][v] = e.weight
for w in range(n):
for u in range(n):
for v in range(n):
d[u][v] = min(d[u][v], d[u][w] + d[w][v])
return d
def dijkstra(self, src, paths_cnt=False, mod=None):
dist = [inf] * self.__N
dist[src] = 0
visited = [False] * self.__N
paths = [0] * self.__N
paths[src] = 1
q = [(0, src)]
while q:
d, u = heappop(q)
if visited[u]:
continue
visited[u] = True
for v, e in self.edges[u].items():
dv = d + e.weight
if dv > dist[v]:
continue
elif dv == dist[v]:
paths[v] += paths[u]
if mod:
paths[v] %= mod
continue
paths[v], dist[v] = paths[u], dv
heappush(q, (dv, v))
if paths_cnt:
return dist, paths
else:
return dist
def astar(self, src, tgt, heuristic_func):
cost = [inf] * self.__N
q = [(heuristic_func(src, tgt), 0, src)]
while q:
_, c, u = heappop(q)
if u == tgt:
return c
if cost[u] != inf:
continue
cost[u] = c
for v, e in self.edges[u].items():
if cost[v] != inf:
continue
h = heuristic_func(v, tgt)
nc = c + e.weight
heappush(q, (h + nc, nc, v))
return inf
def bellman_ford(self, src):
n = self.__N
d = [inf] * n
d[src] = 0
for _ in range(n - 1):
for u in range(n):
for v, e in self.edges[u].items():
d[v] = min(d[v], d[u] + e.weight)
for u in range(n):
for v, e in self.edges[u].items():
if d[u] + e.weight < d[v]:
raise Exception("found negative cycle.")
return d
def bfs01(self, src=0):
d = [inf] * self.__N
d[src] = 0
q = deque([src])
while q:
u = q.popleft()
for v, e in self.edges[u].items():
dv = d[u] + e.weight
if d[v] <= dv:
continue
d[v] = dv
if e.weight:
q.append(v)
else:
q.appendleft(v)
return d
def find_ancestors(self):
self.__ancestors = ancestors = [self.parent]
for _ in range(max(self.depth).bit_length()):
ancestors.append([ancestors[-1][u] for u in ancestors[-1]])
def find_dist(self, u, v):
return (
self.dist[u]
+ self.dist[v]
- 2 * self.dist[self.__find_lca(u, v)]
)
def __find_lca(self, u, v):
du, dv = self.depth[u], self.depth[v]
if du > dv:
u, v = v, u
du, dv = dv, du
d = dv - du
for i in range(d.bit_length()):
if d >> i & 1:
v = self.__ancestors[i][v]
if v == u:
return v
for i in range(
du.bit_length() - 1, -1, -1
):
nu, nv = self.__ancestors[i][u], self.__ancestors[i][v]
if nu == nv:
continue
u, v = nu, nv
return self.__ancestors[0][u]
def init_dsu(self):
n = self.__N
self.parent = list(range(n))
self.rank = [0] * n
self.size = [1] * n
def find(self, u):
if self.parent[u] == u:
return u
self.parent[u] = self.find(self.parent[u])
return self.parent[u]
def unite(self, u, v):
u, v = self.find(u), self.find(v)
if u == v:
return
if self.rank[u] < self.rank[v]:
u, v = v, u
self.parent[v] = u
self.size[u] += self.size[v]
self.rank[u] = max(self.rank[u], self.rank[v] + 1)
def same(self, u, v):
return self.find(u) == self.find(v)
def scc(self):
n = self.__N
visited, q, root, r = [False] * n, [], [None] * n, 0
gg = self.__class__(n)
for u in range(n):
for v in self.edges[u]:
gg.add_edge(v, u)
def dfs(u):
if visited[u]:
return
visited[u] = True
for v in self.edges[u]:
dfs(v)
q.append(u)
def rev_dfs(u, r):
if root[u] is not None:
return
root[u] = r
for v in gg.edges[u]:
rev_dfs(v, r)
for u in range(n):
dfs(u)
for u in q[::-1]:
rev_dfs(u, r)
r += 1
return root
def kruskal(self):
n = self.__N
uf = self.__class__(n)
uf.init_dsu()
edges = sorted(
[
(u, v, e.weight)
for u in range(n)
for v, e in self.edges[u].items()
],
key=lambda x: x[2],
)
g = self.__class__(n)
d = 0
for u, v, w in edges:
if uf.same(u, v):
continue
uf.unite(u, v)
g.add_edge(u, v, weight=w)
d += w
return g, d
def prim(self, src=0, return_parent=False):
n = self.__N
g = self.__class__(n)
parent, visited, dist = [None] * n, [False] * n, 0
q = [(0, (src, src))]
while q:
d, (w, u) = heappop(q)
if visited[u]:
continue
visited[u], parent[u] = True, w
dist += d
g.add_edge(w, u, weight=d)
for v, e in self.edges[u].items():
if not visited[v]:
heappush(q, (e.weight, (u, v)))
if return_parent:
return g, dist, parent
return g, dist
def boruvka(self):
n = self.__N
uf = self.__class__(n)
uf.init_dsu()
g = self.__class__(n)
d = 0
def dfs(u):
if visited[u]:
return (inf, (None, None))
visited[u] = True
cand = []
for v, e in self.edges[u].items():
if uf.same(u, v):
cand.append(dfs(v))
continue
cand.append((e.weight, (u, v)))
return sorted(cand)[0]
while len(set(uf.parent)) != 1:
edges, visited = [], [False] * n
for u in range(n):
if visited[u]:
continue
edges.append(dfs(u))
for w, (u, v) in edges:
if uf.same(u, v):
continue
g.add_edge(u, v, weight=w)
uf.unite(u, v)
d += w
for u in range(n):
uf.find(u)
return g, d
def tsp(self):
pass
@staticmethod
def triangle_area(p0, p1, p2, signed=False):
x1, y1, x2, y2 = (
p1[0] - p0[0],
p1[1] - p0[1],
p2[0] - p0[0],
p2[1] - p0[1],
)
return (
(x1 * y2 - x2 * y1) / 2 if signed else abs(x1 * y2 - x2 * y1) / 2
)
@classmethod
def intersect(cls, seg1, seg2):
(p1, p2), (p3, p4) = seg1, seg2
t1 = cls.triangle_area(p1, p2, p3, signed=True)
t2 = cls.triangle_area(p1, p2, p4, signed=True)
t3 = cls.triangle_area(p3, p4, p1, signed=True)
t4 = cls.triangle_area(p3, p4, p2, signed=True)
return (t1 * t2 < 0) & (t3 * t4 < 0)
def cumxor(a):
return reduce(xor, a, 0)
def cumor(a):
return reduce(or_, a, 0)
def bit_count(n):
cnt = 0
while n:
cnt += n & 1
n >>= 1
return cnt
class AtCoder:
class ABC001:
@staticmethod
def a():
h1, h2 = map(int, sys.stdin.read().split())
print(h1 - h2)
@staticmethod
def d():
def to_minuites(x):
q, r = divmod(x, 100)
return 60 * q + r
def to_hmform(x):
q, r = divmod(x, 60)
return 100 * q + r
n = int(sys.stdin.readline().rstrip())
term = [0] * 2001
for _ in range(n):
s, e = map(
to_minuites,
map(int, sys.stdin.readline().rstrip().split("-")),
)
s = s // 5 * 5
e = (e + 4) // 5 * 5
term[s] += 1
term[e + 1] -= 1
for i in range(2000):
term[i + 1] += term[i]
res = []
raining = False
for i in range(2001):
if term[i]:
if not raining:
s = i
raining = True
elif raining:
res.append((s, i - 1))
raining = False
for s, e in res:
print(f"{to_hmform(s):04}-{to_hmform(e):04}")
class ABC002:
@staticmethod
def a():
print(max(map(int, sys.stdin.readline().split())))
@staticmethod
def b():
vowels = set("aeiou")
print(
"".join(
[
c
for c in sys.stdin.readline().rstrip()
if c not in vowels
]
)
)
@staticmethod
def c():
print(
GeometryTopology.triangle_area(
*map(int, sys.stdin.readline().split())
)
)
@staticmethod
def d():
n, m = map(int, sys.stdin.readline().split())
edges = set(
(x - 1, y - 1)
for x, y in zip(*[map(int, sys.stdin.read().split())] * 2)
)
print(
max(
len(s)
for i in range(1, 1 << n)
for s in [[j for j in range(n) if i >> j & 1]]
if all(
(x, y) in edges
for x, y in itertools.combinations(s, 2)
)
)
)
@staticmethod
def d_2():
n, m = map(int, sys.stdin.readline().split())
relations = [1 << i for i in range(n)]
for x, y in zip(*[map(int, sys.stdin.read().split())] * 2):
relations[x] |= 1 << (y - 1)
relations[y] |= 1 << (x - 1)
res = 0
for i in range(1 << n):
s, cnt = (1 << n) - 1, 0
for j in range(n):
if i >> j & 1:
t &= relations[j] | 1 << j
cnt += 1
if s & i == i:
res = max(res, cnt)
print(res)
class ABC003:
@staticmethod
def a():
print((int(sys.stdin.readline().rstrip()) + 1) * 5000)
@staticmethod
def b():
atcoder = set("atcoder")
s, t = sys.stdin.read().split()
print(
all(
s[i] == t[i]
or s[i] == "@"
and t[i] in atcoder
or t[i] == "@"
and s[i] in atcoder
for i in range(len(s))
)
and "You can win"
or "You will lose"
)
@staticmethod
def c():
n, k, *r = map(int, sys.stdin.read().split())
print(reduce(lambda x, y: (x + y) / 2, sorted(r)[-k:], 0))
class ABC004:
@staticmethod
def a():
print(int(sys.stdin.readline().rstrip()) * 2)
@staticmethod
def b():
for l in [sys.stdin.readline().rstrip() for _ in range(4)][::-1]:
print(l[::-1])
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip()) % 30
res = list(range(1, 7))
for i in range(n):
i %= 5
res[i], res[i + 1] = res[i + 1], res[i]
print(*res, sep="")
class ABC005:
@staticmethod
def a():
x, y = map(int, sys.stdin.readline().split())
print(y // x)
@staticmethod
def b():
n, *t = map(int, sys.stdin.read().split())
print(min(t))
@staticmethod
def c():
t = int(sys.stdin.readline().rstrip())
n = int(sys.stdin.readline().rstrip())
a = [int(x) for x in sys.stdin.readline().split()]
m = int(sys.stdin.readline().rstrip())
b = [int(x) for x in sys.stdin.readline().split()]
i = 0
for p in b:
if i == n:
print("no")
return
while p - a[i] > t:
i += 1
if i == n:
print("no")
return
if a[i] > p:
print("no")
return
i += 1
print("yes")
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
d = np.array(
[sys.stdin.readline().split() for _ in range(n)], np.int64
)
s = d.cumsum(axis=0).cumsum(axis=1)
s = np.pad(s, 1)
max_del = np.zeros((n + 1, n + 1), dtype=np.int64)
for y in range(1, n + 1):
for x in range(1, n + 1):
max_del[y, x] = np.amax(
s[y : n + 1, x : n + 1]
- s[0 : n - y + 1, x : n + 1]
- s[y : n + 1, 0 : n - x + 1]
+ s[0 : n - y + 1, 0 : n - x + 1]
)
res = np.arange(n**2 + 1)[:, None]
i = np.arange(1, n + 1)
res = max_del[i, np.minimum(res // i, n)].max(axis=1)
q = int(sys.stdin.readline().rstrip())
p = np.array(sys.stdin.read().split(), dtype=np.int64)
print(*res[p], sep="\n")
class ABC006:
@staticmethod
def a():
n = sys.stdin.readline().rstrip()
if "3" in n:
print("YES")
elif int(n) % 3 == 0:
print("YES")
else:
print("NO")
@staticmethod
def b():
mod = 10007
a = np.eye(N=3, k=-1, dtype=np.int64)
a[0] = 1
n = int(sys.stdin.readline().rstrip())
a = Algebra.matrix_pow(a, n - 1, mod)
print(a[2][0])
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
cnt = [0, 0, 0]
if m == 1:
cnt = [-1, -1, -1]
else:
if m & 1:
m -= 3
cnt[1] += 1
n -= 1
cnt[2] = m // 2 - n
cnt[0] = n - cnt[2]
if cnt[0] < 0 or cnt[1] < 0 or cnt[2] < 0:
print(-1, -1, -1)
else:
print(*cnt, sep=" ")
@staticmethod
def d():
n, *c = map(int, sys.stdin.read().split())
lis = [inf] * n
for x in c:
lis[bi_l(lis, x)] = x
print(n - bi_l(lis, inf))
class ABC007:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(n - 1)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
if s == "a":
print(-1)
else:
print("a")
@staticmethod
def c():
r, c = map(int, sys.stdin.readline().split())
sy, sx = map(int, sys.stdin.readline().split())
gy, gx = map(int, sys.stdin.readline().split())
sy -= 1
sx -= 1
gy -= 1
gx -= 1
maze = [sys.stdin.readline().rstrip() for _ in range(r)]
queue = deque([(sy, sx)])
dist = np.full((r, c), np.inf)
dist[sy, sx] = 0
while queue:
y, x = queue.popleft()
for i, j in [(-1, 0), (1, 0), (0, -1), (0, 1)]:
i += y
j += x
if maze[i][j] == "#" or dist[i, j] != np.inf:
continue
dist[i, j] = dist[y, x] + 1
queue.append((i, j))
print(int(dist[gy, gx]))
@staticmethod
def d():
ng = set([4, 9])
def count(d):
return d if d <= 4 else d - 1
def f(n):
x = [int(d) for d in str(n)]
flg = True
dp = 0
for d in x:
dp = dp * 8 + flg * count(d)
if d in ng:
flg = False
return n - (dp + flg)
a, b = map(int, sys.stdin.readline().split())
print(f(b) - f(a - 1))
class ABC008:
@staticmethod
def a():
s, t = map(int, sys.stdin.readline().split())
print(t - s + 1)
@staticmethod
def b():
n, *s = sys.stdin.read().split()
res = defaultdict(int)
for name in s:
res[name] += 1
print(sorted(res.items(), key=lambda x: x[1])[-1][0])
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
c = n - np.count_nonzero(a[:, None] % a, axis=1)
print(np.sum((c + 1) // 2 / c))
@staticmethod
def d():
w, h, n, *xy = map(int, sys.stdin.read().split())
(*xy,) = zip(*([iter(xy)] * 2))
@lru_cache(maxsize=None)
def count(x1, y1, x2, y2):
res = 0
for x, y in xy:
if not (x1 <= x <= x2 and y1 <= y <= y2):
continue
cnt = (x2 - x1) + (y2 - y1) + 1
cnt += count(x1, y1, x - 1, y - 1)
cnt += count(x1, y + 1, x - 1, y2)
cnt += count(x + 1, y1, x2, y - 1)
cnt += count(x + 1, y + 1, x2, y2)
res = max(res, cnt)
return res
print(count(1, 1, w, h))
class ABC009:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print((n + 1) // 2)
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
print(sorted(set(a))[-2])
@staticmethod
def c():
n, k = map(int, sys.stdin.readline().split())
s = list(sys.stdin.readline().rstrip())
cost = [1] * n
r = k
for i in range(n - 1):
q = []
for j in range(i + 1, n):
if s[j] < s[i] and cost[i] + cost[j] <= r:
heappush(q, (s[j], cost[i] + cost[j], -j))
if not q:
continue
_, c, j = heappop(q)
j = -j
s[i], s[j] = s[j], s[i]
r -= c
cost[i] = cost[j] = 0
print("".join(s))
@staticmethod
def d():
k, m = map(int, sys.stdin.readline().split())
a = np.array([int(x) for x in sys.stdin.readline().split()])
c = np.array([int(x) for x in sys.stdin.readline().split()])
mask = (1 << 32) - 1
d = np.eye(k, k, -1, dtype=np.uint32) * mask
d[0] = c
if m <= k:
print(a[m - 1])
return
print(
Algebra.bitwise_dot(
Algebra.bitwise_mat_pow(d, m - k), a[::-1].reshape(-1, 1)
)[0][0]
)
class ABC010:
@staticmethod
def a():
print(sys.stdin.readline().rstrip() + "pp")
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
tot = 0
for x in a:
c = 0
while x % 2 == 0 or x % 3 == 2:
x -= 1
c += 1
tot += c
print(tot)
@staticmethod
def c():
sx, sy, gx, gy, t, v, n, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy).reshape(-1, 2).T
def dist(x1, y1, x2, y2):
return np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2)
ans = (
"YES"
if (dist(sx, sy, x, y) + dist(x, y, gx, gy) <= v * t).any()
else "NO"
)
print(ans)
@staticmethod
def d():
n, g, e = map(int, sys.stdin.readline().split())
p = [int(x) for x in sys.stdin.readline().split()]
x, y = [], []
for _ in range(e):
a, b = map(int, sys.stdin.readline().split())
x.append(a)
y.append(b)
x.append(b)
y.append(a)
for a in p:
x.append(a)
y.append(n)
if not x:
print(0)
return
c = [1] * len(x)
min_cut = maximum_flow(
csr_matrix((c, (x, y)), (n + 1, n + 1)), source=0, sink=n
).flow_value
print(min_cut)
@staticmethod
def d_2():
n, g, e = map(int, sys.stdin.readline().split())
graph = nx.DiGraph()
graph.add_nodes_from(range(n + 1))
for p in [int(x) for x in sys.stdin.readline().split()]:
graph.add_edge(p, n, capacity=1)
for _ in range(e):
a, b = map(int, sys.stdin.readline().split())
graph.add_edge(a, b, capacity=1)
graph.add_edge(b, a, capacity=1)
print(nx.minimum_cut_value(graph, 0, n))
@staticmethod
def d_3():
n, q, m = map(int, sys.stdin.readline().split())
g = GeometryTopology.Graph(n + 1)
for p in [int(x) for x in sys.stdin.readline().split()]:
g.add_edge(p, n, capacity=1)
for a, b in zip(*[map(int, sys.stdin.read().split())] * 2):
g.add_edge(a, b, capacity=1)
g.add_edge(b, a, capacity=1)
print(g.dinic(0, n))
class ABC011:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(n % 12 + 1)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
print(s[0].upper() + s[1:].lower())
@staticmethod
def c():
n, *ng = map(int, sys.stdin.read().split())
ng = set(ng)
if n in ng:
print("NO")
else:
r = 100
while n > 0:
if r == 0:
print("NO")
return
for i in range(3, 0, -1):
if (n - i) in ng:
continue
n -= i
r -= 1
break
else:
print("NO")
return
print("YES")
@staticmethod
def d():
n, d, x, y = map(int, sys.stdin.read().split())
x, y = abs(x), abs(y)
if x % d or y % d:
print(0)
return
x, y = x // d, y // d
r = n - (x + y)
if r < 0 or r & 1:
print(0)
return
res = 0
half_p = pow(1 / 2, n)
for d in range(r // 2 + 1):
south, north = d, y + d
west = (r - 2 * d) // 2
res += (
half_p
* comb(n, south, exact=True)
* comb(n - south, north, exact=True)
* comb(n - south - north, west, exact=True)
* half_p
)
print(res)
class ABC012:
@staticmethod
def a():
a, b = map(int, sys.stdin.readline().split())
print(b, a)
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
h, n = divmod(n, 3600)
m, s = divmod(n, 60)
print(f"{h:02}:{m:02}:{s:02}")
@staticmethod
def c():
n = 2025 - int(sys.stdin.readline().rstrip())
res = []
for i in range(1, 10):
if n % i != 0 or n // i > 9:
continue
res.append(f"{i} x {n//i}")
print(*sorted(res), sep="\n")
@staticmethod
def d():
n, m, *abt = map(int, sys.stdin.read().split())
a, b, t = np.array(abt).reshape(m, 3).T
res = shortest_path(
csr_matrix((t, (a - 1, b - 1)), (n, n)),
method="FW",
directed=False,
)
print(res.max(axis=-1).min().astype(np.int64))
@staticmethod
def d_2():
n, m, *abt = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b, t in zip(*[iter(abt)] * 3):
a -= 1
b -= 1
g.add_edge(a, b, weight=t)
g.add_edge(b, a, weight=t)
print(min(max(d) for d in g.floyd_warshall()))
class ABC013:
@staticmethod
def a():
print(ord(sys.stdin.readline().rstrip()) - ord("A") + 1)
@staticmethod
def b():
a, b = map(int, sys.stdin.read().split())
d = abs(a - b)
print(min(d, 10 - d))
@staticmethod
def c():
n, h, a, b, c, d, e = map(int, sys.stdin.read().split())
y = np.arange(n + 1)
x = (n * e - h - (d + e) * y) // (b + e) + 1
np.maximum(x, 0, out=x)
np.minimum(x, n - y, out=x)
print(np.amin(a * x + c * y))
@staticmethod
def d():
n, m, d, *a = map(int, sys.stdin.read().split())
res = list(range(n))
def swap(i, j):
res[i], res[j] = res[j], res[i]
for i in a[::-1]:
swap(i - 1, i)
res = np.array(res)
def binary_method(a, p):
b = np.arange(n)
while p:
if p & 1:
b = a[b]
p >>= 1
a = a[a]
return b
print(*(binary_method(res, d) + 1), sep="\n")
class ABC014:
@staticmethod
def a():
a, b = map(int, sys.stdin.read().split())
print((a + b - 1) // b * b - a)
@staticmethod
def b():
n, x, *a = map(int, sys.stdin.read().split())
print(sum(a[i] for i in range(n) if x >> i & 1))
@staticmethod
def c():
n, *ab = map(int, sys.stdin.read().split())
a, b = np.array(ab).reshape(n, 2).T
res = np.zeros(10**6 + 2, dtype=np.int64)
np.add.at(res, a, 1)
np.subtract.at(res, b + 1, 1)
np.cumsum(res, out=res)
print(res.max())
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
g = GeometryTopology.Graph(n)
for _ in range(n - 1):
x, y = map(int, sys.stdin.readline().split())
x -= 1
y -= 1
g.add_edge(x, y, weight=1)
g.add_edge(y, x, weight=1)
g.bfs(0)
g.find_ancestors()
q, *ab = map(int, sys.stdin.read().split())
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
print(g.find_dist(a, b) + 1)
class ABC015:
@staticmethod
def a():
a, b = sys.stdin.read().split()
print(a if len(a) > len(b) else b)
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
print(
np.ceil(
a[np.nonzero(a)[0]].sum() / np.count_nonzero(a)
).astype(np.int8)
)
@staticmethod
def c():
n, k, *t = map(int, sys.stdin.read().split())
t = np.array(t).reshape(n, k)
x = np.zeros((1, 1), dtype=np.int8)
for i in range(n):
x = x.reshape(-1, 1) ^ t[i]
print("Found" if np.count_nonzero(x == 0) > 0 else "Nothing")
@staticmethod
def d():
w, n, k, *ab = map(int, sys.stdin.read().split())
dp = np.zeros((k + 1, w + 1), dtype=np.int32)
for a, b in zip(*[iter(ab)] * 2):
np.maximum(dp[1:, a:], dp[:-1, :-a] + b, out=dp[1:, a:])
print(dp[k][w])
class ABC016:
@staticmethod
def a():
m, d = map(int, sys.stdin.readline().split())
print("YES" if m % d == 0 else "NO")
@staticmethod
def b():
a, b, c = map(int, sys.stdin.readline().split())
f1, f2 = a + b == c, a - b == c
if f1 & f2:
print("?")
elif f1 & (~f2):
print("+")
elif (~f1) & f2:
print("-")
else:
print("!")
@staticmethod
def c():
n, _, *ab = map(int, sys.stdin.read().split())
f = [0] * n
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
f[a] |= 1 << b
f[b] |= 1 << a
res = [
bit_count(
cumor(f[j] for j in range(n) if f[i] >> j & 1)
& ~(f[i] | 1 << i)
)
for i in range(n)
]
print(*res, sep="\n")
@staticmethod
def d():
sx, sy, gx, gy = map(int, sys.stdin.readline().split())
seg1 = ((sx, sy), (gx, gy))
n = int(sys.stdin.readline().rstrip())
p1 = (
np.array(sys.stdin.read().split(), dtype=np.int64)
.reshape(n, 2)
.T
)
p2 = np.hstack((p1[:, 1:], p1[:, :1]))
seg2 = (p1, p2)
print(
np.count_nonzero(GeometryTopology.intersect(seg1, seg2)) // 2
+ 1
)
class ABC017:
@staticmethod
def a():
s, e = (
np.array(sys.stdin.read().split(), dtype=np.int16)
.reshape(3, 2)
.T
)
print((s // 10 * e).sum())
@staticmethod
def b():
choku_tail = set("ch, o, k, u".split(", "))
def is_choku(s):
if s == "":
return True
if len(s) >= 1 and (s[-1] in choku_tail) and is_choku(s[:-1]):
return True
if len(s) >= 2 and (s[-2:] in choku_tail) and is_choku(s[:-2]):
return True
return False
print("YES" if is_choku(sys.stdin.readline().rstrip()) else "NO")
@staticmethod
def c():
n, m, *lrs = map(int, sys.stdin.read().split())
l, r, s = np.array(lrs).reshape(n, 3).T
score = np.zeros((m + 1,), dtype=np.int32)
np.add.at(score, l - 1, s)
np.subtract.at(score, r, s)
np.cumsum(score, out=score)
print(s.sum() - score[:m].min())
@staticmethod
def d():
n, m, *f = map(int, sys.stdin.read().split())
prev = [0] * (n + 1)
tmp = defaultdict(int)
for i in range(n):
prev[i + 1] = tmp[f[i]]
tmp[f[i]] = i + 1
dp = [0] * (n + 1)
dp[0] = 1
l, s = 0, dp[0]
for i in range(1, n + 1):
while l < prev[i]:
s = (s - dp[l]) % MOD
l += 1
dp[i] = s
s = (s + dp[i]) % MOD
print(dp[n])
class ABC018:
@staticmethod
def a():
(*a,) = map(int, sys.stdin.read().split())
a = sorted(enumerate(a), key=lambda x: -x[1])
res = [None] * 3
for i in range(3):
res[a[i][0]] = i + 1
print(*res, sep="\n")
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
n, *lr = map(int, sys.stdin.read().split())
for l, r in zip(*[iter(lr)] * 2):
l -= 1
r -= 1
s = s[:l] + s[l : r + 1][::-1] + s[r + 1 :]
print(s)
@staticmethod
def c():
r, c, k = map(int, sys.stdin.readline().split())
s = np.array([list(s) for s in sys.stdin.read().split()])
s = np.pad(s, 1, constant_values="x")
a = np.zeros_like(s, dtype=np.float64)
a[s == "o"] = np.inf
for i in range(1, r + 1):
np.minimum(a[i - 1, :] + 1, a[i, :], out=a[i, :])
for i in range(r, 0, -1):
np.minimum(a[i + 1, :] + 1, a[i, :], out=a[i, :])
for j in range(1, c + 1):
np.minimum(a[:, j - 1] + 1, a[:, j], out=a[:, j])
for j in range(c, 0, -1):
np.minimum(a[:, j + 1] + 1, a[:, j], out=a[:, j])
print(np.count_nonzero(a >= k))
@staticmethod
def c_2():
r, c, k = map(int, sys.stdin.readline().split())
s = np.array([list(s) for s in sys.stdin.read().split()])
s = np.pad(s, 1, constant_values="x")
a = (s == "o").astype(np.int16)
a = distance_transform_cdt(a, metric="taxicab")
print(np.count_nonzero(a >= k))
@staticmethod
def d():
n, m, p, q, r, *xyz = map(int, sys.stdin.read().split())
x, y, z = np.array(xyz).reshape(r, 3).T
h = np.zeros((n, m), dtype=np.int32)
h[x - 1, y - 1] = z
g = np.array([*itertools.combinations(range(n), p)])
print(np.sort(h[g].sum(axis=1), axis=1)[:, -q:].sum(axis=1).max())
class ABC019:
@staticmethod
def a():
(*a,) = map(int, sys.stdin.readline().split())
print(sorted(a)[1])
@staticmethod
def b():
s = sys.stdin.readline().rstrip() + "$"
cnt = 0
prev = "$"
t = ""
for c in s:
if c == prev:
cnt += 1
continue
t += prev + str(cnt)
prev = c
cnt = 1
print(t[2:])
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
res = set()
for x in a:
while not x & 1:
x >>= 1
res.add(x)
print(len(res))
@staticmethod
def d():
def inquire(u, v):
print(f"? {u} {v}".format(u, v), flush=True)
return int(sys.stdin.readline().rstrip())
n = int(sys.stdin.readline().rstrip())
u = sorted([(inquire(1, v), v) for v in range(2, n + 1)])[-1][1]
d = max((inquire(u, v)) for v in range(1, n + 1) if u != v)
print(f"! {d}")
class ABC020:
@staticmethod
def a():
print(
"ABC"
if int(sys.stdin.readline().rstrip()) == 1
else "chokudai"
)
@staticmethod
def b():
a, b = sys.stdin.readline().split()
print(int(a + b) * 2)
@staticmethod
def c():
h, w, t = map(int, sys.stdin.readline().split())
s = [list(s) for s in sys.stdin.read().split()]
for i in range(h):
for j in range(w):
if s[i][j] == "S":
sy, sx = i, j
if s[i][j] == "G":
gy, gx = i, j
s[sy][sx] = s[gy][gx] = "."
source, target = sy * w + sx, gy * w + gx
def heuristic_function(u, v=target):
uy, ux = divmod(u, w)
vy, vx = divmod(v, w)
return abs(vy - uy) + abs(ux - vx)
def min_time(x):
g = GeometryTopology.Graph(h * w)
for i in range(h):
for j in range(w):
u = i * w + j
if i > 0:
g.add_edge(
u,
(i - 1) * w + j,
weight=(1 if s[i - 1][j] == "." else x),
)
if i < h - 1:
g.add_edge(
u,
(i + 1) * w + j,
weight=(1 if s[i + 1][j] == "." else x),
)
if j > 0:
g.add_edge(
u,
i * w + j - 1,
weight=(1 if s[i][j - 1] == "." else x),
)
if j < w - 1:
g.add_edge(
u,
i * w + j + 1,
weight=(1 if s[i][j + 1] == "." else x),
)
return g.dijkstra(source)[target]
return g.astar(source, target, heuristic_function)
def binary_search():
lo, hi = 1, t + 1
while lo + 1 < hi:
x = (lo + hi) // 2
if min_time(x) > t:
hi = x
else:
lo = x
return lo
print(binary_search())
@staticmethod
def d():
n, k = map(int, sys.stdin.readline().split())
div = sorted(NumberTheory.find_divisors(k))
l = len(div)
s = [0] * l
for i, d in enumerate(div):
s[i] = (1 + n // d) * (n // d) // 2 * d % MOD
for i in range(l - 1, -1, -1):
for j in range(i + 1, l):
if div[j] % div[i]:
continue
s[i] = (s[i] - s[j]) % MOD
print(
sum(s[i] * k // div[i] % MOD for i in range(l)) % MOD
)
class ABC021:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
s = [1 << i for i in range(5) if n >> i & 1]
print(len(s), *s, sep="\n")
@staticmethod
def b():
n, a, b, k, *p = map(int, sys.stdin.read().split())
print("YES" if len(set(p) | set([a, b])) == k + 2 else "NO")
@staticmethod
def c():
n, a, b, m, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy).reshape(m, 2).T - 1
a -= 1
b -= 1
g = csgraph_to_dense(
csr_matrix((np.ones(m), (x, y)), (n, n), dtype=np.int8)
)
g = np.logical_or(g, g.T)
paths = np.zeros(n, dtype=np.int64).reshape(-1, 1)
paths[a, 0] = 1
while not paths[b, 0]:
paths = np.dot(g, paths) % MOD
print(paths[b, 0])
@staticmethod
def c_2():
n, a, b, m, *xy = map(int, sys.stdin.read().split())
a -= 1
b -= 1
g = GeometryTopology.Graph()
for x, y in zip(*[iter(xy)] * 2):
x -= 1
y -= 1
g.add_edge(x, y, weight=1)
g.add_edge(y, x, weight=1)
dist, paths = g.dijkstra(a, paths_cnt=True, mod=MOD)
print(paths[b])
@staticmethod
def d():
n, k = map(int, sys.stdin.read().split())
cn = Combinatorics.CombinationsMod()
print(cn(n + k - 1, k))
class ABC022:
@staticmethod
def a():
n, s, t, *a = map(int, sys.stdin.read().split())
a = np.array(a)
np.cumsum(a, out=a)
print(((s <= a) & (a <= t)).sum())
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
c = Counter(a)
print(sum(c.values()) - len(c))
@staticmethod
def c():
n, m, *uvl = map(int, sys.stdin.read().split())
u, v, l = np.array(uvl).reshape(m, 3).T
u -= 1
v -= 1
g = csgraph_to_dense(csr_matrix((l, (u, v)), (n, n)))
g += g.T
g[g == 0] = np.inf
dist0 = g[0].copy()
g[0] = 0
g[:, 0] = 0
dist = shortest_path(g, method="FW", directed=False)
u, v = np.array([*itertools.combinations(range(1, n), 2)]).T
res = (dist0[u] + dist[u, v] + dist0[v]).min()
print(-1 if res == np.inf else int(res))
@staticmethod
def d():
n, *ab = map(int, sys.stdin.read().split())
c = np.array(ab).reshape(2, n, 2)
g = c.mean(axis=1)
d = np.sqrt(((c - g[:, None, :]) ** 2).sum(axis=-1)).sum(axis=1)
print(d[1] / d[0])
class ABC023:
@staticmethod
def a():
print(sum(divmod(int(sys.stdin.readline().rstrip()), 10)))
@staticmethod
def b():
n, s = sys.stdin.read().split()
n = int(n)
t = "b"
for i in range(n // 2):
if i % 3 == 0:
t = "a" + t + "c"
elif i % 3 == 1:
t = "c" + t + "a"
else:
t = "b" + t + "b"
print(n // 2 if t == s else -1)
@staticmethod
def b_2():
n, s = sys.stdin.read().split()
n = int(n)
if n & 1 ^ 1:
print(-1)
return
a = list("abc")
i = (1 - n // 2) % 3
for c in s:
if c != a[i]:
print(-1)
return
i = (i + 1) % 3
print(n // 2)
@staticmethod
def c():
h, w, k, n, *rc = map(int, sys.stdin.read().split())
r, c = np.array(rc).reshape(n, 2).T - 1
rb = np.bincount(r, minlength=h)
cb = np.bincount(c, minlength=w)
rbb = np.bincount(rb, minlength=k + 1)
cbb = np.bincount(cb, minlength=k + 1)
tot = (rbb[: k + 1] * cbb[k::-1]).sum()
real = np.bincount(rb[r] + cb[c] - 1, minlength=k + 1)
print(tot - real[k - 1] + real[k])
@staticmethod
def d():
n, *hs = map(int, sys.stdin.read().split())
h, s = np.array(hs).reshape(n, 2).T
t = np.arange(n)
def is_ok(x):
return np.all(np.sort((x - h) // s) >= t)
def binary_search():
lo, hi = 0, 10**14
while lo + 1 < hi:
x = (lo + hi) // 2
if is_ok(x):
hi = x
else:
lo = x
return hi
print(binary_search())
class ABC024:
@staticmethod
def a():
a, b, c, k, s, t = map(int, sys.stdin.read().split())
print(a * s + b * t - c * (s + t) * (s + t >= k))
@staticmethod
def b():
n, t, *a = map(int, sys.stdin.read().split())
a = np.array(a)
print(np.minimum(a[1:] - a[:-1], t).sum() + t)
@staticmethod
def c():
n, d, k, *lrst = map(int, sys.stdin.read().split())
lrst = np.array(lrst)
lr = lrst[: 2 * d].reshape(d, 2)
s, t = lrst[2 * d :].reshape(k, 2).T
day = np.zeros((k,), dtype=np.int32)
for i in range(d):
l, r = lr[i]
move = (l <= s) & (s <= r) & (s != t)
reach = move & (l <= t) & (t <= r)
s[move & (s < t)] = r
s[move & (s > t)] = l
s[reach] = t[reach]
day[reach] = i + 1
print(*day, sep="\n")
@staticmethod
def d():
a, b, c = map(int, sys.stdin.read().split())
p = MOD
denom = pow(a * b % p - b * c % p + c * a % p, p - 2, p)
w = (b * c - a * b) % p * denom % p
h = (b * c - a * c) % p * denom % p
print(h, w)
class ABC025:
@staticmethod
def a():
s, n = sys.stdin.read().split()
n = int(n)
i, j = divmod(n - 1, 5)
print(s[i] + s[j])
@staticmethod
def b():
n, a, b = map(int, sys.stdin.readline().split())
res = defaultdict(int)
for _ in range(n):
s, d = sys.stdin.readline().split()
d = int(d)
res[s] += min(max(d, a), b)
res = res["East"] - res["West"]
if res == 0:
ans = 0
elif res > 0:
ans = f"East {res}"
else:
ans = f"West {-res}"
print(ans)
@staticmethod
def c():
b = [0] * 6
for i in range(2):
(*row,) = map(int, sys.stdin.readline().split())
for j in range(3):
b[i * 3 + j] = row[j]
c = [0] * 8
for i in range(3):
(*row,) = map(int, sys.stdin.readline().split())
for j in range(2):
c[i * 3 + j] = row[j]
tot = sum(b) + sum(c)
@lru_cache(maxsize=None)
def f(s=tuple(0 for _ in range(9))):
if all(s):
res = 0
for i in range(6):
res += (s[i] == s[i + 3]) * b[i]
for i in range(8):
res += (s[i] == s[i + 1]) * c[i]
return res
cand = [i for i in range(9) if not s[i]]
flg = len(cand) & 1
s = list(s)
res = []
for i in cand:
s[i] = (flg ^ 1) + 1
res.append(f(tuple(s)))
s[i] = 0
return sorted(res, reverse=flg)[0]
a = f()
b = tot - a
print(a)
print(b)
class ABC026:
@staticmethod
def a():
a = int(sys.stdin.readline().rstrip())
print(a // 2 * (a - a // 2))
@staticmethod
def b():
n, *r = map(int, sys.stdin.read().split())
s = np.pi * np.array([0] + r) ** 2
s.sort()
res = s[n::-2].sum() - s[n - 1 :: -2].sum()
print(res)
@staticmethod
def c():
n, *b = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph()
for i in range(1, n):
g.add_edge(b[i - 1] - 1, i, weight=1)
def f(u=0):
if not g.edges[u]:
return 1
s = [f(v) for v in g.edges[u]]
return max(s) + min(s) + 1
print(f())
@staticmethod
def d():
a, b, c = map(int, sys.stdin.readline().split())
def f(t):
return a * t + b * np.sin(c * t * np.pi) - 100
print(optimize.brenth(f, 0, 200))
class ABC027:
@staticmethod
def a():
l = [int(l) for l in sys.stdin.readline().split()]
l.sort()
print(l[2] if l[0] == l[1] else l[0])
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
m, r = divmod(sum(a), n)
if r:
print(-1)
return
population = 0
towns = 0
cnt = 0
for x in a:
population += x
towns += 1
if population / towns != m:
cnt += 1
continue
population, towns = 0, 0
print(cnt)
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
flg = n.bit_length() & 1 ^ 1
t = 0
x = 1
while x <= n:
t += 1
x = 2 * x + 1 if t & 1 ^ flg else 2 * x
print("Aoki" if t & 1 else "Takahashi")
class ABC028:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(
"Bad"
if n < 60
else "Good"
if n < 90
else "Great"
if n < 100
else "Perfect"
)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
cnt = Counter(s)
print(*[cnt.get(c, 0) for c in "ABCDEF"])
@staticmethod
def c():
a, b, c, d, e = map(int, sys.stdin.readline().split())
print(max(b + c + e, a + d + e))
@staticmethod
def d():
n, k = map(int, sys.stdin.readline().split())
c = 3 * 2 * (n - k) * (k - 1) + 3 * (n - 1) + 1
print(c / n**3)
class ABC029:
@staticmethod
def a():
print(sys.stdin.readline().rstrip() + "s")
@staticmethod
def b():
print(sum("r" in s for s in sys.stdin.read().split()))
@staticmethod
def c():
print(
*[
"".join(s)
for s in itertools.product(
"abc", repeat=int(sys.stdin.readline().rstrip())
)
],
sep="\n",
)
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
print(
sum(
n // 10 ** (i + 1) * 10**i
+ min(max((n % 10 ** (i + 1) - 10**i + 1), 0), 10**i)
for i in range(9)
)
)
class ABC030:
@staticmethod
def a():
a, b, c, d = map(int, sys.stdin.readline().split())
e, f = b * c, d * a
print("TAKAHASHI" if e > f else "AOKI" if f > e else "DRAW")
@staticmethod
def b():
n, m = map(int, sys.stdin.readline().split())
n = (n % 12 + m / 60) * 30
m *= 6
d = abs(n - m)
print(min(d, 360 - d))
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
x, y = map(int, sys.stdin.readline().split())
a = [int(x) for x in sys.stdin.readline().split()]
b = [int(x) for x in sys.stdin.readline().split()]
t = 0
p = 1
cnt = 0
while True:
if p:
i = bi_l(a, t)
if i == n:
break
t = a[i] + x
else:
i = bi_l(b, t)
if i == m:
break
t = b[i] + y
cnt += 1
p ^= 1
print(cnt)
@staticmethod
def d():
n, a = map(int, sys.stdin.readline().split())
a -= 1
k = sys.stdin.readline().rstrip()
b = [int(x) - 1 for x in sys.stdin.readline().split()]
c = [None] * n
for i in range(n + 1):
if str(i) == k:
print(a + 1)
return
if c[a] is not None:
l, d = i - c[a], c[a]
break
c[a] = i
a = b[a]
r = [None] * len(k)
r[0] = 1
for i in range(len(k) - 1):
r[i + 1] = r[i] * 10 % l
k = [int(c) for c in k][::-1]
d = (sum(r[i] * k[i] for i in range(len(k))) - d) % l
for _ in range(d):
a = b[a]
print(a + 1)
@staticmethod
def d_2():
n, a, k, *b = map(int, sys.stdin.read().split())
a -= 1
b = [x - 1 for x in b]
c = [None] * n
for i in range(n + 1):
if i == k:
print(a + 1)
return
if c[a] is not None:
for _ in range((k - c[a]) % (i - c[a])):
a = b[a]
print(a + 1)
return
c[a] = i
a = b[a]
class ABC031:
@staticmethod
def a():
a, d = map(int, sys.stdin.readline().split())
if a > d:
a, d = d, a
print((a + 1) * d)
@staticmethod
def b():
l, h, n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
res = np.maximum(l - a, 0)
res[a > h] = -1
print(*res, sep="\n")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
np.cumsum(a[::2], out=a[::2])
np.cumsum(a[1::2], out=a[1::2])
a = list(a) + [0] * 2
def score(i, j):
if i > j:
i, j = j, i
if (j - i) & 1:
x, y = a[j - 1] - a[i - 2], a[j] - a[i - 1]
else:
x, y = a[j] - a[i - 2], a[j - 1] - a[i - 1]
return x, y
res = -inf
for i in range(n):
s = -inf
for j in range(n):
if i == j:
continue
x, y = score(i, j)
if y > s:
s, t = y, x
res = max(res, t)
print(res)
@staticmethod
def d():
k, m = map(int, sys.stdin.readline().split())
(*vw,) = zip(*[iter(sys.stdin.read().split())] * 2)
for l in itertools.product((1, 2, 3), repeat=k):
s = dict()
for v, w in vw:
i = 0
for d in v:
d = int(d) - 1
j = i + l[d]
if j > len(w):
break
t = w[i:j]
if d in s and s[d] != t:
break
s[d] = t
i = j
else:
if i == len(w):
continue
break
else:
for i in range(k):
print(s[i])
return
class ABC032:
@staticmethod
def a():
a, b, n = map(int, sys.stdin.read().split())
l = NumberTheory.lcm(a, b)
print((n + l - 1) // l * l)
@staticmethod
def b():
s, k = sys.stdin.read().split()
k = int(k)
res = set()
for i in range(len(s) - k + 1):
res.add(s[i : i + k])
print(len(res))
@staticmethod
def c():
n, k, *s = map(int, sys.stdin.read().split())
if 0 in s:
print(n)
return
if k == 0:
print(0)
return
res, tmp, l = 0, 1, 0
for r in range(n):
tmp *= s[r]
while tmp > k:
tmp //= s[l]
l += 1
res = max(res, r - l + 1)
print(res)
class ABC033:
@staticmethod
def a():
print(
"SAME"
if len(set(sys.stdin.readline().rstrip())) == 1
else "DIFFERENT"
)
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
res = dict()
for _ in range(n):
s, p = sys.stdin.readline().split()
res[s] = int(p)
tot = sum(res.values())
for s, p in res.items():
if p > tot / 2:
print(s)
return
print("atcoder")
@staticmethod
def c():
s = sys.stdin.readline().rstrip()
print(sum(not "0" in f for f in s.split("+")))
class ABC034:
@staticmethod
def a():
x, y = map(int, sys.stdin.readline().split())
print("Better" if y > x else "Worse")
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
print(n + 1 if n & 1 else n - 1)
@staticmethod
def c():
h, w = map(int, sys.stdin.read().split())
choose = Combinatorics.CombinationsMod()
print(choose(h + w - 2, h - 1))
@staticmethod
def d():
n, k, *wp = map(int, sys.stdin.read().split())
w, p = np.array(wp).reshape(-1, 2).T
def f(x):
return np.sort(w * (p - x))[-k:].sum()
print(optimize.bisect(f, 0, 100))
class ABC035:
@staticmethod
def a():
w, h = map(int, sys.stdin.readline().split())
print("4:3" if 4 * h == 3 * w else "16:9")
@staticmethod
def b():
s, t = sys.stdin.read().split()
y = x = z = 0
for c in s:
if c == "?":
z += 1
elif c == "L":
x -= 1
elif c == "R":
x += 1
elif c == "D":
y -= 1
elif c == "U":
y += 1
d = abs(y) + abs(x)
print(d + z if t == "1" else max(d - z, (d - z) & 1))
@staticmethod
def c():
n, q, *lr = map(int, sys.stdin.read().split())
l, r = np.array(lr).reshape(q, 2).T
res = np.zeros(n + 1, dtype=int)
np.add.at(res, l - 1, 1)
np.subtract.at(res, r, 1)
np.cumsum(res, out=res)
res = res & 1
print("".join(map(str, res[:-1])))
@staticmethod
def d():
n, m, t = map(int, sys.stdin.readline().split())
point = np.array(sys.stdin.readline().split(), dtype=int)
a, b, c = (
np.array(sys.stdin.read().split(), dtype=np.int64)
.reshape(m, 3)
.T
)
a -= 1
b -= 1
d_1 = shortest_path(
csr_matrix((c, (a, b)), (n, n)),
method="D",
directed=True,
indices=0,
)
d_2 = shortest_path(
csr_matrix((c, (b, a)), (n, n)),
method="D",
directed=True,
indices=0,
)
print(int(np.amax((t - (d_1 + d_2)) * point)))
class ABC036:
@staticmethod
def a():
a, b = map(int, sys.stdin.readline().split())
print((b + a - 1) // a)
@staticmethod
def b():
n, *s = sys.stdin.read().split()
n = int(n)
for j in range(n):
row = ""
for i in range(n - 1, -1, -1):
row += s[i][j]
print(row)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
b = [None] * n
prev = None
j = -1
for i, x in sorted(enumerate(a), key=lambda x: x[1]):
if x != prev:
j += 1
b[i] = j
prev = x
print(*b, sep="\n")
@staticmethod
def d():
n, *ab = map(int, sys.stdin.read().split())
edges = [[] for _ in range(n)]
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
edges[a].append(b)
edges[b].append(a)
parent = [None] * n
def count(u):
black, white = 1, 1
for v in edges[u]:
if v == parent[u]:
continue
parent[v] = u
b, w = count(v)
black *= w
black %= MOD
white *= (b + w) % MOD
white %= MOD
return black, white
print(sum(count(0)) % MOD)
class ABC037:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print(c // min(a, b))
@staticmethod
def b():
n, q, *lrt = map(int, sys.stdin.read().split())
a = np.zeros(n, dtype=int)
for l, r, t in zip(*[iter(lrt)] * 3):
a[l - 1 : r] = t
print(*a, sep="\n")
@staticmethod
def c():
n, k, *a = map(int, sys.stdin.read().split())
a = np.array([0] + a)
np.cumsum(a, out=a)
s = (a[k:] - a[:-k]).sum()
print(s)
@staticmethod
def d():
h, w, *a = map(int, sys.stdin.read().split())
p = [None] * (h * w)
def paths(k):
if p[k]:
return p[k]
p[k] = 1
i, j = divmod(k, w)
if j > 0 and a[k] > a[k - 1]:
p[k] += paths(k - 1)
if j < w - 1 and a[k] > a[k + 1]:
p[k] += paths(k + 1)
if i > 0 and a[k] > a[k - w]:
p[k] += paths(k - w)
if i < h - 1 and a[k] > a[k + w]:
p[k] += paths(k + w)
p[k] %= MOD
return p[k]
print(sum(paths(i) for i in range(h * w)) % MOD)
class ABC038:
@staticmethod
def a():
s = sys.stdin.readline().rstrip()
print("YES" if s[-1] == "T" else "NO")
@staticmethod
def b():
a, b, c, d = map(int, sys.stdin.read().split())
print("YES" if a == c or b == c or a == d or b == d else "NO")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a += [-1]
cnt = n
tmp = 1
for i in range(n):
if a[i + 1] > a[i]:
tmp += 1
else:
cnt += tmp * (tmp - 1) // 2
tmp = 1
print(cnt)
@staticmethod
def d():
n, *wh = map(int, sys.stdin.read().split())
a = [
x[1]
for x in sorted(
zip(*[iter(wh)] * 2), key=lambda x: (x[0], -x[1])
)
]
print(bi_l(DP.LIS(a), inf))
class ABC039:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print((a * b + b * c + c * a) * 2)
@staticmethod
def b():
x = int(sys.stdin.readline().rstrip())
for n in range(1, int(x**0.5) + 1):
if pow(n, 4) == x:
print(n)
return
@staticmethod
def c():
board = "WBWBWWBWBWBW" * 3
convert = "Do, *, Re, *, Mi, Fa, *, So, *, La, *, Si".split(", ")
s = sys.stdin.readline().rstrip()
print(convert[board.index(s)])
@staticmethod
def d():
h, w = map(int, sys.stdin.readline().split())
s = "".join(sys.stdin.read().split())
white = set()
for i in range(h * w):
if s[i] == "#":
continue
l = 0 if i % w == 0 else -1
r = 0 if (i + 1) % w == 0 else 1
white |= {
i + dy + dx
for dy in range(-w, w + 1, w)
for dx in range(l, r + 1)
}
black_before = set(range(h * w)) - white
black_after = set()
for i in black_before:
l = 0 if i % w == 0 else -1
r = 0 if (i + 1) % w == 0 else 1
black_after |= {
i + dy + dx
for dy in range(-w, w + 1, w)
for dx in range(l, r + 1)
}
black_after &= set(range(h * w))
for i in range(h * w):
if s[i] == "#" and not i in black_after:
print("impossible")
return
print("possible")
for i in range(h):
print(
"".join(
[
"#" if i * w + j in black_before else "."
for j in range(w)
]
)
)
class ABC040:
@staticmethod
def a():
n, x = map(int, sys.stdin.readline().split())
print(min(x - 1, n - x))
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
res = inf
for i in range(1, int(n**0.5) + 1):
res = min(res, n // i - i + n % i)
print(res)
@staticmethod
def c():
n, *h = map(int, sys.stdin.read().split())
h = [h[0]] + h
cost = [None] * (n + 1)
cost[0] = cost[1] = 0
for i in range(2, n + 1):
cost[i] = min(
cost[i - 2] + abs(h[i] - h[i - 2]),
cost[i - 1] + abs(h[i] - h[i - 1]),
)
print(cost[n])
@staticmethod
def d():
n, m = map(int, sys.stdin.readline().split())
uf = GeometryTopology.Graph(n)
uf.init_dsu()
queue = []
for _ in range(m):
a, b, y = map(int, sys.stdin.readline().split())
heappush(queue, (-(2 * y), a - 1, b - 1))
q = int(sys.stdin.readline().rstrip())
for i in range(q):
v, y = map(int, sys.stdin.readline().split())
heappush(queue, (-(2 * y + 1), v - 1, i))
res = [None] * q
while queue:
y, i, j = heappop(queue)
if y & 1:
res[j] = uf.size[uf.find(i)]
else:
uf.unite(i, j)
print(*res, sep="\n")
class ABC041:
@staticmethod
def a():
s, i = sys.stdin.read().split()
i = int(i)
print(s[i - 1])
@staticmethod
def b():
a, b, c = map(int, sys.stdin.readline().split())
ans = a * b % MOD * c % MOD
print(ans)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
for i, h in sorted(enumerate(a), key=lambda x: -x[1]):
print(i + 1)
@staticmethod
def d():
n, _, *xy = map(int, sys.stdin.read().split())
g = [0] * n
for x, y in zip(*[iter(xy)] * 2):
g[x - 1] |= 1 << (y - 1)
res = [0] * (1 << n)
res[0] = 1
for i in range(1 << n):
for j in range(n):
if i >> j & 1 ^ 1:
continue
if not (g[j] & i):
res[i] += res[i & ~(1 << j)]
print(res[-1])
class ABC042:
@staticmethod
def a():
a = [int(x) for x in sys.stdin.readline().split()]
c = Counter(a)
print("YES" if c[5] == 2 and c[7] == 1 else "NO")
@staticmethod
def b():
n, l, *s = sys.stdin.read().split()
print("".join(sorted(s)))
@staticmethod
def c():
n, k, *d = sys.stdin.read().split()
l = len(n)
ok = sorted(set(string.digits) - set(d))
cand = [
int("".join(p)) for p in itertools.product(ok, repeat=l)
] + [int(min(x for x in ok if x > "0") + min(ok) * l)]
print(cand[bi_l(cand, int(n))])
@staticmethod
def d():
h, w, a, b = map(int, sys.stdin.read().split())
combinations = Combinatorics.CombinationsMod(
n=2 * 10**5, mod=MOD
)
i = np.arange(h - a, h)
ng = np.sum(
combinations(i + b - 1, i)
* combinations(h - i + w - b - 2, h - 1 - i)
% MOD
)
print((combinations(h + w - 2, h - 1) - ng) % MOD)
class ABC043:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print((1 + n) * n // 2)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
t = ""
for c in s:
if c == "B":
t = t[:-1]
else:
t += c
print(t)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
x = np.around(a.sum() / n).astype(int)
print(np.sum((a - x) ** 2))
@staticmethod
def d():
s = sys.stdin.readline().rstrip()
n = len(s)
for i in range(n - 1):
if s[i] == s[i + 1]:
print(i + 1, i + 2)
return
for i in range(n - 2):
if s[i] == s[i + 2]:
print(i + 1, i + 3)
return
print(-1, -1)
class ABC044:
@staticmethod
def a():
n, k, x, y = map(int, sys.stdin.read().split())
print(min(n, k) * x + max(0, n - k) * y)
@staticmethod
def b():
res = set(
c & 1 for c in Counter(sys.stdin.readline().rstrip()).values()
)
print("Yes" if len(res) == 1 and res.pop() == 0 else "No")
@staticmethod
def c():
n, a, *x = map(int, sys.stdin.read().split())
dp = np.zeros((n + 1, 2501), dtype=np.int64)
dp[0, 0] = 1
for v in x:
dp[1:, v:] += dp[:-1, :-v]
i = np.arange(1, n + 1)
print(dp[i, i * a].sum())
@staticmethod
def c_2():
n, a, *x = map(int, sys.stdin.read().split())
for i in range(n):
x[i] -= a
s = defaultdict(int)
s[0] = 1
for i in range(n):
ns = s.copy()
for k, v in s.items():
ns[k + x[i]] += v
s = ns
print(s[0] - 1)
@staticmethod
def d():
pass
class ABC045:
@staticmethod
def a():
a, b, h = map(int, sys.stdin.read().split())
print((a + b) * h // 2)
@staticmethod
def b():
a, b, c = sys.stdin.read().split()
d = {"a": a[::-1], "b": b[::-1], "c": c[::-1]}
nx = "a"
while 1:
if not d[nx]:
print(nx.upper())
return
d[nx], nx = d[nx][:-1], d[nx][-1]
@staticmethod
def c():
def c(l):
return pow(2, max(0, l - 1))
s = sys.stdin.readline().rstrip()
n = len(s)
print(
sum(
int(s[i : j + 1]) * c(i) * c(n - 1 - j)
for i in range(n)
for j in range(i, n)
)
)
@staticmethod
def d():
h, w, n, *ab = map(int, sys.stdin.read().split())
c = defaultdict(int)
for y, x in zip(*[iter(ab)] * 2):
y -= 1
x -= 1
for dy, dx in itertools.product(range(-1, 2), repeat=2):
i, j = y + dy, x + dx
if not (0 < i < h - 1 and 0 < j < w - 1):
continue
c[(i, j)] += 1
c = Counter(c.values())
c[0] = (h - 2) * (w - 2) - sum(c.values())
for i in range(10):
print(c[i])
class ABC046:
@staticmethod
def a():
print(len(set(sys.stdin.readline().split())))
@staticmethod
def b():
n, k = map(int, sys.stdin.readline().split())
print(k * pow(k - 1, n - 1))
@staticmethod
def c():
n, *xy = map(int, sys.stdin.read().split())
a, b = 1, 1
for x, y in zip(*[iter(xy)] * 2):
n = max((a + x - 1) // x, (b + y - 1) // y)
a, b = n * x, n * y
print(a + b)
@staticmethod
def d():
c = Counter(sys.stdin.readline().rstrip())
print((c["g"] - c["p"]) // 2)
class ABC047:
@staticmethod
def a():
c = sorted(map(int, sys.stdin.readline().split()))
print("Yes" if c[0] + c[1] == c[2] else "No")
@staticmethod
def b():
w, h, n, *xyf = map(int, sys.stdin.read().split())
l, r, d, u = 0, w, 0, h
for x, y, f in zip(*[iter(xyf)] * 3):
if f == 1:
l = max(l, x)
if f == 2:
r = min(r, x)
if f == 3:
d = max(d, y)
if f == 4:
u = min(u, y)
print(max(0, r - l) * max(0, u - d))
@staticmethod
def c():
s = sys.stdin.readline().rstrip()
print(sum(s[i] != s[i + 1] for i in range(len(s) - 1)))
@staticmethod
def d():
mn, mx, c = inf, -1, 0
n, t, *a = map(int, sys.stdin.read().split())
for p in a:
if p - mn == mx:
c += 1
elif p - mn > mx:
mx, c = p - mn, 1
mn = min(mn, p)
print(c)
class ABC048:
@staticmethod
def a():
def initial(s):
return s[0].upper()
print("".join(map(initial, sys.stdin.readline().split())))
@staticmethod
def b():
a, b, x = map(int, sys.stdin.readline().split())
print(
b // x - (a - 1) // x
)
@staticmethod
def c():
n, x, *a = map(int, sys.stdin.read().split())
cnt = prev = 0
for i in range(n):
d = prev + a[i] - x
prev = a[i]
if d <= 0:
continue
cnt += d
prev -= d
print(cnt)
@staticmethod
def d():
s = sys.stdin.readline().rstrip()
print("First" if len(s) & 1 ^ (s[0] == s[-1]) else "Second")
class ABC049:
@staticmethod
def a():
vowels = set("aeiou")
print(
"vowel"
if sys.stdin.readline().rstrip() in vowels
else "consonant"
)
@staticmethod
def b():
h, w, *s = sys.stdin.read().split()
for l in s:
for _ in range(2):
print(l)
@staticmethod
def c():
t = set("dream, dreamer, erase, eraser".split(", "))
def obtainable(s):
while True:
for i in range(5, 8):
if s[-i:] in t:
s = s[:-i]
if not s:
return True
break
else:
return False
s = sys.stdin.readline().rstrip()
print("YES" if obtainable(s) else "NO")
@staticmethod
def d():
n, k, l = map(int, sys.stdin.readline().split())
uf1 = GeometryTopology.Graph(n)
uf1.init_dsu()
uf2 = GeometryTopology.Graph(n)
uf2.init_dsu()
def add_edges(uf, m):
for _ in range(m):
x, y = map(int, sys.stdin.readline().split())
x -= 1
y -= 1
uf.unite(x, y)
add_edges(uf1, k)
add_edges(uf2, l)
g = defaultdict(list)
for i in range(n):
g[(uf1.find(i), uf2.find(i))].append(i)
res = [None] * n
for a in g:
for i in g[a]:
res[i] = len(g[a])
print(*res, sep=" ")
class ABC050:
@staticmethod
def a():
print(eval(sys.stdin.readline().rstrip()))
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
t = np.array(sys.stdin.readline().split(), dtype=np.int64)
m, *px = map(int, sys.stdin.read().split())
p, x = np.array(px).reshape(m, 2).T
p -= 1
print(*(t.sum() + x - t[p]), sep="\n")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = Counter(a)
if n & 1 and not (
a[0] == 1 and all(a[i] == 2 for i in range(2, n, 2))
):
print(0)
return
if ~n & 1 and any(a[i] != 2 for i in range(1, n, 2)):
print(0)
return
print(pow(2, n // 2, MOD))
@staticmethod
def d():
pass
class ABC051:
@staticmethod
def a():
print(" ".join(sys.stdin.readline().rstrip().split(",")))
@staticmethod
def b():
k, s = map(int, sys.stdin.readline().split())
tot = 0
for x in range(k + 1):
if s - x < 0:
break
if s - x > 2 * k:
continue
tot += s - x + 1 if s - x <= k else 2 * k - (s - x) + 1
print(tot)
@staticmethod
def c():
x1, y1, x2, y2 = map(int, sys.stdin.readline().split())
dx, dy = x2 - x1, y2 - y1
print(
"U" * dy
+ "R" * (dx + 1)
+ "D" * (dy + 1)
+ "L" * (dx + 1)
+ "U"
+ "L"
+ "U" * (dy + 1)
+ "R" * (dx + 1)
+ "D" * (dy + 1)
+ "L" * dx
)
@staticmethod
def d():
n, m, *abc = map(int, sys.stdin.read().split())
x = np.arange(n)
a, b, c = np.array(abc).reshape(m, 3).T
a -= 1
b -= 1
d = shortest_path(
csr_matrix((c, (a, b)), shape=(n, n)),
method="FW",
directed=False,
).astype(np.int64)
print(
m
- np.any(
d[x, a[:, None]] + c[:, None] == d[x, b[:, None]], axis=1
).sum()
)
class ABC052:
@staticmethod
def a():
a, b, c, d = map(int, sys.stdin.readline().split())
print(max(a * b, c * d))
@staticmethod
def b():
n, s = sys.stdin.read().split()
n = int(n)
a = [0] * (n + 1)
for i in range(n):
a[i + 1] = a[i] + (1 if s[i] == "I" else -1)
print(max(a))
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
pn = NumberTheory.PrimeNumbers(n)
s = 1
for c in pn.factorize_factorial(n).values():
s = s * (c + 1) % MOD
print(s)
@staticmethod
def d():
n, a, b, *x = map(int, sys.stdin.read().split())
x = np.array(x)
print(np.minimum((x[1:] - x[:-1]) * a, b).sum())
class ABC053:
@staticmethod
def a():
print(
"ABC" if int(sys.stdin.readline().rstrip()) < 1200 else "ARC"
)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
print(len(s) - s.find("A") - s[::-1].find("Z"))
@staticmethod
def c():
x = int(sys.stdin.readline().rstrip())
q, r = divmod(x, 11)
print(2 * q + (r + 5) // 6)
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
print(n - ((n - len(set(a)) + 1) // 2 * 2))
class ABC054:
@staticmethod
def a():
def f(x):
return (x + 11) % 13
a, b = map(int, sys.stdin.readline().split())
print("Alice" if f(a) > f(b) else "Bob" if f(a) < f(b) else "Draw")
@staticmethod
def b():
n, m = map(int, sys.stdin.readline().split())
a = [sys.stdin.readline().rstrip() for _ in range(n)]
b = [sys.stdin.readline().rstrip() for _ in range(m)]
for i in range(n - m + 1):
for j in range(n - m + 1):
for y in range(m):
for x in range(m):
if a[i + y][j + x] == b[y][x]:
continue
break
else:
continue
break
else:
print("Yes")
return
print("No")
@staticmethod
def c():
n, m, *ab = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
g.add_edge(a, b)
g.add_edge(b, a)
cnt = 0
stack = [(0, 1)]
while stack:
u, s = stack.pop()
if s == (1 << n) - 1:
cnt += 1
continue
for v in g.edges[u]:
if s >> v & 1:
continue
stack.append((v, s | 1 << v))
print(cnt)
@staticmethod
def d():
n, ma, mb, *abc = map(int, sys.stdin.read().split())
dp = np.full((401, 401), np.inf)
dp[0, 0] = 0
for a, b, c in zip(*[iter(abc)] * 3):
np.minimum(dp[a:, b:], dp[:-a, :-b] + c, out=dp[a:, b:])
i = np.arange(1, 400 // max(ma, mb) + 1)
res = dp[i * ma, i * mb].min()
print(int(res) if res != np.inf else -1)
class ABC055:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(800 * n - 200 * (n // 15))
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
fac, _ = Algebra.generate_fac_ifac(n, MOD)
print(fac[-1])
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
print(m // 2 if m <= 2 * n else n + (m - 2 * n) // 4)
@staticmethod
def d():
n, s = sys.stdin.read().split()
n = int(n)
s = [1 if c == "o" else 0 for c in s]
def possible(t):
for i in range(1, n - 1):
t[i + 1] = t[i - 1] ^ t[i] ^ s[i]
return (
(t[0] ^ s[0] ^ t[1] ^ t[-1])
| (t[-1] ^ s[-1] ^ t[-2] ^ t[0])
) ^ 1
for fst in [(1, 0), (0, 1), (1, 1), (0, 0)]:
t = [None] * n
t[0], t[1] = fst[0], fst[1]
if possible(t):
print("".join("S" if x == 1 else "W" for x in t))
return
print(-1)
class ABC056:
@staticmethod
def a():
def to_i(c):
return 1 if c == "H" else 0
a, b = map(to_i, sys.stdin.readline().split())
print("D" if a ^ b else "H")
@staticmethod
def b():
w, a, b = map(int, sys.stdin.readline().split())
if a > b:
a, b = b, a
print(max(b - (a + w), 0))
@staticmethod
def c():
x = int(sys.stdin.readline().rstrip())
print(int(math.ceil(math.sqrt(2 * x + 1 / 4) - 0.5)))
@staticmethod
def d():
n, k, *a = map(int, sys.stdin.read().split())
a = sorted(min(x, k) for x in a)
def necessary(i):
dp = np.zeros(k, dtype=np.bool)
dp[0] = True
for j in range(n):
if j == i:
continue
dp[a[j] :] += dp[: -a[j]]
return np.any(dp[k - a[i] :])
def binary_search():
lo, hi = -1, n
while hi - lo > 1:
i = (lo + hi) // 2
if necessary(i):
hi = i
else:
lo = i
return hi
print(binary_search())
class ABC057:
@staticmethod
def a():
a, b = map(int, sys.stdin.readline().split())
print((a + b) % 24)
@staticmethod
def b():
n, m, *I = map(int, sys.stdin.read().split())
I = np.array(I).reshape(-1, 2)
ab, cd = I[:n], I[n:]
print(
*(
np.argmin(
np.absolute(ab[:, None] - cd).sum(axis=-1), axis=-1
)
+ 1
),
sep="\n",
)
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
divs = NumberTheory.find_divisors(n)
print(len(str(divs[bi_l(divs, math.sqrt(n))])))
@staticmethod
def d():
c = Combinatorics.choose
n, a, b, *v = map(int, sys.stdin.read().split())
v.sort()
print(sum(v[-a:]) / a)
l, r = bi_l(v, v[-a]), bi_r(v, v[-a])
print(
sum(
c(r - l, i)
for i in range(r - n + a, r - max(l, n - b) + 1)
)
if r == n
else c(r - l, r - n + a)
)
class ABC058:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print("YES" if c - b == b - a else "NO")
@staticmethod
def b():
s, t = sys.stdin.read().split()
a = ""
for i in range(len(t)):
a += s[i] + t[i]
if len(s) > len(t):
a += s[-1]
print(a)
@staticmethod
def c():
n, *s = sys.stdin.read().split()
res = {c: 100 for c in string.ascii_lowercase}
for counter in map(Counter, s):
for (
c,
x,
) in res.items():
res[c] = min(x, counter[c])
t = ""
for c, x in sorted(res.items()):
t += c * x
print(t)
@staticmethod
def d():
n, m, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy[:n]), np.array(xy[n:])
print(
(x * (np.arange(n) + 1) - np.cumsum(x)).sum()
% MOD
* ((y * (np.arange(m) + 1) - np.cumsum(y)).sum() % MOD)
% MOD
)
class ABC059:
@staticmethod
def a():
def initial(s):
return s[0].upper()
print("".join(map(initial, sys.stdin.readline().split())))
@staticmethod
def b():
a, b = sys.stdin.read().split()
la, lb = len(a), len(b)
print(
"GREATER"
if la > lb
else "LESS"
if la < lb
else "GREATER"
if a > b
else "LESS"
if a < b
else "EQUAL"
)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
c = s = 0
for i in range(n):
s += a[i]
if i & 1 and s >= 0:
c += s + 1
s = -1
elif i & 1 ^ 1 and s <= 0:
c += 1 - s
s = 1
c1 = c
c = s = 0
for i in range(n):
s += a[i]
if i & 1 and s <= 0:
c += 1 - s
s = 1
elif i & 1 ^ 1 and s >= 0:
c += s + 1
s = -1
c2 = c
print(min(c1, c2))
@staticmethod
def d():
x, y = map(int, sys.stdin.readline().split())
print("Brown" if abs(x - y) <= 1 else "Alice")
class ABC060:
@staticmethod
def a():
a, b, c = sys.stdin.readline().split()
print("YES" if a[-1] == b[0] and b[-1] == c[0] else "NO")
@staticmethod
def b():
a, b, c = map(int, sys.stdin.readline().split())
print("NO" if c % NumberTheory.gcd(a, b) else "YES")
@staticmethod
def c():
n, t, *a = map(int, sys.stdin.read().split())
print(sum(min(a[i + 1] - a[i], t) for i in range(n - 1)) + t)
@staticmethod
def d():
pass
class ABC061:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print("Yes" if a <= c <= b else "No")
@staticmethod
def b():
n, m, *ab = map(int, sys.stdin.read().split())
ab = np.array(ab) - 1
g = np.zeros(n, dtype=np.int32)
np.add.at(g, ab, 1)
print(*g, sep="\n")
@staticmethod
def c():
n, k, *ab = map(int, sys.stdin.read().split())
ab = np.transpose(np.array(ab).reshape(n, 2))
a, b = ab[:, np.argsort(ab[0])]
print(a[np.cumsum(b) >= k][0])
@staticmethod
def d():
n, m, *abc = map(int, sys.stdin.read().split())
a, b, c = np.array(abc).reshape(m, 3).T
a -= 1
b -= 1
c *= -1
g = csr_matrix(
([1] * (m + 1), (np.append(a, n - 1), np.append(b, 0))), (n, n)
)
_, labels = connected_components(g, connection="strong")
bl = (labels[a] == labels[0]) & (labels[b] == labels[0])
g = csr_matrix((c[bl], (a[bl], b[bl])), (n, n))
try:
print(
-shortest_path(g, method="BF", directed=True, indices=0)[
-1
].astype(int)
)
except:
print("inf")
@staticmethod
def d_2():
n, m, *abc = map(int, sys.stdin.read().split())
a, b, c = np.array(abc).reshape(m, 3).T
a -= 1
b -= 1
c *= -1
d = np.full(n, np.inf)
d[0] = 0
for _ in range(n - 1):
np.minimum.at(d, b, d[a] + c)
neg_cycle = np.zeros(n, dtype=np.bool)
for _ in range(n):
np.logical_or.at(neg_cycle, b, d[a] + c < d[b])
np.minimum.at(d, b, d[a] + c)
print(inf if neg_cycle[-1] else -d[-1].astype(int))
class ABC062:
@staticmethod
def a():
g = [0, 2, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0]
x, y = map(int, sys.stdin.readline().split())
print("Yes" if g[x - 1] == g[y - 1] else "No")
@staticmethod
def b():
h, w = map(int, sys.stdin.readline().split())
a = np.array(
[list(s) for s in sys.stdin.read().split()], dtype="U1"
)
a = np.pad(a, pad_width=1, constant_values="#")
for s in a:
print("".join(s))
@staticmethod
def c():
h, w = map(int, sys.stdin.readline().split())
if h * w % 3 == 0:
print(0)
return
def minimize(h, w):
return min(
h,
*(
s[-1] - s[0]
for x in range(w // 3, w // 3 + 2)
for s in (
sorted(
[
h * x,
h // 2 * (w - x),
(h + 1) // 2 * (w - x),
]
),
)
),
)
print(min(minimize(h, w), minimize(w, h)))
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
def optimize(a):
a = list(a)
l, r = a[:n], a[n:]
heapify(l)
s = [None] * (n + 1)
s[0] = sum(l)
for i in range(n):
x = heappop(l)
heappush(l, max(x, r[i]))
s[i + 1] = s[i] + max(0, r[i] - x)
return np.array(s)
print(
(
optimize(a[: 2 * n]) + optimize(-a[-1 : n - 1 : -1])[::-1]
).max()
)
class ABC063:
@staticmethod
def a():
a = sum(map(int, sys.stdin.readline().split()))
print("error" if a >= 10 else a)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
print("yes" if len(set(s)) == len(s) else "no")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
s = a.sum()
if s % 10:
print(s)
elif not np.count_nonzero(a % 10):
print(0)
else:
print(s - a[a % 10 != 0].min())
@staticmethod
def d():
n, a, b, *h = map(int, sys.stdin.read().split())
h = np.array(h)
d = a - b
def possible(c):
hh = h.copy()
np.maximum(hh - b * c, 0, out=hh)
return ((hh + d - 1) // d).sum() <= c
def binary_search():
lo, hi = 0, 10**9
while hi - lo > 1:
c = (lo + hi) // 2
if possible(c):
hi = c
else:
lo = c
return hi
print(binary_search())
class ABC064:
@staticmethod
def a():
r, g, b = map(int, sys.stdin.readline().split())
print("NO" if (10 * g + b) % 4 else "YES")
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
a.sort()
print(a[-1] - a[0])
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.bincount(np.minimum(np.array(a) // 400, 8), minlength=9)
mx = np.count_nonzero(a[:-1]) + a[-1]
mn = max(mx - a[-1], 1)
print(mn, mx)
@staticmethod
def d():
n, s = sys.stdin.read().split()
l = r = 0
for c in s:
if c == "(":
r += 1
else:
if r == 0:
l += 1
else:
r -= 1
print("(" * l + s + ")" * r)
class ABC065:
@staticmethod
def a():
x, a, b = map(int, sys.stdin.readline().split())
y = -a + b
print("delicious" if y <= 0 else "safe" if y <= x else "dangerous")
@staticmethod
def b():
n, *a = [int(x) - 1 for x in sys.stdin.read().split()]
i = 0
for c in range(n):
i = a[i]
if i == 1:
print(c + 1)
return
print(-1)
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
d = abs(n - m)
if d >= 2:
print(0)
return
fac, _ = Algebra.generate_fac_ifac(10**5)
print(fac[n] * fac[m] * (1 if d else 2) % MOD)
@staticmethod
def d():
n, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy).reshape(n, 2).T
i = np.argsort(x)
ax, bx, cx = (
i[:-1],
i[1:],
x[
i[1:],
]
- x[i[:-1]],
)
i = np.argsort(y)
ay, by, cy = (
i[:-1],
i[1:],
y[
i[1:],
]
- y[i[:-1]],
)
e = np.vstack(
[np.hstack([ax, ay]), np.hstack([bx, by]), np.hstack([cx, cy])]
)
e = e[:, np.argsort(e[-1])]
_, i = np.unique(e[:-1], return_index=True, axis=1)
a, b, c = e[:, i]
print(
minimum_spanning_tree(csr_matrix((c, (a, b)), (n, n)))
.astype(np.int64)
.sum()
)
@staticmethod
def d_2():
n, *xy = map(int, sys.stdin.read().split())
x, y = xy[::2], xy[1::2]
g = GeometryTopology.Graph(n)
def make(a):
b = sorted(enumerate(a), key=lambda x: x[1])
for i in range(n - 1):
u, v, w = b[i][0], b[i + 1][0], b[i + 1][1] - b[i][1]
for u, v in [(v, u), (u, v)]:
if not v in g.edges[u]:
g.add_edge(u, v, weight=w)
else:
g.edges[u][v].weight = min(g.edges[u][v].weight, w)
make(x)
make(y)
_, d = g.kruskal()
print(d)
class ABC066:
@staticmethod
def a():
print(sum(sorted(map(int, sys.stdin.readline().split()))[:-1]))
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
def f(s):
n = len(s) // 2
return s[:n] == s[n:]
for i in range(len(s) - 2, 0, -2):
if f(s[:i]):
print(i)
return
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
b = deque()
for i in range(n):
if i & 1:
b.appendleft(a[i])
else:
b.append(a[i])
if n & 1:
b.reverse()
print(*b)
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
tmp = [None] * (n + 1)
for i in range(n + 1):
if tmp[a[i]] is not None:
d = tmp[a[i]] + n - i
break
tmp[a[i]] = i
k = np.arange(1, n + 2)
c = Combinatorics.CombinationsMod(n + 1, MOD)
print(*((c(n + 1, k) - c(d, k - 1)) % MOD), sep="\n")
class ABC067:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, *ab = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
g.add_edge(a, b)
g.add_edge(b, a)
d1, d2 = g.bfs(0), g.bfs(n - 1)
print(
"Fennec"
if sum(d1[i] <= d2[i] for i in range(n)) > n // 2
else "Snuke"
)
class ABC068:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
k = int(sys.stdin.readline().rstrip())
n = 50
print(n)
q, r = divmod(k, n)
a = np.arange(n - 1, -1, -1) + q
a[:r] += 1
print(*a)
class ABC069:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
h, w, n, *a = map(int, sys.stdin.read().split())
c = [i + 1 for i in range(n) for j in range(a[i])]
for i in range(h):
row = c[i * w : (i + 1) * w]
if i & 1:
row = row[::-1]
print(*row)
class ABC070:
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
g = GeometryTopology.Graph(n)
for _ in range(n - 1):
a, b, c = map(int, sys.stdin.readline().split())
a -= 1
b -= 1
g.add_edge(a, b, weight=c)
g.add_edge(b, a, weight=c)
q, k = map(int, sys.stdin.readline().split())
d = g.bfs(k - 1)
for _ in range(q):
x, y = map(int, sys.stdin.readline().split())
x -= 1
y -= 1
print(d[x] + d[y])
class ABC071:
@staticmethod
def d():
n, *s = sys.stdin.read().split()
n = int(n)
s = list(zip(*s))
dp = [0] * n
dp[0] = 3 if s[0][0] == s[0][1] else 6
for i in range(1, n):
dp[i] = dp[i - 1]
if s[i][0] == s[i - 1][0]:
continue
dp[i] *= (
2
if s[i - 1][0] == s[i - 1][1]
else 3
if s[i][0] != s[i][1]
else 1
)
dp[i] %= MOD
print(dp[-1])
class ABC072:
@staticmethod
def d():
n, *p = map(int, sys.stdin.read().split())
p += [-1]
cnt, i = 0, 0
while i < n:
if p[i] == i + 1:
cnt += p[i] == i + 1
if p[i + 1] == i + 2:
i += 1
i += 1
print(cnt)
class ABC073:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, m, r, *I = map(int, sys.stdin.read().split())
I = np.array(I)
a, b, c = I[r:].reshape(m, 3).T
d = shortest_path(
csr_matrix((c, (a - 1, b - 1)), (n, n)),
method="FW",
directed=False,
).astype(np.int32)
r = np.array([*itertools.permutations(I[:r] - 1)])
print((d[r[:, :-1], r[:, 1:]].sum(axis=1)).min())
class ABC074:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a, dtype=np.int32).reshape(n, n)
b = shortest_path(a, method="FW").astype(np.int32)
if (b < a).any():
print(-1)
return
np.fill_diagonal(b, 10**9)
a[np.any(b[:, None] + b <= a[:, :, None], axis=2)] = 0
print(a.sum() // 2)
class ABC075:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, k, *xy = map(int, sys.stdin.read().split())
xy = np.array(xy).reshape(n, 2)
x_y = xy.copy()[np.argsort(xy[:, 0])]
y_x = xy.copy()[np.argsort(xy[:, 1])]
comb = np.array([*itertools.combinations(range(n), 2)])
i1, i2 = comb.T
j1, j2 = comb[None, :].T
s = (y_x[:, 1][i2] - y_x[:, 1][i1]) * (
x_y[:, 0][j2] - x_y[:, 0][j1]
)
c = np.zeros((n + 1, n + 1), dtype=np.int64)
for i in range(n):
c[i + 1, 1:] += c[i, 1:] + (y_x[i, 0] <= x_y[:, 0])
a = c[i2 + 1, j2 + 1] - c[i2 + 1, j1] - c[i1, j2 + 1] + c[i1, j1]
print(s[a >= k].min())
class ABC076:
@staticmethod
def d():
n, *tv = map(int, sys.stdin.read().split())
t, v = np.array(tv).reshape(2, n)
t = np.pad(t, pad_width=[2, 1], constant_values=0)
np.cumsum(t, out=t)
l, r = t[:-1], t[1:]
v = np.pad(v, pad_width=[1, 1], constant_values=0)
x = np.arange(0, r[-1] + 0.1, 0.5, dtype=np.float32)[:, None]
mx = v - (x - l)
np.maximum(mx, v, out=mx)
np.maximum(mx, v + (x - r), out=mx)
y = mx.min(axis=1)
print(((y[:-1] + y[1:]) / 4).sum())
class ABC077:
@staticmethod
def d():
k = int(sys.stdin.readline().rstrip())
g = GeometryTopology.Graph(k)
for i in range(k):
g.add_edge(i, i * 10 % k, weight=0)
g.add_edge(i, (i + 1) % k, update=False, weight=1)
print(1 + g.bfs01(1)[0])
class ABC078:
@staticmethod
def d():
n, z, w, *a = map(int, sys.stdin.read().split())
print(
abs(a[0] - w)
if n == 1
else max(abs(a[-1] - w), abs(a[-1] - a[-2]))
)
class ABC079:
@staticmethod
def d():
h, w, *I = map(int, sys.stdin.read().split())
I = np.array(I)
c = I[:100].reshape(10, 10)
a = I[100:].reshape(h, w)
c = shortest_path(c.T, method="D", indices=1).astype(np.int32)
print(c[a[a != -1]].sum())
class ABC080:
@staticmethod
def d():
n, c, *stc = map(int, sys.stdin.read().split())
using = np.zeros((c, 10**5 + 2), dtype=np.int8)
s, t, c = np.array(stc).reshape(n, 3).T
np.add.at(using, (c - 1, s), 1)
np.subtract.at(using, (c - 1, t + 1), 1)
np.cumsum(using, axis=1, out=using)
print(np.count_nonzero(using, axis=0).max())
class ABC081:
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
i = np.argmax(np.absolute(a))
print(2 * n)
for j in range(n):
print(i + 1, j + 1)
if a[i] >= 0:
for j in range(n - 1):
print(j + 1, j + 2)
else:
for j in range(n - 1, 0, -1):
print(j + 1, j)
class ABC082:
pass
class ABC083:
pass
class ABC084:
pass
class ABC085:
pass
class ABC086:
pass
class ABC087:
pass
class ABC088:
pass
class ABC089:
pass
class ABC090:
pass
class ABC091:
pass
class ABC092:
pass
class ABC093:
pass
class ABC094:
pass
class ABC095:
pass
class ABC096:
pass
class ABC097:
pass
class ABC098:
pass
class ABC099:
pass
class ABC100:
pass
class ABC101:
pass
class ABC102:
pass
class ABC103:
pass
class ABC104:
pass
class ABC105:
pass
class ABC106:
pass
class ABC107:
pass
class ABC108:
pass
class ABC109:
pass
class ABC110:
pass
class ABC111:
pass
class ABC112:
pass
class ABC113:
pass
class ABC114:
pass
class ABC115:
pass
class ABC116:
pass
class ABC117:
pass
class ABC118:
pass
class ABC119:
pass
class ABC120:
pass
class ABC121:
pass
class ABC122:
pass
class ABC123:
pass
class ABC124:
pass
class ABC125:
pass
class ABC126:
pass
class ABC127:
pass
class ABC128:
pass
class ABC129:
pass
class ABC130:
pass
class ABC131:
pass
class ABC132:
pass
class ABC133:
pass
class ABC134:
pass
class ABC135:
pass
class ABC136:
pass
class ABC137:
pass
class ABC138:
pass
class ABC139:
pass
class ABC140:
pass
class ABC141:
pass
class ABC142:
pass
class ABC143:
pass
class ABC144:
pass
class ABC145:
pass
class ABC146:
pass
class ABC147:
pass
class ABC148:
pass
class ABC149:
pass
class ABC150:
pass
class ABC151:
pass
class ABC152:
pass
class ABC153:
pass
class ABC154:
pass
class ABC155:
pass
class ABC156:
pass
class ABC157:
pass
class ABC158:
pass
class ABC159:
pass
class ABC160:
pass
class ABC161:
pass
class ABC162:
pass
class ABC163:
pass
class ABC164:
pass
class ABC165:
pass
class ABC166:
pass
class ABC167:
pass
class ABC168:
pass
class ABC169:
pass
class ABC170:
@staticmethod
def a():
x = [int(x) for x in sys.stdin.readline().split()]
for i in range(5):
if x[i] != i + 1:
print(i + 1)
break
@staticmethod
def b():
x, y = map(int, sys.stdin.readline().split())
print("Yes" if 2 * x <= y <= 4 * x and y % 2 == 0 else "No")
@staticmethod
def c():
x, n, *p = map(int, sys.stdin.read().split())
a = list(set(range(102)) - set(p))
a = [(abs(y - x), y) for y in a]
print(sorted(a)[0][1])
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
cand = set(a)
cnt = 0
for x, c in sorted(Counter(a).items()):
cnt += c == 1 and x in cand
cand -= set(range(x * 2, 10**6 + 1, x))
print(cnt)
@staticmethod
def e():
n, q = map(int, sys.stdin.readline().split())
queue = []
m = 2 * 10**5
infants = [[] for _ in range(m)]
highest_rate = [None] * m
where = [None] * n
rate = [None] * n
def entry(i, k):
where[i] = k
while infants[k]:
r, j = heappop(infants[k])
if where[j] != k or j == i:
continue
if rate[i] >= -r:
highest_rate[k] = rate[i]
heappush(queue, (rate[i], k, i))
heappush(infants[k], (r, j))
break
else:
highest_rate[k] = rate[i]
heappush(queue, (rate[i], k, i))
heappush(infants[k], (-rate[i], i))
def transfer(i, k):
now = where[i]
while infants[now]:
r, j = heappop(infants[now])
if where[j] != now or j == i:
continue
if highest_rate[now] != -r:
highest_rate[now] = -r
heappush(queue, (-r, now, j))
heappush(infants[now], (r, j))
break
else:
highest_rate[now] = None
entry(i, k)
def inquire():
while True:
r, k, i = heappop(queue)
if where[i] != k or r != highest_rate[k]:
continue
heappush(queue, (r, k, i))
return r
for i in range(n):
a, b = map(int, sys.stdin.readline().split())
rate[i] = a
entry(i, b - 1)
for _ in range(q):
c, d = map(int, sys.stdin.readline().split())
transfer(c - 1, d - 1)
print(inquire())
class ABC171:
@staticmethod
def a():
c = sys.stdin.readline().rstrip()
print("A" if c < "a" else "a")
@staticmethod
def b():
n, k, *p = map(int, sys.stdin.read().split())
print(sum(sorted(p)[:k]))
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
n -= 1
l = 1
while True:
if n < pow(26, l):
break
n -= pow(26, l)
l += 1
res = "".join(
[chr(ord("a") + d) for d in NumberTheory.base_convert(n, 26)][
::-1
]
)
res = "a" * (l - len(res)) + res
print(res)
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
a = [int(x) for x in sys.stdin.readline().split()]
s = sum(a)
cnt = Counter(a)
q = int(sys.stdin.readline().rstrip())
for _ in range(q):
b, c = map(int, sys.stdin.readline().split())
s += (c - b) * cnt[b]
print(s)
cnt[c] += cnt[b]
cnt[b] = 0
@staticmethod
def e():
n, *a = map(int, sys.stdin.read().split())
s = 0
for x in a:
s ^= x
b = map(lambda x: x ^ s, a)
print(*b, sep=" ")
class ABC172:
@staticmethod
def a():
a = int(sys.stdin.readline().rstrip())
print(a * (1 + a + a**2))
@staticmethod
def b():
s, t = sys.stdin.read().split()
print(sum(s[i] != t[i] for i in range(len(s))))
@staticmethod
def c():
n, m, k = map(int, sys.stdin.readline().split())
a = [0] + [int(x) for x in sys.stdin.readline().split()]
b = [int(x) for x in sys.stdin.readline().split()]
(*sa,) = itertools.accumulate(a)
(*sb,) = itertools.accumulate(b)
res = 0
for i in range(n + 1):
r = k - sa[i]
if r < 0:
break
res = max(res, i + bi_r(sb, r))
print(res)
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
f = np.zeros(n + 1, dtype=np.int64)
for i in range(1, n + 1):
f[i::i] += 1
print((np.arange(1, n + 1) * f[1:]).sum())
class ABC173:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
charge = (n + 999) // 1000 * 1000 - n
print(charge)
@staticmethod
def b():
n, *s = sys.stdin.read().split()
c = Counter(s)
for v in "AC, WA, TLE, RE".split(", "):
print(f"{v} x {c[v]}")
@staticmethod
def c():
h, w, k = map(int, sys.stdin.readline().split())
c = [sys.stdin.readline().rstrip() for _ in range(h)]
tot = 0
for i in range(1 << h):
for j in range(1 << w):
cnt = 0
for y in range(h):
for x in range(w):
if i >> y & 1 or j >> x & 1:
continue
cnt += c[y][x] == "#"
tot += cnt == k
print(tot)
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a.sort(reverse=True)
res = (
a[0]
+ sum(a[1 : 1 + (n - 2) // 2]) * 2
+ a[1 + (n - 2) // 2] * (n & 1)
)
print(res)
@staticmethod
def e():
MOD = 10**9 + 7
n, k, *a = map(int, sys.stdin.read().split())
minus = [x for x in a if x < 0]
plus = [x for x in a if x > 0]
if len(plus) + len(minus) // 2 * 2 >= k:
(*minus,) = map(abs, minus)
minus.sort(reverse=True)
plus.sort(reverse=True)
cand = []
if len(minus) & 1:
minus = minus[:-1]
for i in range(0, len(minus) - 1, 2):
cand.append(minus[i] * minus[i + 1] % MOD)
if k & 1:
res = plus[0]
plus = plus[1:]
else:
res = 1
if len(plus) & 1:
plus = plus[:-1]
for i in range(0, len(plus) - 1, 2):
cand.append(plus[i] * plus[i + 1] % MOD)
cand.sort(reverse=True)
for x in cand[: k // 2]:
res *= x
res %= MOD
print(res)
elif 0 in a:
print(0)
else:
cand = sorted(map(abs, a))
res = 1
for i in range(k):
res *= cand[i]
res %= MOD
res = MOD - res
print(res)
pass
class ABC174:
@staticmethod
def a():
print("Yes" if int(sys.stdin.readline().rstrip()) >= 30 else "No")
class ABC178:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
s = int(sys.stdin.readline().rstrip())
if s == 0:
print(1)
return
elif s == 1:
print(0)
return
c = np.eye(3, k=-1, dtype=np.int64)
c[0, 0] = c[0, 2] = 1
a = np.array([0, 0, 1])
print(Algebra.dot(Algebra.matrix_pow(c, s - 2), a)[0])
class ABC179:
@staticmethod
def a():
s = sys.stdin.readline().rstrip()
print(s + "s" if s[-1] != "s" else s + "es")
@staticmethod
def b():
n, *d = map(int, sys.stdin.read().split())
d = np.array(d).reshape(n, 2).T
d = np.equal(d[0], d[1]).astype(int)
dd = d.copy()
dd[1:] += d[:-1]
dd[:-1] += d[1:]
print("Yes" if (dd >= 3).any() else "No")
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
res = (n // np.arange(1, n + 1)).sum() - len(
NumberTheory.find_divisors(n)
)
print(res)
@staticmethod
def d():
mod = 998244353
n, k, *lr = map(int, sys.stdin.read().split())
l, r = np.array(lr).reshape(k, -1).T
@njit((i8, i8[:], i8[:]), cache=True)
def solve(n, l, r):
res = np.zeros(n * 2, dtype=np.int64)
res[0], res[1] = 1, -1
for i in range(n - 1):
res[i + 1] = (res[i + 1] + res[i]) % mod
res[i + l] = (res[i + l] + res[i]) % mod
res[i + r + 1] = (res[i + r + 1] - res[i]) % mod
print(res[n - 1])
solve(n, l, r)
@staticmethod
def e():
n, x, m = map(int, sys.stdin.readline().split())
res = [-1 for _ in range(m)]
s = 0
loop = np.zeros(m, dtype=np.int64)
for i in range(m + 1):
if i == n:
print(s)
return
if res[x] != -1:
l, loop = i - res[x], loop[res[x] : i]
q, r = divmod(n - i, l)
print(s + q * loop.sum() + loop[:r].sum())
return
res[x], loop[i] = i, x
s += x
x = x**2 % m
class ABC180:
@staticmethod
def a():
n, a, b = map(int, sys.stdin.readline().split())
print(n - a + b)
@staticmethod
def b():
n, *x = map(int, sys.stdin.read().split())
x = np.absolute(np.array(x))
print(x.sum())
print(np.sqrt((x**2).sum()))
print(x.max())
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
div = NumberTheory.find_divisors(n)
print(*div, sep="\n")
@staticmethod
def d():
x, y, a, b = map(int, sys.stdin.readline().split())
cnt = 0
while x * a <= x + b:
x *= a
if x >= y:
print(cnt)
return
cnt += 1
cnt += (y - x - 1) // b
print(cnt)
@staticmethod
def e():
n, *xyz = map(int, sys.stdin.read().split())
xyz = list(zip(*[iter(xyz)] * 3))
dist = [[0] * n for _ in range(n)]
for i in range(n):
a, b, c = xyz[i]
for j in range(n):
p, q, r = xyz[j]
dist[i][j] = abs(p - a) + abs(q - b) + max(0, r - c)
dp = [[inf] * n for _ in range(1 << n)]
dp[0][0] = 0
for s in range(1 << n):
for i in range(n):
t = s | (1 << i)
for j in range(n):
dp[t][i] = min(dp[t][i], dp[s][j] + dist[j][i])
print(dp[-1][0])
@staticmethod
def f():
n, m, l = map(int, sys.stdin.readline().split())
c = Combinatorics.CombinationsMod(n, MOD)
path = np.zeros(n + 1, dtype=np.int64)
path[1] = path[2] = 1
for i in range(3, n + 1):
path[i] = path[i - 1] * i % MOD
cycle = np.zeros(n + 1, dtype=np.int64)
cycle[1:] = path[:-1]
dp = np.zeros((n + 1, m + 1), dtype=np.int64)
def f(l):
dp[:, :] = 0
dp[0, 0] = 1
for i in range(n):
for j in range(m + 1):
k = np.arange(1, min(l, n - i, m - j + 1) + 1)
dp[i + k, j + k - 1] += (
dp[i, j]
* c(n - i - 1, k - 1)
% MOD
* path[k]
% MOD
)
dp[i + k, j + k - 1] %= MOD
k = np.arange(2, min(l, n - i, m - j) + 1)
dp[i + k, j + k] += (
dp[i, j]
* c(n - i - 1, k - 1)
% MOD
* cycle[k]
% MOD
)
dp[i + k, j + k] %= MOD
return dp[n, m]
print((f(l) - f(l - 1)) % MOD)
@staticmethod
def f_2():
n, m, l = map(int, sys.stdin.readline().split())
c = Combinatorics.CombinationsMod(n, MOD)
path = [0] * (n + 1)
path[1] = path[2] = 1
for i in range(3, n + 1):
path[i] = path[i - 1] * i % MOD
cycle = [0] + path[:-1]
def f(l):
dp = [[0] * (m + 1) for _ in range(n + 1)]
dp[0][0] = 1
for i in range(n):
for j in range(m + 1):
for k in range(1, min(l, n - i, m - j + 1) + 1):
dp[i + k][j + k - 1] += (
dp[i][j]
* c(n - i - 1, k - 1)
% MOD
* path[k]
% MOD
)
dp[i + k][j + k - 1] %= MOD
for k in range(1, min(l, n - i, m - j) + 1):
dp[i + k][j + k] += (
dp[i][j]
* c(n - i - 1, k - 1)
% MOD
* cycle[k]
% MOD
)
dp[i + k][j + k] %= MOD
return dp[n][m]
print((f(l) - f(l - 1)) % MOD)
class ARC106:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
a = 1
while pow(3, a) <= n:
m = n - pow(3, a)
b = 1
while pow(5, b) <= m:
if pow(5, b) == m:
print(a, b)
return
b += 1
a += 1
print(-1)
@staticmethod
def b():
n, m = map(int, sys.stdin.readline().split())
a = [int(x) for x in sys.stdin.readline().split()]
b = [int(x) for x in sys.stdin.readline().split()]
uf = GeometryTopology.Graph(n)
uf.init_dsu()
for _ in range(m):
c, d = map(int, sys.stdin.readline().split())
c -= 1
d -= 1
uf.unite(c, d)
visited = [False] * n
ga = [[] for _ in range(n)]
gb = [[] for _ in range(n)]
for i in range(n):
r = uf.find(i)
ga[r].append(a[i])
gb[r].append(b[i])
print(
"Yes"
if all(sum(ga[i]) == sum(gb[i]) for i in range(n))
else "No"
)
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
if m < 0:
print(-1)
return
if n == 1:
if m != 0:
print(-1)
return
print(1, 2)
return
if m >= n - 1:
print(-1)
return
l, r = 1, 10**9
print(l, r)
for _ in range(n - 2 - m):
l += 1
r -= 1
print(l, r)
r = l
for _ in range(m + 1):
l, r = r + 1, r + 2
print(l, r)
@staticmethod
def d():
mod = 998244353
n, k, *a = map(int, sys.stdin.read().split())
a = np.array(a)
b = np.zeros((k + 1, n), dtype=np.int64)
b[0] = 1
for i in range(k):
b[i + 1] = b[i] * a % mod
s = b.sum(axis=1) % mod
inv_2 = pow(2, mod - 2, mod)
c = Combinatorics.CombinationsMod(mod=mod)
for x in range(1, k + 1):
l = np.arange(x + 1)
print(
(
(c(x, l) * s[l] % mod * s[l][::-1] % mod).sum() % mod
- pow(2, x, mod) * s[x]
)
% mod
* inv_2
% mod
)
@staticmethod
def e():
pass
@staticmethod
def f():
pass
class ACL001:
@staticmethod
def a():
n, *xy = map(int, sys.stdin.read().split())
(*xy,) = zip(*[iter(xy)] * 2)
print(xy)
pass
class TDPC:
@staticmethod
def t():
pass
class MSolutions2020:
@staticmethod
def a():
x = int(sys.stdin.readline().rstrip())
x -= 400
print(8 - x // 200)
@staticmethod
def b():
r, g, b, k = map(int, sys.stdin.read().split())
while k and g <= r:
g *= 2
k -= 1
while k and b <= g:
b *= 2
k -= 1
print("Yes" if r < g < b else "No")
@staticmethod
def c():
n, k, *a = map(int, sys.stdin.read().split())
for i in range(k, n):
print("Yes" if a[i] > a[i - k] else "No")
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a += [-1]
m = 1000
s = 0
for i in range(n):
if a[i + 1] == a[i]:
continue
elif a[i + 1] > a[i]:
cnt = m // a[i]
m -= a[i] * cnt
s += cnt
else:
m += a[i] * s
s = 0
print(m)
class Codeforces:
class CR676div2:
@staticmethod
def a():
t = int(sys.stdin.readline().rstrip())
for _ in range(t):
a, b = map(int, sys.stdin.readline().split())
print(a ^ b)
@staticmethod
def b():
t = int(sys.stdin.readline().rstrip())
for _ in range(t):
n = int(sys.stdin.readline().rstrip())
s = [list(sys.stdin.readline().rstrip()) for _ in range(n)]
s[0][0] = s[-1][-1] = "0"
for i in range(n):
for j in range(n):
s[i][j] = int(s[i][j])
def can_goal(g, c=0):
visited = [0] * n
stack = [(0, 0)]
visited[0] |= 1 << 0
while stack:
y, x = stack.pop()
for dy, dx in [(-1, 0), (0, -1), (1, 0), (0, 1)]:
i, j = y + dy, x + dx
if i < 0 or i >= n or j < 0 or j >= n:
continue
if i == j == n - 1:
return True
if visited[i] >> j & 1:
continue
visited[i] |= 1 << j
if g[i][j] != c:
continue
stack.append((i, j))
return False
if not (can_goal(s, 0) or can_goal(s, 1)):
print(0)
continue
flg = 0
for i in range(n):
for j in range(n):
if i == j == 0 or i == j == n - 1:
continue
s[i][j] ^= 1
if not (can_goal(s, 0) or can_goal(s, 1)):
print(1)
print(i + 1, j + 1)
flg = 1
break
s[i][j] ^= 1
if flg:
break
if flg:
continue
print(2)
if s[0][1] == s[1][0]:
print(n, n - 1)
print(n - 1, n)
continue
if s[0][1] == s[-1][-2]:
print(1, 2)
print(n - 1, n)
else:
print(1, 2)
print(n, n - 1)
@staticmethod
def c():
pass
class ProjectEuler:
@staticmethod
def p1():
def f(n, x):
return (x + n // x * x) * (n // x) // 2
n = 1000
ans = f(n - 1, 3) + f(n - 1, 5) - f(n - 1, 15)
print(ans)
@staticmethod
def p2():
fib = [1, 2]
while fib[-1] < 4 * 10**6:
fib.append(fib[-1] + fib[-2])
print(sum(fib[1:-1:3]))
@staticmethod
def p3():
pn = NumberTheory.PrimeNumbers()
res = pn.factorize(600851475143)
print(max(res.keys()))
@staticmethod
def p4():
def is_palindrome(n):
n = str(n)
return n == n[::-1]
cand = []
for a in range(100, 1000):
for b in range(a, 1000):
n = a * b
if is_palindrome(n):
cand.append(n)
print(max(cand))
@staticmethod
def p5():
pn = NumberTheory.PrimeNumbers()
res = defaultdict(int)
for i in range(1, 21):
for p, c in pn.factorize(i).items():
res[p] = max(res[p], c)
ans = 1
for p, c in res.items():
ans *= pow(p, c)
print(ans)
@staticmethod
def p6():
a = np.arange(101)
b = np.cumsum(a**2)
a = a.cumsum()
print(a[100] ** 2 - b[100])
@staticmethod
def p7():
nt = NumberTheory.PrimeNumbers()
print(sorted(nt)[10000])
@staticmethod
def p8():
n = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
n = [int(d) for d in list(n)]
res = 0
for i in range(988):
x = 1
for j in range(13):
x *= n[i + j]
res = max(res, x)
print(res)
@staticmethod
def p9():
for a in range(1, 997):
for b in range(a, 998 - a):
c = 1000 - a - b
if a**2 + b**2 == c**2:
print(a * b * c)
return
@staticmethod
def p10():
pn = NumberTheory.PrimeNumbers(2 * 10**6 + 1)
print(sum(pn))
@staticmethod
def p11():
grid = "08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48"
print(grid)
pass
class Yukicoder:
def __init__(self):
pass
def __call__(self):
print(1)
class AOJ:
@staticmethod
def ALDS1_12_A():
n, *a = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for i in range(n - 1):
for j in range(i + 1, n):
if a[i * n + j] == -1:
continue
g.add_edge(i, j, weight=a[i * n + j])
g.add_edge(j, i, weight=a[i * n + j])
_, d = g.kruskal()
print(d)
@staticmethod
def GRL_3_C():
n, m = map(int, sys.stdin.readline().split())
g = GeometryTopology.Graph(n)
for _ in range(m):
g.add_edge(*map(int, sys.stdin.readline().split()))
r = g.scc()
q, *uv = map(int, sys.stdin.read().split())
for u, v in zip(*[iter(uv)] * 2):
print(int(r[u] == r[v]))
class YosupoJudge:
@staticmethod
def Directed_MST():
n, m, s, *abc = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b, c in zip(*[iter(abc)] * 3):
g.add_edge(a, b, weight=c)
_, d, p = g.prim(src=s, return_parent=True)
print(d)
print(*p)
@staticmethod
def Manhattan_MST():
n, *xy = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
if __name__ == "__main__":
AtCoder.ABC081.d()
pass
| true
| true
|
f71a85be328989ab5fd1d62bb8e59c2c2b19ba47
| 3,607
|
py
|
Python
|
src/ekpmeasure/experiments/ferroelectric/_switching/core.py
|
cjfinnell/ekpmeasure
|
e6611c053cad28e06f4f8a94764ebe3805cddb15
|
[
"MIT"
] | null | null | null |
src/ekpmeasure/experiments/ferroelectric/_switching/core.py
|
cjfinnell/ekpmeasure
|
e6611c053cad28e06f4f8a94764ebe3805cddb15
|
[
"MIT"
] | null | null | null |
src/ekpmeasure/experiments/ferroelectric/_switching/core.py
|
cjfinnell/ekpmeasure
|
e6611c053cad28e06f4f8a94764ebe3805cddb15
|
[
"MIT"
] | null | null | null |
from ....control import core
from ....control.instruments.berkeleynucleonics765 import stop
from ..switching import preset_run_function
import pandas as pd
import numpy as np
import os
import warnings
import time
__all__ = ("FE",)
class FE(core.experiment):
"""Experiment class for running pulsed Ferroelectric switching experiments like those shown `here <https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.125.067601>`_
args:
pg (pyvisa.resources.gpib.GPIBInstrument): Berkeley Nucleonics 765
scope (pyvisa.resources.gpib.GPIBInstrument): Tektronix TDS620B or Tektronix TDS6604
scopetype (str): Specify scope. Only Tektronix TDS620B (``'620B'``) or Tektronix TDS6604 (``'6604'``) are supported
run_function (function): Run function.
returns:
(FE): Experiment
"""
def __init__(self, pg, scope, scopetype="6604", run_function=preset_run_function):
super().__init__()
if scopetype != "6604" and scopetype != "620B":
raise ValueError(
"must specify scope type as either 6604 or 620B (corresponding to the correct scope you are using)"
)
self.run_function = preset_run_function
self.pg = pg
self.scope = scope
self.scopetype = scopetype
return
def checks(self, params):
"""Checks during initialization."""
if self.pg != params["pg"]:
try:
raise ValueError(
"pg provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.pg, params["pg"]
)
)
except KeyError:
raise ValueError(
"pg provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.pg, None
)
)
if self.scope != params["scope"]:
try:
raise ValueError(
"scope provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scope, params["scope"]
)
)
except KeyError:
raise ValueError(
"scope provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scope, None
)
)
try:
if self.scopetype != params["scopetype"]:
try:
raise ValueError(
"scopetype provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scopetype, params["scopetype"]
)
)
except KeyError:
raise ValueError(
"scopetype provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scopetype, None
)
)
except KeyError:
if self.scopetype != "6604":
raise ValueError(
"check scopetype. If you think this is done correctly, please specify explicitly scopetype in params."
)
def terminate(self):
"""Termination."""
stop(self.pg)
return
| 36.806122
| 174
| 0.540061
|
from ....control import core
from ....control.instruments.berkeleynucleonics765 import stop
from ..switching import preset_run_function
import pandas as pd
import numpy as np
import os
import warnings
import time
__all__ = ("FE",)
class FE(core.experiment):
def __init__(self, pg, scope, scopetype="6604", run_function=preset_run_function):
super().__init__()
if scopetype != "6604" and scopetype != "620B":
raise ValueError(
"must specify scope type as either 6604 or 620B (corresponding to the correct scope you are using)"
)
self.run_function = preset_run_function
self.pg = pg
self.scope = scope
self.scopetype = scopetype
return
def checks(self, params):
if self.pg != params["pg"]:
try:
raise ValueError(
"pg provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.pg, params["pg"]
)
)
except KeyError:
raise ValueError(
"pg provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.pg, None
)
)
if self.scope != params["scope"]:
try:
raise ValueError(
"scope provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scope, params["scope"]
)
)
except KeyError:
raise ValueError(
"scope provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scope, None
)
)
try:
if self.scopetype != params["scopetype"]:
try:
raise ValueError(
"scopetype provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scopetype, params["scopetype"]
)
)
except KeyError:
raise ValueError(
"scopetype provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scopetype, None
)
)
except KeyError:
if self.scopetype != "6604":
raise ValueError(
"check scopetype. If you think this is done correctly, please specify explicitly scopetype in params."
)
def terminate(self):
stop(self.pg)
return
| true
| true
|
f71a85f2c607d66f8e6260e04b8ed45d7f51a744
| 116
|
py
|
Python
|
plots/w.py
|
Tethik/whistleblower
|
56747cbf3c4eda95cee7eded36b4a853d33d6ee3
|
[
"MIT"
] | 1
|
2016-06-20T12:35:42.000Z
|
2016-06-20T12:35:42.000Z
|
plots/w.py
|
Tethik/whistleblower
|
56747cbf3c4eda95cee7eded36b4a853d33d6ee3
|
[
"MIT"
] | null | null | null |
plots/w.py
|
Tethik/whistleblower
|
56747cbf3c4eda95cee7eded36b4a853d33d6ee3
|
[
"MIT"
] | null | null | null |
def w(j, p):
return 4 * j * (1 - p)
for p in [0.5, 0.75, 0.99]:
print([w(j, p)*24*7 for j in [5, 20, 50]])
| 19.333333
| 46
| 0.448276
|
def w(j, p):
return 4 * j * (1 - p)
for p in [0.5, 0.75, 0.99]:
print([w(j, p)*24*7 for j in [5, 20, 50]])
| true
| true
|
f71a8614737b2fe5ad5b8e12f3668178f8d6c600
| 8,334
|
py
|
Python
|
mindspore/ops/operations/__init__.py
|
ZephyrChenzf/mindspore
|
8f191847cf71e12715ced96bc3575914f980127a
|
[
"Apache-2.0"
] | null | null | null |
mindspore/ops/operations/__init__.py
|
ZephyrChenzf/mindspore
|
8f191847cf71e12715ced96bc3575914f980127a
|
[
"Apache-2.0"
] | null | null | null |
mindspore/ops/operations/__init__.py
|
ZephyrChenzf/mindspore
|
8f191847cf71e12715ced96bc3575914f980127a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Primitive operator classes.
A collection of operators to build nerual networks or computing functions.
"""
from .array_ops import (Argmax, Argmin, Cast, Concat, Pack, Unpack,
Diag, DiagPart, DType, ExpandDims, Eye,
Fill, GatherNd, GatherV2, InvertPermutation,
IsInstance, IsSubClass, ArgMaxWithValue, OnesLike, ZerosLike,
Rank, Reshape, ResizeNearestNeighbor, ArgMinWithValue, Range,
SameTypeShape, ScatterAdd, ScatterMax, ScatterUpdate,
ScalarToArray, ScalarToTensor, ScatterNd, ScatterNdUpdate, Select,
Shape, Size, Slice, Split, EmbeddingLookup,
Squeeze, StridedSlice, Tile,
Transpose, TruncatedNormal, TupleToArray, UnsortedSegmentMin,
UnsortedSegmentSum, SpaceToDepth, DepthToSpace, SpaceToBatch, BatchToSpace,
SpaceToBatchND, BatchToSpaceND, BroadcastTo)
from .comm_ops import (AllGather, AllReduce, _AlltoAll, ReduceScatter, Broadcast,
_MirrorOperator, ReduceOp, _VirtualDataset,
_VirtualDiv, _GetTensorSlice,
HostAllGather, HostReduceScatter)
from .debug_ops import (ImageSummary, InsertGradientOf, HookBackward, ScalarSummary,
TensorSummary, HistogramSummary, Print)
from .control_ops import ControlDepend, GeSwitch, Merge
from .inner_ops import ScalarCast
from .math_ops import (Abs, ACos, Asin, Asinh, AddN, AssignAdd, AssignSub, Atan2, BatchMatMul, BitwiseAnd, BitwiseOr,
BitwiseXor, Inv, Invert,
ReduceMax, ReduceMin, ReduceMean, ReduceSum, ReduceAll, ReduceProd, CumProd,
Cos, Div, DivNoNan, Equal, EqualCount, Exp, Expm1, Erf, Erfc, Floor, FloorDiv, FloorMod, Ceil,
Acosh, Greater, GreaterEqual, Less, LessEqual, Log, Log1p, LogicalAnd,
LogicalNot, LogicalOr, MatMul, Maximum,
Minimum, Mul, Neg, NMSWithMask, NotEqual,
NPUAllocFloatStatus, NPUClearFloatStatus,
NPUGetFloatStatus, Pow, RealDiv, IsNan, IsInf, IsFinite, FloatStatus,
Reciprocal, CumSum,
Sin, Sqrt, Rsqrt, BesselI0e, BesselI1e,
Square, Sub, TensorAdd, Sign, Round, SquareSumAll, Atan, Atanh, Cosh, Sinh)
from .random_ops import (RandomChoiceWithMask)
from .nn_ops import (LSTM, SGD, Adam, ApplyMomentum, BatchNorm,
BiasAdd, Conv2D,
DepthwiseConv2dNative,
DropoutDoMask, DropoutGrad, Dropout,
DropoutGenMask, Flatten, FusedBatchNorm,
Gelu, Elu,
GetNext, L2Normalize, LayerNorm, L2Loss, CTCLoss,
LogSoftmax,
MaxPool,
AvgPool, Conv2DBackpropInput, ConfusionMulGrad,
MaxPoolWithArgmax, OneHot, Pad, MirrorPad, PReLU, ReLU, ReLU6, ReLUV2, HSwish, HSigmoid,
ResizeBilinear, Sigmoid,
SigmoidCrossEntropyWithLogits,
SmoothL1Loss, Softmax, Softplus,
SoftmaxCrossEntropyWithLogits, ROIAlign,
SparseSoftmaxCrossEntropyWithLogits, Tanh,
TopK, BinaryCrossEntropy, SparseApplyAdagrad, LARSUpdate, ApplyFtrl, SparseApplyFtrl,
ApplyProximalAdagrad, SparseApplyProximalAdagrad,
ApplyRMSProp, ApplyCenteredRMSProp, BasicLSTMCell)
from .other_ops import (Assign, IOU, BoundingBoxDecode, BoundingBoxEncode,
CheckValid, MakeRefKey, CheckBprop, ConfusionMatrix)
from . import _quant_ops
from ._quant_ops import *
from .thor_ops import *
__all__ = [
'TensorAdd',
'Argmax',
'Argmin',
'ArgMaxWithValue',
'ArgMinWithValue',
'AddN',
'Sub',
'CumSum',
'MatMul',
'BatchMatMul',
'Mul',
'Pow',
'Exp',
'Expm1',
'Rsqrt',
'Sqrt',
'Square',
'Conv2D',
'Flatten',
'MaxPoolWithArgmax',
'BatchNorm',
'MaxPool',
'TopK',
'Adam',
'Softplus',
'Softmax',
'LogSoftmax',
'SoftmaxCrossEntropyWithLogits',
'ROIAlign',
'ConfusionMulGrad',
'SparseSoftmaxCrossEntropyWithLogits',
'SGD',
'ApplyMomentum',
'ExpandDims',
'Cast',
'IsSubClass',
'IsInstance',
'Reshape',
'Squeeze',
'Transpose',
'OneHot',
'GatherV2',
'Concat',
'Pack',
'Unpack',
'Tile',
'BiasAdd',
'Gelu',
'Minimum',
'Maximum',
'StridedSlice',
'ReduceSum',
'ReduceMean',
'LayerNorm',
'EmbeddingLookup',
'Rank',
'Less',
'LessEqual',
'RealDiv',
'Div',
'DivNoNan',
'Inv',
'Invert',
'TruncatedNormal',
'Fill',
'OnesLike',
'ZerosLike',
'Select',
'Split',
'ReLU',
'ReLU6',
'Elu',
'Erf',
'Erfc',
'Sigmoid',
'HSwish',
'HSigmoid',
'Tanh',
'RandomChoiceWithMask',
'ResizeBilinear',
'ScalarSummary',
'ImageSummary',
'TensorSummary',
'HistogramSummary',
"Print",
'InsertGradientOf',
'HookBackward',
'InvertPermutation',
'Shape',
'DropoutDoMask',
'DropoutGenMask',
'DropoutGrad',
'Dropout',
'Neg',
'Slice',
'DType',
'NPUAllocFloatStatus',
'NPUGetFloatStatus',
'NPUClearFloatStatus',
'IsNan',
'IsFinite',
'IsInf',
'FloatStatus',
'Reciprocal',
'SmoothL1Loss',
'L2Loss',
'CTCLoss',
'ReduceAll',
'ScalarToArray',
'ScalarToTensor',
'TupleToArray',
'ControlDepend',
'GeSwitch',
'Merge',
'SameTypeShape',
'CheckBprop',
'CheckValid',
'BoundingBoxEncode',
'BoundingBoxDecode',
'L2Normalize',
'ScatterAdd',
'ScatterNd',
'ScatterMax',
'ResizeNearestNeighbor',
'Pad',
'MirrorPad',
'GatherNd',
'ScatterUpdate',
'ScatterNdUpdate',
'Floor',
'NMSWithMask',
'IOU',
'MakeRefKey',
'AvgPool',
# Back Primitive
'Equal',
'EqualCount',
'NotEqual',
'Greater',
'GreaterEqual',
'LogicalNot',
'LogicalAnd',
'LogicalOr',
'Size',
'DepthwiseConv2dNative',
'UnsortedSegmentSum',
'UnsortedSegmentMin',
"AllGather",
"HostAllGather",
"AllReduce",
"ReduceScatter",
"HostReduceScatter",
"Broadcast",
"ReduceOp",
'ScalarCast',
'GetNext',
'ReduceMax',
'ReduceMin',
'ReduceProd',
'CumProd',
'Log',
'Log1p',
'SigmoidCrossEntropyWithLogits',
'FloorDiv',
'FloorMod',
'Ceil',
'Acosh',
'Asinh',
"PReLU",
"Cos",
"Cosh",
"ACos",
"Diag",
"DiagPart",
'Eye',
'Assign',
'AssignAdd',
'AssignSub',
"Sin",
"Sinh",
"Asin",
"LSTM",
"Abs",
"BinaryCrossEntropy",
"SparseApplyAdagrad",
"SpaceToDepth",
"DepthToSpace",
"Conv2DBackpropInput",
"Sign",
"LARSUpdate",
"Round",
"ApplyFtrl",
"SpaceToBatch",
"SparseApplyFtrl",
"ApplyProximalAdagrad",
"SparseApplyProximalAdagrad",
"BatchToSpace",
"Atan2",
"ApplyRMSProp",
"ApplyCenteredRMSProp",
"SpaceToBatchND",
"BatchToSpaceND",
"SquareSumAll",
"BitwiseAnd",
"BitwiseOr",
"BitwiseXor",
"BesselI0e",
"BesselI1e",
"Atan",
"Atanh",
"BasicLSTMCell",
"ConfusionMatrix",
"BroadcastTo"
]
__all__.extend(_quant_ops.__all__)
__all__.sort()
| 27.78
| 117
| 0.583033
|
from .array_ops import (Argmax, Argmin, Cast, Concat, Pack, Unpack,
Diag, DiagPart, DType, ExpandDims, Eye,
Fill, GatherNd, GatherV2, InvertPermutation,
IsInstance, IsSubClass, ArgMaxWithValue, OnesLike, ZerosLike,
Rank, Reshape, ResizeNearestNeighbor, ArgMinWithValue, Range,
SameTypeShape, ScatterAdd, ScatterMax, ScatterUpdate,
ScalarToArray, ScalarToTensor, ScatterNd, ScatterNdUpdate, Select,
Shape, Size, Slice, Split, EmbeddingLookup,
Squeeze, StridedSlice, Tile,
Transpose, TruncatedNormal, TupleToArray, UnsortedSegmentMin,
UnsortedSegmentSum, SpaceToDepth, DepthToSpace, SpaceToBatch, BatchToSpace,
SpaceToBatchND, BatchToSpaceND, BroadcastTo)
from .comm_ops import (AllGather, AllReduce, _AlltoAll, ReduceScatter, Broadcast,
_MirrorOperator, ReduceOp, _VirtualDataset,
_VirtualDiv, _GetTensorSlice,
HostAllGather, HostReduceScatter)
from .debug_ops import (ImageSummary, InsertGradientOf, HookBackward, ScalarSummary,
TensorSummary, HistogramSummary, Print)
from .control_ops import ControlDepend, GeSwitch, Merge
from .inner_ops import ScalarCast
from .math_ops import (Abs, ACos, Asin, Asinh, AddN, AssignAdd, AssignSub, Atan2, BatchMatMul, BitwiseAnd, BitwiseOr,
BitwiseXor, Inv, Invert,
ReduceMax, ReduceMin, ReduceMean, ReduceSum, ReduceAll, ReduceProd, CumProd,
Cos, Div, DivNoNan, Equal, EqualCount, Exp, Expm1, Erf, Erfc, Floor, FloorDiv, FloorMod, Ceil,
Acosh, Greater, GreaterEqual, Less, LessEqual, Log, Log1p, LogicalAnd,
LogicalNot, LogicalOr, MatMul, Maximum,
Minimum, Mul, Neg, NMSWithMask, NotEqual,
NPUAllocFloatStatus, NPUClearFloatStatus,
NPUGetFloatStatus, Pow, RealDiv, IsNan, IsInf, IsFinite, FloatStatus,
Reciprocal, CumSum,
Sin, Sqrt, Rsqrt, BesselI0e, BesselI1e,
Square, Sub, TensorAdd, Sign, Round, SquareSumAll, Atan, Atanh, Cosh, Sinh)
from .random_ops import (RandomChoiceWithMask)
from .nn_ops import (LSTM, SGD, Adam, ApplyMomentum, BatchNorm,
BiasAdd, Conv2D,
DepthwiseConv2dNative,
DropoutDoMask, DropoutGrad, Dropout,
DropoutGenMask, Flatten, FusedBatchNorm,
Gelu, Elu,
GetNext, L2Normalize, LayerNorm, L2Loss, CTCLoss,
LogSoftmax,
MaxPool,
AvgPool, Conv2DBackpropInput, ConfusionMulGrad,
MaxPoolWithArgmax, OneHot, Pad, MirrorPad, PReLU, ReLU, ReLU6, ReLUV2, HSwish, HSigmoid,
ResizeBilinear, Sigmoid,
SigmoidCrossEntropyWithLogits,
SmoothL1Loss, Softmax, Softplus,
SoftmaxCrossEntropyWithLogits, ROIAlign,
SparseSoftmaxCrossEntropyWithLogits, Tanh,
TopK, BinaryCrossEntropy, SparseApplyAdagrad, LARSUpdate, ApplyFtrl, SparseApplyFtrl,
ApplyProximalAdagrad, SparseApplyProximalAdagrad,
ApplyRMSProp, ApplyCenteredRMSProp, BasicLSTMCell)
from .other_ops import (Assign, IOU, BoundingBoxDecode, BoundingBoxEncode,
CheckValid, MakeRefKey, CheckBprop, ConfusionMatrix)
from . import _quant_ops
from ._quant_ops import *
from .thor_ops import *
__all__ = [
'TensorAdd',
'Argmax',
'Argmin',
'ArgMaxWithValue',
'ArgMinWithValue',
'AddN',
'Sub',
'CumSum',
'MatMul',
'BatchMatMul',
'Mul',
'Pow',
'Exp',
'Expm1',
'Rsqrt',
'Sqrt',
'Square',
'Conv2D',
'Flatten',
'MaxPoolWithArgmax',
'BatchNorm',
'MaxPool',
'TopK',
'Adam',
'Softplus',
'Softmax',
'LogSoftmax',
'SoftmaxCrossEntropyWithLogits',
'ROIAlign',
'ConfusionMulGrad',
'SparseSoftmaxCrossEntropyWithLogits',
'SGD',
'ApplyMomentum',
'ExpandDims',
'Cast',
'IsSubClass',
'IsInstance',
'Reshape',
'Squeeze',
'Transpose',
'OneHot',
'GatherV2',
'Concat',
'Pack',
'Unpack',
'Tile',
'BiasAdd',
'Gelu',
'Minimum',
'Maximum',
'StridedSlice',
'ReduceSum',
'ReduceMean',
'LayerNorm',
'EmbeddingLookup',
'Rank',
'Less',
'LessEqual',
'RealDiv',
'Div',
'DivNoNan',
'Inv',
'Invert',
'TruncatedNormal',
'Fill',
'OnesLike',
'ZerosLike',
'Select',
'Split',
'ReLU',
'ReLU6',
'Elu',
'Erf',
'Erfc',
'Sigmoid',
'HSwish',
'HSigmoid',
'Tanh',
'RandomChoiceWithMask',
'ResizeBilinear',
'ScalarSummary',
'ImageSummary',
'TensorSummary',
'HistogramSummary',
"Print",
'InsertGradientOf',
'HookBackward',
'InvertPermutation',
'Shape',
'DropoutDoMask',
'DropoutGenMask',
'DropoutGrad',
'Dropout',
'Neg',
'Slice',
'DType',
'NPUAllocFloatStatus',
'NPUGetFloatStatus',
'NPUClearFloatStatus',
'IsNan',
'IsFinite',
'IsInf',
'FloatStatus',
'Reciprocal',
'SmoothL1Loss',
'L2Loss',
'CTCLoss',
'ReduceAll',
'ScalarToArray',
'ScalarToTensor',
'TupleToArray',
'ControlDepend',
'GeSwitch',
'Merge',
'SameTypeShape',
'CheckBprop',
'CheckValid',
'BoundingBoxEncode',
'BoundingBoxDecode',
'L2Normalize',
'ScatterAdd',
'ScatterNd',
'ScatterMax',
'ResizeNearestNeighbor',
'Pad',
'MirrorPad',
'GatherNd',
'ScatterUpdate',
'ScatterNdUpdate',
'Floor',
'NMSWithMask',
'IOU',
'MakeRefKey',
'AvgPool',
'Equal',
'EqualCount',
'NotEqual',
'Greater',
'GreaterEqual',
'LogicalNot',
'LogicalAnd',
'LogicalOr',
'Size',
'DepthwiseConv2dNative',
'UnsortedSegmentSum',
'UnsortedSegmentMin',
"AllGather",
"HostAllGather",
"AllReduce",
"ReduceScatter",
"HostReduceScatter",
"Broadcast",
"ReduceOp",
'ScalarCast',
'GetNext',
'ReduceMax',
'ReduceMin',
'ReduceProd',
'CumProd',
'Log',
'Log1p',
'SigmoidCrossEntropyWithLogits',
'FloorDiv',
'FloorMod',
'Ceil',
'Acosh',
'Asinh',
"PReLU",
"Cos",
"Cosh",
"ACos",
"Diag",
"DiagPart",
'Eye',
'Assign',
'AssignAdd',
'AssignSub',
"Sin",
"Sinh",
"Asin",
"LSTM",
"Abs",
"BinaryCrossEntropy",
"SparseApplyAdagrad",
"SpaceToDepth",
"DepthToSpace",
"Conv2DBackpropInput",
"Sign",
"LARSUpdate",
"Round",
"ApplyFtrl",
"SpaceToBatch",
"SparseApplyFtrl",
"ApplyProximalAdagrad",
"SparseApplyProximalAdagrad",
"BatchToSpace",
"Atan2",
"ApplyRMSProp",
"ApplyCenteredRMSProp",
"SpaceToBatchND",
"BatchToSpaceND",
"SquareSumAll",
"BitwiseAnd",
"BitwiseOr",
"BitwiseXor",
"BesselI0e",
"BesselI1e",
"Atan",
"Atanh",
"BasicLSTMCell",
"ConfusionMatrix",
"BroadcastTo"
]
__all__.extend(_quant_ops.__all__)
__all__.sort()
| true
| true
|
f71a86b512e65c17c14cf1e55832bd7b556b892d
| 284,631
|
py
|
Python
|
tt.py
|
someone120/some-py
|
a14732b9fde52d5476e4a433e3eecea8ea3eeaec
|
[
"Apache-2.0"
] | null | null | null |
tt.py
|
someone120/some-py
|
a14732b9fde52d5476e4a433e3eecea8ea3eeaec
|
[
"Apache-2.0"
] | null | null | null |
tt.py
|
someone120/some-py
|
a14732b9fde52d5476e4a433e3eecea8ea3eeaec
|
[
"Apache-2.0"
] | null | null | null |
import json
a="""
[
{
"_id": 1,
"id": 1,
"pid": 0,
"city_code": "101010100",
"city_name": "北京"
},
{
"_id": 2,
"id": 2,
"pid": 0,
"city_code": "",
"city_name": "安徽"
},
{
"_id": 3,
"id": 3,
"pid": 0,
"city_code": "",
"city_name": "福建"
},
{
"_id": 4,
"id": 4,
"pid": 0,
"city_code": "",
"city_name": "甘肃"
},
{
"_id": 5,
"id": 5,
"pid": 0,
"city_code": "",
"city_name": "广东"
},
{
"_id": 6,
"id": 6,
"pid": 0,
"city_code": "",
"city_name": "广西"
},
{
"_id": 7,
"id": 7,
"pid": 0,
"city_code": "",
"city_name": "贵州"
},
{
"_id": 8,
"id": 8,
"pid": 0,
"city_code": "",
"city_name": "海南"
},
{
"_id": 9,
"id": 9,
"pid": 0,
"city_code": "",
"city_name": "河北"
},
{
"_id": 10,
"id": 10,
"pid": 0,
"city_code": "",
"city_name": "河南"
},
{
"_id": 11,
"id": 11,
"pid": 0,
"city_code": "",
"city_name": "黑龙江"
},
{
"_id": 12,
"id": 12,
"pid": 0,
"city_code": "",
"city_name": "湖北"
},
{
"_id": 13,
"id": 13,
"pid": 0,
"city_code": "",
"city_name": "湖南"
},
{
"_id": 14,
"id": 14,
"pid": 0,
"city_code": "",
"city_name": "吉林"
},
{
"_id": 15,
"id": 15,
"pid": 0,
"city_code": "",
"city_name": "江苏"
},
{
"_id": 16,
"id": 16,
"pid": 0,
"city_code": "",
"city_name": "江西"
},
{
"_id": 17,
"id": 17,
"pid": 0,
"city_code": "",
"city_name": "辽宁"
},
{
"_id": 18,
"id": 18,
"pid": 0,
"city_code": "",
"city_name": "内蒙古"
},
{
"_id": 19,
"id": 19,
"pid": 0,
"city_code": "",
"city_name": "宁夏"
},
{
"_id": 20,
"id": 20,
"pid": 0,
"city_code": "",
"city_name": "青海"
},
{
"_id": 21,
"id": 21,
"pid": 0,
"city_code": "",
"city_name": "山东"
},
{
"_id": 22,
"id": 22,
"pid": 0,
"city_code": "",
"city_name": "山西"
},
{
"_id": 23,
"id": 23,
"pid": 0,
"city_code": "",
"city_name": "陕西"
},
{
"_id": 24,
"id": 24,
"pid": 0,
"city_code": "101020100",
"city_name": "上海"
},
{
"_id": 25,
"id": 25,
"pid": 0,
"city_code": "",
"city_name": "四川"
},
{
"_id": 26,
"id": 26,
"pid": 0,
"city_code": "101030100",
"city_name": "天津"
},
{
"_id": 27,
"id": 27,
"pid": 0,
"city_code": "",
"city_name": "西藏"
},
{
"_id": 28,
"id": 28,
"pid": 0,
"city_code": "",
"city_name": "新疆"
},
{
"_id": 29,
"id": 29,
"pid": 0,
"city_code": "",
"city_name": "云南"
},
{
"_id": 30,
"id": 30,
"pid": 0,
"city_code": "",
"city_name": "浙江"
},
{
"_id": 31,
"id": 31,
"pid": 0,
"city_code": "101040100",
"city_name": "重庆"
},
{
"_id": 32,
"id": 32,
"pid": 0,
"city_code": "101320101",
"city_name": "香港"
},
{
"_id": 33,
"id": 33,
"pid": 0,
"city_code": "101330101",
"city_name": "澳门"
},
{
"_id": 34,
"id": 34,
"pid": 0,
"city_code": "",
"city_name": "台湾"
},
{
"_id": 35,
"id": 35,
"pid": 2,
"city_code": "101220601",
"city_name": "安庆"
},
{
"_id": 36,
"id": 36,
"pid": 2,
"city_code": "101220201",
"city_name": "蚌埠"
},
{
"_id": 37,
"id": 37,
"pid": 3400,
"city_code": "101220105",
"city_name": "巢湖市"
},
{
"_id": 38,
"id": 38,
"pid": 2,
"city_code": "101221701",
"city_name": "池州"
},
{
"_id": 39,
"id": 39,
"pid": 2,
"city_code": "101221101",
"city_name": "滁州"
},
{
"_id": 40,
"id": 40,
"pid": 2,
"city_code": "101220801",
"city_name": "阜阳"
},
{
"_id": 41,
"id": 41,
"pid": 2,
"city_code": "101221201",
"city_name": "淮北"
},
{
"_id": 42,
"id": 42,
"pid": 2,
"city_code": "101220401",
"city_name": "淮南"
},
{
"_id": 43,
"id": 43,
"pid": 2,
"city_code": "101221001",
"city_name": "黄山"
},
{
"_id": 44,
"id": 44,
"pid": 2,
"city_code": "101221501",
"city_name": "六安"
},
{
"_id": 45,
"id": 45,
"pid": 2,
"city_code": "101220501",
"city_name": "马鞍山"
},
{
"_id": 46,
"id": 46,
"pid": 2,
"city_code": "101220701",
"city_name": "宿州"
},
{
"_id": 47,
"id": 47,
"pid": 2,
"city_code": "101221301",
"city_name": "铜陵"
},
{
"_id": 48,
"id": 48,
"pid": 2,
"city_code": "101220301",
"city_name": "芜湖"
},
{
"_id": 49,
"id": 49,
"pid": 2,
"city_code": "101221401",
"city_name": "宣城"
},
{
"_id": 50,
"id": 50,
"pid": 2,
"city_code": "101220901",
"city_name": "亳州"
},
{
"_id": 51,
"id": 52,
"pid": 3,
"city_code": "101230101",
"city_name": "福州"
},
{
"_id": 52,
"id": 53,
"pid": 3,
"city_code": "101230701",
"city_name": "龙岩"
},
{
"_id": 53,
"id": 54,
"pid": 3,
"city_code": "101230901",
"city_name": "南平"
},
{
"_id": 54,
"id": 55,
"pid": 3,
"city_code": "101230301",
"city_name": "宁德"
},
{
"_id": 55,
"id": 56,
"pid": 3,
"city_code": "101230401",
"city_name": "莆田"
},
{
"_id": 56,
"id": 57,
"pid": 3,
"city_code": "101230501",
"city_name": "泉州"
},
{
"_id": 57,
"id": 58,
"pid": 3,
"city_code": "101230801",
"city_name": "三明"
},
{
"_id": 58,
"id": 59,
"pid": 3,
"city_code": "101230201",
"city_name": "厦门"
},
{
"_id": 59,
"id": 60,
"pid": 3,
"city_code": "101230601",
"city_name": "漳州"
},
{
"_id": 60,
"id": 61,
"pid": 4,
"city_code": "101160101",
"city_name": "兰州"
},
{
"_id": 61,
"id": 62,
"pid": 4,
"city_code": "101161301",
"city_name": "白银"
},
{
"_id": 62,
"id": 63,
"pid": 4,
"city_code": "101160201",
"city_name": "定西"
},
{
"_id": 63,
"id": 64,
"pid": 4,
"city_code": "",
"city_name": "甘南州"
},
{
"_id": 64,
"id": 65,
"pid": 4,
"city_code": "101161401",
"city_name": "嘉峪关"
},
{
"_id": 65,
"id": 66,
"pid": 4,
"city_code": "101160601",
"city_name": "金昌"
},
{
"_id": 66,
"id": 67,
"pid": 4,
"city_code": "101160801",
"city_name": "酒泉"
},
{
"_id": 67,
"id": 68,
"pid": 4,
"city_code": "101161101",
"city_name": "临夏"
},
{
"_id": 68,
"id": 69,
"pid": 4,
"city_code": "101161010",
"city_name": "陇南市"
},
{
"_id": 69,
"id": 70,
"pid": 4,
"city_code": "101160301",
"city_name": "平凉"
},
{
"_id": 70,
"id": 71,
"pid": 4,
"city_code": "101160401",
"city_name": "庆阳"
},
{
"_id": 71,
"id": 72,
"pid": 4,
"city_code": "101160901",
"city_name": "天水"
},
{
"_id": 72,
"id": 73,
"pid": 4,
"city_code": "101160501",
"city_name": "武威"
},
{
"_id": 73,
"id": 74,
"pid": 4,
"city_code": "101160701",
"city_name": "张掖"
},
{
"_id": 74,
"id": 75,
"pid": 5,
"city_code": "101280101",
"city_name": "广州"
},
{
"_id": 75,
"id": 76,
"pid": 5,
"city_code": "101280601",
"city_name": "深圳"
},
{
"_id": 76,
"id": 77,
"pid": 5,
"city_code": "101281501",
"city_name": "潮州"
},
{
"_id": 77,
"id": 78,
"pid": 5,
"city_code": "101281601",
"city_name": "东莞"
},
{
"_id": 78,
"id": 79,
"pid": 5,
"city_code": "101280800",
"city_name": "佛山"
},
{
"_id": 79,
"id": 80,
"pid": 5,
"city_code": "101281201",
"city_name": "河源"
},
{
"_id": 80,
"id": 81,
"pid": 5,
"city_code": "101280301",
"city_name": "惠州"
},
{
"_id": 81,
"id": 82,
"pid": 5,
"city_code": "101281101",
"city_name": "江门"
},
{
"_id": 82,
"id": 83,
"pid": 5,
"city_code": "101281901",
"city_name": "揭阳"
},
{
"_id": 83,
"id": 84,
"pid": 5,
"city_code": "101282001",
"city_name": "茂名"
},
{
"_id": 84,
"id": 85,
"pid": 5,
"city_code": "101280401",
"city_name": "梅州"
},
{
"_id": 85,
"id": 86,
"pid": 5,
"city_code": "101281301",
"city_name": "清远"
},
{
"_id": 86,
"id": 87,
"pid": 5,
"city_code": "101280501",
"city_name": "汕头"
},
{
"_id": 87,
"id": 88,
"pid": 5,
"city_code": "101282101",
"city_name": "汕尾"
},
{
"_id": 88,
"id": 89,
"pid": 5,
"city_code": "101280201",
"city_name": "韶关"
},
{
"_id": 89,
"id": 90,
"pid": 5,
"city_code": "101281801",
"city_name": "阳江"
},
{
"_id": 90,
"id": 91,
"pid": 5,
"city_code": "101281401",
"city_name": "云浮"
},
{
"_id": 91,
"id": 92,
"pid": 5,
"city_code": "101281001",
"city_name": "湛江"
},
{
"_id": 92,
"id": 93,
"pid": 5,
"city_code": "101280901",
"city_name": "肇庆"
},
{
"_id": 93,
"id": 94,
"pid": 5,
"city_code": "101281701",
"city_name": "中山"
},
{
"_id": 94,
"id": 95,
"pid": 5,
"city_code": "101280701",
"city_name": "珠海"
},
{
"_id": 95,
"id": 96,
"pid": 6,
"city_code": "101300101",
"city_name": "南宁"
},
{
"_id": 96,
"id": 97,
"pid": 6,
"city_code": "101300501",
"city_name": "桂林"
},
{
"_id": 97,
"id": 98,
"pid": 6,
"city_code": "101301001",
"city_name": "百色"
},
{
"_id": 98,
"id": 99,
"pid": 6,
"city_code": "101301301",
"city_name": "北海"
},
{
"_id": 99,
"id": 100,
"pid": 6,
"city_code": "101300201",
"city_name": "崇左"
},
{
"_id": 100,
"id": 101,
"pid": 6,
"city_code": "101301401",
"city_name": "防城港"
},
{
"_id": 101,
"id": 102,
"pid": 6,
"city_code": "101300801",
"city_name": "贵港"
},
{
"_id": 102,
"id": 103,
"pid": 6,
"city_code": "101301201",
"city_name": "河池"
},
{
"_id": 103,
"id": 104,
"pid": 6,
"city_code": "101300701",
"city_name": "贺州"
},
{
"_id": 104,
"id": 105,
"pid": 6,
"city_code": "101300401",
"city_name": "来宾"
},
{
"_id": 105,
"id": 106,
"pid": 6,
"city_code": "101300301",
"city_name": "柳州"
},
{
"_id": 106,
"id": 107,
"pid": 6,
"city_code": "101301101",
"city_name": "钦州"
},
{
"_id": 107,
"id": 108,
"pid": 6,
"city_code": "101300601",
"city_name": "梧州"
},
{
"_id": 108,
"id": 109,
"pid": 6,
"city_code": "101300901",
"city_name": "玉林"
},
{
"_id": 109,
"id": 110,
"pid": 7,
"city_code": "101260101",
"city_name": "贵阳"
},
{
"_id": 110,
"id": 111,
"pid": 7,
"city_code": "101260301",
"city_name": "安顺"
},
{
"_id": 111,
"id": 112,
"pid": 7,
"city_code": "101260701",
"city_name": "毕节"
},
{
"_id": 112,
"id": 113,
"pid": 7,
"city_code": "101260801",
"city_name": "六盘水"
},
{
"_id": 113,
"id": 114,
"pid": 7,
"city_code": "101260506",
"city_name": "黔东南"
},
{
"_id": 114,
"id": 115,
"pid": 7,
"city_code": "101260413",
"city_name": "黔南"
},
{
"_id": 115,
"id": 116,
"pid": 7,
"city_code": "101260906",
"city_name": "黔西南"
},
{
"_id": 116,
"id": 117,
"pid": 7,
"city_code": "101260601",
"city_name": "铜仁"
},
{
"_id": 117,
"id": 118,
"pid": 7,
"city_code": "101260201",
"city_name": "遵义"
},
{
"_id": 118,
"id": 119,
"pid": 8,
"city_code": "101310101",
"city_name": "海口"
},
{
"_id": 119,
"id": 120,
"pid": 8,
"city_code": "101310201",
"city_name": "三亚"
},
{
"_id": 120,
"id": 121,
"pid": 8,
"city_code": "101310207",
"city_name": "白沙县"
},
{
"_id": 121,
"id": 122,
"pid": 8,
"city_code": "101310214",
"city_name": "保亭县"
},
{
"_id": 122,
"id": 123,
"pid": 8,
"city_code": "101310206",
"city_name": "昌江县"
},
{
"_id": 123,
"id": 124,
"pid": 8,
"city_code": "101310204",
"city_name": "澄迈县"
},
{
"_id": 124,
"id": 125,
"pid": 8,
"city_code": "101310209",
"city_name": "定安县"
},
{
"_id": 125,
"id": 126,
"pid": 8,
"city_code": "101310202",
"city_name": "东方"
},
{
"_id": 126,
"id": 127,
"pid": 8,
"city_code": "101310221",
"city_name": "乐东县"
},
{
"_id": 127,
"id": 128,
"pid": 8,
"city_code": "101310203",
"city_name": "临高县"
},
{
"_id": 128,
"id": 129,
"pid": 8,
"city_code": "101310216",
"city_name": "陵水县"
},
{
"_id": 129,
"id": 130,
"pid": 8,
"city_code": "101310211",
"city_name": "琼海"
},
{
"_id": 130,
"id": 131,
"pid": 8,
"city_code": "101310208",
"city_name": "琼中"
},
{
"_id": 131,
"id": 132,
"pid": 8,
"city_code": "101310210",
"city_name": "屯昌县"
},
{
"_id": 132,
"id": 133,
"pid": 8,
"city_code": "101310215",
"city_name": "万宁"
},
{
"_id": 133,
"id": 134,
"pid": 8,
"city_code": "101310212",
"city_name": "文昌"
},
{
"_id": 134,
"id": 135,
"pid": 8,
"city_code": "101310222",
"city_name": "五指山"
},
{
"_id": 135,
"id": 136,
"pid": 8,
"city_code": "101310205",
"city_name": "儋州"
},
{
"_id": 136,
"id": 137,
"pid": 9,
"city_code": "101090101",
"city_name": "石家庄"
},
{
"_id": 137,
"id": 138,
"pid": 9,
"city_code": "101090201",
"city_name": "保定"
},
{
"_id": 138,
"id": 139,
"pid": 9,
"city_code": "101090701",
"city_name": "沧州"
},
{
"_id": 139,
"id": 140,
"pid": 9,
"city_code": "101090402",
"city_name": "承德"
},
{
"_id": 140,
"id": 141,
"pid": 9,
"city_code": "101091001",
"city_name": "邯郸"
},
{
"_id": 141,
"id": 142,
"pid": 9,
"city_code": "101090801",
"city_name": "衡水"
},
{
"_id": 142,
"id": 143,
"pid": 9,
"city_code": "101090601",
"city_name": "廊坊"
},
{
"_id": 143,
"id": 144,
"pid": 9,
"city_code": "101091101",
"city_name": "秦皇岛"
},
{
"_id": 144,
"id": 145,
"pid": 9,
"city_code": "101090501",
"city_name": "唐山"
},
{
"_id": 145,
"id": 146,
"pid": 9,
"city_code": "101090901",
"city_name": "邢台"
},
{
"_id": 146,
"id": 147,
"pid": 9,
"city_code": "101090301",
"city_name": "张家口"
},
{
"_id": 147,
"id": 148,
"pid": 10,
"city_code": "101180101",
"city_name": "郑州"
},
{
"_id": 148,
"id": 149,
"pid": 10,
"city_code": "101180901",
"city_name": "洛阳"
},
{
"_id": 149,
"id": 150,
"pid": 10,
"city_code": "101180801",
"city_name": "开封"
},
{
"_id": 150,
"id": 151,
"pid": 10,
"city_code": "101180201",
"city_name": "安阳"
},
{
"_id": 151,
"id": 152,
"pid": 10,
"city_code": "101181201",
"city_name": "鹤壁"
},
{
"_id": 152,
"id": 153,
"pid": 10,
"city_code": "101181801",
"city_name": "济源"
},
{
"_id": 153,
"id": 154,
"pid": 10,
"city_code": "101181101",
"city_name": "焦作"
},
{
"_id": 154,
"id": 155,
"pid": 10,
"city_code": "101180701",
"city_name": "南阳"
},
{
"_id": 155,
"id": 156,
"pid": 10,
"city_code": "101180501",
"city_name": "平顶山"
},
{
"_id": 156,
"id": 157,
"pid": 10,
"city_code": "101181701",
"city_name": "三门峡"
},
{
"_id": 157,
"id": 158,
"pid": 10,
"city_code": "101181001",
"city_name": "商丘"
},
{
"_id": 158,
"id": 159,
"pid": 10,
"city_code": "101180301",
"city_name": "新乡"
},
{
"_id": 159,
"id": 160,
"pid": 10,
"city_code": "101180601",
"city_name": "信阳"
},
{
"_id": 160,
"id": 161,
"pid": 10,
"city_code": "101180401",
"city_name": "许昌"
},
{
"_id": 161,
"id": 162,
"pid": 10,
"city_code": "101181401",
"city_name": "周口"
},
{
"_id": 162,
"id": 163,
"pid": 10,
"city_code": "101181601",
"city_name": "驻马店"
},
{
"_id": 163,
"id": 164,
"pid": 10,
"city_code": "101181501",
"city_name": "漯河"
},
{
"_id": 164,
"id": 165,
"pid": 10,
"city_code": "101181301",
"city_name": "濮阳"
},
{
"_id": 165,
"id": 166,
"pid": 11,
"city_code": "101050101",
"city_name": "哈尔滨"
},
{
"_id": 166,
"id": 167,
"pid": 11,
"city_code": "101050901",
"city_name": "大庆"
},
{
"_id": 167,
"id": 168,
"pid": 11,
"city_code": "101050701",
"city_name": "大兴安岭"
},
{
"_id": 168,
"id": 169,
"pid": 11,
"city_code": "101051201",
"city_name": "鹤岗"
},
{
"_id": 169,
"id": 170,
"pid": 11,
"city_code": "101050601",
"city_name": "黑河"
},
{
"_id": 170,
"id": 171,
"pid": 11,
"city_code": "101051101",
"city_name": "鸡西"
},
{
"_id": 171,
"id": 172,
"pid": 11,
"city_code": "101050401",
"city_name": "佳木斯"
},
{
"_id": 172,
"id": 173,
"pid": 11,
"city_code": "101050301",
"city_name": "牡丹江"
},
{
"_id": 173,
"id": 174,
"pid": 11,
"city_code": "101051002",
"city_name": "七台河"
},
{
"_id": 174,
"id": 175,
"pid": 11,
"city_code": "101050201",
"city_name": "齐齐哈尔"
},
{
"_id": 175,
"id": 176,
"pid": 11,
"city_code": "101051301",
"city_name": "双鸭山"
},
{
"_id": 176,
"id": 177,
"pid": 11,
"city_code": "101050501",
"city_name": "绥化"
},
{
"_id": 177,
"id": 178,
"pid": 11,
"city_code": "101050801",
"city_name": "伊春"
},
{
"_id": 178,
"id": 179,
"pid": 12,
"city_code": "101200101",
"city_name": "武汉"
},
{
"_id": 179,
"id": 180,
"pid": 12,
"city_code": "101201601",
"city_name": "仙桃"
},
{
"_id": 180,
"id": 181,
"pid": 12,
"city_code": "101200301",
"city_name": "鄂州"
},
{
"_id": 181,
"id": 182,
"pid": 12,
"city_code": "101200501",
"city_name": "黄冈"
},
{
"_id": 182,
"id": 183,
"pid": 12,
"city_code": "101200601",
"city_name": "黄石"
},
{
"_id": 183,
"id": 184,
"pid": 12,
"city_code": "101201401",
"city_name": "荆门"
},
{
"_id": 184,
"id": 185,
"pid": 12,
"city_code": "101200801",
"city_name": "荆州"
},
{
"_id": 185,
"id": 186,
"pid": 12,
"city_code": "101201701",
"city_name": "潜江"
},
{
"_id": 186,
"id": 187,
"pid": 12,
"city_code": "101201201",
"city_name": "神农架林区"
},
{
"_id": 187,
"id": 188,
"pid": 12,
"city_code": "101201101",
"city_name": "十堰"
},
{
"_id": 188,
"id": 189,
"pid": 12,
"city_code": "101201301",
"city_name": "随州"
},
{
"_id": 189,
"id": 190,
"pid": 12,
"city_code": "101201501",
"city_name": "天门"
},
{
"_id": 190,
"id": 191,
"pid": 12,
"city_code": "101200701",
"city_name": "咸宁"
},
{
"_id": 191,
"id": 192,
"pid": 12,
"city_code": "101200202",
"city_name": "襄阳"
},
{
"_id": 192,
"id": 193,
"pid": 12,
"city_code": "101200401",
"city_name": "孝感"
},
{
"_id": 193,
"id": 194,
"pid": 12,
"city_code": "101200901",
"city_name": "宜昌"
},
{
"_id": 194,
"id": 195,
"pid": 12,
"city_code": "101201001",
"city_name": "恩施"
},
{
"_id": 195,
"id": 196,
"pid": 13,
"city_code": "101250101",
"city_name": "长沙"
},
{
"_id": 196,
"id": 197,
"pid": 13,
"city_code": "101251101",
"city_name": "张家界"
},
{
"_id": 197,
"id": 198,
"pid": 13,
"city_code": "101250601",
"city_name": "常德"
},
{
"_id": 198,
"id": 199,
"pid": 13,
"city_code": "101250501",
"city_name": "郴州"
},
{
"_id": 199,
"id": 200,
"pid": 13,
"city_code": "101250401",
"city_name": "衡阳"
},
{
"_id": 200,
"id": 201,
"pid": 13,
"city_code": "101251201",
"city_name": "怀化"
},
{
"_id": 201,
"id": 202,
"pid": 13,
"city_code": "101250801",
"city_name": "娄底"
},
{
"_id": 202,
"id": 203,
"pid": 13,
"city_code": "101250901",
"city_name": "邵阳"
},
{
"_id": 203,
"id": 204,
"pid": 13,
"city_code": "101250201",
"city_name": "湘潭"
},
{
"_id": 204,
"id": 205,
"pid": 13,
"city_code": "101251509",
"city_name": "湘西"
},
{
"_id": 205,
"id": 206,
"pid": 13,
"city_code": "101250700",
"city_name": "益阳"
},
{
"_id": 206,
"id": 207,
"pid": 13,
"city_code": "101251401",
"city_name": "永州"
},
{
"_id": 207,
"id": 208,
"pid": 13,
"city_code": "101251001",
"city_name": "岳阳"
},
{
"_id": 208,
"id": 209,
"pid": 13,
"city_code": "101250301",
"city_name": "株洲"
},
{
"_id": 209,
"id": 210,
"pid": 14,
"city_code": "101060101",
"city_name": "长春"
},
{
"_id": 210,
"id": 211,
"pid": 14,
"city_code": "101060201",
"city_name": "吉林市"
},
{
"_id": 211,
"id": 212,
"pid": 14,
"city_code": "101060601",
"city_name": "白城"
},
{
"_id": 212,
"id": 213,
"pid": 14,
"city_code": "101060901",
"city_name": "白山"
},
{
"_id": 213,
"id": 214,
"pid": 14,
"city_code": "101060701",
"city_name": "辽源"
},
{
"_id": 214,
"id": 215,
"pid": 14,
"city_code": "101060401",
"city_name": "四平"
},
{
"_id": 215,
"id": 216,
"pid": 14,
"city_code": "101060801",
"city_name": "松原"
},
{
"_id": 216,
"id": 217,
"pid": 14,
"city_code": "101060501",
"city_name": "通化"
},
{
"_id": 217,
"id": 218,
"pid": 14,
"city_code": "101060312",
"city_name": "延边"
},
{
"_id": 218,
"id": 219,
"pid": 15,
"city_code": "101190101",
"city_name": "南京"
},
{
"_id": 219,
"id": 220,
"pid": 15,
"city_code": "101190401",
"city_name": "苏州"
},
{
"_id": 220,
"id": 221,
"pid": 15,
"city_code": "101190201",
"city_name": "无锡"
},
{
"_id": 221,
"id": 222,
"pid": 15,
"city_code": "101191101",
"city_name": "常州"
},
{
"_id": 222,
"id": 223,
"pid": 15,
"city_code": "101190901",
"city_name": "淮安"
},
{
"_id": 223,
"id": 224,
"pid": 15,
"city_code": "101191001",
"city_name": "连云港"
},
{
"_id": 224,
"id": 225,
"pid": 15,
"city_code": "101190501",
"city_name": "南通"
},
{
"_id": 225,
"id": 226,
"pid": 15,
"city_code": "101191301",
"city_name": "宿迁"
},
{
"_id": 226,
"id": 227,
"pid": 15,
"city_code": "101191201",
"city_name": "泰州"
},
{
"_id": 227,
"id": 228,
"pid": 15,
"city_code": "101190801",
"city_name": "徐州"
},
{
"_id": 228,
"id": 229,
"pid": 15,
"city_code": "101190701",
"city_name": "盐城"
},
{
"_id": 229,
"id": 230,
"pid": 15,
"city_code": "101190601",
"city_name": "扬州"
},
{
"_id": 230,
"id": 231,
"pid": 15,
"city_code": "101190301",
"city_name": "镇江"
},
{
"_id": 231,
"id": 232,
"pid": 16,
"city_code": "101240101",
"city_name": "南昌"
},
{
"_id": 232,
"id": 233,
"pid": 16,
"city_code": "101240401",
"city_name": "抚州"
},
{
"_id": 233,
"id": 234,
"pid": 16,
"city_code": "101240701",
"city_name": "赣州"
},
{
"_id": 234,
"id": 235,
"pid": 16,
"city_code": "101240601",
"city_name": "吉安"
},
{
"_id": 235,
"id": 236,
"pid": 16,
"city_code": "101240801",
"city_name": "景德镇"
},
{
"_id": 236,
"id": 237,
"pid": 16,
"city_code": "101240201",
"city_name": "九江"
},
{
"_id": 237,
"id": 238,
"pid": 16,
"city_code": "101240901",
"city_name": "萍乡"
},
{
"_id": 238,
"id": 239,
"pid": 16,
"city_code": "101240301",
"city_name": "上饶"
},
{
"_id": 239,
"id": 240,
"pid": 16,
"city_code": "101241001",
"city_name": "新余"
},
{
"_id": 240,
"id": 241,
"pid": 16,
"city_code": "101240501",
"city_name": "宜春"
},
{
"_id": 241,
"id": 242,
"pid": 16,
"city_code": "101241101",
"city_name": "鹰潭"
},
{
"_id": 242,
"id": 243,
"pid": 17,
"city_code": "101070101",
"city_name": "沈阳"
},
{
"_id": 243,
"id": 244,
"pid": 17,
"city_code": "101070201",
"city_name": "大连"
},
{
"_id": 244,
"id": 245,
"pid": 17,
"city_code": "101070301",
"city_name": "鞍山"
},
{
"_id": 245,
"id": 246,
"pid": 17,
"city_code": "101070501",
"city_name": "本溪"
},
{
"_id": 246,
"id": 247,
"pid": 17,
"city_code": "101071201",
"city_name": "朝阳"
},
{
"_id": 247,
"id": 248,
"pid": 17,
"city_code": "101070601",
"city_name": "丹东"
},
{
"_id": 248,
"id": 249,
"pid": 17,
"city_code": "101070401",
"city_name": "抚顺"
},
{
"_id": 249,
"id": 250,
"pid": 17,
"city_code": "101070901",
"city_name": "阜新"
},
{
"_id": 250,
"id": 251,
"pid": 17,
"city_code": "101071401",
"city_name": "葫芦岛"
},
{
"_id": 251,
"id": 252,
"pid": 17,
"city_code": "101070701",
"city_name": "锦州"
},
{
"_id": 252,
"id": 253,
"pid": 17,
"city_code": "101071001",
"city_name": "辽阳"
},
{
"_id": 253,
"id": 254,
"pid": 17,
"city_code": "101071301",
"city_name": "盘锦"
},
{
"_id": 254,
"id": 255,
"pid": 17,
"city_code": "101071101",
"city_name": "铁岭"
},
{
"_id": 255,
"id": 256,
"pid": 17,
"city_code": "101070801",
"city_name": "营口"
},
{
"_id": 256,
"id": 257,
"pid": 18,
"city_code": "101080101",
"city_name": "呼和浩特"
},
{
"_id": 257,
"id": 258,
"pid": 18,
"city_code": "101081213",
"city_name": "阿拉善盟"
},
{
"_id": 258,
"id": 259,
"pid": 18,
"city_code": "101080801",
"city_name": "巴彦淖尔"
},
{
"_id": 259,
"id": 260,
"pid": 18,
"city_code": "101080201",
"city_name": "包头"
},
{
"_id": 260,
"id": 261,
"pid": 18,
"city_code": "101080601",
"city_name": "赤峰"
},
{
"_id": 261,
"id": 262,
"pid": 18,
"city_code": "101080701",
"city_name": "鄂尔多斯"
},
{
"_id": 262,
"id": 263,
"pid": 18,
"city_code": "101081001",
"city_name": "呼伦贝尔"
},
{
"_id": 263,
"id": 264,
"pid": 18,
"city_code": "101080501",
"city_name": "通辽"
},
{
"_id": 264,
"id": 265,
"pid": 18,
"city_code": "101080301",
"city_name": "乌海"
},
{
"_id": 265,
"id": 266,
"pid": 18,
"city_code": "101080405",
"city_name": "乌兰察布"
},
{
"_id": 266,
"id": 267,
"pid": 18,
"city_code": "101080902",
"city_name": "锡林郭勒"
},
{
"_id": 267,
"id": 268,
"pid": 18,
"city_code": "101081108",
"city_name": "兴安盟"
},
{
"_id": 268,
"id": 269,
"pid": 19,
"city_code": "101170101",
"city_name": "银川"
},
{
"_id": 269,
"id": 270,
"pid": 19,
"city_code": "101170401",
"city_name": "固原"
},
{
"_id": 270,
"id": 271,
"pid": 19,
"city_code": "101170201",
"city_name": "石嘴山"
},
{
"_id": 271,
"id": 272,
"pid": 19,
"city_code": "101170301",
"city_name": "吴忠"
},
{
"_id": 272,
"id": 273,
"pid": 19,
"city_code": "101170501",
"city_name": "中卫"
},
{
"_id": 273,
"id": 274,
"pid": 20,
"city_code": "101150101",
"city_name": "西宁"
},
{
"_id": 274,
"id": 275,
"pid": 20,
"city_code": "101150501",
"city_name": "果洛"
},
{
"_id": 275,
"id": 276,
"pid": 20,
"city_code": "101150801",
"city_name": "海北"
},
{
"_id": 276,
"id": 277,
"pid": 20,
"city_code": "101150201",
"city_name": "海东"
},
{
"_id": 277,
"id": 278,
"pid": 20,
"city_code": "101150401",
"city_name": "海南州"
},
{
"_id": 278,
"id": 279,
"pid": 20,
"city_code": "101150701",
"city_name": "海西"
},
{
"_id": 279,
"id": 280,
"pid": 20,
"city_code": "101150301",
"city_name": "黄南"
},
{
"_id": 280,
"id": 281,
"pid": 20,
"city_code": "101150601",
"city_name": "玉树"
},
{
"_id": 281,
"id": 282,
"pid": 21,
"city_code": "101120101",
"city_name": "济南"
},
{
"_id": 282,
"id": 283,
"pid": 21,
"city_code": "101120201",
"city_name": "青岛"
},
{
"_id": 283,
"id": 284,
"pid": 21,
"city_code": "101121101",
"city_name": "滨州"
},
{
"_id": 284,
"id": 285,
"pid": 21,
"city_code": "101120401",
"city_name": "德州"
},
{
"_id": 285,
"id": 286,
"pid": 21,
"city_code": "101121201",
"city_name": "东营"
},
{
"_id": 286,
"id": 287,
"pid": 21,
"city_code": "101121001",
"city_name": "菏泽"
},
{
"_id": 287,
"id": 288,
"pid": 21,
"city_code": "101120701",
"city_name": "济宁"
},
{
"_id": 288,
"id": 289,
"pid": 21,
"city_code": "101121601",
"city_name": "莱芜"
},
{
"_id": 289,
"id": 290,
"pid": 21,
"city_code": "101121701",
"city_name": "聊城"
},
{
"_id": 290,
"id": 291,
"pid": 21,
"city_code": "101120901",
"city_name": "临沂"
},
{
"_id": 291,
"id": 292,
"pid": 21,
"city_code": "101121501",
"city_name": "日照"
},
{
"_id": 292,
"id": 293,
"pid": 21,
"city_code": "101120801",
"city_name": "泰安"
},
{
"_id": 293,
"id": 294,
"pid": 21,
"city_code": "101121301",
"city_name": "威海"
},
{
"_id": 294,
"id": 295,
"pid": 21,
"city_code": "101120601",
"city_name": "潍坊"
},
{
"_id": 295,
"id": 296,
"pid": 21,
"city_code": "101120501",
"city_name": "烟台"
},
{
"_id": 296,
"id": 297,
"pid": 21,
"city_code": "101121401",
"city_name": "枣庄"
},
{
"_id": 297,
"id": 298,
"pid": 21,
"city_code": "101120301",
"city_name": "淄博"
},
{
"_id": 298,
"id": 299,
"pid": 22,
"city_code": "101100101",
"city_name": "太原"
},
{
"_id": 299,
"id": 300,
"pid": 22,
"city_code": "101100501",
"city_name": "长治"
},
{
"_id": 300,
"id": 301,
"pid": 22,
"city_code": "101100201",
"city_name": "大同"
},
{
"_id": 301,
"id": 302,
"pid": 22,
"city_code": "101100601",
"city_name": "晋城"
},
{
"_id": 302,
"id": 303,
"pid": 22,
"city_code": "101100401",
"city_name": "晋中"
},
{
"_id": 303,
"id": 304,
"pid": 22,
"city_code": "101100701",
"city_name": "临汾"
},
{
"_id": 304,
"id": 305,
"pid": 22,
"city_code": "101101100",
"city_name": "吕梁"
},
{
"_id": 305,
"id": 306,
"pid": 22,
"city_code": "101100901",
"city_name": "朔州"
},
{
"_id": 306,
"id": 307,
"pid": 22,
"city_code": "101101001",
"city_name": "忻州"
},
{
"_id": 307,
"id": 308,
"pid": 22,
"city_code": "101100301",
"city_name": "阳泉"
},
{
"_id": 308,
"id": 309,
"pid": 22,
"city_code": "101100801",
"city_name": "运城"
},
{
"_id": 309,
"id": 310,
"pid": 23,
"city_code": "101110101",
"city_name": "西安"
},
{
"_id": 310,
"id": 311,
"pid": 23,
"city_code": "101110701",
"city_name": "安康"
},
{
"_id": 311,
"id": 312,
"pid": 23,
"city_code": "101110901",
"city_name": "宝鸡"
},
{
"_id": 312,
"id": 313,
"pid": 23,
"city_code": "101110801",
"city_name": "汉中"
},
{
"_id": 313,
"id": 314,
"pid": 23,
"city_code": "101110601",
"city_name": "商洛"
},
{
"_id": 314,
"id": 315,
"pid": 23,
"city_code": "101111001",
"city_name": "铜川"
},
{
"_id": 315,
"id": 316,
"pid": 23,
"city_code": "101110501",
"city_name": "渭南"
},
{
"_id": 316,
"id": 317,
"pid": 23,
"city_code": "101110200",
"city_name": "咸阳"
},
{
"_id": 317,
"id": 318,
"pid": 23,
"city_code": "101110300",
"city_name": "延安"
},
{
"_id": 318,
"id": 319,
"pid": 23,
"city_code": "101110401",
"city_name": "榆林"
},
{
"_id": 319,
"id": 321,
"pid": 25,
"city_code": "101270101",
"city_name": "成都"
},
{
"_id": 320,
"id": 322,
"pid": 25,
"city_code": "101270401",
"city_name": "绵阳"
},
{
"_id": 321,
"id": 323,
"pid": 25,
"city_code": "101271901",
"city_name": "阿坝"
},
{
"_id": 322,
"id": 324,
"pid": 25,
"city_code": "101270901",
"city_name": "巴中"
},
{
"_id": 323,
"id": 325,
"pid": 25,
"city_code": "101270601",
"city_name": "达州"
},
{
"_id": 324,
"id": 326,
"pid": 25,
"city_code": "101272001",
"city_name": "德阳"
},
{
"_id": 325,
"id": 327,
"pid": 25,
"city_code": "101271801",
"city_name": "甘孜"
},
{
"_id": 326,
"id": 328,
"pid": 25,
"city_code": "101270801",
"city_name": "广安"
},
{
"_id": 327,
"id": 329,
"pid": 25,
"city_code": "101272101",
"city_name": "广元"
},
{
"_id": 328,
"id": 330,
"pid": 25,
"city_code": "101271401",
"city_name": "乐山"
},
{
"_id": 329,
"id": 331,
"pid": 25,
"city_code": "101271601",
"city_name": "凉山"
},
{
"_id": 330,
"id": 332,
"pid": 25,
"city_code": "101271501",
"city_name": "眉山"
},
{
"_id": 331,
"id": 333,
"pid": 25,
"city_code": "101270501",
"city_name": "南充"
},
{
"_id": 332,
"id": 334,
"pid": 25,
"city_code": "101271201",
"city_name": "内江"
},
{
"_id": 333,
"id": 335,
"pid": 25,
"city_code": "101270201",
"city_name": "攀枝花"
},
{
"_id": 334,
"id": 336,
"pid": 25,
"city_code": "101270701",
"city_name": "遂宁"
},
{
"_id": 335,
"id": 337,
"pid": 25,
"city_code": "101271701",
"city_name": "雅安"
},
{
"_id": 336,
"id": 338,
"pid": 25,
"city_code": "101271101",
"city_name": "宜宾"
},
{
"_id": 337,
"id": 339,
"pid": 25,
"city_code": "101271301",
"city_name": "资阳"
},
{
"_id": 338,
"id": 340,
"pid": 25,
"city_code": "101270301",
"city_name": "自贡"
},
{
"_id": 339,
"id": 341,
"pid": 25,
"city_code": "101271001",
"city_name": "泸州"
},
{
"_id": 340,
"id": 343,
"pid": 27,
"city_code": "101140101",
"city_name": "拉萨"
},
{
"_id": 341,
"id": 344,
"pid": 27,
"city_code": "101140701",
"city_name": "阿里"
},
{
"_id": 342,
"id": 345,
"pid": 27,
"city_code": "101140501",
"city_name": "昌都"
},
{
"_id": 343,
"id": 346,
"pid": 27,
"city_code": "101140401",
"city_name": "林芝"
},
{
"_id": 344,
"id": 347,
"pid": 27,
"city_code": "101140601",
"city_name": "那曲"
},
{
"_id": 345,
"id": 348,
"pid": 27,
"city_code": "101140201",
"city_name": "日喀则"
},
{
"_id": 346,
"id": 349,
"pid": 27,
"city_code": "101140301",
"city_name": "山南"
},
{
"_id": 347,
"id": 350,
"pid": 28,
"city_code": "101130101",
"city_name": "乌鲁木齐"
},
{
"_id": 348,
"id": 351,
"pid": 28,
"city_code": "101130801",
"city_name": "阿克苏"
},
{
"_id": 349,
"id": 352,
"pid": 28,
"city_code": "101130701",
"city_name": "阿拉尔"
},
{
"_id": 350,
"id": 353,
"pid": 28,
"city_code": "101130609",
"city_name": "巴音郭楞"
},
{
"_id": 351,
"id": 354,
"pid": 28,
"city_code": "101131604",
"city_name": "博尔塔拉"
},
{
"_id": 352,
"id": 355,
"pid": 28,
"city_code": "101130401",
"city_name": "昌吉"
},
{
"_id": 353,
"id": 356,
"pid": 28,
"city_code": "101131201",
"city_name": "哈密"
},
{
"_id": 354,
"id": 357,
"pid": 28,
"city_code": "101131301",
"city_name": "和田"
},
{
"_id": 355,
"id": 358,
"pid": 28,
"city_code": "101130901",
"city_name": "喀什"
},
{
"_id": 356,
"id": 359,
"pid": 28,
"city_code": "101130201",
"city_name": "克拉玛依"
},
{
"_id": 357,
"id": 360,
"pid": 28,
"city_code": "",
"city_name": "克孜勒苏"
},
{
"_id": 358,
"id": 361,
"pid": 28,
"city_code": "101130301",
"city_name": "石河子"
},
{
"_id": 359,
"id": 362,
"pid": 28,
"city_code": "",
"city_name": "图木舒克"
},
{
"_id": 360,
"id": 363,
"pid": 28,
"city_code": "101130501",
"city_name": "吐鲁番"
},
{
"_id": 361,
"id": 364,
"pid": 28,
"city_code": "",
"city_name": "五家渠"
},
{
"_id": 362,
"id": 365,
"pid": 28,
"city_code": "101131012",
"city_name": "伊犁"
},
{
"_id": 363,
"id": 366,
"pid": 29,
"city_code": "101290101",
"city_name": "昆明"
},
{
"_id": 364,
"id": 367,
"pid": 29,
"city_code": "101291201",
"city_name": "怒江"
},
{
"_id": 365,
"id": 368,
"pid": 29,
"city_code": "101290901",
"city_name": "普洱"
},
{
"_id": 366,
"id": 369,
"pid": 29,
"city_code": "101291401",
"city_name": "丽江"
},
{
"_id": 367,
"id": 370,
"pid": 29,
"city_code": "101290501",
"city_name": "保山"
},
{
"_id": 368,
"id": 371,
"pid": 29,
"city_code": "101290801",
"city_name": "楚雄"
},
{
"_id": 369,
"id": 372,
"pid": 29,
"city_code": "101290201",
"city_name": "大理"
},
{
"_id": 370,
"id": 373,
"pid": 29,
"city_code": "101291501",
"city_name": "德宏"
},
{
"_id": 371,
"id": 374,
"pid": 29,
"city_code": "101291305",
"city_name": "迪庆"
},
{
"_id": 372,
"id": 375,
"pid": 29,
"city_code": "101290301",
"city_name": "红河"
},
{
"_id": 373,
"id": 376,
"pid": 29,
"city_code": "101291101",
"city_name": "临沧"
},
{
"_id": 374,
"id": 377,
"pid": 29,
"city_code": "101290401",
"city_name": "曲靖"
},
{
"_id": 375,
"id": 378,
"pid": 29,
"city_code": "101290601",
"city_name": "文山"
},
{
"_id": 376,
"id": 379,
"pid": 29,
"city_code": "101291602",
"city_name": "西双版纳"
},
{
"_id": 377,
"id": 380,
"pid": 29,
"city_code": "101290701",
"city_name": "玉溪"
},
{
"_id": 378,
"id": 381,
"pid": 29,
"city_code": "101291001",
"city_name": "昭通"
},
{
"_id": 379,
"id": 382,
"pid": 30,
"city_code": "101210101",
"city_name": "杭州"
},
{
"_id": 380,
"id": 383,
"pid": 30,
"city_code": "101210201",
"city_name": "湖州"
},
{
"_id": 381,
"id": 384,
"pid": 30,
"city_code": "101210301",
"city_name": "嘉兴"
},
{
"_id": 382,
"id": 385,
"pid": 30,
"city_code": "101210901",
"city_name": "金华"
},
{
"_id": 383,
"id": 386,
"pid": 30,
"city_code": "101210801",
"city_name": "丽水"
},
{
"_id": 384,
"id": 387,
"pid": 30,
"city_code": "101210401",
"city_name": "宁波"
},
{
"_id": 385,
"id": 388,
"pid": 30,
"city_code": "101210501",
"city_name": "绍兴"
},
{
"_id": 386,
"id": 389,
"pid": 30,
"city_code": "101210601",
"city_name": "台州"
},
{
"_id": 387,
"id": 390,
"pid": 30,
"city_code": "101210701",
"city_name": "温州"
},
{
"_id": 388,
"id": 391,
"pid": 30,
"city_code": "101211101",
"city_name": "舟山"
},
{
"_id": 389,
"id": 392,
"pid": 30,
"city_code": "101211001",
"city_name": "衢州"
},
{
"_id": 390,
"id": 400,
"pid": 35,
"city_code": "101220609",
"city_name": "桐城市"
},
{
"_id": 391,
"id": 401,
"pid": 35,
"city_code": "101220605",
"city_name": "怀宁县"
},
{
"_id": 392,
"id": 402,
"pid": 47,
"city_code": "101220602",
"city_name": "枞阳县"
},
{
"_id": 393,
"id": 403,
"pid": 35,
"city_code": "101220604",
"city_name": "潜山县"
},
{
"_id": 394,
"id": 404,
"pid": 35,
"city_code": "101220603",
"city_name": "太湖县"
},
{
"_id": 395,
"id": 405,
"pid": 35,
"city_code": "101220606",
"city_name": "宿松县"
},
{
"_id": 396,
"id": 406,
"pid": 35,
"city_code": "101220607",
"city_name": "望江县"
},
{
"_id": 397,
"id": 407,
"pid": 35,
"city_code": "101220608",
"city_name": "岳西县"
},
{
"_id": 398,
"id": 412,
"pid": 36,
"city_code": "101220202",
"city_name": "怀远县"
},
{
"_id": 399,
"id": 413,
"pid": 36,
"city_code": "101220204",
"city_name": "五河县"
},
{
"_id": 400,
"id": 414,
"pid": 36,
"city_code": "101220203",
"city_name": "固镇县"
},
{
"_id": 401,
"id": 416,
"pid": 3400,
"city_code": "101220106",
"city_name": "庐江县"
},
{
"_id": 402,
"id": 417,
"pid": 48,
"city_code": "101220305",
"city_name": "无为县"
},
{
"_id": 403,
"id": 418,
"pid": 45,
"city_code": "101220503",
"city_name": "含山县"
},
{
"_id": 404,
"id": 419,
"pid": 45,
"city_code": "101220504",
"city_name": "和县"
},
{
"_id": 405,
"id": 421,
"pid": 38,
"city_code": "101221702",
"city_name": "东至县"
},
{
"_id": 406,
"id": 422,
"pid": 38,
"city_code": "101221705",
"city_name": "石台县"
},
{
"_id": 407,
"id": 423,
"pid": 38,
"city_code": "101221703",
"city_name": "青阳县"
},
{
"_id": 408,
"id": 426,
"pid": 39,
"city_code": "101221107",
"city_name": "天长市"
},
{
"_id": 409,
"id": 427,
"pid": 39,
"city_code": "101221103",
"city_name": "明光市"
},
{
"_id": 410,
"id": 428,
"pid": 39,
"city_code": "101221106",
"city_name": "来安县"
},
{
"_id": 411,
"id": 429,
"pid": 39,
"city_code": "101221105",
"city_name": "全椒县"
},
{
"_id": 412,
"id": 430,
"pid": 39,
"city_code": "101221104",
"city_name": "定远县"
},
{
"_id": 413,
"id": 431,
"pid": 39,
"city_code": "101221102",
"city_name": "凤阳县"
},
{
"_id": 414,
"id": 439,
"pid": 40,
"city_code": "101220805",
"city_name": "界首市"
},
{
"_id": 415,
"id": 440,
"pid": 40,
"city_code": "101220804",
"city_name": "临泉县"
},
{
"_id": 416,
"id": 441,
"pid": 40,
"city_code": "101220806",
"city_name": "太和县"
},
{
"_id": 417,
"id": 442,
"pid": 40,
"city_code": "101220802",
"city_name": "阜南县"
},
{
"_id": 418,
"id": 443,
"pid": 40,
"city_code": "101220803",
"city_name": "颍上县"
},
{
"_id": 419,
"id": 447,
"pid": 41,
"city_code": "101221202",
"city_name": "濉溪县"
},
{
"_id": 420,
"id": 452,
"pid": 42,
"city_code": "101220403",
"city_name": "潘集区"
},
{
"_id": 421,
"id": 453,
"pid": 42,
"city_code": "101220402",
"city_name": "凤台县"
},
{
"_id": 422,
"id": 454,
"pid": 43,
"city_code": "101221003",
"city_name": "屯溪区"
},
{
"_id": 423,
"id": 455,
"pid": 43,
"city_code": "101221002",
"city_name": "黄山区"
},
{
"_id": 424,
"id": 457,
"pid": 43,
"city_code": "101221006",
"city_name": "歙县"
},
{
"_id": 425,
"id": 458,
"pid": 43,
"city_code": "101221007",
"city_name": "休宁县"
},
{
"_id": 426,
"id": 459,
"pid": 43,
"city_code": "101221005",
"city_name": "黟县"
},
{
"_id": 427,
"id": 460,
"pid": 43,
"city_code": "101221004",
"city_name": "祁门县"
},
{
"_id": 428,
"id": 463,
"pid": 44,
"city_code": "101221503",
"city_name": "寿县"
},
{
"_id": 429,
"id": 464,
"pid": 44,
"city_code": "101221502",
"city_name": "霍邱县"
},
{
"_id": 430,
"id": 465,
"pid": 44,
"city_code": "101221507",
"city_name": "舒城县"
},
{
"_id": 431,
"id": 466,
"pid": 44,
"city_code": "101221505",
"city_name": "金寨县"
},
{
"_id": 432,
"id": 467,
"pid": 44,
"city_code": "101221506",
"city_name": "霍山县"
},
{
"_id": 433,
"id": 471,
"pid": 45,
"city_code": "101220502",
"city_name": "当涂县"
},
{
"_id": 434,
"id": 473,
"pid": 46,
"city_code": "101220702",
"city_name": "砀山县"
},
{
"_id": 435,
"id": 474,
"pid": 46,
"city_code": "101220705",
"city_name": "萧县"
},
{
"_id": 436,
"id": 475,
"pid": 46,
"city_code": "101220703",
"city_name": "灵璧县"
},
{
"_id": 437,
"id": 476,
"pid": 46,
"city_code": "101220704",
"city_name": "泗县"
},
{
"_id": 438,
"id": 480,
"pid": 47,
"city_code": "101221301",
"city_name": "义安区"
},
{
"_id": 439,
"id": 485,
"pid": 48,
"city_code": "101220303",
"city_name": "芜湖县"
},
{
"_id": 440,
"id": 486,
"pid": 48,
"city_code": "101220302",
"city_name": "繁昌县"
},
{
"_id": 441,
"id": 487,
"pid": 48,
"city_code": "101220304",
"city_name": "南陵县"
},
{
"_id": 442,
"id": 489,
"pid": 49,
"city_code": "101221404",
"city_name": "宁国市"
},
{
"_id": 443,
"id": 490,
"pid": 49,
"city_code": "101221407",
"city_name": "郎溪县"
},
{
"_id": 444,
"id": 491,
"pid": 49,
"city_code": "101221406",
"city_name": "广德县"
},
{
"_id": 445,
"id": 492,
"pid": 49,
"city_code": "101221402",
"city_name": "泾县"
},
{
"_id": 446,
"id": 493,
"pid": 49,
"city_code": "101221405",
"city_name": "绩溪县"
},
{
"_id": 447,
"id": 494,
"pid": 49,
"city_code": "101221403",
"city_name": "旌德县"
},
{
"_id": 448,
"id": 495,
"pid": 50,
"city_code": "101220902",
"city_name": "涡阳县"
},
{
"_id": 449,
"id": 496,
"pid": 50,
"city_code": "101220904",
"city_name": "蒙城县"
},
{
"_id": 450,
"id": 497,
"pid": 50,
"city_code": "101220903",
"city_name": "利辛县"
},
{
"_id": 451,
"id": 501,
"pid": 1,
"city_code": "101010200",
"city_name": "海淀区"
},
{
"_id": 452,
"id": 502,
"pid": 1,
"city_code": "101010300",
"city_name": "朝阳区"
},
{
"_id": 453,
"id": 505,
"pid": 1,
"city_code": "101010900",
"city_name": "丰台区"
},
{
"_id": 454,
"id": 506,
"pid": 1,
"city_code": "101011000",
"city_name": "石景山区"
},
{
"_id": 455,
"id": 507,
"pid": 1,
"city_code": "101011200",
"city_name": "房山区"
},
{
"_id": 456,
"id": 508,
"pid": 1,
"city_code": "101011400",
"city_name": "门头沟区"
},
{
"_id": 457,
"id": 509,
"pid": 1,
"city_code": "101010600",
"city_name": "通州区"
},
{
"_id": 458,
"id": 510,
"pid": 1,
"city_code": "101010400",
"city_name": "顺义区"
},
{
"_id": 459,
"id": 511,
"pid": 1,
"city_code": "101010700",
"city_name": "昌平区"
},
{
"_id": 460,
"id": 512,
"pid": 1,
"city_code": "101010500",
"city_name": "怀柔区"
},
{
"_id": 461,
"id": 513,
"pid": 1,
"city_code": "101011500",
"city_name": "平谷区"
},
{
"_id": 462,
"id": 514,
"pid": 1,
"city_code": "101011100",
"city_name": "大兴区"
},
{
"_id": 463,
"id": 515,
"pid": 1,
"city_code": "101011300",
"city_name": "密云县"
},
{
"_id": 464,
"id": 516,
"pid": 1,
"city_code": "101010800",
"city_name": "延庆县"
},
{
"_id": 465,
"id": 522,
"pid": 52,
"city_code": "101230111",
"city_name": "福清市"
},
{
"_id": 466,
"id": 523,
"pid": 52,
"city_code": "101230110",
"city_name": "长乐市"
},
{
"_id": 467,
"id": 524,
"pid": 52,
"city_code": "101230103",
"city_name": "闽侯县"
},
{
"_id": 468,
"id": 525,
"pid": 52,
"city_code": "101230105",
"city_name": "连江县"
},
{
"_id": 469,
"id": 526,
"pid": 52,
"city_code": "101230104",
"city_name": "罗源县"
},
{
"_id": 470,
"id": 527,
"pid": 52,
"city_code": "101230102",
"city_name": "闽清县"
},
{
"_id": 471,
"id": 528,
"pid": 52,
"city_code": "101230107",
"city_name": "永泰县"
},
{
"_id": 472,
"id": 529,
"pid": 52,
"city_code": "101230108",
"city_name": "平潭县"
},
{
"_id": 473,
"id": 531,
"pid": 53,
"city_code": "101230707",
"city_name": "漳平市"
},
{
"_id": 474,
"id": 532,
"pid": 53,
"city_code": "101230702",
"city_name": "长汀县"
},
{
"_id": 475,
"id": 533,
"pid": 53,
"city_code": "101230706",
"city_name": "永定县"
},
{
"_id": 476,
"id": 534,
"pid": 53,
"city_code": "101230705",
"city_name": "上杭县"
},
{
"_id": 477,
"id": 535,
"pid": 53,
"city_code": "101230704",
"city_name": "武平县"
},
{
"_id": 478,
"id": 536,
"pid": 53,
"city_code": "101230703",
"city_name": "连城县"
},
{
"_id": 479,
"id": 538,
"pid": 54,
"city_code": "101230904",
"city_name": "邵武市"
},
{
"_id": 480,
"id": 539,
"pid": 54,
"city_code": "101230905",
"city_name": "武夷山市"
},
{
"_id": 481,
"id": 540,
"pid": 54,
"city_code": "101230910",
"city_name": "建瓯市"
},
{
"_id": 482,
"id": 541,
"pid": 54,
"city_code": "101230907",
"city_name": "建阳市"
},
{
"_id": 483,
"id": 542,
"pid": 54,
"city_code": "101230902",
"city_name": "顺昌县"
},
{
"_id": 484,
"id": 543,
"pid": 54,
"city_code": "101230906",
"city_name": "浦城县"
},
{
"_id": 485,
"id": 544,
"pid": 54,
"city_code": "101230903",
"city_name": "光泽县"
},
{
"_id": 486,
"id": 545,
"pid": 54,
"city_code": "101230908",
"city_name": "松溪县"
},
{
"_id": 487,
"id": 546,
"pid": 54,
"city_code": "101230909",
"city_name": "政和县"
},
{
"_id": 488,
"id": 548,
"pid": 55,
"city_code": "101230306",
"city_name": "福安市"
},
{
"_id": 489,
"id": 549,
"pid": 55,
"city_code": "101230308",
"city_name": "福鼎市"
},
{
"_id": 490,
"id": 550,
"pid": 55,
"city_code": "101230303",
"city_name": "霞浦县"
},
{
"_id": 491,
"id": 551,
"pid": 55,
"city_code": "101230302",
"city_name": "古田县"
},
{
"_id": 492,
"id": 552,
"pid": 55,
"city_code": "101230309",
"city_name": "屏南县"
},
{
"_id": 493,
"id": 553,
"pid": 55,
"city_code": "101230304",
"city_name": "寿宁县"
},
{
"_id": 494,
"id": 554,
"pid": 55,
"city_code": "101230305",
"city_name": "周宁县"
},
{
"_id": 495,
"id": 555,
"pid": 55,
"city_code": "101230307",
"city_name": "柘荣县"
},
{
"_id": 496,
"id": 556,
"pid": 56,
"city_code": "101230407",
"city_name": "城厢区"
},
{
"_id": 497,
"id": 557,
"pid": 56,
"city_code": "101230404",
"city_name": "涵江区"
},
{
"_id": 498,
"id": 558,
"pid": 56,
"city_code": "101230406",
"city_name": "荔城区"
},
{
"_id": 499,
"id": 559,
"pid": 56,
"city_code": "101230405",
"city_name": "秀屿区"
},
{
"_id": 500,
"id": 560,
"pid": 56,
"city_code": "101230402",
"city_name": "仙游县"
},
{
"_id": 501,
"id": 566,
"pid": 57,
"city_code": "101230510",
"city_name": "石狮市"
},
{
"_id": 502,
"id": 567,
"pid": 57,
"city_code": "101230509",
"city_name": "晋江市"
},
{
"_id": 503,
"id": 568,
"pid": 57,
"city_code": "101230506",
"city_name": "南安市"
},
{
"_id": 504,
"id": 569,
"pid": 57,
"city_code": "101230508",
"city_name": "惠安县"
},
{
"_id": 505,
"id": 570,
"pid": 57,
"city_code": "101230502",
"city_name": "安溪县"
},
{
"_id": 506,
"id": 571,
"pid": 57,
"city_code": "101230504",
"city_name": "永春县"
},
{
"_id": 507,
"id": 572,
"pid": 57,
"city_code": "101230505",
"city_name": "德化县"
},
{
"_id": 508,
"id": 576,
"pid": 58,
"city_code": "101230810",
"city_name": "永安市"
},
{
"_id": 509,
"id": 577,
"pid": 58,
"city_code": "101230807",
"city_name": "明溪县"
},
{
"_id": 510,
"id": 578,
"pid": 58,
"city_code": "101230803",
"city_name": "清流县"
},
{
"_id": 511,
"id": 579,
"pid": 58,
"city_code": "101230802",
"city_name": "宁化县"
},
{
"_id": 512,
"id": 580,
"pid": 58,
"city_code": "101230811",
"city_name": "大田县"
},
{
"_id": 513,
"id": 581,
"pid": 58,
"city_code": "101230809",
"city_name": "尤溪县"
},
{
"_id": 514,
"id": 582,
"pid": 58,
"city_code": "101230808",
"city_name": "沙县"
},
{
"_id": 515,
"id": 583,
"pid": 58,
"city_code": "101230805",
"city_name": "将乐县"
},
{
"_id": 516,
"id": 584,
"pid": 58,
"city_code": "101230804",
"city_name": "泰宁县"
},
{
"_id": 517,
"id": 585,
"pid": 58,
"city_code": "101230806",
"city_name": "建宁县"
},
{
"_id": 518,
"id": 590,
"pid": 59,
"city_code": "101230202",
"city_name": "同安区"
},
{
"_id": 519,
"id": 594,
"pid": 60,
"city_code": "101230605",
"city_name": "龙海市"
},
{
"_id": 520,
"id": 595,
"pid": 60,
"city_code": "101230609",
"city_name": "云霄县"
},
{
"_id": 521,
"id": 596,
"pid": 60,
"city_code": "101230606",
"city_name": "漳浦县"
},
{
"_id": 522,
"id": 597,
"pid": 60,
"city_code": "101230607",
"city_name": "诏安县"
},
{
"_id": 523,
"id": 598,
"pid": 60,
"city_code": "101230602",
"city_name": "长泰县"
},
{
"_id": 524,
"id": 599,
"pid": 60,
"city_code": "101230608",
"city_name": "东山县"
},
{
"_id": 525,
"id": 600,
"pid": 60,
"city_code": "101230603",
"city_name": "南靖县"
},
{
"_id": 526,
"id": 601,
"pid": 60,
"city_code": "101230604",
"city_name": "平和县"
},
{
"_id": 527,
"id": 602,
"pid": 60,
"city_code": "101230610",
"city_name": "华安县"
},
{
"_id": 528,
"id": 603,
"pid": 61,
"city_code": "101160102",
"city_name": "皋兰县"
},
{
"_id": 529,
"id": 609,
"pid": 61,
"city_code": "101160103",
"city_name": "永登县"
},
{
"_id": 530,
"id": 610,
"pid": 61,
"city_code": "101160104",
"city_name": "榆中县"
},
{
"_id": 531,
"id": 611,
"pid": 62,
"city_code": "101161301",
"city_name": "白银区"
},
{
"_id": 532,
"id": 612,
"pid": 62,
"city_code": "101161304",
"city_name": "平川区"
},
{
"_id": 533,
"id": 613,
"pid": 62,
"city_code": "101161303",
"city_name": "会宁县"
},
{
"_id": 534,
"id": 614,
"pid": 62,
"city_code": "101161305",
"city_name": "景泰县"
},
{
"_id": 535,
"id": 615,
"pid": 62,
"city_code": "101161302",
"city_name": "靖远县"
},
{
"_id": 536,
"id": 616,
"pid": 63,
"city_code": "101160205",
"city_name": "临洮县"
},
{
"_id": 537,
"id": 617,
"pid": 63,
"city_code": "101160203",
"city_name": "陇西县"
},
{
"_id": 538,
"id": 618,
"pid": 63,
"city_code": "101160202",
"city_name": "通渭县"
},
{
"_id": 539,
"id": 619,
"pid": 63,
"city_code": "101160204",
"city_name": "渭源县"
},
{
"_id": 540,
"id": 620,
"pid": 63,
"city_code": "101160206",
"city_name": "漳县"
},
{
"_id": 541,
"id": 621,
"pid": 63,
"city_code": "101160207",
"city_name": "岷县"
},
{
"_id": 542,
"id": 624,
"pid": 64,
"city_code": "101161201",
"city_name": "合作市"
},
{
"_id": 543,
"id": 625,
"pid": 64,
"city_code": "101161202",
"city_name": "临潭县"
},
{
"_id": 544,
"id": 626,
"pid": 64,
"city_code": "101161203",
"city_name": "卓尼县"
},
{
"_id": 545,
"id": 627,
"pid": 64,
"city_code": "101161204",
"city_name": "舟曲县"
},
{
"_id": 546,
"id": 628,
"pid": 64,
"city_code": "101161205",
"city_name": "迭部县"
},
{
"_id": 547,
"id": 629,
"pid": 64,
"city_code": "101161206",
"city_name": "玛曲县"
},
{
"_id": 548,
"id": 630,
"pid": 64,
"city_code": "101161207",
"city_name": "碌曲县"
},
{
"_id": 549,
"id": 631,
"pid": 64,
"city_code": "101161208",
"city_name": "夏河县"
},
{
"_id": 550,
"id": 634,
"pid": 66,
"city_code": "101160602",
"city_name": "永昌县"
},
{
"_id": 551,
"id": 636,
"pid": 67,
"city_code": "101160807",
"city_name": "玉门市"
},
{
"_id": 552,
"id": 637,
"pid": 67,
"city_code": "101160808",
"city_name": "敦煌市"
},
{
"_id": 553,
"id": 638,
"pid": 67,
"city_code": "101160803",
"city_name": "金塔县"
},
{
"_id": 554,
"id": 639,
"pid": 67,
"city_code": "101160805",
"city_name": "瓜州县"
},
{
"_id": 555,
"id": 640,
"pid": 67,
"city_code": "101160806",
"city_name": "肃北县"
},
{
"_id": 556,
"id": 641,
"pid": 67,
"city_code": "101160804",
"city_name": "阿克塞"
},
{
"_id": 557,
"id": 642,
"pid": 68,
"city_code": "101161101",
"city_name": "临夏市"
},
{
"_id": 558,
"id": 643,
"pid": 68,
"city_code": "101161101",
"city_name": "临夏县"
},
{
"_id": 559,
"id": 644,
"pid": 68,
"city_code": "101161102",
"city_name": "康乐县"
},
{
"_id": 560,
"id": 645,
"pid": 68,
"city_code": "101161103",
"city_name": "永靖县"
},
{
"_id": 561,
"id": 646,
"pid": 68,
"city_code": "101161104",
"city_name": "广河县"
},
{
"_id": 562,
"id": 647,
"pid": 68,
"city_code": "101161105",
"city_name": "和政县"
},
{
"_id": 563,
"id": 648,
"pid": 68,
"city_code": "101161106",
"city_name": "东乡族自治县"
},
{
"_id": 564,
"id": 649,
"pid": 68,
"city_code": "101161107",
"city_name": "积石山"
},
{
"_id": 565,
"id": 650,
"pid": 69,
"city_code": "101161002",
"city_name": "成县"
},
{
"_id": 566,
"id": 651,
"pid": 69,
"city_code": "101161008",
"city_name": "徽县"
},
{
"_id": 567,
"id": 652,
"pid": 69,
"city_code": "101161005",
"city_name": "康县"
},
{
"_id": 568,
"id": 653,
"pid": 69,
"city_code": "101161007",
"city_name": "礼县"
},
{
"_id": 569,
"id": 654,
"pid": 69,
"city_code": "101161009",
"city_name": "两当县"
},
{
"_id": 570,
"id": 655,
"pid": 69,
"city_code": "101161003",
"city_name": "文县"
},
{
"_id": 571,
"id": 656,
"pid": 69,
"city_code": "101161006",
"city_name": "西和县"
},
{
"_id": 572,
"id": 657,
"pid": 69,
"city_code": "101161004",
"city_name": "宕昌县"
},
{
"_id": 573,
"id": 658,
"pid": 69,
"city_code": "101161001",
"city_name": "武都区"
},
{
"_id": 574,
"id": 659,
"pid": 70,
"city_code": "101160304",
"city_name": "崇信县"
},
{
"_id": 575,
"id": 660,
"pid": 70,
"city_code": "101160305",
"city_name": "华亭县"
},
{
"_id": 576,
"id": 661,
"pid": 70,
"city_code": "101160307",
"city_name": "静宁县"
},
{
"_id": 577,
"id": 662,
"pid": 70,
"city_code": "101160303",
"city_name": "灵台县"
},
{
"_id": 578,
"id": 663,
"pid": 70,
"city_code": "101160308",
"city_name": "崆峒区"
},
{
"_id": 579,
"id": 664,
"pid": 70,
"city_code": "101160306",
"city_name": "庄浪县"
},
{
"_id": 580,
"id": 665,
"pid": 70,
"city_code": "101160302",
"city_name": "泾川县"
},
{
"_id": 581,
"id": 666,
"pid": 71,
"city_code": "101160405",
"city_name": "合水县"
},
{
"_id": 582,
"id": 667,
"pid": 71,
"city_code": "101160404",
"city_name": "华池县"
},
{
"_id": 583,
"id": 668,
"pid": 71,
"city_code": "101160403",
"city_name": "环县"
},
{
"_id": 584,
"id": 669,
"pid": 71,
"city_code": "101160407",
"city_name": "宁县"
},
{
"_id": 585,
"id": 670,
"pid": 71,
"city_code": "101160409",
"city_name": "庆城县"
},
{
"_id": 586,
"id": 671,
"pid": 71,
"city_code": "101160402",
"city_name": "西峰区"
},
{
"_id": 587,
"id": 672,
"pid": 71,
"city_code": "101160408",
"city_name": "镇原县"
},
{
"_id": 588,
"id": 673,
"pid": 71,
"city_code": "101160406",
"city_name": "正宁县"
},
{
"_id": 589,
"id": 674,
"pid": 72,
"city_code": "101160905",
"city_name": "甘谷县"
},
{
"_id": 590,
"id": 675,
"pid": 72,
"city_code": "101160904",
"city_name": "秦安县"
},
{
"_id": 591,
"id": 676,
"pid": 72,
"city_code": "101160903",
"city_name": "清水县"
},
{
"_id": 592,
"id": 678,
"pid": 72,
"city_code": "101160908",
"city_name": "麦积区"
},
{
"_id": 593,
"id": 679,
"pid": 72,
"city_code": "101160906",
"city_name": "武山县"
},
{
"_id": 594,
"id": 680,
"pid": 72,
"city_code": "101160907",
"city_name": "张家川"
},
{
"_id": 595,
"id": 681,
"pid": 73,
"city_code": "101160503",
"city_name": "古浪县"
},
{
"_id": 596,
"id": 682,
"pid": 73,
"city_code": "101160502",
"city_name": "民勤县"
},
{
"_id": 597,
"id": 683,
"pid": 73,
"city_code": "101160505",
"city_name": "天祝县"
},
{
"_id": 598,
"id": 685,
"pid": 74,
"city_code": "101160705",
"city_name": "高台县"
},
{
"_id": 599,
"id": 686,
"pid": 74,
"city_code": "101160704",
"city_name": "临泽县"
},
{
"_id": 600,
"id": 687,
"pid": 74,
"city_code": "101160703",
"city_name": "民乐县"
},
{
"_id": 601,
"id": 688,
"pid": 74,
"city_code": "101160706",
"city_name": "山丹县"
},
{
"_id": 602,
"id": 689,
"pid": 74,
"city_code": "101160702",
"city_name": "肃南县"
},
{
"_id": 603,
"id": 691,
"pid": 75,
"city_code": "101280103",
"city_name": "从化区"
},
{
"_id": 604,
"id": 692,
"pid": 75,
"city_code": "101280106",
"city_name": "天河区"
},
{
"_id": 605,
"id": 699,
"pid": 75,
"city_code": "101280102",
"city_name": "番禺区"
},
{
"_id": 606,
"id": 700,
"pid": 75,
"city_code": "101280105",
"city_name": "花都区"
},
{
"_id": 607,
"id": 701,
"pid": 75,
"city_code": "101280104",
"city_name": "增城区"
},
{
"_id": 608,
"id": 706,
"pid": 76,
"city_code": "101280604",
"city_name": "南山区"
},
{
"_id": 609,
"id": 711,
"pid": 77,
"city_code": "101281503",
"city_name": "潮安县"
},
{
"_id": 610,
"id": 712,
"pid": 77,
"city_code": "101281502",
"city_name": "饶平县"
},
{
"_id": 611,
"id": 746,
"pid": 79,
"city_code": "101280803",
"city_name": "南海区"
},
{
"_id": 612,
"id": 747,
"pid": 79,
"city_code": "101280801",
"city_name": "顺德区"
},
{
"_id": 613,
"id": 748,
"pid": 79,
"city_code": "101280802",
"city_name": "三水区"
},
{
"_id": 614,
"id": 749,
"pid": 79,
"city_code": "101280804",
"city_name": "高明区"
},
{
"_id": 615,
"id": 750,
"pid": 80,
"city_code": "101281206",
"city_name": "东源县"
},
{
"_id": 616,
"id": 751,
"pid": 80,
"city_code": "101281204",
"city_name": "和平县"
},
{
"_id": 617,
"id": 753,
"pid": 80,
"city_code": "101281203",
"city_name": "连平县"
},
{
"_id": 618,
"id": 754,
"pid": 80,
"city_code": "101281205",
"city_name": "龙川县"
},
{
"_id": 619,
"id": 755,
"pid": 80,
"city_code": "101281202",
"city_name": "紫金县"
},
{
"_id": 620,
"id": 756,
"pid": 81,
"city_code": "101280303",
"city_name": "惠阳区"
},
{
"_id": 621,
"id": 759,
"pid": 81,
"city_code": "101280302",
"city_name": "博罗县"
},
{
"_id": 622,
"id": 760,
"pid": 81,
"city_code": "101280304",
"city_name": "惠东县"
},
{
"_id": 623,
"id": 761,
"pid": 81,
"city_code": "101280305",
"city_name": "龙门县"
},
{
"_id": 624,
"id": 762,
"pid": 82,
"city_code": "101281109",
"city_name": "江海区"
},
{
"_id": 625,
"id": 763,
"pid": 82,
"city_code": "101281107",
"city_name": "蓬江区"
},
{
"_id": 626,
"id": 764,
"pid": 82,
"city_code": "101281104",
"city_name": "新会区"
},
{
"_id": 627,
"id": 765,
"pid": 82,
"city_code": "101281106",
"city_name": "台山市"
},
{
"_id": 628,
"id": 766,
"pid": 82,
"city_code": "101281103",
"city_name": "开平市"
},
{
"_id": 629,
"id": 767,
"pid": 82,
"city_code": "101281108",
"city_name": "鹤山市"
},
{
"_id": 630,
"id": 768,
"pid": 82,
"city_code": "101281105",
"city_name": "恩平市"
},
{
"_id": 631,
"id": 770,
"pid": 83,
"city_code": "101281903",
"city_name": "普宁市"
},
{
"_id": 632,
"id": 771,
"pid": 83,
"city_code": "101281905",
"city_name": "揭东县"
},
{
"_id": 633,
"id": 772,
"pid": 83,
"city_code": "101281902",
"city_name": "揭西县"
},
{
"_id": 634,
"id": 773,
"pid": 83,
"city_code": "101281904",
"city_name": "惠来县"
},
{
"_id": 635,
"id": 775,
"pid": 84,
"city_code": "101282006",
"city_name": "茂港区"
},
{
"_id": 636,
"id": 776,
"pid": 84,
"city_code": "101282002",
"city_name": "高州市"
},
{
"_id": 637,
"id": 777,
"pid": 84,
"city_code": "101282003",
"city_name": "化州市"
},
{
"_id": 638,
"id": 778,
"pid": 84,
"city_code": "101282005",
"city_name": "信宜市"
},
{
"_id": 639,
"id": 779,
"pid": 84,
"city_code": "101282004",
"city_name": "电白县"
},
{
"_id": 640,
"id": 780,
"pid": 85,
"city_code": "101280409",
"city_name": "梅县"
},
{
"_id": 641,
"id": 782,
"pid": 85,
"city_code": "101280402",
"city_name": "兴宁市"
},
{
"_id": 642,
"id": 783,
"pid": 85,
"city_code": "101280404",
"city_name": "大埔县"
},
{
"_id": 643,
"id": 784,
"pid": 85,
"city_code": "101280406",
"city_name": "丰顺县"
},
{
"_id": 644,
"id": 785,
"pid": 85,
"city_code": "101280408",
"city_name": "五华县"
},
{
"_id": 645,
"id": 786,
"pid": 85,
"city_code": "101280407",
"city_name": "平远县"
},
{
"_id": 646,
"id": 787,
"pid": 85,
"city_code": "101280403",
"city_name": "蕉岭县"
},
{
"_id": 647,
"id": 789,
"pid": 86,
"city_code": "101281307",
"city_name": "英德市"
},
{
"_id": 648,
"id": 790,
"pid": 86,
"city_code": "101281303",
"city_name": "连州市"
},
{
"_id": 649,
"id": 791,
"pid": 86,
"city_code": "101281306",
"city_name": "佛冈县"
},
{
"_id": 650,
"id": 792,
"pid": 86,
"city_code": "101281305",
"city_name": "阳山县"
},
{
"_id": 651,
"id": 793,
"pid": 86,
"city_code": "101281308",
"city_name": "清新县"
},
{
"_id": 652,
"id": 794,
"pid": 86,
"city_code": "101281304",
"city_name": "连山县"
},
{
"_id": 653,
"id": 795,
"pid": 86,
"city_code": "101281302",
"city_name": "连南县"
},
{
"_id": 654,
"id": 796,
"pid": 87,
"city_code": "101280504",
"city_name": "南澳县"
},
{
"_id": 655,
"id": 797,
"pid": 87,
"city_code": "101280502",
"city_name": "潮阳区"
},
{
"_id": 656,
"id": 798,
"pid": 87,
"city_code": "101280503",
"city_name": "澄海区"
},
{
"_id": 657,
"id": 804,
"pid": 88,
"city_code": "101282103",
"city_name": "陆丰市"
},
{
"_id": 658,
"id": 805,
"pid": 88,
"city_code": "101282102",
"city_name": "海丰县"
},
{
"_id": 659,
"id": 806,
"pid": 88,
"city_code": "101282104",
"city_name": "陆河县"
},
{
"_id": 660,
"id": 807,
"pid": 89,
"city_code": "101280209",
"city_name": "曲江区"
},
{
"_id": 661,
"id": 808,
"pid": 89,
"city_code": "101280210",
"city_name": "浈江区"
},
{
"_id": 662,
"id": 809,
"pid": 89,
"city_code": "101280211",
"city_name": "武江区"
},
{
"_id": 663,
"id": 810,
"pid": 89,
"city_code": "101280209",
"city_name": "曲江区"
},
{
"_id": 664,
"id": 811,
"pid": 89,
"city_code": "101280205",
"city_name": "乐昌市"
},
{
"_id": 665,
"id": 812,
"pid": 89,
"city_code": "101280207",
"city_name": "南雄市"
},
{
"_id": 666,
"id": 813,
"pid": 89,
"city_code": "101280203",
"city_name": "始兴县"
},
{
"_id": 667,
"id": 814,
"pid": 89,
"city_code": "101280206",
"city_name": "仁化县"
},
{
"_id": 668,
"id": 815,
"pid": 89,
"city_code": "101280204",
"city_name": "翁源县"
},
{
"_id": 669,
"id": 816,
"pid": 89,
"city_code": "101280208",
"city_name": "新丰县"
},
{
"_id": 670,
"id": 817,
"pid": 89,
"city_code": "101280202",
"city_name": "乳源县"
},
{
"_id": 671,
"id": 819,
"pid": 90,
"city_code": "101281802",
"city_name": "阳春市"
},
{
"_id": 672,
"id": 820,
"pid": 90,
"city_code": "101281804",
"city_name": "阳西县"
},
{
"_id": 673,
"id": 821,
"pid": 90,
"city_code": "101281803",
"city_name": "阳东县"
},
{
"_id": 674,
"id": 823,
"pid": 91,
"city_code": "101281402",
"city_name": "罗定市"
},
{
"_id": 675,
"id": 824,
"pid": 91,
"city_code": "101281403",
"city_name": "新兴县"
},
{
"_id": 676,
"id": 825,
"pid": 91,
"city_code": "101281404",
"city_name": "郁南县"
},
{
"_id": 677,
"id": 826,
"pid": 91,
"city_code": "101281406",
"city_name": "云安县"
},
{
"_id": 678,
"id": 827,
"pid": 92,
"city_code": "101281006",
"city_name": "赤坎区"
},
{
"_id": 679,
"id": 828,
"pid": 92,
"city_code": "101281009",
"city_name": "霞山区"
},
{
"_id": 680,
"id": 829,
"pid": 92,
"city_code": "101281008",
"city_name": "坡头区"
},
{
"_id": 681,
"id": 830,
"pid": 92,
"city_code": "101281010",
"city_name": "麻章区"
},
{
"_id": 682,
"id": 831,
"pid": 92,
"city_code": "101281005",
"city_name": "廉江市"
},
{
"_id": 683,
"id": 832,
"pid": 92,
"city_code": "101281003",
"city_name": "雷州市"
},
{
"_id": 684,
"id": 833,
"pid": 92,
"city_code": "101281002",
"city_name": "吴川市"
},
{
"_id": 685,
"id": 834,
"pid": 92,
"city_code": "101281007",
"city_name": "遂溪县"
},
{
"_id": 686,
"id": 835,
"pid": 92,
"city_code": "101281004",
"city_name": "徐闻县"
},
{
"_id": 687,
"id": 837,
"pid": 93,
"city_code": "101280908",
"city_name": "高要区"
},
{
"_id": 688,
"id": 838,
"pid": 93,
"city_code": "101280903",
"city_name": "四会市"
},
{
"_id": 689,
"id": 839,
"pid": 93,
"city_code": "101280902",
"city_name": "广宁县"
},
{
"_id": 690,
"id": 840,
"pid": 93,
"city_code": "101280906",
"city_name": "怀集县"
},
{
"_id": 691,
"id": 841,
"pid": 93,
"city_code": "101280907",
"city_name": "封开县"
},
{
"_id": 692,
"id": 842,
"pid": 93,
"city_code": "101280905",
"city_name": "德庆县"
},
{
"_id": 693,
"id": 850,
"pid": 95,
"city_code": "101280702",
"city_name": "斗门区"
},
{
"_id": 694,
"id": 851,
"pid": 95,
"city_code": "101280703",
"city_name": "金湾区"
},
{
"_id": 695,
"id": 852,
"pid": 96,
"city_code": "101300103",
"city_name": "邕宁区"
},
{
"_id": 696,
"id": 858,
"pid": 96,
"city_code": "101300108",
"city_name": "武鸣县"
},
{
"_id": 697,
"id": 859,
"pid": 96,
"city_code": "101300105",
"city_name": "隆安县"
},
{
"_id": 698,
"id": 860,
"pid": 96,
"city_code": "101300106",
"city_name": "马山县"
},
{
"_id": 699,
"id": 861,
"pid": 96,
"city_code": "101300107",
"city_name": "上林县"
},
{
"_id": 700,
"id": 862,
"pid": 96,
"city_code": "101300109",
"city_name": "宾阳县"
},
{
"_id": 701,
"id": 863,
"pid": 96,
"city_code": "101300104",
"city_name": "横县"
},
{
"_id": 702,
"id": 869,
"pid": 97,
"city_code": "101300510",
"city_name": "阳朔县"
},
{
"_id": 703,
"id": 870,
"pid": 97,
"city_code": "101300505",
"city_name": "临桂县"
},
{
"_id": 704,
"id": 871,
"pid": 97,
"city_code": "101300507",
"city_name": "灵川县"
},
{
"_id": 705,
"id": 872,
"pid": 97,
"city_code": "101300508",
"city_name": "全州县"
},
{
"_id": 706,
"id": 873,
"pid": 97,
"city_code": "101300512",
"city_name": "平乐县"
},
{
"_id": 707,
"id": 874,
"pid": 97,
"city_code": "101300506",
"city_name": "兴安县"
},
{
"_id": 708,
"id": 875,
"pid": 97,
"city_code": "101300509",
"city_name": "灌阳县"
},
{
"_id": 709,
"id": 876,
"pid": 97,
"city_code": "101300513",
"city_name": "荔浦县"
},
{
"_id": 710,
"id": 877,
"pid": 97,
"city_code": "101300514",
"city_name": "资源县"
},
{
"_id": 711,
"id": 878,
"pid": 97,
"city_code": "101300504",
"city_name": "永福县"
},
{
"_id": 712,
"id": 879,
"pid": 97,
"city_code": "101300503",
"city_name": "龙胜县"
},
{
"_id": 713,
"id": 880,
"pid": 97,
"city_code": "101300511",
"city_name": "恭城县"
},
{
"_id": 714,
"id": 882,
"pid": 98,
"city_code": "101301011",
"city_name": "凌云县"
},
{
"_id": 715,
"id": 883,
"pid": 98,
"city_code": "101301007",
"city_name": "平果县"
},
{
"_id": 716,
"id": 884,
"pid": 98,
"city_code": "101301009",
"city_name": "西林县"
},
{
"_id": 717,
"id": 885,
"pid": 98,
"city_code": "101301010",
"city_name": "乐业县"
},
{
"_id": 718,
"id": 886,
"pid": 98,
"city_code": "101301004",
"city_name": "德保县"
},
{
"_id": 719,
"id": 887,
"pid": 98,
"city_code": "101301012",
"city_name": "田林县"
},
{
"_id": 720,
"id": 888,
"pid": 98,
"city_code": "101301003",
"city_name": "田阳县"
},
{
"_id": 721,
"id": 889,
"pid": 98,
"city_code": "101301005",
"city_name": "靖西县"
},
{
"_id": 722,
"id": 890,
"pid": 98,
"city_code": "101301006",
"city_name": "田东县"
},
{
"_id": 723,
"id": 891,
"pid": 98,
"city_code": "101301002",
"city_name": "那坡县"
},
{
"_id": 724,
"id": 892,
"pid": 98,
"city_code": "101301008",
"city_name": "隆林县"
},
{
"_id": 725,
"id": 896,
"pid": 99,
"city_code": "101301302",
"city_name": "合浦县"
},
{
"_id": 726,
"id": 898,
"pid": 100,
"city_code": "101300204",
"city_name": "凭祥市"
},
{
"_id": 727,
"id": 899,
"pid": 100,
"city_code": "101300207",
"city_name": "宁明县"
},
{
"_id": 728,
"id": 900,
"pid": 100,
"city_code": "101300206",
"city_name": "扶绥县"
},
{
"_id": 729,
"id": 901,
"pid": 100,
"city_code": "101300203",
"city_name": "龙州县"
},
{
"_id": 730,
"id": 902,
"pid": 100,
"city_code": "101300205",
"city_name": "大新县"
},
{
"_id": 731,
"id": 903,
"pid": 100,
"city_code": "101300202",
"city_name": "天等县"
},
{
"_id": 732,
"id": 905,
"pid": 101,
"city_code": "101301405",
"city_name": "防城区"
},
{
"_id": 733,
"id": 906,
"pid": 101,
"city_code": "101301403",
"city_name": "东兴市"
},
{
"_id": 734,
"id": 907,
"pid": 101,
"city_code": "101301402",
"city_name": "上思县"
},
{
"_id": 735,
"id": 911,
"pid": 102,
"city_code": "101300802",
"city_name": "桂平市"
},
{
"_id": 736,
"id": 912,
"pid": 102,
"city_code": "101300803",
"city_name": "平南县"
},
{
"_id": 737,
"id": 914,
"pid": 103,
"city_code": "101301207",
"city_name": "宜州市"
},
{
"_id": 738,
"id": 915,
"pid": 103,
"city_code": "101301202",
"city_name": "天峨县"
},
{
"_id": 739,
"id": 916,
"pid": 103,
"city_code": "101301208",
"city_name": "凤山县"
},
{
"_id": 740,
"id": 917,
"pid": 103,
"city_code": "101301209",
"city_name": "南丹县"
},
{
"_id": 741,
"id": 918,
"pid": 103,
"city_code": "101301203",
"city_name": "东兰县"
},
{
"_id": 742,
"id": 919,
"pid": 103,
"city_code": "101301210",
"city_name": "都安县"
},
{
"_id": 743,
"id": 920,
"pid": 103,
"city_code": "101301206",
"city_name": "罗城县"
},
{
"_id": 744,
"id": 921,
"pid": 103,
"city_code": "101301204",
"city_name": "巴马县"
},
{
"_id": 745,
"id": 922,
"pid": 103,
"city_code": "101301205",
"city_name": "环江县"
},
{
"_id": 746,
"id": 923,
"pid": 103,
"city_code": "101301211",
"city_name": "大化县"
},
{
"_id": 747,
"id": 925,
"pid": 104,
"city_code": "101300704",
"city_name": "钟山县"
},
{
"_id": 748,
"id": 926,
"pid": 104,
"city_code": "101300702",
"city_name": "昭平县"
},
{
"_id": 749,
"id": 927,
"pid": 104,
"city_code": "101300703",
"city_name": "富川县"
},
{
"_id": 750,
"id": 929,
"pid": 105,
"city_code": "101300406",
"city_name": "合山市"
},
{
"_id": 751,
"id": 930,
"pid": 105,
"city_code": "101300404",
"city_name": "象州县"
},
{
"_id": 752,
"id": 931,
"pid": 105,
"city_code": "101300405",
"city_name": "武宣县"
},
{
"_id": 753,
"id": 932,
"pid": 105,
"city_code": "101300402",
"city_name": "忻城县"
},
{
"_id": 754,
"id": 933,
"pid": 105,
"city_code": "101300403",
"city_name": "金秀县"
},
{
"_id": 755,
"id": 938,
"pid": 106,
"city_code": "101300305",
"city_name": "柳江县"
},
{
"_id": 756,
"id": 939,
"pid": 106,
"city_code": "101300302",
"city_name": "柳城县"
},
{
"_id": 757,
"id": 940,
"pid": 106,
"city_code": "101300304",
"city_name": "鹿寨县"
},
{
"_id": 758,
"id": 941,
"pid": 106,
"city_code": "101300306",
"city_name": "融安县"
},
{
"_id": 759,
"id": 942,
"pid": 106,
"city_code": "101300307",
"city_name": "融水县"
},
{
"_id": 760,
"id": 943,
"pid": 106,
"city_code": "101300308",
"city_name": "三江县"
},
{
"_id": 761,
"id": 946,
"pid": 107,
"city_code": "101301103",
"city_name": "灵山县"
},
{
"_id": 762,
"id": 947,
"pid": 107,
"city_code": "101301102",
"city_name": "浦北县"
},
{
"_id": 763,
"id": 950,
"pid": 108,
"city_code": "101300607",
"city_name": "长洲区"
},
{
"_id": 764,
"id": 951,
"pid": 108,
"city_code": "101300606",
"city_name": "岑溪市"
},
{
"_id": 765,
"id": 952,
"pid": 108,
"city_code": "101300604",
"city_name": "苍梧县"
},
{
"_id": 766,
"id": 953,
"pid": 108,
"city_code": "101300602",
"city_name": "藤县"
},
{
"_id": 767,
"id": 954,
"pid": 108,
"city_code": "101300605",
"city_name": "蒙山县"
},
{
"_id": 768,
"id": 956,
"pid": 109,
"city_code": "101300903",
"city_name": "北流市"
},
{
"_id": 769,
"id": 957,
"pid": 109,
"city_code": "101300904",
"city_name": "容县"
},
{
"_id": 770,
"id": 958,
"pid": 109,
"city_code": "101300905",
"city_name": "陆川县"
},
{
"_id": 771,
"id": 959,
"pid": 109,
"city_code": "101300902",
"city_name": "博白县"
},
{
"_id": 772,
"id": 960,
"pid": 109,
"city_code": "101300906",
"city_name": "兴业县"
},
{
"_id": 773,
"id": 961,
"pid": 110,
"city_code": "101260111",
"city_name": "南明区"
},
{
"_id": 774,
"id": 962,
"pid": 110,
"city_code": "101260110",
"city_name": "云岩区"
},
{
"_id": 775,
"id": 963,
"pid": 110,
"city_code": "101260103",
"city_name": "花溪区"
},
{
"_id": 776,
"id": 964,
"pid": 110,
"city_code": "101260104",
"city_name": "乌当区"
},
{
"_id": 777,
"id": 965,
"pid": 110,
"city_code": "101260102",
"city_name": "白云区"
},
{
"_id": 778,
"id": 966,
"pid": 110,
"city_code": "101260109",
"city_name": "小河区"
},
{
"_id": 779,
"id": 969,
"pid": 110,
"city_code": "101260108",
"city_name": "清镇市"
},
{
"_id": 780,
"id": 970,
"pid": 110,
"city_code": "101260106",
"city_name": "开阳县"
},
{
"_id": 781,
"id": 971,
"pid": 110,
"city_code": "101260107",
"city_name": "修文县"
},
{
"_id": 782,
"id": 972,
"pid": 110,
"city_code": "101260105",
"city_name": "息烽县"
},
{
"_id": 783,
"id": 974,
"pid": 111,
"city_code": "101260306",
"city_name": "关岭县"
},
{
"_id": 784,
"id": 976,
"pid": 111,
"city_code": "101260305",
"city_name": "紫云县"
},
{
"_id": 785,
"id": 977,
"pid": 111,
"city_code": "101260304",
"city_name": "平坝县"
},
{
"_id": 786,
"id": 978,
"pid": 111,
"city_code": "101260302",
"city_name": "普定县"
},
{
"_id": 787,
"id": 980,
"pid": 112,
"city_code": "101260705",
"city_name": "大方县"
},
{
"_id": 788,
"id": 981,
"pid": 112,
"city_code": "101260708",
"city_name": "黔西县"
},
{
"_id": 789,
"id": 982,
"pid": 112,
"city_code": "101260703",
"city_name": "金沙县"
},
{
"_id": 790,
"id": 983,
"pid": 112,
"city_code": "101260707",
"city_name": "织金县"
},
{
"_id": 791,
"id": 984,
"pid": 112,
"city_code": "101260706",
"city_name": "纳雍县"
},
{
"_id": 792,
"id": 985,
"pid": 112,
"city_code": "101260702",
"city_name": "赫章县"
},
{
"_id": 793,
"id": 986,
"pid": 112,
"city_code": "101260704",
"city_name": "威宁县"
},
{
"_id": 794,
"id": 989,
"pid": 113,
"city_code": "101260801",
"city_name": "水城县"
},
{
"_id": 795,
"id": 990,
"pid": 113,
"city_code": "101260804",
"city_name": "盘县"
},
{
"_id": 796,
"id": 991,
"pid": 114,
"city_code": "101260501",
"city_name": "凯里市"
},
{
"_id": 797,
"id": 992,
"pid": 114,
"city_code": "101260505",
"city_name": "黄平县"
},
{
"_id": 798,
"id": 993,
"pid": 114,
"city_code": "101260503",
"city_name": "施秉县"
},
{
"_id": 799,
"id": 994,
"pid": 114,
"city_code": "101260509",
"city_name": "三穗县"
},
{
"_id": 800,
"id": 995,
"pid": 114,
"city_code": "101260504",
"city_name": "镇远县"
},
{
"_id": 801,
"id": 996,
"pid": 114,
"city_code": "101260502",
"city_name": "岑巩县"
},
{
"_id": 802,
"id": 997,
"pid": 114,
"city_code": "101260514",
"city_name": "天柱县"
},
{
"_id": 803,
"id": 998,
"pid": 114,
"city_code": "101260515",
"city_name": "锦屏县"
},
{
"_id": 804,
"id": 999,
"pid": 114,
"city_code": "101260511",
"city_name": "剑河县"
},
{
"_id": 805,
"id": 1000,
"pid": 114,
"city_code": "101260510",
"city_name": "台江县"
},
{
"_id": 806,
"id": 1001,
"pid": 114,
"city_code": "101260513",
"city_name": "黎平县"
},
{
"_id": 807,
"id": 1002,
"pid": 114,
"city_code": "101260516",
"city_name": "榕江县"
},
{
"_id": 808,
"id": 1003,
"pid": 114,
"city_code": "101260517",
"city_name": "从江县"
},
{
"_id": 809,
"id": 1004,
"pid": 114,
"city_code": "101260512",
"city_name": "雷山县"
},
{
"_id": 810,
"id": 1005,
"pid": 114,
"city_code": "101260507",
"city_name": "麻江县"
},
{
"_id": 811,
"id": 1006,
"pid": 114,
"city_code": "101260508",
"city_name": "丹寨县"
},
{
"_id": 812,
"id": 1007,
"pid": 115,
"city_code": "101260401",
"city_name": "都匀市"
},
{
"_id": 813,
"id": 1008,
"pid": 115,
"city_code": "101260405",
"city_name": "福泉市"
},
{
"_id": 814,
"id": 1009,
"pid": 115,
"city_code": "101260412",
"city_name": "荔波县"
},
{
"_id": 815,
"id": 1010,
"pid": 115,
"city_code": "101260402",
"city_name": "贵定县"
},
{
"_id": 816,
"id": 1011,
"pid": 115,
"city_code": "101260403",
"city_name": "瓮安县"
},
{
"_id": 817,
"id": 1012,
"pid": 115,
"city_code": "101260410",
"city_name": "独山县"
},
{
"_id": 818,
"id": 1013,
"pid": 115,
"city_code": "101260409",
"city_name": "平塘县"
},
{
"_id": 819,
"id": 1014,
"pid": 115,
"city_code": "101260408",
"city_name": "罗甸县"
},
{
"_id": 820,
"id": 1015,
"pid": 115,
"city_code": "101260404",
"city_name": "长顺县"
},
{
"_id": 821,
"id": 1016,
"pid": 115,
"city_code": "101260407",
"city_name": "龙里县"
},
{
"_id": 822,
"id": 1017,
"pid": 115,
"city_code": "101260406",
"city_name": "惠水县"
},
{
"_id": 823,
"id": 1018,
"pid": 115,
"city_code": "101260411",
"city_name": "三都县"
},
{
"_id": 824,
"id": 1019,
"pid": 116,
"city_code": "101260906",
"city_name": "兴义市"
},
{
"_id": 825,
"id": 1020,
"pid": 116,
"city_code": "101260903",
"city_name": "兴仁县"
},
{
"_id": 826,
"id": 1021,
"pid": 116,
"city_code": "101260909",
"city_name": "普安县"
},
{
"_id": 827,
"id": 1022,
"pid": 116,
"city_code": "101260902",
"city_name": "晴隆县"
},
{
"_id": 828,
"id": 1023,
"pid": 116,
"city_code": "101260904",
"city_name": "贞丰县"
},
{
"_id": 829,
"id": 1024,
"pid": 116,
"city_code": "101260905",
"city_name": "望谟县"
},
{
"_id": 830,
"id": 1025,
"pid": 116,
"city_code": "101260908",
"city_name": "册亨县"
},
{
"_id": 831,
"id": 1026,
"pid": 116,
"city_code": "101260907",
"city_name": "安龙县"
},
{
"_id": 832,
"id": 1027,
"pid": 117,
"city_code": "101260601",
"city_name": "铜仁市"
},
{
"_id": 833,
"id": 1028,
"pid": 117,
"city_code": "101260602",
"city_name": "江口县"
},
{
"_id": 834,
"id": 1029,
"pid": 117,
"city_code": "101260608",
"city_name": "石阡县"
},
{
"_id": 835,
"id": 1030,
"pid": 117,
"city_code": "101260605",
"city_name": "思南县"
},
{
"_id": 836,
"id": 1031,
"pid": 117,
"city_code": "101260610",
"city_name": "德江县"
},
{
"_id": 837,
"id": 1032,
"pid": 117,
"city_code": "101260603",
"city_name": "玉屏县"
},
{
"_id": 838,
"id": 1033,
"pid": 117,
"city_code": "101260607",
"city_name": "印江县"
},
{
"_id": 839,
"id": 1034,
"pid": 117,
"city_code": "101260609",
"city_name": "沿河县"
},
{
"_id": 840,
"id": 1035,
"pid": 117,
"city_code": "101260611",
"city_name": "松桃县"
},
{
"_id": 841,
"id": 1037,
"pid": 118,
"city_code": "101260215",
"city_name": "红花岗区"
},
{
"_id": 842,
"id": 1038,
"pid": 118,
"city_code": "101260212",
"city_name": "务川县"
},
{
"_id": 843,
"id": 1039,
"pid": 118,
"city_code": "101260210",
"city_name": "道真县"
},
{
"_id": 844,
"id": 1040,
"pid": 118,
"city_code": "101260214",
"city_name": "汇川区"
},
{
"_id": 845,
"id": 1041,
"pid": 118,
"city_code": "101260208",
"city_name": "赤水市"
},
{
"_id": 846,
"id": 1042,
"pid": 118,
"city_code": "101260203",
"city_name": "仁怀市"
},
{
"_id": 847,
"id": 1043,
"pid": 118,
"city_code": "101260202",
"city_name": "遵义县"
},
{
"_id": 848,
"id": 1044,
"pid": 118,
"city_code": "101260207",
"city_name": "桐梓县"
},
{
"_id": 849,
"id": 1045,
"pid": 118,
"city_code": "101260204",
"city_name": "绥阳县"
},
{
"_id": 850,
"id": 1046,
"pid": 118,
"city_code": "101260211",
"city_name": "正安县"
},
{
"_id": 851,
"id": 1047,
"pid": 118,
"city_code": "101260206",
"city_name": "凤冈县"
},
{
"_id": 852,
"id": 1048,
"pid": 118,
"city_code": "101260205",
"city_name": "湄潭县"
},
{
"_id": 853,
"id": 1049,
"pid": 118,
"city_code": "101260213",
"city_name": "余庆县"
},
{
"_id": 854,
"id": 1050,
"pid": 118,
"city_code": "101260209",
"city_name": "习水县"
},
{
"_id": 855,
"id": 1055,
"pid": 119,
"city_code": "101310102",
"city_name": "琼山区"
},
{
"_id": 856,
"id": 1082,
"pid": 137,
"city_code": "101090102",
"city_name": "井陉矿区"
},
{
"_id": 857,
"id": 1084,
"pid": 137,
"city_code": "101090114",
"city_name": "辛集市"
},
{
"_id": 858,
"id": 1085,
"pid": 137,
"city_code": "101090115",
"city_name": "藁城市"
},
{
"_id": 859,
"id": 1086,
"pid": 137,
"city_code": "101090116",
"city_name": "晋州市"
},
{
"_id": 860,
"id": 1087,
"pid": 137,
"city_code": "101090117",
"city_name": "新乐市"
},
{
"_id": 861,
"id": 1088,
"pid": 137,
"city_code": "101090118",
"city_name": "鹿泉区"
},
{
"_id": 862,
"id": 1089,
"pid": 137,
"city_code": "101090102",
"city_name": "井陉县"
},
{
"_id": 863,
"id": 1090,
"pid": 137,
"city_code": "101090103",
"city_name": "正定县"
},
{
"_id": 864,
"id": 1091,
"pid": 137,
"city_code": "101090104",
"city_name": "栾城区"
},
{
"_id": 865,
"id": 1092,
"pid": 137,
"city_code": "101090105",
"city_name": "行唐县"
},
{
"_id": 866,
"id": 1093,
"pid": 137,
"city_code": "101090106",
"city_name": "灵寿县"
},
{
"_id": 867,
"id": 1094,
"pid": 137,
"city_code": "101090107",
"city_name": "高邑县"
},
{
"_id": 868,
"id": 1095,
"pid": 137,
"city_code": "101090108",
"city_name": "深泽县"
},
{
"_id": 869,
"id": 1096,
"pid": 137,
"city_code": "101090109",
"city_name": "赞皇县"
},
{
"_id": 870,
"id": 1097,
"pid": 137,
"city_code": "101090110",
"city_name": "无极县"
},
{
"_id": 871,
"id": 1098,
"pid": 137,
"city_code": "101090111",
"city_name": "平山县"
},
{
"_id": 872,
"id": 1099,
"pid": 137,
"city_code": "101090112",
"city_name": "元氏县"
},
{
"_id": 873,
"id": 1100,
"pid": 137,
"city_code": "101090113",
"city_name": "赵县"
},
{
"_id": 874,
"id": 1104,
"pid": 138,
"city_code": "101090218",
"city_name": "涿州市"
},
{
"_id": 875,
"id": 1105,
"pid": 138,
"city_code": "101090219",
"city_name": "定州市"
},
{
"_id": 876,
"id": 1106,
"pid": 138,
"city_code": "101090220",
"city_name": "安国市"
},
{
"_id": 877,
"id": 1107,
"pid": 138,
"city_code": "101090221",
"city_name": "高碑店市"
},
{
"_id": 878,
"id": 1108,
"pid": 138,
"city_code": "101090202",
"city_name": "满城县"
},
{
"_id": 879,
"id": 1109,
"pid": 138,
"city_code": "101090224",
"city_name": "清苑县"
},
{
"_id": 880,
"id": 1110,
"pid": 138,
"city_code": "101090213",
"city_name": "涞水县"
},
{
"_id": 881,
"id": 1111,
"pid": 138,
"city_code": "101090203",
"city_name": "阜平县"
},
{
"_id": 882,
"id": 1112,
"pid": 138,
"city_code": "101090204",
"city_name": "徐水县"
},
{
"_id": 883,
"id": 1113,
"pid": 138,
"city_code": "101090223",
"city_name": "定兴县"
},
{
"_id": 884,
"id": 1114,
"pid": 138,
"city_code": "101090205",
"city_name": "唐县"
},
{
"_id": 885,
"id": 1115,
"pid": 138,
"city_code": "101090206",
"city_name": "高阳县"
},
{
"_id": 886,
"id": 1116,
"pid": 138,
"city_code": "101090207",
"city_name": "容城县"
},
{
"_id": 887,
"id": 1117,
"pid": 138,
"city_code": "101090209",
"city_name": "涞源县"
},
{
"_id": 888,
"id": 1118,
"pid": 138,
"city_code": "101090210",
"city_name": "望都县"
},
{
"_id": 889,
"id": 1119,
"pid": 138,
"city_code": "101090211",
"city_name": "安新县"
},
{
"_id": 890,
"id": 1120,
"pid": 138,
"city_code": "101090212",
"city_name": "易县"
},
{
"_id": 891,
"id": 1121,
"pid": 138,
"city_code": "101090214",
"city_name": "曲阳县"
},
{
"_id": 892,
"id": 1122,
"pid": 138,
"city_code": "101090215",
"city_name": "蠡县"
},
{
"_id": 893,
"id": 1123,
"pid": 138,
"city_code": "101090216",
"city_name": "顺平县"
},
{
"_id": 894,
"id": 1124,
"pid": 138,
"city_code": "101090225",
"city_name": "博野县"
},
{
"_id": 895,
"id": 1125,
"pid": 138,
"city_code": "101090217",
"city_name": "雄县"
},
{
"_id": 896,
"id": 1128,
"pid": 139,
"city_code": "101090711",
"city_name": "泊头市"
},
{
"_id": 897,
"id": 1129,
"pid": 139,
"city_code": "101090712",
"city_name": "任丘市"
},
{
"_id": 898,
"id": 1130,
"pid": 139,
"city_code": "101090713",
"city_name": "黄骅市"
},
{
"_id": 899,
"id": 1131,
"pid": 139,
"city_code": "101090714",
"city_name": "河间市"
},
{
"_id": 900,
"id": 1132,
"pid": 139,
"city_code": "101090716",
"city_name": "沧县"
},
{
"_id": 901,
"id": 1133,
"pid": 139,
"city_code": "101090702",
"city_name": "青县"
},
{
"_id": 902,
"id": 1134,
"pid": 139,
"city_code": "101090703",
"city_name": "东光县"
},
{
"_id": 903,
"id": 1135,
"pid": 139,
"city_code": "101090704",
"city_name": "海兴县"
},
{
"_id": 904,
"id": 1136,
"pid": 139,
"city_code": "101090705",
"city_name": "盐山县"
},
{
"_id": 905,
"id": 1137,
"pid": 139,
"city_code": "101090706",
"city_name": "肃宁县"
},
{
"_id": 906,
"id": 1138,
"pid": 139,
"city_code": "101090707",
"city_name": "南皮县"
},
{
"_id": 907,
"id": 1139,
"pid": 139,
"city_code": "101090708",
"city_name": "吴桥县"
},
{
"_id": 908,
"id": 1140,
"pid": 139,
"city_code": "101090709",
"city_name": "献县"
},
{
"_id": 909,
"id": 1141,
"pid": 139,
"city_code": "101090710",
"city_name": "孟村县"
},
{
"_id": 910,
"id": 1145,
"pid": 140,
"city_code": "101090403",
"city_name": "承德县"
},
{
"_id": 911,
"id": 1146,
"pid": 140,
"city_code": "101090404",
"city_name": "兴隆县"
},
{
"_id": 912,
"id": 1147,
"pid": 140,
"city_code": "101090405",
"city_name": "平泉县"
},
{
"_id": 913,
"id": 1148,
"pid": 140,
"city_code": "101090406",
"city_name": "滦平县"
},
{
"_id": 914,
"id": 1149,
"pid": 140,
"city_code": "101090407",
"city_name": "隆化县"
},
{
"_id": 915,
"id": 1150,
"pid": 140,
"city_code": "101090408",
"city_name": "丰宁县"
},
{
"_id": 916,
"id": 1151,
"pid": 140,
"city_code": "101090409",
"city_name": "宽城县"
},
{
"_id": 917,
"id": 1152,
"pid": 140,
"city_code": "101090410",
"city_name": "围场县"
},
{
"_id": 918,
"id": 1156,
"pid": 141,
"city_code": "101091002",
"city_name": "峰峰矿区"
},
{
"_id": 919,
"id": 1157,
"pid": 141,
"city_code": "101091016",
"city_name": "武安市"
},
{
"_id": 920,
"id": 1158,
"pid": 141,
"city_code": "101091001",
"city_name": "邯郸县"
},
{
"_id": 921,
"id": 1159,
"pid": 141,
"city_code": "101091003",
"city_name": "临漳县"
},
{
"_id": 922,
"id": 1160,
"pid": 141,
"city_code": "101091004",
"city_name": "成安县"
},
{
"_id": 923,
"id": 1161,
"pid": 141,
"city_code": "101091005",
"city_name": "大名县"
},
{
"_id": 924,
"id": 1162,
"pid": 141,
"city_code": "101091006",
"city_name": "涉县"
},
{
"_id": 925,
"id": 1163,
"pid": 141,
"city_code": "101091007",
"city_name": "磁县"
},
{
"_id": 926,
"id": 1164,
"pid": 141,
"city_code": "101091008",
"city_name": "肥乡县"
},
{
"_id": 927,
"id": 1165,
"pid": 141,
"city_code": "101091009",
"city_name": "永年县"
},
{
"_id": 928,
"id": 1166,
"pid": 141,
"city_code": "101091010",
"city_name": "邱县"
},
{
"_id": 929,
"id": 1167,
"pid": 141,
"city_code": "101091011",
"city_name": "鸡泽县"
},
{
"_id": 930,
"id": 1168,
"pid": 141,
"city_code": "101091012",
"city_name": "广平县"
},
{
"_id": 931,
"id": 1169,
"pid": 141,
"city_code": "101091013",
"city_name": "馆陶县"
},
{
"_id": 932,
"id": 1170,
"pid": 141,
"city_code": "101091014",
"city_name": "魏县"
},
{
"_id": 933,
"id": 1171,
"pid": 141,
"city_code": "101091015",
"city_name": "曲周县"
},
{
"_id": 934,
"id": 1173,
"pid": 142,
"city_code": "101090810",
"city_name": "冀州市"
},
{
"_id": 935,
"id": 1174,
"pid": 142,
"city_code": "101090811",
"city_name": "深州市"
},
{
"_id": 936,
"id": 1175,
"pid": 142,
"city_code": "101090802",
"city_name": "枣强县"
},
{
"_id": 937,
"id": 1176,
"pid": 142,
"city_code": "101090803",
"city_name": "武邑县"
},
{
"_id": 938,
"id": 1177,
"pid": 142,
"city_code": "101090804",
"city_name": "武强县"
},
{
"_id": 939,
"id": 1178,
"pid": 142,
"city_code": "101090805",
"city_name": "饶阳县"
},
{
"_id": 940,
"id": 1179,
"pid": 142,
"city_code": "101090806",
"city_name": "安平县"
},
{
"_id": 941,
"id": 1180,
"pid": 142,
"city_code": "101090807",
"city_name": "故城县"
},
{
"_id": 942,
"id": 1181,
"pid": 142,
"city_code": "101090808",
"city_name": "景县"
},
{
"_id": 943,
"id": 1182,
"pid": 142,
"city_code": "101090809",
"city_name": "阜城县"
},
{
"_id": 944,
"id": 1185,
"pid": 143,
"city_code": "101090608",
"city_name": "霸州市"
},
{
"_id": 945,
"id": 1186,
"pid": 143,
"city_code": "101090609",
"city_name": "三河市"
},
{
"_id": 946,
"id": 1187,
"pid": 143,
"city_code": "101090602",
"city_name": "固安县"
},
{
"_id": 947,
"id": 1188,
"pid": 143,
"city_code": "101090603",
"city_name": "永清县"
},
{
"_id": 948,
"id": 1189,
"pid": 143,
"city_code": "101090604",
"city_name": "香河县"
},
{
"_id": 949,
"id": 1190,
"pid": 143,
"city_code": "101090605",
"city_name": "大城县"
},
{
"_id": 950,
"id": 1191,
"pid": 143,
"city_code": "101090606",
"city_name": "文安县"
},
{
"_id": 951,
"id": 1192,
"pid": 143,
"city_code": "101090607",
"city_name": "大厂县"
},
{
"_id": 952,
"id": 1195,
"pid": 144,
"city_code": "101091106",
"city_name": "北戴河区"
},
{
"_id": 953,
"id": 1196,
"pid": 144,
"city_code": "101091103",
"city_name": "昌黎县"
},
{
"_id": 954,
"id": 1197,
"pid": 144,
"city_code": "101091104",
"city_name": "抚宁县"
},
{
"_id": 955,
"id": 1198,
"pid": 144,
"city_code": "101091105",
"city_name": "卢龙县"
},
{
"_id": 956,
"id": 1199,
"pid": 144,
"city_code": "101091102",
"city_name": "青龙县"
},
{
"_id": 957,
"id": 1204,
"pid": 145,
"city_code": "101090502",
"city_name": "丰南区"
},
{
"_id": 958,
"id": 1205,
"pid": 145,
"city_code": "101090503",
"city_name": "丰润区"
},
{
"_id": 959,
"id": 1206,
"pid": 145,
"city_code": "101090510",
"city_name": "遵化市"
},
{
"_id": 960,
"id": 1207,
"pid": 145,
"city_code": "101090511",
"city_name": "迁安市"
},
{
"_id": 961,
"id": 1208,
"pid": 145,
"city_code": "101090504",
"city_name": "滦县"
},
{
"_id": 962,
"id": 1209,
"pid": 145,
"city_code": "101090505",
"city_name": "滦南县"
},
{
"_id": 963,
"id": 1210,
"pid": 145,
"city_code": "101090506",
"city_name": "乐亭县"
},
{
"_id": 964,
"id": 1211,
"pid": 145,
"city_code": "101090507",
"city_name": "迁西县"
},
{
"_id": 965,
"id": 1212,
"pid": 145,
"city_code": "101090508",
"city_name": "玉田县"
},
{
"_id": 966,
"id": 1213,
"pid": 145,
"city_code": "101090509",
"city_name": "唐海县"
},
{
"_id": 967,
"id": 1216,
"pid": 146,
"city_code": "101090916",
"city_name": "南宫市"
},
{
"_id": 968,
"id": 1217,
"pid": 146,
"city_code": "101090917",
"city_name": "沙河市"
},
{
"_id": 969,
"id": 1218,
"pid": 146,
"city_code": "101090901",
"city_name": "邢台县"
},
{
"_id": 970,
"id": 1219,
"pid": 146,
"city_code": "101090902",
"city_name": "临城县"
},
{
"_id": 971,
"id": 1220,
"pid": 146,
"city_code": "101090904",
"city_name": "内丘县"
},
{
"_id": 972,
"id": 1221,
"pid": 146,
"city_code": "101090905",
"city_name": "柏乡县"
},
{
"_id": 973,
"id": 1222,
"pid": 146,
"city_code": "101090906",
"city_name": "隆尧县"
},
{
"_id": 974,
"id": 1223,
"pid": 146,
"city_code": "101090918",
"city_name": "任县"
},
{
"_id": 975,
"id": 1224,
"pid": 146,
"city_code": "101090907",
"city_name": "南和县"
},
{
"_id": 976,
"id": 1225,
"pid": 146,
"city_code": "101090908",
"city_name": "宁晋县"
},
{
"_id": 977,
"id": 1226,
"pid": 146,
"city_code": "101090909",
"city_name": "巨鹿县"
},
{
"_id": 978,
"id": 1227,
"pid": 146,
"city_code": "101090910",
"city_name": "新河县"
},
{
"_id": 979,
"id": 1228,
"pid": 146,
"city_code": "101090911",
"city_name": "广宗县"
},
{
"_id": 980,
"id": 1229,
"pid": 146,
"city_code": "101090912",
"city_name": "平乡县"
},
{
"_id": 981,
"id": 1230,
"pid": 146,
"city_code": "101090913",
"city_name": "威县"
},
{
"_id": 982,
"id": 1231,
"pid": 146,
"city_code": "101090914",
"city_name": "清河县"
},
{
"_id": 983,
"id": 1232,
"pid": 146,
"city_code": "101090915",
"city_name": "临西县"
},
{
"_id": 984,
"id": 1235,
"pid": 147,
"city_code": "101090302",
"city_name": "宣化区"
},
{
"_id": 985,
"id": 1237,
"pid": 147,
"city_code": "101090302",
"city_name": "宣化县"
},
{
"_id": 986,
"id": 1238,
"pid": 147,
"city_code": "101090303",
"city_name": "张北县"
},
{
"_id": 987,
"id": 1239,
"pid": 147,
"city_code": "101090304",
"city_name": "康保县"
},
{
"_id": 988,
"id": 1240,
"pid": 147,
"city_code": "101090305",
"city_name": "沽源县"
},
{
"_id": 989,
"id": 1241,
"pid": 147,
"city_code": "101090306",
"city_name": "尚义县"
},
{
"_id": 990,
"id": 1242,
"pid": 147,
"city_code": "101090307",
"city_name": "蔚县"
},
{
"_id": 991,
"id": 1243,
"pid": 147,
"city_code": "101090308",
"city_name": "阳原县"
},
{
"_id": 992,
"id": 1244,
"pid": 147,
"city_code": "101090309",
"city_name": "怀安县"
},
{
"_id": 993,
"id": 1245,
"pid": 147,
"city_code": "101090310",
"city_name": "万全县"
},
{
"_id": 994,
"id": 1246,
"pid": 147,
"city_code": "101090311",
"city_name": "怀来县"
},
{
"_id": 995,
"id": 1247,
"pid": 147,
"city_code": "101090312",
"city_name": "涿鹿县"
},
{
"_id": 996,
"id": 1248,
"pid": 147,
"city_code": "101090313",
"city_name": "赤城县"
},
{
"_id": 997,
"id": 1249,
"pid": 147,
"city_code": "101090314",
"city_name": "崇礼县"
},
{
"_id": 998,
"id": 1255,
"pid": 148,
"city_code": "101180108",
"city_name": "上街区"
},
{
"_id": 999,
"id": 1261,
"pid": 148,
"city_code": "101180102",
"city_name": "巩义市"
},
{
"_id": 1000,
"id": 1262,
"pid": 148,
"city_code": "101180103",
"city_name": "荥阳市"
},
{
"_id": 1001,
"id": 1263,
"pid": 148,
"city_code": "101180105",
"city_name": "新密市"
},
{
"_id": 1002,
"id": 1264,
"pid": 148,
"city_code": "101180106",
"city_name": "新郑市"
},
{
"_id": 1003,
"id": 1265,
"pid": 148,
"city_code": "101180104",
"city_name": "登封市"
},
{
"_id": 1004,
"id": 1266,
"pid": 148,
"city_code": "101180107",
"city_name": "中牟县"
},
{
"_id": 1005,
"id": 1272,
"pid": 149,
"city_code": "101180911",
"city_name": "吉利区"
},
{
"_id": 1006,
"id": 1273,
"pid": 149,
"city_code": "101180908",
"city_name": "偃师市"
},
{
"_id": 1007,
"id": 1274,
"pid": 149,
"city_code": "101180903",
"city_name": "孟津县"
},
{
"_id": 1008,
"id": 1275,
"pid": 149,
"city_code": "101180902",
"city_name": "新安县"
},
{
"_id": 1009,
"id": 1276,
"pid": 149,
"city_code": "101180909",
"city_name": "栾川县"
},
{
"_id": 1010,
"id": 1277,
"pid": 149,
"city_code": "101180907",
"city_name": "嵩县"
},
{
"_id": 1011,
"id": 1278,
"pid": 149,
"city_code": "101180910",
"city_name": "汝阳县"
},
{
"_id": 1012,
"id": 1279,
"pid": 149,
"city_code": "101180904",
"city_name": "宜阳县"
},
{
"_id": 1013,
"id": 1280,
"pid": 149,
"city_code": "101180905",
"city_name": "洛宁县"
},
{
"_id": 1014,
"id": 1281,
"pid": 149,
"city_code": "101180906",
"city_name": "伊川县"
},
{
"_id": 1015,
"id": 1287,
"pid": 150,
"city_code": "101180802",
"city_name": "杞县"
},
{
"_id": 1016,
"id": 1288,
"pid": 150,
"city_code": "101180804",
"city_name": "通许县"
},
{
"_id": 1017,
"id": 1289,
"pid": 150,
"city_code": "101180803",
"city_name": "尉氏县"
},
{
"_id": 1018,
"id": 1290,
"pid": 150,
"city_code": "101180801",
"city_name": "开封县"
},
{
"_id": 1019,
"id": 1291,
"pid": 150,
"city_code": "101180805",
"city_name": "兰考县"
},
{
"_id": 1020,
"id": 1296,
"pid": 151,
"city_code": "101180205",
"city_name": "林州市"
},
{
"_id": 1021,
"id": 1297,
"pid": 151,
"city_code": "101180201",
"city_name": "安阳县"
},
{
"_id": 1022,
"id": 1298,
"pid": 151,
"city_code": "101180202",
"city_name": "汤阴县"
},
{
"_id": 1023,
"id": 1299,
"pid": 151,
"city_code": "101180203",
"city_name": "滑县"
},
{
"_id": 1024,
"id": 1300,
"pid": 151,
"city_code": "101180204",
"city_name": "内黄县"
},
{
"_id": 1025,
"id": 1304,
"pid": 152,
"city_code": "101181202",
"city_name": "浚县"
},
{
"_id": 1026,
"id": 1305,
"pid": 152,
"city_code": "101181203",
"city_name": "淇县"
},
{
"_id": 1027,
"id": 1306,
"pid": 153,
"city_code": "101181801",
"city_name": "济源市"
},
{
"_id": 1028,
"id": 1311,
"pid": 154,
"city_code": "101181104",
"city_name": "沁阳市"
},
{
"_id": 1029,
"id": 1312,
"pid": 154,
"city_code": "101181108",
"city_name": "孟州市"
},
{
"_id": 1030,
"id": 1313,
"pid": 154,
"city_code": "101181102",
"city_name": "修武县"
},
{
"_id": 1031,
"id": 1314,
"pid": 154,
"city_code": "101181106",
"city_name": "博爱县"
},
{
"_id": 1032,
"id": 1315,
"pid": 154,
"city_code": "101181103",
"city_name": "武陟县"
},
{
"_id": 1033,
"id": 1316,
"pid": 154,
"city_code": "101181107",
"city_name": "温县"
},
{
"_id": 1034,
"id": 1319,
"pid": 155,
"city_code": "101180711",
"city_name": "邓州市"
},
{
"_id": 1035,
"id": 1320,
"pid": 155,
"city_code": "101180702",
"city_name": "南召县"
},
{
"_id": 1036,
"id": 1321,
"pid": 155,
"city_code": "101180703",
"city_name": "方城县"
},
{
"_id": 1037,
"id": 1322,
"pid": 155,
"city_code": "101180705",
"city_name": "西峡县"
},
{
"_id": 1038,
"id": 1323,
"pid": 155,
"city_code": "101180707",
"city_name": "镇平县"
},
{
"_id": 1039,
"id": 1324,
"pid": 155,
"city_code": "101180706",
"city_name": "内乡县"
},
{
"_id": 1040,
"id": 1325,
"pid": 155,
"city_code": "101180708",
"city_name": "淅川县"
},
{
"_id": 1041,
"id": 1326,
"pid": 155,
"city_code": "101180704",
"city_name": "社旗县"
},
{
"_id": 1042,
"id": 1327,
"pid": 155,
"city_code": "101180710",
"city_name": "唐河县"
},
{
"_id": 1043,
"id": 1328,
"pid": 155,
"city_code": "101180709",
"city_name": "新野县"
},
{
"_id": 1044,
"id": 1329,
"pid": 155,
"city_code": "101180712",
"city_name": "桐柏县"
},
{
"_id": 1045,
"id": 1333,
"pid": 156,
"city_code": "101180508",
"city_name": "石龙区"
},
{
"_id": 1046,
"id": 1334,
"pid": 156,
"city_code": "101180506",
"city_name": "舞钢市"
},
{
"_id": 1047,
"id": 1335,
"pid": 156,
"city_code": "101180504",
"city_name": "汝州市"
},
{
"_id": 1048,
"id": 1336,
"pid": 156,
"city_code": "101180503",
"city_name": "宝丰县"
},
{
"_id": 1049,
"id": 1337,
"pid": 156,
"city_code": "101180505",
"city_name": "叶县"
},
{
"_id": 1050,
"id": 1338,
"pid": 156,
"city_code": "101180507",
"city_name": "鲁山县"
},
{
"_id": 1051,
"id": 1339,
"pid": 156,
"city_code": "101180502",
"city_name": "郏县"
},
{
"_id": 1052,
"id": 1341,
"pid": 157,
"city_code": "101181705",
"city_name": "义马市"
},
{
"_id": 1053,
"id": 1342,
"pid": 157,
"city_code": "101181702",
"city_name": "灵宝市"
},
{
"_id": 1054,
"id": 1343,
"pid": 157,
"city_code": "101181703",
"city_name": "渑池县"
},
{
"_id": 1055,
"id": 1344,
"pid": 157,
"city_code": "101181706",
"city_name": "陕县"
},
{
"_id": 1056,
"id": 1345,
"pid": 157,
"city_code": "101181704",
"city_name": "卢氏县"
},
{
"_id": 1057,
"id": 1347,
"pid": 158,
"city_code": "101181002",
"city_name": "睢阳区"
},
{
"_id": 1058,
"id": 1348,
"pid": 158,
"city_code": "101181009",
"city_name": "永城市"
},
{
"_id": 1059,
"id": 1349,
"pid": 158,
"city_code": "101181004",
"city_name": "民权县"
},
{
"_id": 1060,
"id": 1350,
"pid": 158,
"city_code": "101181003",
"city_name": "睢县"
},
{
"_id": 1061,
"id": 1351,
"pid": 158,
"city_code": "101181007",
"city_name": "宁陵县"
},
{
"_id": 1062,
"id": 1352,
"pid": 158,
"city_code": "101181005",
"city_name": "虞城县"
},
{
"_id": 1063,
"id": 1353,
"pid": 158,
"city_code": "101181006",
"city_name": "柘城县"
},
{
"_id": 1064,
"id": 1354,
"pid": 158,
"city_code": "101181008",
"city_name": "夏邑县"
},
{
"_id": 1065,
"id": 1359,
"pid": 159,
"city_code": "101180305",
"city_name": "卫辉市"
},
{
"_id": 1066,
"id": 1360,
"pid": 159,
"city_code": "101180304",
"city_name": "辉县市"
},
{
"_id": 1067,
"id": 1361,
"pid": 159,
"city_code": "101180301",
"city_name": "新乡县"
},
{
"_id": 1068,
"id": 1362,
"pid": 159,
"city_code": "101180302",
"city_name": "获嘉县"
},
{
"_id": 1069,
"id": 1363,
"pid": 159,
"city_code": "101180303",
"city_name": "原阳县"
},
{
"_id": 1070,
"id": 1364,
"pid": 159,
"city_code": "101180306",
"city_name": "延津县"
},
{
"_id": 1071,
"id": 1365,
"pid": 159,
"city_code": "101180307",
"city_name": "封丘县"
},
{
"_id": 1072,
"id": 1366,
"pid": 159,
"city_code": "101180308",
"city_name": "长垣县"
},
{
"_id": 1073,
"id": 1369,
"pid": 160,
"city_code": "101180603",
"city_name": "罗山县"
},
{
"_id": 1074,
"id": 1370,
"pid": 160,
"city_code": "101180604",
"city_name": "光山县"
},
{
"_id": 1075,
"id": 1371,
"pid": 160,
"city_code": "101180605",
"city_name": "新县"
},
{
"_id": 1076,
"id": 1372,
"pid": 160,
"city_code": "101180609",
"city_name": "商城县"
},
{
"_id": 1077,
"id": 1373,
"pid": 160,
"city_code": "101180608",
"city_name": "固始县"
},
{
"_id": 1078,
"id": 1374,
"pid": 160,
"city_code": "101180607",
"city_name": "潢川县"
},
{
"_id": 1079,
"id": 1375,
"pid": 160,
"city_code": "101180606",
"city_name": "淮滨县"
},
{
"_id": 1080,
"id": 1376,
"pid": 160,
"city_code": "101180602",
"city_name": "息县"
},
{
"_id": 1081,
"id": 1378,
"pid": 161,
"city_code": "101180405",
"city_name": "禹州市"
},
{
"_id": 1082,
"id": 1379,
"pid": 161,
"city_code": "101180404",
"city_name": "长葛市"
},
{
"_id": 1083,
"id": 1380,
"pid": 161,
"city_code": "101180401",
"city_name": "许昌县"
},
{
"_id": 1084,
"id": 1381,
"pid": 161,
"city_code": "101180402",
"city_name": "鄢陵县"
},
{
"_id": 1085,
"id": 1382,
"pid": 161,
"city_code": "101180403",
"city_name": "襄城县"
},
{
"_id": 1086,
"id": 1384,
"pid": 162,
"city_code": "101181407",
"city_name": "项城市"
},
{
"_id": 1087,
"id": 1385,
"pid": 162,
"city_code": "101181402",
"city_name": "扶沟县"
},
{
"_id": 1088,
"id": 1386,
"pid": 162,
"city_code": "101181405",
"city_name": "西华县"
},
{
"_id": 1089,
"id": 1387,
"pid": 162,
"city_code": "101181406",
"city_name": "商水县"
},
{
"_id": 1090,
"id": 1388,
"pid": 162,
"city_code": "101181410",
"city_name": "沈丘县"
},
{
"_id": 1091,
"id": 1389,
"pid": 162,
"city_code": "101181408",
"city_name": "郸城县"
},
{
"_id": 1092,
"id": 1390,
"pid": 162,
"city_code": "101181404",
"city_name": "淮阳县"
},
{
"_id": 1093,
"id": 1391,
"pid": 162,
"city_code": "101181403",
"city_name": "太康县"
},
{
"_id": 1094,
"id": 1392,
"pid": 162,
"city_code": "101181409",
"city_name": "鹿邑县"
},
{
"_id": 1095,
"id": 1394,
"pid": 163,
"city_code": "101181602",
"city_name": "西平县"
},
{
"_id": 1096,
"id": 1395,
"pid": 163,
"city_code": "101181604",
"city_name": "上蔡县"
},
{
"_id": 1097,
"id": 1396,
"pid": 163,
"city_code": "101181607",
"city_name": "平舆县"
},
{
"_id": 1098,
"id": 1397,
"pid": 163,
"city_code": "101181610",
"city_name": "正阳县"
},
{
"_id": 1099,
"id": 1398,
"pid": 163,
"city_code": "101181609",
"city_name": "确山县"
},
{
"_id": 1100,
"id": 1399,
"pid": 163,
"city_code": "101181606",
"city_name": "泌阳县"
},
{
"_id": 1101,
"id": 1400,
"pid": 163,
"city_code": "101181605",
"city_name": "汝南县"
},
{
"_id": 1102,
"id": 1401,
"pid": 163,
"city_code": "101181603",
"city_name": "遂平县"
},
{
"_id": 1103,
"id": 1402,
"pid": 163,
"city_code": "101181608",
"city_name": "新蔡县"
},
{
"_id": 1104,
"id": 1406,
"pid": 164,
"city_code": "101181503",
"city_name": "舞阳县"
},
{
"_id": 1105,
"id": 1407,
"pid": 164,
"city_code": "101181502",
"city_name": "临颍县"
},
{
"_id": 1106,
"id": 1409,
"pid": 165,
"city_code": "101181304",
"city_name": "清丰县"
},
{
"_id": 1107,
"id": 1410,
"pid": 165,
"city_code": "101181303",
"city_name": "南乐县"
},
{
"_id": 1108,
"id": 1411,
"pid": 165,
"city_code": "101181305",
"city_name": "范县"
},
{
"_id": 1109,
"id": 1412,
"pid": 165,
"city_code": "101181302",
"city_name": "台前县"
},
{
"_id": 1110,
"id": 1413,
"pid": 165,
"city_code": "101181301",
"city_name": "濮阳县"
},
{
"_id": 1111,
"id": 1421,
"pid": 166,
"city_code": "101050104",
"city_name": "阿城区"
},
{
"_id": 1112,
"id": 1422,
"pid": 166,
"city_code": "101050103",
"city_name": "呼兰区"
},
{
"_id": 1113,
"id": 1424,
"pid": 166,
"city_code": "101050111",
"city_name": "尚志市"
},
{
"_id": 1114,
"id": 1425,
"pid": 166,
"city_code": "101050102",
"city_name": "双城市"
},
{
"_id": 1115,
"id": 1426,
"pid": 166,
"city_code": "101050112",
"city_name": "五常市"
},
{
"_id": 1116,
"id": 1427,
"pid": 166,
"city_code": "101050109",
"city_name": "方正县"
},
{
"_id": 1117,
"id": 1428,
"pid": 166,
"city_code": "101050105",
"city_name": "宾县"
},
{
"_id": 1118,
"id": 1429,
"pid": 166,
"city_code": "101050106",
"city_name": "依兰县"
},
{
"_id": 1119,
"id": 1430,
"pid": 166,
"city_code": "101050107",
"city_name": "巴彦县"
},
{
"_id": 1120,
"id": 1431,
"pid": 166,
"city_code": "101050108",
"city_name": "通河县"
},
{
"_id": 1121,
"id": 1432,
"pid": 166,
"city_code": "101050113",
"city_name": "木兰县"
},
{
"_id": 1122,
"id": 1433,
"pid": 166,
"city_code": "101050110",
"city_name": "延寿县"
},
{
"_id": 1123,
"id": 1439,
"pid": 167,
"city_code": "101050903",
"city_name": "肇州县"
},
{
"_id": 1124,
"id": 1440,
"pid": 167,
"city_code": "101050904",
"city_name": "肇源县"
},
{
"_id": 1125,
"id": 1441,
"pid": 167,
"city_code": "101050902",
"city_name": "林甸县"
},
{
"_id": 1126,
"id": 1442,
"pid": 167,
"city_code": "101050905",
"city_name": "杜尔伯特"
},
{
"_id": 1127,
"id": 1443,
"pid": 168,
"city_code": "101050704",
"city_name": "呼玛县"
},
{
"_id": 1128,
"id": 1444,
"pid": 168,
"city_code": "101050703",
"city_name": "漠河县"
},
{
"_id": 1129,
"id": 1445,
"pid": 168,
"city_code": "101050702",
"city_name": "塔河县"
},
{
"_id": 1130,
"id": 1448,
"pid": 169,
"city_code": "101051206",
"city_name": "南山区"
},
{
"_id": 1131,
"id": 1452,
"pid": 169,
"city_code": "101051203",
"city_name": "萝北县"
},
{
"_id": 1132,
"id": 1453,
"pid": 169,
"city_code": "101051202",
"city_name": "绥滨县"
},
{
"_id": 1133,
"id": 1455,
"pid": 170,
"city_code": "101050605",
"city_name": "五大连池市"
},
{
"_id": 1134,
"id": 1456,
"pid": 170,
"city_code": "101050606",
"city_name": "北安市"
},
{
"_id": 1135,
"id": 1457,
"pid": 170,
"city_code": "101050602",
"city_name": "嫩江县"
},
{
"_id": 1136,
"id": 1458,
"pid": 170,
"city_code": "101050604",
"city_name": "逊克县"
},
{
"_id": 1137,
"id": 1459,
"pid": 170,
"city_code": "101050603",
"city_name": "孙吴县"
},
{
"_id": 1138,
"id": 1465,
"pid": 171,
"city_code": "101051102",
"city_name": "虎林市"
},
{
"_id": 1139,
"id": 1466,
"pid": 171,
"city_code": "101051103",
"city_name": "密山市"
},
{
"_id": 1140,
"id": 1467,
"pid": 171,
"city_code": "101051104",
"city_name": "鸡东县"
},
{
"_id": 1141,
"id": 1472,
"pid": 172,
"city_code": "101050406",
"city_name": "同江市"
},
{
"_id": 1142,
"id": 1473,
"pid": 172,
"city_code": "101050407",
"city_name": "富锦市"
},
{
"_id": 1143,
"id": 1474,
"pid": 172,
"city_code": "101050405",
"city_name": "桦南县"
},
{
"_id": 1144,
"id": 1475,
"pid": 172,
"city_code": "101050404",
"city_name": "桦川县"
},
{
"_id": 1145,
"id": 1476,
"pid": 172,
"city_code": "101050402",
"city_name": "汤原县"
},
{
"_id": 1146,
"id": 1477,
"pid": 172,
"city_code": "101050403",
"city_name": "抚远县"
},
{
"_id": 1147,
"id": 1482,
"pid": 173,
"city_code": "101050305",
"city_name": "绥芬河市"
},
{
"_id": 1148,
"id": 1483,
"pid": 173,
"city_code": "101050302",
"city_name": "海林市"
},
{
"_id": 1149,
"id": 1484,
"pid": 173,
"city_code": "101050306",
"city_name": "宁安市"
},
{
"_id": 1150,
"id": 1485,
"pid": 173,
"city_code": "101050303",
"city_name": "穆棱市"
},
{
"_id": 1151,
"id": 1486,
"pid": 173,
"city_code": "101050307",
"city_name": "东宁县"
},
{
"_id": 1152,
"id": 1487,
"pid": 173,
"city_code": "101050304",
"city_name": "林口县"
},
{
"_id": 1153,
"id": 1491,
"pid": 174,
"city_code": "101051002",
"city_name": "勃利县"
},
{
"_id": 1154,
"id": 1499,
"pid": 175,
"city_code": "101050202",
"city_name": "讷河市"
},
{
"_id": 1155,
"id": 1500,
"pid": 175,
"city_code": "101050203",
"city_name": "龙江县"
},
{
"_id": 1156,
"id": 1501,
"pid": 175,
"city_code": "101050206",
"city_name": "依安县"
},
{
"_id": 1157,
"id": 1502,
"pid": 175,
"city_code": "101050210",
"city_name": "泰来县"
},
{
"_id": 1158,
"id": 1503,
"pid": 175,
"city_code": "101050204",
"city_name": "甘南县"
},
{
"_id": 1159,
"id": 1504,
"pid": 175,
"city_code": "101050205",
"city_name": "富裕县"
},
{
"_id": 1160,
"id": 1505,
"pid": 175,
"city_code": "101050208",
"city_name": "克山县"
},
{
"_id": 1161,
"id": 1506,
"pid": 175,
"city_code": "101050209",
"city_name": "克东县"
},
{
"_id": 1162,
"id": 1507,
"pid": 175,
"city_code": "101050207",
"city_name": "拜泉县"
},
{
"_id": 1163,
"id": 1512,
"pid": 176,
"city_code": "101051302",
"city_name": "集贤县"
},
{
"_id": 1164,
"id": 1513,
"pid": 176,
"city_code": "101051305",
"city_name": "友谊县"
},
{
"_id": 1165,
"id": 1514,
"pid": 176,
"city_code": "101051303",
"city_name": "宝清县"
},
{
"_id": 1166,
"id": 1515,
"pid": 176,
"city_code": "101051304",
"city_name": "饶河县"
},
{
"_id": 1167,
"id": 1517,
"pid": 177,
"city_code": "101050503",
"city_name": "安达市"
},
{
"_id": 1168,
"id": 1518,
"pid": 177,
"city_code": "101050502",
"city_name": "肇东市"
},
{
"_id": 1169,
"id": 1519,
"pid": 177,
"city_code": "101050504",
"city_name": "海伦市"
},
{
"_id": 1170,
"id": 1520,
"pid": 177,
"city_code": "101050506",
"city_name": "望奎县"
},
{
"_id": 1171,
"id": 1521,
"pid": 177,
"city_code": "101050507",
"city_name": "兰西县"
},
{
"_id": 1172,
"id": 1522,
"pid": 177,
"city_code": "101050508",
"city_name": "青冈县"
},
{
"_id": 1173,
"id": 1523,
"pid": 177,
"city_code": "101050509",
"city_name": "庆安县"
},
{
"_id": 1174,
"id": 1524,
"pid": 177,
"city_code": "101050505",
"city_name": "明水县"
},
{
"_id": 1175,
"id": 1525,
"pid": 177,
"city_code": "101050510",
"city_name": "绥棱县"
},
{
"_id": 1176,
"id": 1526,
"pid": 178,
"city_code": "101050801",
"city_name": "伊春区"
},
{
"_id": 1177,
"id": 1536,
"pid": 178,
"city_code": "101050803",
"city_name": "五营区"
},
{
"_id": 1178,
"id": 1540,
"pid": 178,
"city_code": "101050802",
"city_name": "乌伊岭区"
},
{
"_id": 1179,
"id": 1541,
"pid": 178,
"city_code": "101050804",
"city_name": "铁力市"
},
{
"_id": 1180,
"id": 1542,
"pid": 178,
"city_code": "101050805",
"city_name": "嘉荫县"
},
{
"_id": 1181,
"id": 1550,
"pid": 179,
"city_code": "101200106",
"city_name": "东西湖区"
},
{
"_id": 1182,
"id": 1552,
"pid": 179,
"city_code": "101200102",
"city_name": "蔡甸区"
},
{
"_id": 1183,
"id": 1553,
"pid": 179,
"city_code": "101200105",
"city_name": "江夏区"
},
{
"_id": 1184,
"id": 1554,
"pid": 179,
"city_code": "101200103",
"city_name": "黄陂区"
},
{
"_id": 1185,
"id": 1555,
"pid": 179,
"city_code": "101200104",
"city_name": "新洲区"
},
{
"_id": 1186,
"id": 1560,
"pid": 181,
"city_code": "101200302",
"city_name": "梁子湖区"
},
{
"_id": 1187,
"id": 1562,
"pid": 182,
"city_code": "101200503",
"city_name": "麻城市"
},
{
"_id": 1188,
"id": 1563,
"pid": 182,
"city_code": "101200509",
"city_name": "武穴市"
},
{
"_id": 1189,
"id": 1564,
"pid": 182,
"city_code": "101200510",
"city_name": "团风县"
},
{
"_id": 1190,
"id": 1565,
"pid": 182,
"city_code": "101200502",
"city_name": "红安县"
},
{
"_id": 1191,
"id": 1566,
"pid": 182,
"city_code": "101200504",
"city_name": "罗田县"
},
{
"_id": 1192,
"id": 1567,
"pid": 182,
"city_code": "101200505",
"city_name": "英山县"
},
{
"_id": 1193,
"id": 1568,
"pid": 182,
"city_code": "101200506",
"city_name": "浠水县"
},
{
"_id": 1194,
"id": 1569,
"pid": 182,
"city_code": "101200507",
"city_name": "蕲春县"
},
{
"_id": 1195,
"id": 1570,
"pid": 182,
"city_code": "101200508",
"city_name": "黄梅县"
},
{
"_id": 1196,
"id": 1572,
"pid": 183,
"city_code": "101200606",
"city_name": "西塞山区"
},
{
"_id": 1197,
"id": 1573,
"pid": 183,
"city_code": "101200605",
"city_name": "下陆区"
},
{
"_id": 1198,
"id": 1574,
"pid": 183,
"city_code": "101200604",
"city_name": "铁山区"
},
{
"_id": 1199,
"id": 1575,
"pid": 183,
"city_code": "101200602",
"city_name": "大冶市"
},
{
"_id": 1200,
"id": 1576,
"pid": 183,
"city_code": "101200603",
"city_name": "阳新县"
},
{
"_id": 1201,
"id": 1578,
"pid": 184,
"city_code": "101201404",
"city_name": "掇刀区"
},
{
"_id": 1202,
"id": 1579,
"pid": 184,
"city_code": "101201402",
"city_name": "钟祥市"
},
{
"_id": 1203,
"id": 1580,
"pid": 184,
"city_code": "101201403",
"city_name": "京山县"
},
{
"_id": 1204,
"id": 1581,
"pid": 184,
"city_code": "101201405",
"city_name": "沙洋县"
},
{
"_id": 1205,
"id": 1583,
"pid": 185,
"city_code": "101200801",
"city_name": "荆州区"
},
{
"_id": 1206,
"id": 1584,
"pid": 185,
"city_code": "101200804",
"city_name": "石首市"
},
{
"_id": 1207,
"id": 1585,
"pid": 185,
"city_code": "101200806",
"city_name": "洪湖市"
},
{
"_id": 1208,
"id": 1586,
"pid": 185,
"city_code": "101200807",
"city_name": "松滋市"
},
{
"_id": 1209,
"id": 1587,
"pid": 185,
"city_code": "101200803",
"city_name": "公安县"
},
{
"_id": 1210,
"id": 1588,
"pid": 185,
"city_code": "101200805",
"city_name": "监利县"
},
{
"_id": 1211,
"id": 1589,
"pid": 185,
"city_code": "101200802",
"city_name": "江陵县"
},
{
"_id": 1212,
"id": 1590,
"pid": 186,
"city_code": "101201701",
"city_name": "潜江市"
},
{
"_id": 1213,
"id": 1592,
"pid": 188,
"city_code": "101201109",
"city_name": "张湾区"
},
{
"_id": 1214,
"id": 1593,
"pid": 188,
"city_code": "101201108",
"city_name": "茅箭区"
},
{
"_id": 1215,
"id": 1594,
"pid": 188,
"city_code": "101201107",
"city_name": "丹江口市"
},
{
"_id": 1216,
"id": 1595,
"pid": 188,
"city_code": "101201104",
"city_name": "郧县"
},
{
"_id": 1217,
"id": 1596,
"pid": 188,
"city_code": "101201103",
"city_name": "郧西县"
},
{
"_id": 1218,
"id": 1597,
"pid": 188,
"city_code": "101201105",
"city_name": "竹山县"
},
{
"_id": 1219,
"id": 1598,
"pid": 188,
"city_code": "101201102",
"city_name": "竹溪县"
},
{
"_id": 1220,
"id": 1599,
"pid": 188,
"city_code": "101201106",
"city_name": "房县"
},
{
"_id": 1221,
"id": 1601,
"pid": 189,
"city_code": "101201302",
"city_name": "广水市"
},
{
"_id": 1222,
"id": 1602,
"pid": 190,
"city_code": "101201501",
"city_name": "天门市"
},
{
"_id": 1223,
"id": 1604,
"pid": 191,
"city_code": "101200702",
"city_name": "赤壁市"
},
{
"_id": 1224,
"id": 1605,
"pid": 191,
"city_code": "101200703",
"city_name": "嘉鱼县"
},
{
"_id": 1225,
"id": 1606,
"pid": 191,
"city_code": "101200705",
"city_name": "通城县"
},
{
"_id": 1226,
"id": 1607,
"pid": 191,
"city_code": "101200704",
"city_name": "崇阳县"
},
{
"_id": 1227,
"id": 1608,
"pid": 191,
"city_code": "101200706",
"city_name": "通山县"
},
{
"_id": 1228,
"id": 1611,
"pid": 192,
"city_code": "101200202",
"city_name": "襄州区"
},
{
"_id": 1229,
"id": 1612,
"pid": 192,
"city_code": "101200206",
"city_name": "老河口市"
},
{
"_id": 1230,
"id": 1613,
"pid": 192,
"city_code": "101200208",
"city_name": "枣阳市"
},
{
"_id": 1231,
"id": 1614,
"pid": 192,
"city_code": "101200205",
"city_name": "宜城市"
},
{
"_id": 1232,
"id": 1615,
"pid": 192,
"city_code": "101200204",
"city_name": "南漳县"
},
{
"_id": 1233,
"id": 1616,
"pid": 192,
"city_code": "101200207",
"city_name": "谷城县"
},
{
"_id": 1234,
"id": 1617,
"pid": 192,
"city_code": "101200203",
"city_name": "保康县"
},
{
"_id": 1235,
"id": 1619,
"pid": 193,
"city_code": "101200405",
"city_name": "应城市"
},
{
"_id": 1236,
"id": 1620,
"pid": 193,
"city_code": "101200402",
"city_name": "安陆市"
},
{
"_id": 1237,
"id": 1621,
"pid": 193,
"city_code": "101200406",
"city_name": "汉川市"
},
{
"_id": 1238,
"id": 1622,
"pid": 193,
"city_code": "101200407",
"city_name": "孝昌县"
},
{
"_id": 1239,
"id": 1623,
"pid": 193,
"city_code": "101200404",
"city_name": "大悟县"
},
{
"_id": 1240,
"id": 1624,
"pid": 193,
"city_code": "101200403",
"city_name": "云梦县"
},
{
"_id": 1241,
"id": 1625,
"pid": 194,
"city_code": "101200908",
"city_name": "长阳县"
},
{
"_id": 1242,
"id": 1626,
"pid": 194,
"city_code": "101200906",
"city_name": "五峰县"
},
{
"_id": 1243,
"id": 1631,
"pid": 194,
"city_code": "101200912",
"city_name": "夷陵区"
},
{
"_id": 1244,
"id": 1632,
"pid": 194,
"city_code": "101200909",
"city_name": "宜都市"
},
{
"_id": 1245,
"id": 1633,
"pid": 194,
"city_code": "101200907",
"city_name": "当阳市"
},
{
"_id": 1246,
"id": 1634,
"pid": 194,
"city_code": "101200910",
"city_name": "枝江市"
},
{
"_id": 1247,
"id": 1635,
"pid": 194,
"city_code": "101200902",
"city_name": "远安县"
},
{
"_id": 1248,
"id": 1636,
"pid": 194,
"city_code": "101200904",
"city_name": "兴山县"
},
{
"_id": 1249,
"id": 1637,
"pid": 194,
"city_code": "101200903",
"city_name": "秭归县"
},
{
"_id": 1250,
"id": 1638,
"pid": 195,
"city_code": "101201001",
"city_name": "恩施市"
},
{
"_id": 1251,
"id": 1639,
"pid": 195,
"city_code": "101201002",
"city_name": "利川市"
},
{
"_id": 1252,
"id": 1640,
"pid": 195,
"city_code": "101201003",
"city_name": "建始县"
},
{
"_id": 1253,
"id": 1641,
"pid": 195,
"city_code": "101201008",
"city_name": "巴东县"
},
{
"_id": 1254,
"id": 1642,
"pid": 195,
"city_code": "101201005",
"city_name": "宣恩县"
},
{
"_id": 1255,
"id": 1643,
"pid": 195,
"city_code": "101201004",
"city_name": "咸丰县"
},
{
"_id": 1256,
"id": 1644,
"pid": 195,
"city_code": "101201007",
"city_name": "来凤县"
},
{
"_id": 1257,
"id": 1645,
"pid": 195,
"city_code": "101201006",
"city_name": "鹤峰县"
},
{
"_id": 1258,
"id": 1652,
"pid": 196,
"city_code": "101250103",
"city_name": "浏阳市"
},
{
"_id": 1259,
"id": 1653,
"pid": 196,
"city_code": "101250101",
"city_name": "长沙县"
},
{
"_id": 1260,
"id": 1654,
"pid": 196,
"city_code": "101250105",
"city_name": "望城县"
},
{
"_id": 1261,
"id": 1655,
"pid": 196,
"city_code": "101250102",
"city_name": "宁乡县"
},
{
"_id": 1262,
"id": 1657,
"pid": 197,
"city_code": "101251104",
"city_name": "武陵源区"
},
{
"_id": 1263,
"id": 1658,
"pid": 197,
"city_code": "101251103",
"city_name": "慈利县"
},
{
"_id": 1264,
"id": 1659,
"pid": 197,
"city_code": "101251102",
"city_name": "桑植县"
},
{
"_id": 1265,
"id": 1662,
"pid": 198,
"city_code": "101250608",
"city_name": "津市市"
},
{
"_id": 1266,
"id": 1663,
"pid": 198,
"city_code": "101250602",
"city_name": "安乡县"
},
{
"_id": 1267,
"id": 1664,
"pid": 198,
"city_code": "101250604",
"city_name": "汉寿县"
},
{
"_id": 1268,
"id": 1665,
"pid": 198,
"city_code": "101250605",
"city_name": "澧县"
},
{
"_id": 1269,
"id": 1666,
"pid": 198,
"city_code": "101250606",
"city_name": "临澧县"
},
{
"_id": 1270,
"id": 1667,
"pid": 198,
"city_code": "101250603",
"city_name": "桃源县"
},
{
"_id": 1271,
"id": 1668,
"pid": 198,
"city_code": "101250607",
"city_name": "石门县"
},
{
"_id": 1272,
"id": 1670,
"pid": 199,
"city_code": "101250512",
"city_name": "苏仙区"
},
{
"_id": 1273,
"id": 1671,
"pid": 199,
"city_code": "101250507",
"city_name": "资兴市"
},
{
"_id": 1274,
"id": 1672,
"pid": 199,
"city_code": "101250502",
"city_name": "桂阳县"
},
{
"_id": 1275,
"id": 1673,
"pid": 199,
"city_code": "101250504",
"city_name": "宜章县"
},
{
"_id": 1276,
"id": 1674,
"pid": 199,
"city_code": "101250510",
"city_name": "永兴县"
},
{
"_id": 1277,
"id": 1675,
"pid": 199,
"city_code": "101250503",
"city_name": "嘉禾县"
},
{
"_id": 1278,
"id": 1676,
"pid": 199,
"city_code": "101250505",
"city_name": "临武县"
},
{
"_id": 1279,
"id": 1677,
"pid": 199,
"city_code": "101250508",
"city_name": "汝城县"
},
{
"_id": 1280,
"id": 1678,
"pid": 199,
"city_code": "101250511",
"city_name": "桂东县"
},
{
"_id": 1281,
"id": 1679,
"pid": 199,
"city_code": "101250509",
"city_name": "安仁县"
},
{
"_id": 1282,
"id": 1684,
"pid": 200,
"city_code": "101250409",
"city_name": "南岳区"
},
{
"_id": 1283,
"id": 1685,
"pid": 200,
"city_code": "101250408",
"city_name": "耒阳市"
},
{
"_id": 1284,
"id": 1686,
"pid": 200,
"city_code": "101250406",
"city_name": "常宁市"
},
{
"_id": 1285,
"id": 1687,
"pid": 200,
"city_code": "101250405",
"city_name": "衡阳县"
},
{
"_id": 1286,
"id": 1688,
"pid": 200,
"city_code": "101250407",
"city_name": "衡南县"
},
{
"_id": 1287,
"id": 1689,
"pid": 200,
"city_code": "101250402",
"city_name": "衡山县"
},
{
"_id": 1288,
"id": 1690,
"pid": 200,
"city_code": "101250403",
"city_name": "衡东县"
},
{
"_id": 1289,
"id": 1691,
"pid": 200,
"city_code": "101250404",
"city_name": "祁东县"
},
{
"_id": 1290,
"id": 1692,
"pid": 201,
"city_code": "101251202",
"city_name": "鹤城区"
},
{
"_id": 1291,
"id": 1693,
"pid": 201,
"city_code": "101251205",
"city_name": "靖州县"
},
{
"_id": 1292,
"id": 1694,
"pid": 201,
"city_code": "101251208",
"city_name": "麻阳县"
},
{
"_id": 1293,
"id": 1695,
"pid": 201,
"city_code": "101251207",
"city_name": "通道县"
},
{
"_id": 1294,
"id": 1696,
"pid": 201,
"city_code": "101251209",
"city_name": "新晃县"
},
{
"_id": 1295,
"id": 1697,
"pid": 201,
"city_code": "101251210",
"city_name": "芷江县"
},
{
"_id": 1296,
"id": 1698,
"pid": 201,
"city_code": "101251203",
"city_name": "沅陵县"
},
{
"_id": 1297,
"id": 1699,
"pid": 201,
"city_code": "101251204",
"city_name": "辰溪县"
},
{
"_id": 1298,
"id": 1700,
"pid": 201,
"city_code": "101251211",
"city_name": "溆浦县"
},
{
"_id": 1299,
"id": 1701,
"pid": 201,
"city_code": "101251212",
"city_name": "中方县"
},
{
"_id": 1300,
"id": 1702,
"pid": 201,
"city_code": "101251206",
"city_name": "会同县"
},
{
"_id": 1301,
"id": 1703,
"pid": 201,
"city_code": "101251213",
"city_name": "洪江市"
},
{
"_id": 1302,
"id": 1705,
"pid": 202,
"city_code": "101250803",
"city_name": "冷水江市"
},
{
"_id": 1303,
"id": 1706,
"pid": 202,
"city_code": "101250806",
"city_name": "涟源市"
},
{
"_id": 1304,
"id": 1707,
"pid": 202,
"city_code": "101250802",
"city_name": "双峰县"
},
{
"_id": 1305,
"id": 1708,
"pid": 202,
"city_code": "101250805",
"city_name": "新化县"
},
{
"_id": 1306,
"id": 1709,
"pid": 203,
"city_code": "101250909",
"city_name": "城步县"
},
{
"_id": 1307,
"id": 1713,
"pid": 203,
"city_code": "101250908",
"city_name": "武冈市"
},
{
"_id": 1308,
"id": 1714,
"pid": 203,
"city_code": "101250905",
"city_name": "邵东县"
},
{
"_id": 1309,
"id": 1715,
"pid": 203,
"city_code": "101250904",
"city_name": "新邵县"
},
{
"_id": 1310,
"id": 1716,
"pid": 203,
"city_code": "101250910",
"city_name": "邵阳县"
},
{
"_id": 1311,
"id": 1717,
"pid": 203,
"city_code": "101250902",
"city_name": "隆回县"
},
{
"_id": 1312,
"id": 1718,
"pid": 203,
"city_code": "101250903",
"city_name": "洞口县"
},
{
"_id": 1313,
"id": 1719,
"pid": 203,
"city_code": "101250906",
"city_name": "绥宁县"
},
{
"_id": 1314,
"id": 1720,
"pid": 203,
"city_code": "101250907",
"city_name": "新宁县"
},
{
"_id": 1315,
"id": 1723,
"pid": 204,
"city_code": "101250203",
"city_name": "湘乡市"
},
{
"_id": 1316,
"id": 1724,
"pid": 204,
"city_code": "101250202",
"city_name": "韶山市"
},
{
"_id": 1317,
"id": 1725,
"pid": 204,
"city_code": "101250201",
"city_name": "湘潭县"
},
{
"_id": 1318,
"id": 1726,
"pid": 205,
"city_code": "101251501",
"city_name": "吉首市"
},
{
"_id": 1319,
"id": 1727,
"pid": 205,
"city_code": "101251506",
"city_name": "泸溪县"
},
{
"_id": 1320,
"id": 1728,
"pid": 205,
"city_code": "101251505",
"city_name": "凤凰县"
},
{
"_id": 1321,
"id": 1729,
"pid": 205,
"city_code": "101251508",
"city_name": "花垣县"
},
{
"_id": 1322,
"id": 1730,
"pid": 205,
"city_code": "101251502",
"city_name": "保靖县"
},
{
"_id": 1323,
"id": 1731,
"pid": 205,
"city_code": "101251504",
"city_name": "古丈县"
},
{
"_id": 1324,
"id": 1732,
"pid": 205,
"city_code": "101251503",
"city_name": "永顺县"
},
{
"_id": 1325,
"id": 1733,
"pid": 205,
"city_code": "101251507",
"city_name": "龙山县"
},
{
"_id": 1326,
"id": 1734,
"pid": 206,
"city_code": "101250701",
"city_name": "赫山区"
},
{
"_id": 1327,
"id": 1736,
"pid": 206,
"city_code": "101250705",
"city_name": "沅江市"
},
{
"_id": 1328,
"id": 1737,
"pid": 206,
"city_code": "101250702",
"city_name": "南县"
},
{
"_id": 1329,
"id": 1738,
"pid": 206,
"city_code": "101250703",
"city_name": "桃江县"
},
{
"_id": 1330,
"id": 1739,
"pid": 206,
"city_code": "101250704",
"city_name": "安化县"
},
{
"_id": 1331,
"id": 1740,
"pid": 207,
"city_code": "101251410",
"city_name": "江华县"
},
{
"_id": 1332,
"id": 1743,
"pid": 207,
"city_code": "101251402",
"city_name": "祁阳县"
},
{
"_id": 1333,
"id": 1744,
"pid": 207,
"city_code": "101251403",
"city_name": "东安县"
},
{
"_id": 1334,
"id": 1745,
"pid": 207,
"city_code": "101251404",
"city_name": "双牌县"
},
{
"_id": 1335,
"id": 1746,
"pid": 207,
"city_code": "101251405",
"city_name": "道县"
},
{
"_id": 1336,
"id": 1747,
"pid": 207,
"city_code": "101251407",
"city_name": "江永县"
},
{
"_id": 1337,
"id": 1748,
"pid": 207,
"city_code": "101251406",
"city_name": "宁远县"
},
{
"_id": 1338,
"id": 1749,
"pid": 207,
"city_code": "101251408",
"city_name": "蓝山县"
},
{
"_id": 1339,
"id": 1750,
"pid": 207,
"city_code": "101251409",
"city_name": "新田县"
},
{
"_id": 1340,
"id": 1754,
"pid": 208,
"city_code": "101251004",
"city_name": "汨罗市"
},
{
"_id": 1341,
"id": 1755,
"pid": 208,
"city_code": "101251006",
"city_name": "临湘市"
},
{
"_id": 1342,
"id": 1756,
"pid": 208,
"city_code": "101251001",
"city_name": "岳阳县"
},
{
"_id": 1343,
"id": 1757,
"pid": 208,
"city_code": "101251002",
"city_name": "华容县"
},
{
"_id": 1344,
"id": 1758,
"pid": 208,
"city_code": "101251003",
"city_name": "湘阴县"
},
{
"_id": 1345,
"id": 1759,
"pid": 208,
"city_code": "101251005",
"city_name": "平江县"
},
{
"_id": 1346,
"id": 1764,
"pid": 209,
"city_code": "101250303",
"city_name": "醴陵市"
},
{
"_id": 1347,
"id": 1765,
"pid": 209,
"city_code": "101250304",
"city_name": "株洲县"
},
{
"_id": 1348,
"id": 1766,
"pid": 209,
"city_code": "101250302",
"city_name": "攸县"
},
{
"_id": 1349,
"id": 1767,
"pid": 209,
"city_code": "101250305",
"city_name": "茶陵县"
},
{
"_id": 1350,
"id": 1768,
"pid": 209,
"city_code": "101250306",
"city_name": "炎陵县"
},
{
"_id": 1351,
"id": 1774,
"pid": 210,
"city_code": "101060106",
"city_name": "双阳区"
},
{
"_id": 1352,
"id": 1779,
"pid": 210,
"city_code": "101060103",
"city_name": "德惠市"
},
{
"_id": 1353,
"id": 1780,
"pid": 210,
"city_code": "101060104",
"city_name": "九台市"
},
{
"_id": 1354,
"id": 1781,
"pid": 210,
"city_code": "101060105",
"city_name": "榆树市"
},
{
"_id": 1355,
"id": 1782,
"pid": 210,
"city_code": "101060102",
"city_name": "农安县"
},
{
"_id": 1356,
"id": 1787,
"pid": 211,
"city_code": "101060204",
"city_name": "蛟河市"
},
{
"_id": 1357,
"id": 1788,
"pid": 211,
"city_code": "101060206",
"city_name": "桦甸市"
},
{
"_id": 1358,
"id": 1789,
"pid": 211,
"city_code": "101060202",
"city_name": "舒兰市"
},
{
"_id": 1359,
"id": 1790,
"pid": 211,
"city_code": "101060205",
"city_name": "磐石市"
},
{
"_id": 1360,
"id": 1791,
"pid": 211,
"city_code": "101060203",
"city_name": "永吉县"
},
{
"_id": 1361,
"id": 1793,
"pid": 212,
"city_code": "101060602",
"city_name": "洮南市"
},
{
"_id": 1362,
"id": 1794,
"pid": 212,
"city_code": "101060603",
"city_name": "大安市"
},
{
"_id": 1363,
"id": 1795,
"pid": 212,
"city_code": "101060604",
"city_name": "镇赉县"
},
{
"_id": 1364,
"id": 1796,
"pid": 212,
"city_code": "101060605",
"city_name": "通榆县"
},
{
"_id": 1365,
"id": 1797,
"pid": 213,
"city_code": "101060907",
"city_name": "江源区"
},
{
"_id": 1366,
"id": 1799,
"pid": 213,
"city_code": "101060905",
"city_name": "长白县"
},
{
"_id": 1367,
"id": 1800,
"pid": 213,
"city_code": "101060903",
"city_name": "临江市"
},
{
"_id": 1368,
"id": 1801,
"pid": 213,
"city_code": "101060906",
"city_name": "抚松县"
},
{
"_id": 1369,
"id": 1802,
"pid": 213,
"city_code": "101060902",
"city_name": "靖宇县"
},
{
"_id": 1370,
"id": 1805,
"pid": 214,
"city_code": "101060702",
"city_name": "东丰县"
},
{
"_id": 1371,
"id": 1806,
"pid": 214,
"city_code": "101060703",
"city_name": "东辽县"
},
{
"_id": 1372,
"id": 1809,
"pid": 215,
"city_code": "101060405",
"city_name": "伊通县"
},
{
"_id": 1373,
"id": 1810,
"pid": 215,
"city_code": "101060404",
"city_name": "公主岭市"
},
{
"_id": 1374,
"id": 1811,
"pid": 215,
"city_code": "101060402",
"city_name": "双辽市"
},
{
"_id": 1375,
"id": 1812,
"pid": 215,
"city_code": "101060403",
"city_name": "梨树县"
},
{
"_id": 1376,
"id": 1813,
"pid": 216,
"city_code": "101060803",
"city_name": "前郭尔罗斯"
},
{
"_id": 1377,
"id": 1815,
"pid": 216,
"city_code": "101060804",
"city_name": "长岭县"
},
{
"_id": 1378,
"id": 1816,
"pid": 216,
"city_code": "101060802",
"city_name": "乾安县"
},
{
"_id": 1379,
"id": 1817,
"pid": 216,
"city_code": "101060805",
"city_name": "扶余市"
},
{
"_id": 1380,
"id": 1820,
"pid": 217,
"city_code": "101060502",
"city_name": "梅河口市"
},
{
"_id": 1381,
"id": 1821,
"pid": 217,
"city_code": "101060505",
"city_name": "集安市"
},
{
"_id": 1382,
"id": 1822,
"pid": 217,
"city_code": "101060506",
"city_name": "通化县"
},
{
"_id": 1383,
"id": 1823,
"pid": 217,
"city_code": "101060504",
"city_name": "辉南县"
},
{
"_id": 1384,
"id": 1824,
"pid": 217,
"city_code": "101060503",
"city_name": "柳河县"
},
{
"_id": 1385,
"id": 1825,
"pid": 218,
"city_code": "101060301",
"city_name": "延吉市"
},
{
"_id": 1386,
"id": 1826,
"pid": 218,
"city_code": "101060309",
"city_name": "图们市"
},
{
"_id": 1387,
"id": 1827,
"pid": 218,
"city_code": "101060302",
"city_name": "敦化市"
},
{
"_id": 1388,
"id": 1828,
"pid": 218,
"city_code": "101060308",
"city_name": "珲春市"
},
{
"_id": 1389,
"id": 1829,
"pid": 218,
"city_code": "101060307",
"city_name": "龙井市"
},
{
"_id": 1390,
"id": 1830,
"pid": 218,
"city_code": "101060305",
"city_name": "和龙市"
},
{
"_id": 1391,
"id": 1831,
"pid": 218,
"city_code": "101060303",
"city_name": "安图县"
},
{
"_id": 1392,
"id": 1832,
"pid": 218,
"city_code": "101060304",
"city_name": "汪清县"
},
{
"_id": 1393,
"id": 1841,
"pid": 219,
"city_code": "101190107",
"city_name": "浦口区"
},
{
"_id": 1394,
"id": 1842,
"pid": 219,
"city_code": "101190104",
"city_name": "江宁区"
},
{
"_id": 1395,
"id": 1843,
"pid": 219,
"city_code": "101190105",
"city_name": "六合区"
},
{
"_id": 1396,
"id": 1844,
"pid": 219,
"city_code": "101190102",
"city_name": "溧水区"
},
{
"_id": 1397,
"id": 1845,
"pid": 219,
"city_code": "101190103",
"city_name": "高淳县"
},
{
"_id": 1398,
"id": 1850,
"pid": 220,
"city_code": "101190405",
"city_name": "吴中区"
},
{
"_id": 1399,
"id": 1853,
"pid": 220,
"city_code": "101190404",
"city_name": "昆山市"
},
{
"_id": 1400,
"id": 1854,
"pid": 220,
"city_code": "101190402",
"city_name": "常熟市"
},
{
"_id": 1401,
"id": 1855,
"pid": 220,
"city_code": "101190403",
"city_name": "张家港市"
},
{
"_id": 1402,
"id": 1867,
"pid": 220,
"city_code": "101190407",
"city_name": "吴江区"
},
{
"_id": 1403,
"id": 1868,
"pid": 220,
"city_code": "101190408",
"city_name": "太仓市"
},
{
"_id": 1404,
"id": 1872,
"pid": 221,
"city_code": "101190204",
"city_name": "锡山区"
},
{
"_id": 1405,
"id": 1876,
"pid": 221,
"city_code": "101190202",
"city_name": "江阴市"
},
{
"_id": 1406,
"id": 1877,
"pid": 221,
"city_code": "101190203",
"city_name": "宜兴市"
},
{
"_id": 1407,
"id": 1883,
"pid": 222,
"city_code": "101191104",
"city_name": "武进区"
},
{
"_id": 1408,
"id": 1884,
"pid": 222,
"city_code": "101191102",
"city_name": "溧阳市"
},
{
"_id": 1409,
"id": 1885,
"pid": 222,
"city_code": "101191103",
"city_name": "金坛区"
},
{
"_id": 1410,
"id": 1888,
"pid": 223,
"city_code": "101190908",
"city_name": "楚州区"
},
{
"_id": 1411,
"id": 1889,
"pid": 223,
"city_code": "101190907",
"city_name": "淮阴区"
},
{
"_id": 1412,
"id": 1890,
"pid": 223,
"city_code": "101190905",
"city_name": "涟水县"
},
{
"_id": 1413,
"id": 1891,
"pid": 223,
"city_code": "101190904",
"city_name": "洪泽县"
},
{
"_id": 1414,
"id": 1892,
"pid": 223,
"city_code": "101190903",
"city_name": "盱眙县"
},
{
"_id": 1415,
"id": 1893,
"pid": 223,
"city_code": "101190902",
"city_name": "金湖县"
},
{
"_id": 1416,
"id": 1897,
"pid": 224,
"city_code": "101191003",
"city_name": "赣榆县"
},
{
"_id": 1417,
"id": 1898,
"pid": 224,
"city_code": "101191002",
"city_name": "东海县"
},
{
"_id": 1418,
"id": 1899,
"pid": 224,
"city_code": "101191004",
"city_name": "灌云县"
},
{
"_id": 1419,
"id": 1900,
"pid": 224,
"city_code": "101191005",
"city_name": "灌南县"
},
{
"_id": 1420,
"id": 1904,
"pid": 225,
"city_code": "101190507",
"city_name": "启东市"
},
{
"_id": 1421,
"id": 1905,
"pid": 225,
"city_code": "101190503",
"city_name": "如皋市"
},
{
"_id": 1422,
"id": 1906,
"pid": 225,
"city_code": "101190509",
"city_name": "通州区"
},
{
"_id": 1423,
"id": 1907,
"pid": 225,
"city_code": "101190508",
"city_name": "海门市"
},
{
"_id": 1424,
"id": 1908,
"pid": 225,
"city_code": "101190502",
"city_name": "海安县"
},
{
"_id": 1425,
"id": 1909,
"pid": 225,
"city_code": "101190504",
"city_name": "如东县"
},
{
"_id": 1426,
"id": 1911,
"pid": 226,
"city_code": "101191305",
"city_name": "宿豫区"
},
{
"_id": 1427,
"id": 1912,
"pid": 226,
"city_code": "101191305",
"city_name": "宿豫县"
},
{
"_id": 1428,
"id": 1913,
"pid": 226,
"city_code": "101191302",
"city_name": "沭阳县"
},
{
"_id": 1429,
"id": 1914,
"pid": 226,
"city_code": "101191303",
"city_name": "泗阳县"
},
{
"_id": 1430,
"id": 1915,
"pid": 226,
"city_code": "101191304",
"city_name": "泗洪县"
},
{
"_id": 1431,
"id": 1918,
"pid": 227,
"city_code": "101191202",
"city_name": "兴化市"
},
{
"_id": 1432,
"id": 1919,
"pid": 227,
"city_code": "101191205",
"city_name": "靖江市"
},
{
"_id": 1433,
"id": 1920,
"pid": 227,
"city_code": "101191203",
"city_name": "泰兴市"
},
{
"_id": 1434,
"id": 1921,
"pid": 227,
"city_code": "101191204",
"city_name": "姜堰区"
},
{
"_id": 1435,
"id": 1927,
"pid": 228,
"city_code": "101190807",
"city_name": "新沂市"
},
{
"_id": 1436,
"id": 1928,
"pid": 228,
"city_code": "101190805",
"city_name": "邳州市"
},
{
"_id": 1437,
"id": 1929,
"pid": 228,
"city_code": "101190803",
"city_name": "丰县"
},
{
"_id": 1438,
"id": 1930,
"pid": 228,
"city_code": "101190804",
"city_name": "沛县"
},
{
"_id": 1439,
"id": 1931,
"pid": 228,
"city_code": "101190802",
"city_name": "铜山区"
},
{
"_id": 1440,
"id": 1932,
"pid": 228,
"city_code": "101190806",
"city_name": "睢宁县"
},
{
"_id": 1441,
"id": 1935,
"pid": 229,
"city_code": "101190709",
"city_name": "盐都区"
},
{
"_id": 1442,
"id": 1937,
"pid": 229,
"city_code": "101190707",
"city_name": "东台市"
},
{
"_id": 1443,
"id": 1938,
"pid": 229,
"city_code": "101190708",
"city_name": "大丰区"
},
{
"_id": 1444,
"id": 1939,
"pid": 229,
"city_code": "101190702",
"city_name": "响水县"
},
{
"_id": 1445,
"id": 1940,
"pid": 229,
"city_code": "101190703",
"city_name": "滨海县"
},
{
"_id": 1446,
"id": 1941,
"pid": 229,
"city_code": "101190704",
"city_name": "阜宁县"
},
{
"_id": 1447,
"id": 1942,
"pid": 229,
"city_code": "101190705",
"city_name": "射阳县"
},
{
"_id": 1448,
"id": 1943,
"pid": 229,
"city_code": "101190706",
"city_name": "建湖县"
},
{
"_id": 1449,
"id": 1946,
"pid": 230,
"city_code": "101190606",
"city_name": "邗江区"
},
{
"_id": 1450,
"id": 1947,
"pid": 230,
"city_code": "101190603",
"city_name": "仪征市"
},
{
"_id": 1451,
"id": 1948,
"pid": 230,
"city_code": "101190604",
"city_name": "高邮市"
},
{
"_id": 1452,
"id": 1949,
"pid": 230,
"city_code": "101190605",
"city_name": "江都市"
},
{
"_id": 1453,
"id": 1950,
"pid": 230,
"city_code": "101190602",
"city_name": "宝应县"
},
{
"_id": 1454,
"id": 1953,
"pid": 231,
"city_code": "101190305",
"city_name": "丹徒区"
},
{
"_id": 1455,
"id": 1954,
"pid": 231,
"city_code": "101190302",
"city_name": "丹阳市"
},
{
"_id": 1456,
"id": 1955,
"pid": 231,
"city_code": "101190303",
"city_name": "扬中市"
},
{
"_id": 1457,
"id": 1956,
"pid": 231,
"city_code": "101190304",
"city_name": "句容市"
},
{
"_id": 1458,
"id": 1965,
"pid": 232,
"city_code": "101240103",
"city_name": "南昌县"
},
{
"_id": 1459,
"id": 1966,
"pid": 232,
"city_code": "101240102",
"city_name": "新建县"
},
{
"_id": 1460,
"id": 1967,
"pid": 232,
"city_code": "101240104",
"city_name": "安义县"
},
{
"_id": 1461,
"id": 1968,
"pid": 232,
"city_code": "101240105",
"city_name": "进贤县"
},
{
"_id": 1462,
"id": 1970,
"pid": 233,
"city_code": "101240408",
"city_name": "南城县"
},
{
"_id": 1463,
"id": 1971,
"pid": 233,
"city_code": "101240410",
"city_name": "黎川县"
},
{
"_id": 1464,
"id": 1972,
"pid": 233,
"city_code": "101240409",
"city_name": "南丰县"
},
{
"_id": 1465,
"id": 1973,
"pid": 233,
"city_code": "101240404",
"city_name": "崇仁县"
},
{
"_id": 1466,
"id": 1974,
"pid": 233,
"city_code": "101240403",
"city_name": "乐安县"
},
{
"_id": 1467,
"id": 1975,
"pid": 233,
"city_code": "101240407",
"city_name": "宜黄县"
},
{
"_id": 1468,
"id": 1976,
"pid": 233,
"city_code": "101240405",
"city_name": "金溪县"
},
{
"_id": 1469,
"id": 1977,
"pid": 233,
"city_code": "101240406",
"city_name": "资溪县"
},
{
"_id": 1470,
"id": 1978,
"pid": 233,
"city_code": "101240411",
"city_name": "东乡县"
},
{
"_id": 1471,
"id": 1979,
"pid": 233,
"city_code": "101240402",
"city_name": "广昌县"
},
{
"_id": 1472,
"id": 1981,
"pid": 234,
"city_code": "101240710",
"city_name": "于都县"
},
{
"_id": 1473,
"id": 1982,
"pid": 234,
"city_code": "101240709",
"city_name": "瑞金市"
},
{
"_id": 1474,
"id": 1983,
"pid": 234,
"city_code": "101240704",
"city_name": "南康市"
},
{
"_id": 1475,
"id": 1984,
"pid": 234,
"city_code": "101240718",
"city_name": "赣县"
},
{
"_id": 1476,
"id": 1985,
"pid": 234,
"city_code": "101240706",
"city_name": "信丰县"
},
{
"_id": 1477,
"id": 1986,
"pid": 234,
"city_code": "101240705",
"city_name": "大余县"
},
{
"_id": 1478,
"id": 1987,
"pid": 234,
"city_code": "101240703",
"city_name": "上犹县"
},
{
"_id": 1479,
"id": 1988,
"pid": 234,
"city_code": "101240702",
"city_name": "崇义县"
},
{
"_id": 1480,
"id": 1989,
"pid": 234,
"city_code": "101240712",
"city_name": "安远县"
},
{
"_id": 1481,
"id": 1990,
"pid": 234,
"city_code": "101240714",
"city_name": "龙南县"
},
{
"_id": 1482,
"id": 1991,
"pid": 234,
"city_code": "101240715",
"city_name": "定南县"
},
{
"_id": 1483,
"id": 1992,
"pid": 234,
"city_code": "101240713",
"city_name": "全南县"
},
{
"_id": 1484,
"id": 1993,
"pid": 234,
"city_code": "101240707",
"city_name": "宁都县"
},
{
"_id": 1485,
"id": 1994,
"pid": 234,
"city_code": "101240717",
"city_name": "兴国县"
},
{
"_id": 1486,
"id": 1995,
"pid": 234,
"city_code": "101240711",
"city_name": "会昌县"
},
{
"_id": 1487,
"id": 1996,
"pid": 234,
"city_code": "101240716",
"city_name": "寻乌县"
},
{
"_id": 1488,
"id": 1997,
"pid": 234,
"city_code": "101240708",
"city_name": "石城县"
},
{
"_id": 1489,
"id": 1998,
"pid": 235,
"city_code": "101240612",
"city_name": "安福县"
},
{
"_id": 1490,
"id": 2001,
"pid": 235,
"city_code": "101240608",
"city_name": "井冈山市"
},
{
"_id": 1491,
"id": 2002,
"pid": 235,
"city_code": "101240602",
"city_name": "吉安县"
},
{
"_id": 1492,
"id": 2003,
"pid": 235,
"city_code": "101240603",
"city_name": "吉水县"
},
{
"_id": 1493,
"id": 2004,
"pid": 235,
"city_code": "101240605",
"city_name": "峡江县"
},
{
"_id": 1494,
"id": 2005,
"pid": 235,
"city_code": "101240604",
"city_name": "新干县"
},
{
"_id": 1495,
"id": 2006,
"pid": 235,
"city_code": "101240606",
"city_name": "永丰县"
},
{
"_id": 1496,
"id": 2007,
"pid": 235,
"city_code": "101240611",
"city_name": "泰和县"
},
{
"_id": 1497,
"id": 2008,
"pid": 235,
"city_code": "101240610",
"city_name": "遂川县"
},
{
"_id": 1498,
"id": 2009,
"pid": 235,
"city_code": "101240609",
"city_name": "万安县"
},
{
"_id": 1499,
"id": 2010,
"pid": 235,
"city_code": "101240607",
"city_name": "永新县"
},
{
"_id": 1500,
"id": 2013,
"pid": 236,
"city_code": "101240802",
"city_name": "乐平市"
},
{
"_id": 1501,
"id": 2014,
"pid": 236,
"city_code": "101240803",
"city_name": "浮梁县"
},
{
"_id": 1502,
"id": 2016,
"pid": 237,
"city_code": "101240203",
"city_name": "庐山区"
},
{
"_id": 1503,
"id": 2017,
"pid": 237,
"city_code": "101240202",
"city_name": "瑞昌市"
},
{
"_id": 1504,
"id": 2018,
"pid": 237,
"city_code": "101240201",
"city_name": "九江县"
},
{
"_id": 1505,
"id": 2019,
"pid": 237,
"city_code": "101240204",
"city_name": "武宁县"
},
{
"_id": 1506,
"id": 2020,
"pid": 237,
"city_code": "101240212",
"city_name": "修水县"
},
{
"_id": 1507,
"id": 2021,
"pid": 237,
"city_code": "101240206",
"city_name": "永修县"
},
{
"_id": 1508,
"id": 2022,
"pid": 237,
"city_code": "101240205",
"city_name": "德安县"
},
{
"_id": 1509,
"id": 2023,
"pid": 237,
"city_code": "101240209",
"city_name": "星子县"
},
{
"_id": 1510,
"id": 2024,
"pid": 237,
"city_code": "101240210",
"city_name": "都昌县"
},
{
"_id": 1511,
"id": 2025,
"pid": 237,
"city_code": "101240207",
"city_name": "湖口县"
},
{
"_id": 1512,
"id": 2026,
"pid": 237,
"city_code": "101240208",
"city_name": "彭泽县"
},
{
"_id": 1513,
"id": 2027,
"pid": 238,
"city_code": "101240904",
"city_name": "安源区"
},
{
"_id": 1514,
"id": 2028,
"pid": 238,
"city_code": "101240906",
"city_name": "湘东区"
},
{
"_id": 1515,
"id": 2029,
"pid": 238,
"city_code": "101240902",
"city_name": "莲花县"
},
{
"_id": 1516,
"id": 2030,
"pid": 238,
"city_code": "101240905",
"city_name": "芦溪县"
},
{
"_id": 1517,
"id": 2031,
"pid": 238,
"city_code": "101240903",
"city_name": "上栗县"
},
{
"_id": 1518,
"id": 2033,
"pid": 239,
"city_code": "101240307",
"city_name": "德兴市"
},
{
"_id": 1519,
"id": 2034,
"pid": 239,
"city_code": "101240308",
"city_name": "上饶县"
},
{
"_id": 1520,
"id": 2035,
"pid": 239,
"city_code": "101240313",
"city_name": "广丰县"
},
{
"_id": 1521,
"id": 2036,
"pid": 239,
"city_code": "101240312",
"city_name": "玉山县"
},
{
"_id": 1522,
"id": 2037,
"pid": 239,
"city_code": "101240311",
"city_name": "铅山县"
},
{
"_id": 1523,
"id": 2038,
"pid": 239,
"city_code": "101240310",
"city_name": "横峰县"
},
{
"_id": 1524,
"id": 2039,
"pid": 239,
"city_code": "101240309",
"city_name": "弋阳县"
},
{
"_id": 1525,
"id": 2040,
"pid": 239,
"city_code": "101240305",
"city_name": "余干县"
},
{
"_id": 1526,
"id": 2041,
"pid": 239,
"city_code": "101240302",
"city_name": "鄱阳县"
},
{
"_id": 1527,
"id": 2042,
"pid": 239,
"city_code": "101240306",
"city_name": "万年县"
},
{
"_id": 1528,
"id": 2043,
"pid": 239,
"city_code": "101240303",
"city_name": "婺源县"
},
{
"_id": 1529,
"id": 2045,
"pid": 240,
"city_code": "101241002",
"city_name": "分宜县"
},
{
"_id": 1530,
"id": 2047,
"pid": 241,
"city_code": "101240510",
"city_name": "丰城市"
},
{
"_id": 1531,
"id": 2048,
"pid": 241,
"city_code": "101240509",
"city_name": "樟树市"
},
{
"_id": 1532,
"id": 2049,
"pid": 241,
"city_code": "101240508",
"city_name": "高安市"
},
{
"_id": 1533,
"id": 2050,
"pid": 241,
"city_code": "101240507",
"city_name": "奉新县"
},
{
"_id": 1534,
"id": 2051,
"pid": 241,
"city_code": "101240504",
"city_name": "万载县"
},
{
"_id": 1535,
"id": 2052,
"pid": 241,
"city_code": "101240505",
"city_name": "上高县"
},
{
"_id": 1536,
"id": 2053,
"pid": 241,
"city_code": "101240503",
"city_name": "宜丰县"
},
{
"_id": 1537,
"id": 2054,
"pid": 241,
"city_code": "101240506",
"city_name": "靖安县"
},
{
"_id": 1538,
"id": 2055,
"pid": 241,
"city_code": "101240502",
"city_name": "铜鼓县"
},
{
"_id": 1539,
"id": 2057,
"pid": 242,
"city_code": "101241103",
"city_name": "贵溪市"
},
{
"_id": 1540,
"id": 2058,
"pid": 242,
"city_code": "101241102",
"city_name": "余江县"
},
{
"_id": 1541,
"id": 2064,
"pid": 243,
"city_code": "101070102",
"city_name": "苏家屯区"
},
{
"_id": 1542,
"id": 2067,
"pid": 243,
"city_code": "101070107",
"city_name": "于洪区"
},
{
"_id": 1543,
"id": 2069,
"pid": 243,
"city_code": "101070106",
"city_name": "新民市"
},
{
"_id": 1544,
"id": 2070,
"pid": 243,
"city_code": "101070103",
"city_name": "辽中县"
},
{
"_id": 1545,
"id": 2071,
"pid": 243,
"city_code": "101070104",
"city_name": "康平县"
},
{
"_id": 1546,
"id": 2072,
"pid": 243,
"city_code": "101070105",
"city_name": "法库县"
},
{
"_id": 1547,
"id": 2077,
"pid": 244,
"city_code": "101070205",
"city_name": "旅顺口区"
},
{
"_id": 1548,
"id": 2078,
"pid": 244,
"city_code": "101070203",
"city_name": "金州区"
},
{
"_id": 1549,
"id": 2080,
"pid": 244,
"city_code": "101070202",
"city_name": "瓦房店市"
},
{
"_id": 1550,
"id": 2081,
"pid": 244,
"city_code": "101070204",
"city_name": "普兰店市"
},
{
"_id": 1551,
"id": 2082,
"pid": 244,
"city_code": "101070207",
"city_name": "庄河市"
},
{
"_id": 1552,
"id": 2083,
"pid": 244,
"city_code": "101070206",
"city_name": "长海县"
},
{
"_id": 1553,
"id": 2088,
"pid": 245,
"city_code": "101070303",
"city_name": "岫岩县"
},
{
"_id": 1554,
"id": 2089,
"pid": 245,
"city_code": "101070304",
"city_name": "海城市"
},
{
"_id": 1555,
"id": 2090,
"pid": 245,
"city_code": "101070302",
"city_name": "台安县"
},
{
"_id": 1556,
"id": 2091,
"pid": 246,
"city_code": "101070502",
"city_name": "本溪县"
},
{
"_id": 1557,
"id": 2096,
"pid": 246,
"city_code": "101070504",
"city_name": "桓仁县"
},
{
"_id": 1558,
"id": 2099,
"pid": 247,
"city_code": "101071204",
"city_name": "喀喇沁左翼蒙古族自治县"
},
{
"_id": 1559,
"id": 2100,
"pid": 247,
"city_code": "101071205",
"city_name": "北票市"
},
{
"_id": 1560,
"id": 2101,
"pid": 247,
"city_code": "101071203",
"city_name": "凌源市"
},
{
"_id": 1561,
"id": 2103,
"pid": 247,
"city_code": "101071207",
"city_name": "建平县"
},
{
"_id": 1562,
"id": 2107,
"pid": 248,
"city_code": "101070603",
"city_name": "宽甸县"
},
{
"_id": 1563,
"id": 2108,
"pid": 248,
"city_code": "101070604",
"city_name": "东港市"
},
{
"_id": 1564,
"id": 2109,
"pid": 248,
"city_code": "101070602",
"city_name": "凤城市"
},
{
"_id": 1565,
"id": 2114,
"pid": 249,
"city_code": "101070403",
"city_name": "清原县"
},
{
"_id": 1566,
"id": 2115,
"pid": 249,
"city_code": "101070402",
"city_name": "新宾县"
},
{
"_id": 1567,
"id": 2116,
"pid": 249,
"city_code": "101070401",
"city_name": "抚顺县"
},
{
"_id": 1568,
"id": 2123,
"pid": 250,
"city_code": "101070902",
"city_name": "彰武县"
},
{
"_id": 1569,
"id": 2127,
"pid": 251,
"city_code": "101071404",
"city_name": "兴城市"
},
{
"_id": 1570,
"id": 2128,
"pid": 251,
"city_code": "101071403",
"city_name": "绥中县"
},
{
"_id": 1571,
"id": 2129,
"pid": 251,
"city_code": "101071402",
"city_name": "建昌县"
},
{
"_id": 1572,
"id": 2133,
"pid": 252,
"city_code": "101070702",
"city_name": "凌海市"
},
{
"_id": 1573,
"id": 2134,
"pid": 252,
"city_code": "101070706",
"city_name": "北镇市"
},
{
"_id": 1574,
"id": 2135,
"pid": 252,
"city_code": "101070705",
"city_name": "黑山县"
},
{
"_id": 1575,
"id": 2136,
"pid": 252,
"city_code": "101070704",
"city_name": "义县"
},
{
"_id": 1576,
"id": 2141,
"pid": 253,
"city_code": "101071004",
"city_name": "弓长岭区"
},
{
"_id": 1577,
"id": 2142,
"pid": 253,
"city_code": "101071003",
"city_name": "灯塔市"
},
{
"_id": 1578,
"id": 2143,
"pid": 253,
"city_code": "101071002",
"city_name": "辽阳县"
},
{
"_id": 1579,
"id": 2146,
"pid": 254,
"city_code": "101071302",
"city_name": "大洼县"
},
{
"_id": 1580,
"id": 2147,
"pid": 254,
"city_code": "101071303",
"city_name": "盘山县"
},
{
"_id": 1581,
"id": 2150,
"pid": 255,
"city_code": "101071105",
"city_name": "调兵山市"
},
{
"_id": 1582,
"id": 2151,
"pid": 255,
"city_code": "101071102",
"city_name": "开原市"
},
{
"_id": 1583,
"id": 2152,
"pid": 255,
"city_code": "101071101",
"city_name": "铁岭县"
},
{
"_id": 1584,
"id": 2153,
"pid": 255,
"city_code": "101071104",
"city_name": "西丰县"
},
{
"_id": 1585,
"id": 2154,
"pid": 255,
"city_code": "101071103",
"city_name": "昌图县"
},
{
"_id": 1586,
"id": 2159,
"pid": 256,
"city_code": "101070803",
"city_name": "盖州市"
},
{
"_id": 1587,
"id": 2160,
"pid": 256,
"city_code": "101070802",
"city_name": "大石桥市"
},
{
"_id": 1588,
"id": 2165,
"pid": 257,
"city_code": "101080105",
"city_name": "清水河县"
},
{
"_id": 1589,
"id": 2166,
"pid": 257,
"city_code": "101080102",
"city_name": "土默特左旗"
},
{
"_id": 1590,
"id": 2167,
"pid": 257,
"city_code": "101080103",
"city_name": "托克托县"
},
{
"_id": 1591,
"id": 2168,
"pid": 257,
"city_code": "101080104",
"city_name": "和林格尔县"
},
{
"_id": 1592,
"id": 2169,
"pid": 257,
"city_code": "101080107",
"city_name": "武川县"
},
{
"_id": 1593,
"id": 2170,
"pid": 258,
"city_code": "101081201",
"city_name": "阿拉善左旗"
},
{
"_id": 1594,
"id": 2171,
"pid": 258,
"city_code": "101081202",
"city_name": "阿拉善右旗"
},
{
"_id": 1595,
"id": 2172,
"pid": 258,
"city_code": "101081203",
"city_name": "额济纳旗"
},
{
"_id": 1596,
"id": 2173,
"pid": 259,
"city_code": "101080801",
"city_name": "临河区"
},
{
"_id": 1597,
"id": 2174,
"pid": 259,
"city_code": "101080802",
"city_name": "五原县"
},
{
"_id": 1598,
"id": 2175,
"pid": 259,
"city_code": "101080803",
"city_name": "磴口县"
},
{
"_id": 1599,
"id": 2176,
"pid": 259,
"city_code": "101080804",
"city_name": "乌拉特前旗"
},
{
"_id": 1600,
"id": 2177,
"pid": 259,
"city_code": "101080806",
"city_name": "乌拉特中旗"
},
{
"_id": 1601,
"id": 2178,
"pid": 259,
"city_code": "101080807",
"city_name": "乌拉特后旗"
},
{
"_id": 1602,
"id": 2179,
"pid": 259,
"city_code": "101080810",
"city_name": "杭锦后旗"
},
{
"_id": 1603,
"id": 2184,
"pid": 260,
"city_code": "101080207",
"city_name": "石拐区"
},
{
"_id": 1604,
"id": 2185,
"pid": 260,
"city_code": "101080202",
"city_name": "白云鄂博"
},
{
"_id": 1605,
"id": 2186,
"pid": 260,
"city_code": "101080204",
"city_name": "土默特右旗"
},
{
"_id": 1606,
"id": 2187,
"pid": 260,
"city_code": "101080205",
"city_name": "固阳县"
},
{
"_id": 1607,
"id": 2188,
"pid": 260,
"city_code": "101080206",
"city_name": "达尔罕茂明安联合旗"
},
{
"_id": 1608,
"id": 2192,
"pid": 261,
"city_code": "101080603",
"city_name": "阿鲁科尔沁旗"
},
{
"_id": 1609,
"id": 2193,
"pid": 261,
"city_code": "101080605",
"city_name": "巴林左旗"
},
{
"_id": 1610,
"id": 2194,
"pid": 261,
"city_code": "101080606",
"city_name": "巴林右旗"
},
{
"_id": 1611,
"id": 2195,
"pid": 261,
"city_code": "101080607",
"city_name": "林西县"
},
{
"_id": 1612,
"id": 2196,
"pid": 261,
"city_code": "101080608",
"city_name": "克什克腾旗"
},
{
"_id": 1613,
"id": 2197,
"pid": 261,
"city_code": "101080609",
"city_name": "翁牛特旗"
},
{
"_id": 1614,
"id": 2198,
"pid": 261,
"city_code": "101080611",
"city_name": "喀喇沁旗"
},
{
"_id": 1615,
"id": 2199,
"pid": 261,
"city_code": "101080613",
"city_name": "宁城县"
},
{
"_id": 1616,
"id": 2200,
"pid": 261,
"city_code": "101080614",
"city_name": "敖汉旗"
},
{
"_id": 1617,
"id": 2201,
"pid": 262,
"city_code": "101080713",
"city_name": "东胜区"
},
{
"_id": 1618,
"id": 2202,
"pid": 262,
"city_code": "101080703",
"city_name": "达拉特旗"
},
{
"_id": 1619,
"id": 2203,
"pid": 262,
"city_code": "101080704",
"city_name": "准格尔旗"
},
{
"_id": 1620,
"id": 2204,
"pid": 262,
"city_code": "101080705",
"city_name": "鄂托克前旗"
},
{
"_id": 1621,
"id": 2205,
"pid": 262,
"city_code": "101080708",
"city_name": "鄂托克旗"
},
{
"_id": 1622,
"id": 2206,
"pid": 262,
"city_code": "101080709",
"city_name": "杭锦旗"
},
{
"_id": 1623,
"id": 2207,
"pid": 262,
"city_code": "101080710",
"city_name": "乌审旗"
},
{
"_id": 1624,
"id": 2208,
"pid": 262,
"city_code": "101080711",
"city_name": "伊金霍洛旗"
},
{
"_id": 1625,
"id": 2209,
"pid": 263,
"city_code": "101081001",
"city_name": "海拉尔区"
},
{
"_id": 1626,
"id": 2210,
"pid": 263,
"city_code": "101081004",
"city_name": "莫力达瓦"
},
{
"_id": 1627,
"id": 2211,
"pid": 263,
"city_code": "101081010",
"city_name": "满洲里市"
},
{
"_id": 1628,
"id": 2212,
"pid": 263,
"city_code": "101081011",
"city_name": "牙克石市"
},
{
"_id": 1629,
"id": 2213,
"pid": 263,
"city_code": "101081012",
"city_name": "扎兰屯市"
},
{
"_id": 1630,
"id": 2214,
"pid": 263,
"city_code": "101081014",
"city_name": "额尔古纳市"
},
{
"_id": 1631,
"id": 2215,
"pid": 263,
"city_code": "101081015",
"city_name": "根河市"
},
{
"_id": 1632,
"id": 2216,
"pid": 263,
"city_code": "101081003",
"city_name": "阿荣旗"
},
{
"_id": 1633,
"id": 2217,
"pid": 263,
"city_code": "101081005",
"city_name": "鄂伦春自治旗"
},
{
"_id": 1634,
"id": 2218,
"pid": 263,
"city_code": "101081006",
"city_name": "鄂温克族自治旗"
},
{
"_id": 1635,
"id": 2219,
"pid": 263,
"city_code": "101081007",
"city_name": "陈巴尔虎旗"
},
{
"_id": 1636,
"id": 2220,
"pid": 263,
"city_code": "101081008",
"city_name": "新巴尔虎左旗"
},
{
"_id": 1637,
"id": 2221,
"pid": 263,
"city_code": "101081009",
"city_name": "新巴尔虎右旗"
},
{
"_id": 1638,
"id": 2223,
"pid": 264,
"city_code": "101080512",
"city_name": "霍林郭勒市"
},
{
"_id": 1639,
"id": 2224,
"pid": 264,
"city_code": "101080503",
"city_name": "科尔沁左翼中旗"
},
{
"_id": 1640,
"id": 2225,
"pid": 264,
"city_code": "101080504",
"city_name": "科尔沁左翼后旗"
},
{
"_id": 1641,
"id": 2226,
"pid": 264,
"city_code": "101080506",
"city_name": "开鲁县"
},
{
"_id": 1642,
"id": 2227,
"pid": 264,
"city_code": "101080507",
"city_name": "库伦旗"
},
{
"_id": 1643,
"id": 2228,
"pid": 264,
"city_code": "101080508",
"city_name": "奈曼旗"
},
{
"_id": 1644,
"id": 2229,
"pid": 264,
"city_code": "101080509",
"city_name": "扎鲁特旗"
},
{
"_id": 1645,
"id": 2233,
"pid": 266,
"city_code": "101080403",
"city_name": "化德县"
},
{
"_id": 1646,
"id": 2234,
"pid": 266,
"city_code": "101080401",
"city_name": "集宁区"
},
{
"_id": 1647,
"id": 2235,
"pid": 266,
"city_code": "101080412",
"city_name": "丰镇市"
},
{
"_id": 1648,
"id": 2236,
"pid": 266,
"city_code": "101080402",
"city_name": "卓资县"
},
{
"_id": 1649,
"id": 2237,
"pid": 266,
"city_code": "101080404",
"city_name": "商都县"
},
{
"_id": 1650,
"id": 2238,
"pid": 266,
"city_code": "101080406",
"city_name": "兴和县"
},
{
"_id": 1651,
"id": 2239,
"pid": 266,
"city_code": "101080407",
"city_name": "凉城县"
},
{
"_id": 1652,
"id": 2240,
"pid": 266,
"city_code": "101080408",
"city_name": "察哈尔右翼前旗"
},
{
"_id": 1653,
"id": 2241,
"pid": 266,
"city_code": "101080409",
"city_name": "察哈尔右翼中旗"
},
{
"_id": 1654,
"id": 2242,
"pid": 266,
"city_code": "101080410",
"city_name": "察哈尔右翼后旗"
},
{
"_id": 1655,
"id": 2243,
"pid": 266,
"city_code": "101080411",
"city_name": "四子王旗"
},
{
"_id": 1656,
"id": 2244,
"pid": 267,
"city_code": "101080903",
"city_name": "二连浩特市"
},
{
"_id": 1657,
"id": 2245,
"pid": 267,
"city_code": "101080901",
"city_name": "锡林浩特市"
},
{
"_id": 1658,
"id": 2246,
"pid": 267,
"city_code": "101080904",
"city_name": "阿巴嘎旗"
},
{
"_id": 1659,
"id": 2247,
"pid": 267,
"city_code": "101080906",
"city_name": "苏尼特左旗"
},
{
"_id": 1660,
"id": 2248,
"pid": 267,
"city_code": "101080907",
"city_name": "苏尼特右旗"
},
{
"_id": 1661,
"id": 2249,
"pid": 267,
"city_code": "101080909",
"city_name": "东乌珠穆沁旗"
},
{
"_id": 1662,
"id": 2250,
"pid": 267,
"city_code": "101080910",
"city_name": "西乌珠穆沁旗"
},
{
"_id": 1663,
"id": 2251,
"pid": 267,
"city_code": "101080911",
"city_name": "太仆寺旗"
},
{
"_id": 1664,
"id": 2252,
"pid": 267,
"city_code": "101080912",
"city_name": "镶黄旗"
},
{
"_id": 1665,
"id": 2253,
"pid": 267,
"city_code": "101080913",
"city_name": "正镶白旗"
},
{
"_id": 1666,
"id": 2255,
"pid": 267,
"city_code": "101080915",
"city_name": "多伦县"
},
{
"_id": 1667,
"id": 2256,
"pid": 268,
"city_code": "101081101",
"city_name": "乌兰浩特市"
},
{
"_id": 1668,
"id": 2257,
"pid": 268,
"city_code": "101081102",
"city_name": "阿尔山市"
},
{
"_id": 1669,
"id": 2258,
"pid": 268,
"city_code": "101081109",
"city_name": "科尔沁右翼前旗"
},
{
"_id": 1670,
"id": 2259,
"pid": 268,
"city_code": "101081103",
"city_name": "科尔沁右翼中旗"
},
{
"_id": 1671,
"id": 2260,
"pid": 268,
"city_code": "101081105",
"city_name": "扎赉特旗"
},
{
"_id": 1672,
"id": 2261,
"pid": 268,
"city_code": "101081107",
"city_name": "突泉县"
},
{
"_id": 1673,
"id": 2265,
"pid": 269,
"city_code": "101170103",
"city_name": "灵武市"
},
{
"_id": 1674,
"id": 2266,
"pid": 269,
"city_code": "101170102",
"city_name": "永宁县"
},
{
"_id": 1675,
"id": 2267,
"pid": 269,
"city_code": "101170104",
"city_name": "贺兰县"
},
{
"_id": 1676,
"id": 2270,
"pid": 270,
"city_code": "101170402",
"city_name": "西吉县"
},
{
"_id": 1677,
"id": 2271,
"pid": 270,
"city_code": "101170403",
"city_name": "隆德县"
},
{
"_id": 1678,
"id": 2272,
"pid": 270,
"city_code": "101170404",
"city_name": "泾源县"
},
{
"_id": 1679,
"id": 2273,
"pid": 270,
"city_code": "101170406",
"city_name": "彭阳县"
},
{
"_id": 1680,
"id": 2274,
"pid": 271,
"city_code": "101170202",
"city_name": "惠农区"
},
{
"_id": 1681,
"id": 2275,
"pid": 271,
"city_code": "101170206",
"city_name": "大武口区"
},
{
"_id": 1682,
"id": 2276,
"pid": 271,
"city_code": "101170202",
"city_name": "惠农区"
},
{
"_id": 1683,
"id": 2277,
"pid": 271,
"city_code": "101170204",
"city_name": "陶乐县"
},
{
"_id": 1684,
"id": 2278,
"pid": 271,
"city_code": "101170203",
"city_name": "平罗县"
},
{
"_id": 1685,
"id": 2281,
"pid": 272,
"city_code": "101170306",
"city_name": "青铜峡市"
},
{
"_id": 1686,
"id": 2283,
"pid": 272,
"city_code": "101170303",
"city_name": "盐池县"
},
{
"_id": 1687,
"id": 2284,
"pid": 272,
"city_code": "101170302",
"city_name": "同心县"
},
{
"_id": 1688,
"id": 2286,
"pid": 273,
"city_code": "101170504",
"city_name": "海原县"
},
{
"_id": 1689,
"id": 2287,
"pid": 273,
"city_code": "101170502",
"city_name": "中宁县"
},
{
"_id": 1690,
"id": 2292,
"pid": 274,
"city_code": "101150104",
"city_name": "湟中县"
},
{
"_id": 1691,
"id": 2293,
"pid": 274,
"city_code": "101150103",
"city_name": "湟源县"
},
{
"_id": 1692,
"id": 2294,
"pid": 274,
"city_code": "101150102",
"city_name": "大通县"
},
{
"_id": 1693,
"id": 2295,
"pid": 275,
"city_code": "101150508",
"city_name": "玛沁县"
},
{
"_id": 1694,
"id": 2296,
"pid": 275,
"city_code": "101150502",
"city_name": "班玛县"
},
{
"_id": 1695,
"id": 2297,
"pid": 275,
"city_code": "101150503",
"city_name": "甘德县"
},
{
"_id": 1696,
"id": 2298,
"pid": 275,
"city_code": "101150504",
"city_name": "达日县"
},
{
"_id": 1697,
"id": 2299,
"pid": 275,
"city_code": "101150505",
"city_name": "久治县"
},
{
"_id": 1698,
"id": 2300,
"pid": 275,
"city_code": "101150506",
"city_name": "玛多县"
},
{
"_id": 1699,
"id": 2301,
"pid": 276,
"city_code": "101150804",
"city_name": "海晏县"
},
{
"_id": 1700,
"id": 2302,
"pid": 276,
"city_code": "101150803",
"city_name": "祁连县"
},
{
"_id": 1701,
"id": 2303,
"pid": 276,
"city_code": "101150806",
"city_name": "刚察县"
},
{
"_id": 1702,
"id": 2304,
"pid": 276,
"city_code": "101150802",
"city_name": "门源县"
},
{
"_id": 1703,
"id": 2305,
"pid": 277,
"city_code": "101150208",
"city_name": "平安县"
},
{
"_id": 1704,
"id": 2306,
"pid": 277,
"city_code": "101150202",
"city_name": "乐都县"
},
{
"_id": 1705,
"id": 2307,
"pid": 277,
"city_code": "101150203",
"city_name": "民和县"
},
{
"_id": 1706,
"id": 2308,
"pid": 277,
"city_code": "101150204",
"city_name": "互助县"
},
{
"_id": 1707,
"id": 2309,
"pid": 277,
"city_code": "101150205",
"city_name": "化隆县"
},
{
"_id": 1708,
"id": 2310,
"pid": 277,
"city_code": "101150206",
"city_name": "循化县"
},
{
"_id": 1709,
"id": 2311,
"pid": 278,
"city_code": "101150409",
"city_name": "共和县"
},
{
"_id": 1710,
"id": 2312,
"pid": 278,
"city_code": "101150408",
"city_name": "同德县"
},
{
"_id": 1711,
"id": 2313,
"pid": 278,
"city_code": "101150404",
"city_name": "贵德县"
},
{
"_id": 1712,
"id": 2314,
"pid": 278,
"city_code": "101150406",
"city_name": "兴海县"
},
{
"_id": 1713,
"id": 2315,
"pid": 278,
"city_code": "101150407",
"city_name": "贵南县"
},
{
"_id": 1714,
"id": 2316,
"pid": 279,
"city_code": "101150716",
"city_name": "德令哈市"
},
{
"_id": 1715,
"id": 2317,
"pid": 279,
"city_code": "101150702",
"city_name": "格尔木市"
},
{
"_id": 1716,
"id": 2318,
"pid": 279,
"city_code": "101150709",
"city_name": "乌兰县"
},
{
"_id": 1717,
"id": 2319,
"pid": 279,
"city_code": "101150710",
"city_name": "都兰县"
},
{
"_id": 1718,
"id": 2320,
"pid": 279,
"city_code": "101150708",
"city_name": "天峻县"
},
{
"_id": 1719,
"id": 2321,
"pid": 280,
"city_code": "101150305",
"city_name": "同仁县"
},
{
"_id": 1720,
"id": 2322,
"pid": 280,
"city_code": "101150302",
"city_name": "尖扎县"
},
{
"_id": 1721,
"id": 2323,
"pid": 280,
"city_code": "101150303",
"city_name": "泽库县"
},
{
"_id": 1722,
"id": 2324,
"pid": 280,
"city_code": "101150304",
"city_name": "河南蒙古族自治县"
},
{
"_id": 1723,
"id": 2325,
"pid": 281,
"city_code": "101150601",
"city_name": "玉树县"
},
{
"_id": 1724,
"id": 2326,
"pid": 281,
"city_code": "101150604",
"city_name": "杂多县"
},
{
"_id": 1725,
"id": 2327,
"pid": 281,
"city_code": "101150602",
"city_name": "称多县"
},
{
"_id": 1726,
"id": 2328,
"pid": 281,
"city_code": "101150603",
"city_name": "治多县"
},
{
"_id": 1727,
"id": 2329,
"pid": 281,
"city_code": "101150605",
"city_name": "囊谦县"
},
{
"_id": 1728,
"id": 2330,
"pid": 281,
"city_code": "101150606",
"city_name": "曲麻莱县"
},
{
"_id": 1729,
"id": 2336,
"pid": 282,
"city_code": "101120102",
"city_name": "长清区"
},
{
"_id": 1730,
"id": 2337,
"pid": 282,
"city_code": "101120104",
"city_name": "章丘市"
},
{
"_id": 1731,
"id": 2338,
"pid": 282,
"city_code": "101120105",
"city_name": "平阴县"
},
{
"_id": 1732,
"id": 2339,
"pid": 282,
"city_code": "101120106",
"city_name": "济阳县"
},
{
"_id": 1733,
"id": 2340,
"pid": 282,
"city_code": "101120103",
"city_name": "商河县"
},
{
"_id": 1734,
"id": 2347,
"pid": 283,
"city_code": "101120202",
"city_name": "崂山区"
},
{
"_id": 1735,
"id": 2348,
"pid": 283,
"city_code": "101120205",
"city_name": "胶州市"
},
{
"_id": 1736,
"id": 2349,
"pid": 283,
"city_code": "101120204",
"city_name": "即墨市"
},
{
"_id": 1737,
"id": 2350,
"pid": 283,
"city_code": "101120208",
"city_name": "平度市"
},
{
"_id": 1738,
"id": 2351,
"pid": 283,
"city_code": "101120206",
"city_name": "胶南市"
},
{
"_id": 1739,
"id": 2352,
"pid": 283,
"city_code": "101120207",
"city_name": "莱西市"
},
{
"_id": 1740,
"id": 2354,
"pid": 284,
"city_code": "101121105",
"city_name": "惠民县"
},
{
"_id": 1741,
"id": 2355,
"pid": 284,
"city_code": "101121104",
"city_name": "阳信县"
},
{
"_id": 1742,
"id": 2356,
"pid": 284,
"city_code": "101121103",
"city_name": "无棣县"
},
{
"_id": 1743,
"id": 2357,
"pid": 284,
"city_code": "101121106",
"city_name": "沾化县"
},
{
"_id": 1744,
"id": 2358,
"pid": 284,
"city_code": "101121102",
"city_name": "博兴县"
},
{
"_id": 1745,
"id": 2359,
"pid": 284,
"city_code": "101121107",
"city_name": "邹平县"
},
{
"_id": 1746,
"id": 2361,
"pid": 285,
"city_code": "101120404",
"city_name": "陵县"
},
{
"_id": 1747,
"id": 2362,
"pid": 285,
"city_code": "101120406",
"city_name": "乐陵市"
},
{
"_id": 1748,
"id": 2363,
"pid": 285,
"city_code": "101120411",
"city_name": "禹城市"
},
{
"_id": 1749,
"id": 2364,
"pid": 285,
"city_code": "101120409",
"city_name": "宁津县"
},
{
"_id": 1750,
"id": 2365,
"pid": 285,
"city_code": "101120407",
"city_name": "庆云县"
},
{
"_id": 1751,
"id": 2366,
"pid": 285,
"city_code": "101120403",
"city_name": "临邑县"
},
{
"_id": 1752,
"id": 2367,
"pid": 285,
"city_code": "101120405",
"city_name": "齐河县"
},
{
"_id": 1753,
"id": 2368,
"pid": 285,
"city_code": "101120408",
"city_name": "平原县"
},
{
"_id": 1754,
"id": 2369,
"pid": 285,
"city_code": "101120410",
"city_name": "夏津县"
},
{
"_id": 1755,
"id": 2370,
"pid": 285,
"city_code": "101120402",
"city_name": "武城县"
},
{
"_id": 1756,
"id": 2371,
"pid": 286,
"city_code": "101121201",
"city_name": "东营区"
},
{
"_id": 1757,
"id": 2372,
"pid": 286,
"city_code": "101121202",
"city_name": "河口区"
},
{
"_id": 1758,
"id": 2373,
"pid": 286,
"city_code": "101121203",
"city_name": "垦利县"
},
{
"_id": 1759,
"id": 2374,
"pid": 286,
"city_code": "101121204",
"city_name": "利津县"
},
{
"_id": 1760,
"id": 2375,
"pid": 286,
"city_code": "101121205",
"city_name": "广饶县"
},
{
"_id": 1761,
"id": 2377,
"pid": 287,
"city_code": "101121007",
"city_name": "曹县"
},
{
"_id": 1762,
"id": 2378,
"pid": 287,
"city_code": "101121009",
"city_name": "单县"
},
{
"_id": 1763,
"id": 2379,
"pid": 287,
"city_code": "101121008",
"city_name": "成武县"
},
{
"_id": 1764,
"id": 2380,
"pid": 287,
"city_code": "101121006",
"city_name": "巨野县"
},
{
"_id": 1765,
"id": 2381,
"pid": 287,
"city_code": "101121003",
"city_name": "郓城县"
},
{
"_id": 1766,
"id": 2382,
"pid": 287,
"city_code": "101121002",
"city_name": "鄄城县"
},
{
"_id": 1767,
"id": 2383,
"pid": 287,
"city_code": "101121005",
"city_name": "定陶县"
},
{
"_id": 1768,
"id": 2384,
"pid": 287,
"city_code": "101121004",
"city_name": "东明县"
},
{
"_id": 1769,
"id": 2387,
"pid": 288,
"city_code": "101120710",
"city_name": "曲阜市"
},
{
"_id": 1770,
"id": 2388,
"pid": 288,
"city_code": "101120705",
"city_name": "兖州市"
},
{
"_id": 1771,
"id": 2389,
"pid": 288,
"city_code": "101120711",
"city_name": "邹城市"
},
{
"_id": 1772,
"id": 2390,
"pid": 288,
"city_code": "101120703",
"city_name": "微山县"
},
{
"_id": 1773,
"id": 2391,
"pid": 288,
"city_code": "101120704",
"city_name": "鱼台县"
},
{
"_id": 1774,
"id": 2392,
"pid": 288,
"city_code": "101120706",
"city_name": "金乡县"
},
{
"_id": 1775,
"id": 2393,
"pid": 288,
"city_code": "101120702",
"city_name": "嘉祥县"
},
{
"_id": 1776,
"id": 2394,
"pid": 288,
"city_code": "101120707",
"city_name": "汶上县"
},
{
"_id": 1777,
"id": 2395,
"pid": 288,
"city_code": "101120708",
"city_name": "泗水县"
},
{
"_id": 1778,
"id": 2396,
"pid": 288,
"city_code": "101120709",
"city_name": "梁山县"
},
{
"_id": 1779,
"id": 2400,
"pid": 290,
"city_code": "101121707",
"city_name": "临清市"
},
{
"_id": 1780,
"id": 2401,
"pid": 290,
"city_code": "101121703",
"city_name": "阳谷县"
},
{
"_id": 1781,
"id": 2402,
"pid": 290,
"city_code": "101121709",
"city_name": "莘县"
},
{
"_id": 1782,
"id": 2403,
"pid": 290,
"city_code": "101121705",
"city_name": "茌平县"
},
{
"_id": 1783,
"id": 2404,
"pid": 290,
"city_code": "101121706",
"city_name": "东阿县"
},
{
"_id": 1784,
"id": 2405,
"pid": 290,
"city_code": "101121702",
"city_name": "冠县"
},
{
"_id": 1785,
"id": 2406,
"pid": 290,
"city_code": "101121704",
"city_name": "高唐县"
},
{
"_id": 1786,
"id": 2410,
"pid": 291,
"city_code": "101120903",
"city_name": "沂南县"
},
{
"_id": 1787,
"id": 2411,
"pid": 291,
"city_code": "101120906",
"city_name": "郯城县"
},
{
"_id": 1788,
"id": 2412,
"pid": 291,
"city_code": "101120910",
"city_name": "沂水县"
},
{
"_id": 1789,
"id": 2413,
"pid": 291,
"city_code": "101120904",
"city_name": "兰陵县"
},
{
"_id": 1790,
"id": 2414,
"pid": 291,
"city_code": "101120909",
"city_name": "费县"
},
{
"_id": 1791,
"id": 2415,
"pid": 291,
"city_code": "101120908",
"city_name": "平邑县"
},
{
"_id": 1792,
"id": 2416,
"pid": 291,
"city_code": "101120902",
"city_name": "莒南县"
},
{
"_id": 1793,
"id": 2417,
"pid": 291,
"city_code": "101120907",
"city_name": "蒙阴县"
},
{
"_id": 1794,
"id": 2418,
"pid": 291,
"city_code": "101120905",
"city_name": "临沭县"
},
{
"_id": 1795,
"id": 2421,
"pid": 292,
"city_code": "101121502",
"city_name": "五莲县"
},
{
"_id": 1796,
"id": 2422,
"pid": 292,
"city_code": "101121503",
"city_name": "莒县"
},
{
"_id": 1797,
"id": 2423,
"pid": 293,
"city_code": "101120803",
"city_name": "泰山区"
},
{
"_id": 1798,
"id": 2425,
"pid": 293,
"city_code": "101120802",
"city_name": "新泰市"
},
{
"_id": 1799,
"id": 2426,
"pid": 293,
"city_code": "101120804",
"city_name": "肥城市"
},
{
"_id": 1800,
"id": 2427,
"pid": 293,
"city_code": "101120806",
"city_name": "宁阳县"
},
{
"_id": 1801,
"id": 2428,
"pid": 293,
"city_code": "101120805",
"city_name": "东平县"
},
{
"_id": 1802,
"id": 2429,
"pid": 294,
"city_code": "101121303",
"city_name": "荣成市"
},
{
"_id": 1803,
"id": 2430,
"pid": 294,
"city_code": "101121304",
"city_name": "乳山市"
},
{
"_id": 1804,
"id": 2432,
"pid": 294,
"city_code": "101121302",
"city_name": "文登市"
},
{
"_id": 1805,
"id": 2437,
"pid": 295,
"city_code": "101120602",
"city_name": "青州市"
},
{
"_id": 1806,
"id": 2438,
"pid": 295,
"city_code": "101120609",
"city_name": "诸城市"
},
{
"_id": 1807,
"id": 2439,
"pid": 295,
"city_code": "101120603",
"city_name": "寿光市"
},
{
"_id": 1808,
"id": 2440,
"pid": 295,
"city_code": "101120607",
"city_name": "安丘市"
},
{
"_id": 1809,
"id": 2441,
"pid": 295,
"city_code": "101120608",
"city_name": "高密市"
},
{
"_id": 1810,
"id": 2442,
"pid": 295,
"city_code": "101120606",
"city_name": "昌邑市"
},
{
"_id": 1811,
"id": 2443,
"pid": 295,
"city_code": "101120604",
"city_name": "临朐县"
},
{
"_id": 1812,
"id": 2444,
"pid": 295,
"city_code": "101120605",
"city_name": "昌乐县"
},
{
"_id": 1813,
"id": 2446,
"pid": 296,
"city_code": "101120508",
"city_name": "福山区"
},
{
"_id": 1814,
"id": 2447,
"pid": 296,
"city_code": "101120509",
"city_name": "牟平区"
},
{
"_id": 1815,
"id": 2450,
"pid": 296,
"city_code": "101120505",
"city_name": "龙口市"
},
{
"_id": 1816,
"id": 2451,
"pid": 296,
"city_code": "101120510",
"city_name": "莱阳市"
},
{
"_id": 1817,
"id": 2452,
"pid": 296,
"city_code": "101120502",
"city_name": "莱州市"
},
{
"_id": 1818,
"id": 2453,
"pid": 296,
"city_code": "101120504",
"city_name": "蓬莱市"
},
{
"_id": 1819,
"id": 2454,
"pid": 296,
"city_code": "101120506",
"city_name": "招远市"
},
{
"_id": 1820,
"id": 2455,
"pid": 296,
"city_code": "101120507",
"city_name": "栖霞市"
},
{
"_id": 1821,
"id": 2456,
"pid": 296,
"city_code": "101120511",
"city_name": "海阳市"
},
{
"_id": 1822,
"id": 2457,
"pid": 296,
"city_code": "101120503",
"city_name": "长岛县"
},
{
"_id": 1823,
"id": 2460,
"pid": 297,
"city_code": "101121403",
"city_name": "峄城区"
},
{
"_id": 1824,
"id": 2461,
"pid": 297,
"city_code": "101121404",
"city_name": "台儿庄区"
},
{
"_id": 1825,
"id": 2462,
"pid": 297,
"city_code": "101121402",
"city_name": "薛城区"
},
{
"_id": 1826,
"id": 2463,
"pid": 297,
"city_code": "101121405",
"city_name": "滕州市"
},
{
"_id": 1827,
"id": 2465,
"pid": 298,
"city_code": "101120308",
"city_name": "临淄区"
},
{
"_id": 1828,
"id": 2466,
"pid": 298,
"city_code": "101120302",
"city_name": "淄川区"
},
{
"_id": 1829,
"id": 2467,
"pid": 298,
"city_code": "101120303",
"city_name": "博山区"
},
{
"_id": 1830,
"id": 2468,
"pid": 298,
"city_code": "101120305",
"city_name": "周村区"
},
{
"_id": 1831,
"id": 2469,
"pid": 298,
"city_code": "101120307",
"city_name": "桓台县"
},
{
"_id": 1832,
"id": 2470,
"pid": 298,
"city_code": "101120304",
"city_name": "高青县"
},
{
"_id": 1833,
"id": 2471,
"pid": 298,
"city_code": "101120306",
"city_name": "沂源县"
},
{
"_id": 1834,
"id": 2481,
"pid": 299,
"city_code": "101100102",
"city_name": "清徐县"
},
{
"_id": 1835,
"id": 2482,
"pid": 299,
"city_code": "101100103",
"city_name": "阳曲县"
},
{
"_id": 1836,
"id": 2483,
"pid": 299,
"city_code": "101100104",
"city_name": "娄烦县"
},
{
"_id": 1837,
"id": 2484,
"pid": 299,
"city_code": "101100105",
"city_name": "古交市"
},
{
"_id": 1838,
"id": 2487,
"pid": 300,
"city_code": "101100508",
"city_name": "沁县"
},
{
"_id": 1839,
"id": 2488,
"pid": 300,
"city_code": "101100504",
"city_name": "潞城市"
},
{
"_id": 1840,
"id": 2489,
"pid": 300,
"city_code": "101100501",
"city_name": "长治县"
},
{
"_id": 1841,
"id": 2490,
"pid": 300,
"city_code": "101100505",
"city_name": "襄垣县"
},
{
"_id": 1842,
"id": 2491,
"pid": 300,
"city_code": "101100503",
"city_name": "屯留县"
},
{
"_id": 1843,
"id": 2492,
"pid": 300,
"city_code": "101100506",
"city_name": "平顺县"
},
{
"_id": 1844,
"id": 2493,
"pid": 300,
"city_code": "101100502",
"city_name": "黎城县"
},
{
"_id": 1845,
"id": 2494,
"pid": 300,
"city_code": "101100511",
"city_name": "壶关县"
},
{
"_id": 1846,
"id": 2495,
"pid": 300,
"city_code": "101100509",
"city_name": "长子县"
},
{
"_id": 1847,
"id": 2496,
"pid": 300,
"city_code": "101100507",
"city_name": "武乡县"
},
{
"_id": 1848,
"id": 2497,
"pid": 300,
"city_code": "101100510",
"city_name": "沁源县"
},
{
"_id": 1849,
"id": 2502,
"pid": 301,
"city_code": "101100202",
"city_name": "阳高县"
},
{
"_id": 1850,
"id": 2503,
"pid": 301,
"city_code": "101100204",
"city_name": "天镇县"
},
{
"_id": 1851,
"id": 2504,
"pid": 301,
"city_code": "101100205",
"city_name": "广灵县"
},
{
"_id": 1852,
"id": 2505,
"pid": 301,
"city_code": "101100206",
"city_name": "灵丘县"
},
{
"_id": 1853,
"id": 2506,
"pid": 301,
"city_code": "101100207",
"city_name": "浑源县"
},
{
"_id": 1854,
"id": 2507,
"pid": 301,
"city_code": "101100208",
"city_name": "左云县"
},
{
"_id": 1855,
"id": 2508,
"pid": 301,
"city_code": "101100203",
"city_name": "大同县"
},
{
"_id": 1856,
"id": 2510,
"pid": 302,
"city_code": "101100605",
"city_name": "高平市"
},
{
"_id": 1857,
"id": 2511,
"pid": 302,
"city_code": "101100602",
"city_name": "沁水县"
},
{
"_id": 1858,
"id": 2512,
"pid": 302,
"city_code": "101100603",
"city_name": "阳城县"
},
{
"_id": 1859,
"id": 2513,
"pid": 302,
"city_code": "101100604",
"city_name": "陵川县"
},
{
"_id": 1860,
"id": 2514,
"pid": 302,
"city_code": "101100606",
"city_name": "泽州县"
},
{
"_id": 1861,
"id": 2515,
"pid": 303,
"city_code": "101100402",
"city_name": "榆次区"
},
{
"_id": 1862,
"id": 2516,
"pid": 303,
"city_code": "101100412",
"city_name": "介休市"
},
{
"_id": 1863,
"id": 2517,
"pid": 303,
"city_code": "101100403",
"city_name": "榆社县"
},
{
"_id": 1864,
"id": 2518,
"pid": 303,
"city_code": "101100404",
"city_name": "左权县"
},
{
"_id": 1865,
"id": 2519,
"pid": 303,
"city_code": "101100405",
"city_name": "和顺县"
},
{
"_id": 1866,
"id": 2520,
"pid": 303,
"city_code": "101100406",
"city_name": "昔阳县"
},
{
"_id": 1867,
"id": 2521,
"pid": 303,
"city_code": "101100407",
"city_name": "寿阳县"
},
{
"_id": 1868,
"id": 2522,
"pid": 303,
"city_code": "101100408",
"city_name": "太谷县"
},
{
"_id": 1869,
"id": 2523,
"pid": 303,
"city_code": "101100409",
"city_name": "祁县"
},
{
"_id": 1870,
"id": 2524,
"pid": 303,
"city_code": "101100410",
"city_name": "平遥县"
},
{
"_id": 1871,
"id": 2525,
"pid": 303,
"city_code": "101100411",
"city_name": "灵石县"
},
{
"_id": 1872,
"id": 2527,
"pid": 304,
"city_code": "101100714",
"city_name": "侯马市"
},
{
"_id": 1873,
"id": 2528,
"pid": 304,
"city_code": "101100711",
"city_name": "霍州市"
},
{
"_id": 1874,
"id": 2529,
"pid": 304,
"city_code": "101100702",
"city_name": "曲沃县"
},
{
"_id": 1875,
"id": 2530,
"pid": 304,
"city_code": "101100713",
"city_name": "翼城县"
},
{
"_id": 1876,
"id": 2531,
"pid": 304,
"city_code": "101100707",
"city_name": "襄汾县"
},
{
"_id": 1877,
"id": 2532,
"pid": 304,
"city_code": "101100710",
"city_name": "洪洞县"
},
{
"_id": 1878,
"id": 2533,
"pid": 304,
"city_code": "101100706",
"city_name": "吉县"
},
{
"_id": 1879,
"id": 2534,
"pid": 304,
"city_code": "101100716",
"city_name": "安泽县"
},
{
"_id": 1880,
"id": 2535,
"pid": 304,
"city_code": "101100715",
"city_name": "浮山县"
},
{
"_id": 1881,
"id": 2536,
"pid": 304,
"city_code": "101100717",
"city_name": "古县"
},
{
"_id": 1882,
"id": 2537,
"pid": 304,
"city_code": "101100712",
"city_name": "乡宁县"
},
{
"_id": 1883,
"id": 2538,
"pid": 304,
"city_code": "101100705",
"city_name": "大宁县"
},
{
"_id": 1884,
"id": 2539,
"pid": 304,
"city_code": "101100704",
"city_name": "隰县"
},
{
"_id": 1885,
"id": 2540,
"pid": 304,
"city_code": "101100703",
"city_name": "永和县"
},
{
"_id": 1886,
"id": 2541,
"pid": 304,
"city_code": "101100708",
"city_name": "蒲县"
},
{
"_id": 1887,
"id": 2542,
"pid": 304,
"city_code": "101100709",
"city_name": "汾西县"
},
{
"_id": 1888,
"id": 2543,
"pid": 305,
"city_code": "101101101",
"city_name": "离石市"
},
{
"_id": 1889,
"id": 2544,
"pid": 305,
"city_code": "101101101",
"city_name": "离石区"
},
{
"_id": 1890,
"id": 2545,
"pid": 305,
"city_code": "101101110",
"city_name": "孝义市"
},
{
"_id": 1891,
"id": 2546,
"pid": 305,
"city_code": "101101111",
"city_name": "汾阳市"
},
{
"_id": 1892,
"id": 2547,
"pid": 305,
"city_code": "101101112",
"city_name": "文水县"
},
{
"_id": 1893,
"id": 2548,
"pid": 305,
"city_code": "101101113",
"city_name": "交城县"
},
{
"_id": 1894,
"id": 2549,
"pid": 305,
"city_code": "101101103",
"city_name": "兴县"
},
{
"_id": 1895,
"id": 2550,
"pid": 305,
"city_code": "101101102",
"city_name": "临县"
},
{
"_id": 1896,
"id": 2551,
"pid": 305,
"city_code": "101101105",
"city_name": "柳林县"
},
{
"_id": 1897,
"id": 2552,
"pid": 305,
"city_code": "101101106",
"city_name": "石楼县"
},
{
"_id": 1898,
"id": 2553,
"pid": 305,
"city_code": "101101104",
"city_name": "岚县"
},
{
"_id": 1899,
"id": 2554,
"pid": 305,
"city_code": "101101107",
"city_name": "方山县"
},
{
"_id": 1900,
"id": 2555,
"pid": 305,
"city_code": "101101109",
"city_name": "中阳县"
},
{
"_id": 1901,
"id": 2556,
"pid": 305,
"city_code": "101101108",
"city_name": "交口县"
},
{
"_id": 1902,
"id": 2558,
"pid": 306,
"city_code": "101100902",
"city_name": "平鲁区"
},
{
"_id": 1903,
"id": 2559,
"pid": 306,
"city_code": "101100903",
"city_name": "山阴县"
},
{
"_id": 1904,
"id": 2560,
"pid": 306,
"city_code": "101100905",
"city_name": "应县"
},
{
"_id": 1905,
"id": 2561,
"pid": 306,
"city_code": "101100904",
"city_name": "右玉县"
},
{
"_id": 1906,
"id": 2562,
"pid": 306,
"city_code": "101100906",
"city_name": "怀仁县"
},
{
"_id": 1907,
"id": 2564,
"pid": 307,
"city_code": "101101015",
"city_name": "原平市"
},
{
"_id": 1908,
"id": 2565,
"pid": 307,
"city_code": "101101002",
"city_name": "定襄县"
},
{
"_id": 1909,
"id": 2566,
"pid": 307,
"city_code": "101101003",
"city_name": "五台县"
},
{
"_id": 1910,
"id": 2567,
"pid": 307,
"city_code": "101101008",
"city_name": "代县"
},
{
"_id": 1911,
"id": 2568,
"pid": 307,
"city_code": "101101009",
"city_name": "繁峙县"
},
{
"_id": 1912,
"id": 2569,
"pid": 307,
"city_code": "101101007",
"city_name": "宁武县"
},
{
"_id": 1913,
"id": 2570,
"pid": 307,
"city_code": "101101012",
"city_name": "静乐县"
},
{
"_id": 1914,
"id": 2571,
"pid": 307,
"city_code": "101101006",
"city_name": "神池县"
},
{
"_id": 1915,
"id": 2572,
"pid": 307,
"city_code": "101101014",
"city_name": "五寨县"
},
{
"_id": 1916,
"id": 2573,
"pid": 307,
"city_code": "101101013",
"city_name": "岢岚县"
},
{
"_id": 1917,
"id": 2574,
"pid": 307,
"city_code": "101101004",
"city_name": "河曲县"
},
{
"_id": 1918,
"id": 2575,
"pid": 307,
"city_code": "101101011",
"city_name": "保德县"
},
{
"_id": 1919,
"id": 2576,
"pid": 307,
"city_code": "101101005",
"city_name": "偏关县"
},
{
"_id": 1920,
"id": 2580,
"pid": 308,
"city_code": "101100303",
"city_name": "平定县"
},
{
"_id": 1921,
"id": 2581,
"pid": 308,
"city_code": "101100302",
"city_name": "盂县"
},
{
"_id": 1922,
"id": 2583,
"pid": 309,
"city_code": "101100810",
"city_name": "永济市"
},
{
"_id": 1923,
"id": 2584,
"pid": 309,
"city_code": "101100805",
"city_name": "河津市"
},
{
"_id": 1924,
"id": 2585,
"pid": 309,
"city_code": "101100802",
"city_name": "临猗县"
},
{
"_id": 1925,
"id": 2586,
"pid": 309,
"city_code": "101100804",
"city_name": "万荣县"
},
{
"_id": 1926,
"id": 2587,
"pid": 309,
"city_code": "101100808",
"city_name": "闻喜县"
},
{
"_id": 1927,
"id": 2588,
"pid": 309,
"city_code": "101100803",
"city_name": "稷山县"
},
{
"_id": 1928,
"id": 2589,
"pid": 309,
"city_code": "101100806",
"city_name": "新绛县"
},
{
"_id": 1929,
"id": 2590,
"pid": 309,
"city_code": "101100807",
"city_name": "绛县"
},
{
"_id": 1930,
"id": 2591,
"pid": 309,
"city_code": "101100809",
"city_name": "垣曲县"
},
{
"_id": 1931,
"id": 2592,
"pid": 309,
"city_code": "101100812",
"city_name": "夏县"
},
{
"_id": 1932,
"id": 2593,
"pid": 309,
"city_code": "101100813",
"city_name": "平陆县"
},
{
"_id": 1933,
"id": 2594,
"pid": 309,
"city_code": "101100811",
"city_name": "芮城县"
},
{
"_id": 1934,
"id": 2602,
"pid": 310,
"city_code": "101110103",
"city_name": "临潼区"
},
{
"_id": 1935,
"id": 2603,
"pid": 310,
"city_code": "101110102",
"city_name": "长安区"
},
{
"_id": 1936,
"id": 2604,
"pid": 310,
"city_code": "101110104",
"city_name": "蓝田县"
},
{
"_id": 1937,
"id": 2605,
"pid": 310,
"city_code": "101110105",
"city_name": "周至县"
},
{
"_id": 1938,
"id": 2606,
"pid": 310,
"city_code": "101110106",
"city_name": "户县"
},
{
"_id": 1939,
"id": 2607,
"pid": 310,
"city_code": "101110107",
"city_name": "高陵县"
},
{
"_id": 1940,
"id": 2609,
"pid": 311,
"city_code": "101110704",
"city_name": "汉阴县"
},
{
"_id": 1941,
"id": 2610,
"pid": 311,
"city_code": "101110703",
"city_name": "石泉县"
},
{
"_id": 1942,
"id": 2611,
"pid": 311,
"city_code": "101110710",
"city_name": "宁陕县"
},
{
"_id": 1943,
"id": 2612,
"pid": 311,
"city_code": "101110702",
"city_name": "紫阳县"
},
{
"_id": 1944,
"id": 2613,
"pid": 311,
"city_code": "101110706",
"city_name": "岚皋县"
},
{
"_id": 1945,
"id": 2614,
"pid": 311,
"city_code": "101110707",
"city_name": "平利县"
},
{
"_id": 1946,
"id": 2615,
"pid": 311,
"city_code": "101110709",
"city_name": "镇坪县"
},
{
"_id": 1947,
"id": 2616,
"pid": 311,
"city_code": "101110705",
"city_name": "旬阳县"
},
{
"_id": 1948,
"id": 2617,
"pid": 311,
"city_code": "101110708",
"city_name": "白河县"
},
{
"_id": 1949,
"id": 2618,
"pid": 312,
"city_code": "101110912",
"city_name": "陈仓区"
},
{
"_id": 1950,
"id": 2621,
"pid": 312,
"city_code": "101110906",
"city_name": "凤翔县"
},
{
"_id": 1951,
"id": 2622,
"pid": 312,
"city_code": "101110905",
"city_name": "岐山县"
},
{
"_id": 1952,
"id": 2623,
"pid": 312,
"city_code": "101110907",
"city_name": "扶风县"
},
{
"_id": 1953,
"id": 2624,
"pid": 312,
"city_code": "101110908",
"city_name": "眉县"
},
{
"_id": 1954,
"id": 2625,
"pid": 312,
"city_code": "101110911",
"city_name": "陇县"
},
{
"_id": 1955,
"id": 2626,
"pid": 312,
"city_code": "101110903",
"city_name": "千阳县"
},
{
"_id": 1956,
"id": 2627,
"pid": 312,
"city_code": "101110904",
"city_name": "麟游县"
},
{
"_id": 1957,
"id": 2628,
"pid": 312,
"city_code": "101110910",
"city_name": "凤县"
},
{
"_id": 1958,
"id": 2629,
"pid": 312,
"city_code": "101110909",
"city_name": "太白县"
},
{
"_id": 1959,
"id": 2631,
"pid": 313,
"city_code": "101110810",
"city_name": "南郑县"
},
{
"_id": 1960,
"id": 2632,
"pid": 313,
"city_code": "101110806",
"city_name": "城固县"
},
{
"_id": 1961,
"id": 2633,
"pid": 313,
"city_code": "101110805",
"city_name": "洋县"
},
{
"_id": 1962,
"id": 2634,
"pid": 313,
"city_code": "101110807",
"city_name": "西乡县"
},
{
"_id": 1963,
"id": 2635,
"pid": 313,
"city_code": "101110803",
"city_name": "勉县"
},
{
"_id": 1964,
"id": 2636,
"pid": 313,
"city_code": "101110809",
"city_name": "宁强县"
},
{
"_id": 1965,
"id": 2637,
"pid": 313,
"city_code": "101110802",
"city_name": "略阳县"
},
{
"_id": 1966,
"id": 2638,
"pid": 313,
"city_code": "101110811",
"city_name": "镇巴县"
},
{
"_id": 1967,
"id": 2639,
"pid": 313,
"city_code": "101110804",
"city_name": "留坝县"
},
{
"_id": 1968,
"id": 2640,
"pid": 313,
"city_code": "101110808",
"city_name": "佛坪县"
},
{
"_id": 1969,
"id": 2641,
"pid": 314,
"city_code": "101110604",
"city_name": "商州区"
},
{
"_id": 1970,
"id": 2642,
"pid": 314,
"city_code": "101110602",
"city_name": "洛南县"
},
{
"_id": 1971,
"id": 2643,
"pid": 314,
"city_code": "101110606",
"city_name": "丹凤县"
},
{
"_id": 1972,
"id": 2644,
"pid": 314,
"city_code": "101110607",
"city_name": "商南县"
},
{
"_id": 1973,
"id": 2645,
"pid": 314,
"city_code": "101110608",
"city_name": "山阳县"
},
{
"_id": 1974,
"id": 2646,
"pid": 314,
"city_code": "101110605",
"city_name": "镇安县"
},
{
"_id": 1975,
"id": 2647,
"pid": 314,
"city_code": "101110603",
"city_name": "柞水县"
},
{
"_id": 1976,
"id": 2648,
"pid": 315,
"city_code": "101111004",
"city_name": "耀州区"
},
{
"_id": 1977,
"id": 2651,
"pid": 315,
"city_code": "101111003",
"city_name": "宜君县"
},
{
"_id": 1978,
"id": 2653,
"pid": 316,
"city_code": "101110510",
"city_name": "韩城市"
},
{
"_id": 1979,
"id": 2654,
"pid": 316,
"city_code": "101110511",
"city_name": "华阴市"
},
{
"_id": 1980,
"id": 2655,
"pid": 316,
"city_code": "101110502",
"city_name": "华县"
},
{
"_id": 1981,
"id": 2656,
"pid": 316,
"city_code": "101110503",
"city_name": "潼关县"
},
{
"_id": 1982,
"id": 2657,
"pid": 316,
"city_code": "101110504",
"city_name": "大荔县"
},
{
"_id": 1983,
"id": 2658,
"pid": 316,
"city_code": "101110509",
"city_name": "合阳县"
},
{
"_id": 1984,
"id": 2659,
"pid": 316,
"city_code": "101110508",
"city_name": "澄城县"
},
{
"_id": 1985,
"id": 2660,
"pid": 316,
"city_code": "101110507",
"city_name": "蒲城县"
},
{
"_id": 1986,
"id": 2661,
"pid": 316,
"city_code": "101110505",
"city_name": "白水县"
},
{
"_id": 1987,
"id": 2662,
"pid": 316,
"city_code": "101110506",
"city_name": "富平县"
},
{
"_id": 1988,
"id": 2666,
"pid": 317,
"city_code": "101110211",
"city_name": "兴平市"
},
{
"_id": 1989,
"id": 2667,
"pid": 317,
"city_code": "101110201",
"city_name": "三原县"
},
{
"_id": 1990,
"id": 2668,
"pid": 317,
"city_code": "101110205",
"city_name": "泾阳县"
},
{
"_id": 1991,
"id": 2669,
"pid": 317,
"city_code": "101110207",
"city_name": "乾县"
},
{
"_id": 1992,
"id": 2670,
"pid": 317,
"city_code": "101110202",
"city_name": "礼泉县"
},
{
"_id": 1993,
"id": 2671,
"pid": 317,
"city_code": "101110203",
"city_name": "永寿县"
},
{
"_id": 1994,
"id": 2672,
"pid": 317,
"city_code": "101110208",
"city_name": "彬县"
},
{
"_id": 1995,
"id": 2673,
"pid": 317,
"city_code": "101110209",
"city_name": "长武县"
},
{
"_id": 1996,
"id": 2674,
"pid": 317,
"city_code": "101110210",
"city_name": "旬邑县"
},
{
"_id": 1997,
"id": 2675,
"pid": 317,
"city_code": "101110204",
"city_name": "淳化县"
},
{
"_id": 1998,
"id": 2676,
"pid": 317,
"city_code": "101110206",
"city_name": "武功县"
},
{
"_id": 1999,
"id": 2677,
"pid": 318,
"city_code": "101110312",
"city_name": "吴起县"
},
{
"_id": 2000,
"id": 2679,
"pid": 318,
"city_code": "101110301",
"city_name": "延长县"
},
{
"_id": 2001,
"id": 2680,
"pid": 318,
"city_code": "101110302",
"city_name": "延川县"
},
{
"_id": 2002,
"id": 2681,
"pid": 318,
"city_code": "101110303",
"city_name": "子长县"
},
{
"_id": 2003,
"id": 2682,
"pid": 318,
"city_code": "101110307",
"city_name": "安塞县"
},
{
"_id": 2004,
"id": 2683,
"pid": 318,
"city_code": "101110306",
"city_name": "志丹县"
},
{
"_id": 2005,
"id": 2684,
"pid": 318,
"city_code": "101110308",
"city_name": "甘泉县"
},
{
"_id": 2006,
"id": 2685,
"pid": 318,
"city_code": "101110305",
"city_name": "富县"
},
{
"_id": 2007,
"id": 2686,
"pid": 318,
"city_code": "101110309",
"city_name": "洛川县"
},
{
"_id": 2008,
"id": 2687,
"pid": 318,
"city_code": "101110304",
"city_name": "宜川县"
},
{
"_id": 2009,
"id": 2688,
"pid": 318,
"city_code": "101110311",
"city_name": "黄龙县"
},
{
"_id": 2010,
"id": 2689,
"pid": 318,
"city_code": "101110310",
"city_name": "黄陵县"
},
{
"_id": 2011,
"id": 2690,
"pid": 319,
"city_code": "101110413",
"city_name": "榆阳区"
},
{
"_id": 2012,
"id": 2691,
"pid": 319,
"city_code": "101110403",
"city_name": "神木县"
},
{
"_id": 2013,
"id": 2692,
"pid": 319,
"city_code": "101110402",
"city_name": "府谷县"
},
{
"_id": 2014,
"id": 2693,
"pid": 319,
"city_code": "101110407",
"city_name": "横山县"
},
{
"_id": 2015,
"id": 2694,
"pid": 319,
"city_code": "101110406",
"city_name": "靖边县"
},
{
"_id": 2016,
"id": 2695,
"pid": 319,
"city_code": "101110405",
"city_name": "定边县"
},
{
"_id": 2017,
"id": 2696,
"pid": 319,
"city_code": "101110410",
"city_name": "绥德县"
},
{
"_id": 2018,
"id": 2697,
"pid": 319,
"city_code": "101110408",
"city_name": "米脂县"
},
{
"_id": 2019,
"id": 2698,
"pid": 319,
"city_code": "101110404",
"city_name": "佳县"
},
{
"_id": 2020,
"id": 2699,
"pid": 319,
"city_code": "101110411",
"city_name": "吴堡县"
},
{
"_id": 2021,
"id": 2700,
"pid": 319,
"city_code": "101110412",
"city_name": "清涧县"
},
{
"_id": 2022,
"id": 2701,
"pid": 319,
"city_code": "101110409",
"city_name": "子洲县"
},
{
"_id": 2023,
"id": 2704,
"pid": 24,
"city_code": "101020200",
"city_name": "闵行区"
},
{
"_id": 2024,
"id": 2706,
"pid": 24,
"city_code": "101021300",
"city_name": "浦东新区"
},
{
"_id": 2025,
"id": 2714,
"pid": 24,
"city_code": "101020900",
"city_name": "松江区"
},
{
"_id": 2026,
"id": 2715,
"pid": 24,
"city_code": "101020500",
"city_name": "嘉定区"
},
{
"_id": 2027,
"id": 2716,
"pid": 24,
"city_code": "101020300",
"city_name": "宝山区"
},
{
"_id": 2028,
"id": 2717,
"pid": 24,
"city_code": "101020800",
"city_name": "青浦区"
},
{
"_id": 2029,
"id": 2718,
"pid": 24,
"city_code": "101020700",
"city_name": "金山区"
},
{
"_id": 2030,
"id": 2719,
"pid": 24,
"city_code": "101021000",
"city_name": "奉贤区"
},
{
"_id": 2031,
"id": 2720,
"pid": 24,
"city_code": "101021100",
"city_name": "崇明区"
},
{
"_id": 2032,
"id": 2726,
"pid": 321,
"city_code": "101270102",
"city_name": "龙泉驿区"
},
{
"_id": 2033,
"id": 2727,
"pid": 321,
"city_code": "101270115",
"city_name": "青白江区"
},
{
"_id": 2034,
"id": 2728,
"pid": 321,
"city_code": "101270103",
"city_name": "新都区"
},
{
"_id": 2035,
"id": 2729,
"pid": 321,
"city_code": "101270104",
"city_name": "温江区"
},
{
"_id": 2036,
"id": 2732,
"pid": 321,
"city_code": "101270111",
"city_name": "都江堰市"
},
{
"_id": 2037,
"id": 2733,
"pid": 321,
"city_code": "101270112",
"city_name": "彭州市"
},
{
"_id": 2038,
"id": 2734,
"pid": 321,
"city_code": "101270113",
"city_name": "邛崃市"
},
{
"_id": 2039,
"id": 2735,
"pid": 321,
"city_code": "101270114",
"city_name": "崇州市"
},
{
"_id": 2040,
"id": 2736,
"pid": 321,
"city_code": "101270105",
"city_name": "金堂县"
},
{
"_id": 2041,
"id": 2737,
"pid": 321,
"city_code": "101270106",
"city_name": "双流县"
},
{
"_id": 2042,
"id": 2738,
"pid": 321,
"city_code": "101270107",
"city_name": "郫县"
},
{
"_id": 2043,
"id": 2739,
"pid": 321,
"city_code": "101270108",
"city_name": "大邑县"
},
{
"_id": 2044,
"id": 2740,
"pid": 321,
"city_code": "101270109",
"city_name": "蒲江县"
},
{
"_id": 2045,
"id": 2741,
"pid": 321,
"city_code": "101270110",
"city_name": "新津县"
},
{
"_id": 2046,
"id": 2754,
"pid": 322,
"city_code": "101270408",
"city_name": "江油市"
},
{
"_id": 2047,
"id": 2755,
"pid": 322,
"city_code": "101270403",
"city_name": "盐亭县"
},
{
"_id": 2048,
"id": 2756,
"pid": 322,
"city_code": "101270402",
"city_name": "三台县"
},
{
"_id": 2049,
"id": 2757,
"pid": 322,
"city_code": "101270407",
"city_name": "平武县"
},
{
"_id": 2050,
"id": 2758,
"pid": 322,
"city_code": "101270404",
"city_name": "安县"
},
{
"_id": 2051,
"id": 2759,
"pid": 322,
"city_code": "101270405",
"city_name": "梓潼县"
},
{
"_id": 2052,
"id": 2760,
"pid": 322,
"city_code": "101270406",
"city_name": "北川县"
},
{
"_id": 2053,
"id": 2761,
"pid": 323,
"city_code": "101271910",
"city_name": "马尔康县"
},
{
"_id": 2054,
"id": 2762,
"pid": 323,
"city_code": "101271902",
"city_name": "汶川县"
},
{
"_id": 2055,
"id": 2763,
"pid": 323,
"city_code": "101271903",
"city_name": "理县"
},
{
"_id": 2056,
"id": 2764,
"pid": 323,
"city_code": "101271904",
"city_name": "茂县"
},
{
"_id": 2057,
"id": 2765,
"pid": 323,
"city_code": "101271905",
"city_name": "松潘县"
},
{
"_id": 2058,
"id": 2766,
"pid": 323,
"city_code": "101271906",
"city_name": "九寨沟县"
},
{
"_id": 2059,
"id": 2767,
"pid": 323,
"city_code": "101271907",
"city_name": "金川县"
},
{
"_id": 2060,
"id": 2768,
"pid": 323,
"city_code": "101271908",
"city_name": "小金县"
},
{
"_id": 2061,
"id": 2769,
"pid": 323,
"city_code": "101271909",
"city_name": "黑水县"
},
{
"_id": 2062,
"id": 2770,
"pid": 323,
"city_code": "101271911",
"city_name": "壤塘县"
},
{
"_id": 2063,
"id": 2771,
"pid": 323,
"city_code": "101271901",
"city_name": "阿坝县"
},
{
"_id": 2064,
"id": 2772,
"pid": 323,
"city_code": "101271912",
"city_name": "若尔盖县"
},
{
"_id": 2065,
"id": 2773,
"pid": 323,
"city_code": "101271913",
"city_name": "红原县"
},
{
"_id": 2066,
"id": 2775,
"pid": 324,
"city_code": "101270902",
"city_name": "通江县"
},
{
"_id": 2067,
"id": 2776,
"pid": 324,
"city_code": "101270903",
"city_name": "南江县"
},
{
"_id": 2068,
"id": 2777,
"pid": 324,
"city_code": "101270904",
"city_name": "平昌县"
},
{
"_id": 2069,
"id": 2779,
"pid": 325,
"city_code": "101270606",
"city_name": "万源市"
},
{
"_id": 2070,
"id": 2780,
"pid": 325,
"city_code": "101270608",
"city_name": "达川区"
},
{
"_id": 2071,
"id": 2781,
"pid": 325,
"city_code": "101270602",
"city_name": "宣汉县"
},
{
"_id": 2072,
"id": 2782,
"pid": 325,
"city_code": "101270603",
"city_name": "开江县"
},
{
"_id": 2073,
"id": 2783,
"pid": 325,
"city_code": "101270604",
"city_name": "大竹县"
},
{
"_id": 2074,
"id": 2784,
"pid": 325,
"city_code": "101270605",
"city_name": "渠县"
},
{
"_id": 2075,
"id": 2786,
"pid": 326,
"city_code": "101272003",
"city_name": "广汉市"
},
{
"_id": 2076,
"id": 2787,
"pid": 326,
"city_code": "101272004",
"city_name": "什邡市"
},
{
"_id": 2077,
"id": 2788,
"pid": 326,
"city_code": "101272005",
"city_name": "绵竹市"
},
{
"_id": 2078,
"id": 2789,
"pid": 326,
"city_code": "101272006",
"city_name": "罗江县"
},
{
"_id": 2079,
"id": 2790,
"pid": 326,
"city_code": "101272002",
"city_name": "中江县"
},
{
"_id": 2080,
"id": 2791,
"pid": 327,
"city_code": "101271802",
"city_name": "康定县"
},
{
"_id": 2081,
"id": 2792,
"pid": 327,
"city_code": "101271804",
"city_name": "丹巴县"
},
{
"_id": 2082,
"id": 2793,
"pid": 327,
"city_code": "101271803",
"city_name": "泸定县"
},
{
"_id": 2083,
"id": 2794,
"pid": 327,
"city_code": "101271808",
"city_name": "炉霍县"
},
{
"_id": 2084,
"id": 2795,
"pid": 327,
"city_code": "101271805",
"city_name": "九龙县"
},
{
"_id": 2085,
"id": 2796,
"pid": 327,
"city_code": "101271801",
"city_name": "甘孜县"
},
{
"_id": 2086,
"id": 2797,
"pid": 327,
"city_code": "101271806",
"city_name": "雅江县"
},
{
"_id": 2087,
"id": 2798,
"pid": 327,
"city_code": "101271809",
"city_name": "新龙县"
},
{
"_id": 2088,
"id": 2799,
"pid": 327,
"city_code": "101271807",
"city_name": "道孚县"
},
{
"_id": 2089,
"id": 2800,
"pid": 327,
"city_code": "101271811",
"city_name": "白玉县"
},
{
"_id": 2090,
"id": 2801,
"pid": 327,
"city_code": "101271814",
"city_name": "理塘县"
},
{
"_id": 2091,
"id": 2802,
"pid": 327,
"city_code": "101271810",
"city_name": "德格县"
},
{
"_id": 2092,
"id": 2803,
"pid": 327,
"city_code": "101271816",
"city_name": "乡城县"
},
{
"_id": 2093,
"id": 2804,
"pid": 327,
"city_code": "101271812",
"city_name": "石渠县"
},
{
"_id": 2094,
"id": 2805,
"pid": 327,
"city_code": "101271817",
"city_name": "稻城县"
},
{
"_id": 2095,
"id": 2806,
"pid": 327,
"city_code": "101271813",
"city_name": "色达县"
},
{
"_id": 2096,
"id": 2807,
"pid": 327,
"city_code": "101271815",
"city_name": "巴塘县"
},
{
"_id": 2097,
"id": 2808,
"pid": 327,
"city_code": "101271818",
"city_name": "得荣县"
},
{
"_id": 2098,
"id": 2809,
"pid": 328,
"city_code": "101270801",
"city_name": "广安区"
},
{
"_id": 2099,
"id": 2810,
"pid": 328,
"city_code": "101270805",
"city_name": "华蓥市"
},
{
"_id": 2100,
"id": 2811,
"pid": 328,
"city_code": "101270802",
"city_name": "岳池县"
},
{
"_id": 2101,
"id": 2812,
"pid": 328,
"city_code": "101270803",
"city_name": "武胜县"
},
{
"_id": 2102,
"id": 2813,
"pid": 328,
"city_code": "101270804",
"city_name": "邻水县"
},
{
"_id": 2103,
"id": 2817,
"pid": 329,
"city_code": "101272102",
"city_name": "旺苍县"
},
{
"_id": 2104,
"id": 2818,
"pid": 329,
"city_code": "101272103",
"city_name": "青川县"
},
{
"_id": 2105,
"id": 2819,
"pid": 329,
"city_code": "101272104",
"city_name": "剑阁县"
},
{
"_id": 2106,
"id": 2820,
"pid": 329,
"city_code": "101272105",
"city_name": "苍溪县"
},
{
"_id": 2107,
"id": 2821,
"pid": 330,
"city_code": "101271409",
"city_name": "峨眉山市"
},
{
"_id": 2108,
"id": 2823,
"pid": 330,
"city_code": "101271402",
"city_name": "犍为县"
},
{
"_id": 2109,
"id": 2824,
"pid": 330,
"city_code": "101271403",
"city_name": "井研县"
},
{
"_id": 2110,
"id": 2825,
"pid": 330,
"city_code": "101271404",
"city_name": "夹江县"
},
{
"_id": 2111,
"id": 2826,
"pid": 330,
"city_code": "101271405",
"city_name": "沐川县"
},
{
"_id": 2112,
"id": 2827,
"pid": 330,
"city_code": "101271406",
"city_name": "峨边县"
},
{
"_id": 2113,
"id": 2828,
"pid": 330,
"city_code": "101271407",
"city_name": "马边县"
},
{
"_id": 2114,
"id": 2829,
"pid": 331,
"city_code": "101271610",
"city_name": "西昌市"
},
{
"_id": 2115,
"id": 2830,
"pid": 331,
"city_code": "101271604",
"city_name": "盐源县"
},
{
"_id": 2116,
"id": 2831,
"pid": 331,
"city_code": "101271605",
"city_name": "德昌县"
},
{
"_id": 2117,
"id": 2832,
"pid": 331,
"city_code": "101271606",
"city_name": "会理县"
},
{
"_id": 2118,
"id": 2833,
"pid": 331,
"city_code": "101271607",
"city_name": "会东县"
},
{
"_id": 2119,
"id": 2834,
"pid": 331,
"city_code": "101271608",
"city_name": "宁南县"
},
{
"_id": 2120,
"id": 2835,
"pid": 331,
"city_code": "101271609",
"city_name": "普格县"
},
{
"_id": 2121,
"id": 2836,
"pid": 331,
"city_code": "101271619",
"city_name": "布拖县"
},
{
"_id": 2122,
"id": 2837,
"pid": 331,
"city_code": "101271611",
"city_name": "金阳县"
},
{
"_id": 2123,
"id": 2838,
"pid": 331,
"city_code": "101271612",
"city_name": "昭觉县"
},
{
"_id": 2124,
"id": 2839,
"pid": 331,
"city_code": "101271613",
"city_name": "喜德县"
},
{
"_id": 2125,
"id": 2840,
"pid": 331,
"city_code": "101271614",
"city_name": "冕宁县"
},
{
"_id": 2126,
"id": 2841,
"pid": 331,
"city_code": "101271615",
"city_name": "越西县"
},
{
"_id": 2127,
"id": 2842,
"pid": 331,
"city_code": "101271616",
"city_name": "甘洛县"
},
{
"_id": 2128,
"id": 2843,
"pid": 331,
"city_code": "101271618",
"city_name": "美姑县"
},
{
"_id": 2129,
"id": 2844,
"pid": 331,
"city_code": "101271617",
"city_name": "雷波县"
},
{
"_id": 2130,
"id": 2845,
"pid": 331,
"city_code": "101271603",
"city_name": "木里县"
},
{
"_id": 2131,
"id": 2847,
"pid": 332,
"city_code": "101271502",
"city_name": "仁寿县"
},
{
"_id": 2132,
"id": 2848,
"pid": 332,
"city_code": "101271503",
"city_name": "彭山县"
},
{
"_id": 2133,
"id": 2849,
"pid": 332,
"city_code": "101271504",
"city_name": "洪雅县"
},
{
"_id": 2134,
"id": 2850,
"pid": 332,
"city_code": "101271505",
"city_name": "丹棱县"
},
{
"_id": 2135,
"id": 2851,
"pid": 332,
"city_code": "101271506",
"city_name": "青神县"
},
{
"_id": 2136,
"id": 2852,
"pid": 333,
"city_code": "101270507",
"city_name": "阆中市"
},
{
"_id": 2137,
"id": 2853,
"pid": 333,
"city_code": "101270502",
"city_name": "南部县"
},
{
"_id": 2138,
"id": 2854,
"pid": 333,
"city_code": "101270503",
"city_name": "营山县"
},
{
"_id": 2139,
"id": 2855,
"pid": 333,
"city_code": "101270504",
"city_name": "蓬安县"
},
{
"_id": 2140,
"id": 2856,
"pid": 333,
"city_code": "101270505",
"city_name": "仪陇县"
},
{
"_id": 2141,
"id": 2860,
"pid": 333,
"city_code": "101270506",
"city_name": "西充县"
},
{
"_id": 2142,
"id": 2862,
"pid": 334,
"city_code": "101271202",
"city_name": "东兴区"
},
{
"_id": 2143,
"id": 2863,
"pid": 334,
"city_code": "101271203",
"city_name": "威远县"
},
{
"_id": 2144,
"id": 2864,
"pid": 334,
"city_code": "101271204",
"city_name": "资中县"
},
{
"_id": 2145,
"id": 2865,
"pid": 334,
"city_code": "101271205",
"city_name": "隆昌县"
},
{
"_id": 2146,
"id": 2868,
"pid": 335,
"city_code": "101270202",
"city_name": "仁和区"
},
{
"_id": 2147,
"id": 2869,
"pid": 335,
"city_code": "101270203",
"city_name": "米易县"
},
{
"_id": 2148,
"id": 2870,
"pid": 335,
"city_code": "101270204",
"city_name": "盐边县"
},
{
"_id": 2149,
"id": 2873,
"pid": 336,
"city_code": "101270702",
"city_name": "蓬溪县"
},
{
"_id": 2150,
"id": 2874,
"pid": 336,
"city_code": "101270703",
"city_name": "射洪县"
},
{
"_id": 2151,
"id": 2877,
"pid": 337,
"city_code": "101271702",
"city_name": "名山县"
},
{
"_id": 2152,
"id": 2878,
"pid": 337,
"city_code": "101271703",
"city_name": "荥经县"
},
{
"_id": 2153,
"id": 2879,
"pid": 337,
"city_code": "101271704",
"city_name": "汉源县"
},
{
"_id": 2154,
"id": 2880,
"pid": 337,
"city_code": "101271705",
"city_name": "石棉县"
},
{
"_id": 2155,
"id": 2881,
"pid": 337,
"city_code": "101271706",
"city_name": "天全县"
},
{
"_id": 2156,
"id": 2882,
"pid": 337,
"city_code": "101271707",
"city_name": "芦山县"
},
{
"_id": 2157,
"id": 2883,
"pid": 337,
"city_code": "101271708",
"city_name": "宝兴县"
},
{
"_id": 2158,
"id": 2885,
"pid": 338,
"city_code": "101271103",
"city_name": "宜宾县"
},
{
"_id": 2159,
"id": 2886,
"pid": 338,
"city_code": "101271104",
"city_name": "南溪县"
},
{
"_id": 2160,
"id": 2887,
"pid": 338,
"city_code": "101271105",
"city_name": "江安县"
},
{
"_id": 2161,
"id": 2888,
"pid": 338,
"city_code": "101271106",
"city_name": "长宁县"
},
{
"_id": 2162,
"id": 2889,
"pid": 338,
"city_code": "101271107",
"city_name": "高县"
},
{
"_id": 2163,
"id": 2890,
"pid": 338,
"city_code": "101271108",
"city_name": "珙县"
},
{
"_id": 2164,
"id": 2891,
"pid": 338,
"city_code": "101271109",
"city_name": "筠连县"
},
{
"_id": 2165,
"id": 2892,
"pid": 338,
"city_code": "101271110",
"city_name": "兴文县"
},
{
"_id": 2166,
"id": 2893,
"pid": 338,
"city_code": "101271111",
"city_name": "屏山县"
},
{
"_id": 2167,
"id": 2895,
"pid": 321,
"city_code": "101271304",
"city_name": "简阳市"
},
{
"_id": 2168,
"id": 2896,
"pid": 339,
"city_code": "101271302",
"city_name": "安岳县"
},
{
"_id": 2169,
"id": 2897,
"pid": 339,
"city_code": "101271303",
"city_name": "乐至县"
},
{
"_id": 2170,
"id": 2902,
"pid": 340,
"city_code": "101270303",
"city_name": "荣县"
},
{
"_id": 2171,
"id": 2903,
"pid": 340,
"city_code": "101270302",
"city_name": "富顺县"
},
{
"_id": 2172,
"id": 2905,
"pid": 341,
"city_code": "101271007",
"city_name": "纳溪区"
},
{
"_id": 2173,
"id": 2907,
"pid": 341,
"city_code": "101271003",
"city_name": "泸县"
},
{
"_id": 2174,
"id": 2908,
"pid": 341,
"city_code": "101271004",
"city_name": "合江县"
},
{
"_id": 2175,
"id": 2909,
"pid": 341,
"city_code": "101271005",
"city_name": "叙永县"
},
{
"_id": 2176,
"id": 2910,
"pid": 341,
"city_code": "101271006",
"city_name": "古蔺县"
},
{
"_id": 2177,
"id": 2917,
"pid": 26,
"city_code": "101030400",
"city_name": "东丽区"
},
{
"_id": 2178,
"id": 2918,
"pid": 26,
"city_code": "101031000",
"city_name": "津南区"
},
{
"_id": 2179,
"id": 2919,
"pid": 26,
"city_code": "101030500",
"city_name": "西青区"
},
{
"_id": 2180,
"id": 2920,
"pid": 26,
"city_code": "101030600",
"city_name": "北辰区"
},
{
"_id": 2181,
"id": 2921,
"pid": 26,
"city_code": "101031100",
"city_name": "塘沽区"
},
{
"_id": 2182,
"id": 2922,
"pid": 26,
"city_code": "101030800",
"city_name": "汉沽区"
},
{
"_id": 2183,
"id": 2923,
"pid": 26,
"city_code": "101031200",
"city_name": "大港区"
},
{
"_id": 2184,
"id": 2924,
"pid": 26,
"city_code": "101030200",
"city_name": "武清区"
},
{
"_id": 2185,
"id": 2925,
"pid": 26,
"city_code": "101030300",
"city_name": "宝坻区"
},
{
"_id": 2186,
"id": 2927,
"pid": 26,
"city_code": "101030700",
"city_name": "宁河区"
},
{
"_id": 2187,
"id": 2928,
"pid": 26,
"city_code": "101030900",
"city_name": "静海区"
},
{
"_id": 2188,
"id": 2929,
"pid": 26,
"city_code": "101031400",
"city_name": "蓟州区"
},
{
"_id": 2189,
"id": 2931,
"pid": 343,
"city_code": "101140104",
"city_name": "林周县"
},
{
"_id": 2190,
"id": 2932,
"pid": 343,
"city_code": "101140102",
"city_name": "当雄县"
},
{
"_id": 2191,
"id": 2933,
"pid": 343,
"city_code": "101140103",
"city_name": "尼木县"
},
{
"_id": 2192,
"id": 2934,
"pid": 343,
"city_code": "101140106",
"city_name": "曲水县"
},
{
"_id": 2193,
"id": 2935,
"pid": 343,
"city_code": "101140105",
"city_name": "堆龙德庆县"
},
{
"_id": 2194,
"id": 2936,
"pid": 343,
"city_code": "101140107",
"city_name": "达孜县"
},
{
"_id": 2195,
"id": 2937,
"pid": 343,
"city_code": "101140108",
"city_name": "墨竹工卡县"
},
{
"_id": 2196,
"id": 2938,
"pid": 344,
"city_code": "101140707",
"city_name": "噶尔县"
},
{
"_id": 2197,
"id": 2939,
"pid": 344,
"city_code": "101140705",
"city_name": "普兰县"
},
{
"_id": 2198,
"id": 2940,
"pid": 344,
"city_code": "101140706",
"city_name": "札达县"
},
{
"_id": 2199,
"id": 2941,
"pid": 344,
"city_code": "101140708",
"city_name": "日土县"
},
{
"_id": 2200,
"id": 2942,
"pid": 344,
"city_code": "101140709",
"city_name": "革吉县"
},
{
"_id": 2201,
"id": 2943,
"pid": 344,
"city_code": "101140702",
"city_name": "改则县"
},
{
"_id": 2202,
"id": 2944,
"pid": 344,
"city_code": "101140710",
"city_name": "措勤县"
},
{
"_id": 2203,
"id": 2945,
"pid": 345,
"city_code": "101140501",
"city_name": "昌都县"
},
{
"_id": 2204,
"id": 2946,
"pid": 345,
"city_code": "101140509",
"city_name": "江达县"
},
{
"_id": 2205,
"id": 2947,
"pid": 345,
"city_code": "101140511",
"city_name": "贡觉县"
},
{
"_id": 2206,
"id": 2948,
"pid": 345,
"city_code": "101140503",
"city_name": "类乌齐县"
},
{
"_id": 2207,
"id": 2949,
"pid": 345,
"city_code": "101140502",
"city_name": "丁青县"
},
{
"_id": 2208,
"id": 2950,
"pid": 345,
"city_code": "101140510",
"city_name": "察雅县"
},
{
"_id": 2209,
"id": 2951,
"pid": 345,
"city_code": "101140507",
"city_name": "八宿县"
},
{
"_id": 2210,
"id": 2952,
"pid": 345,
"city_code": "101140505",
"city_name": "左贡县"
},
{
"_id": 2211,
"id": 2953,
"pid": 345,
"city_code": "101140506",
"city_name": "芒康县"
},
{
"_id": 2212,
"id": 2954,
"pid": 345,
"city_code": "101140504",
"city_name": "洛隆县"
},
{
"_id": 2213,
"id": 2955,
"pid": 345,
"city_code": "101140503",
"city_name": "边坝县"
},
{
"_id": 2214,
"id": 2956,
"pid": 346,
"city_code": "101140401",
"city_name": "林芝县"
},
{
"_id": 2215,
"id": 2957,
"pid": 346,
"city_code": "101140405",
"city_name": "工布江达县"
},
{
"_id": 2216,
"id": 2958,
"pid": 346,
"city_code": "101140403",
"city_name": "米林县"
},
{
"_id": 2217,
"id": 2959,
"pid": 346,
"city_code": "101140407",
"city_name": "墨脱县"
},
{
"_id": 2218,
"id": 2960,
"pid": 346,
"city_code": "101140402",
"city_name": "波密县"
},
{
"_id": 2219,
"id": 2961,
"pid": 346,
"city_code": "101140404",
"city_name": "察隅县"
},
{
"_id": 2220,
"id": 2962,
"pid": 346,
"city_code": "101140406",
"city_name": "朗县"
},
{
"_id": 2221,
"id": 2963,
"pid": 347,
"city_code": "101140601",
"city_name": "那曲县"
},
{
"_id": 2222,
"id": 2964,
"pid": 347,
"city_code": "101140603",
"city_name": "嘉黎县"
},
{
"_id": 2223,
"id": 2965,
"pid": 347,
"city_code": "101140607",
"city_name": "比如县"
},
{
"_id": 2224,
"id": 2966,
"pid": 347,
"city_code": "101140607",
"city_name": "聂荣县"
},
{
"_id": 2225,
"id": 2967,
"pid": 347,
"city_code": "101140605",
"city_name": "安多县"
},
{
"_id": 2226,
"id": 2968,
"pid": 347,
"city_code": "101140703",
"city_name": "申扎县"
},
{
"_id": 2227,
"id": 2969,
"pid": 347,
"city_code": "101140606",
"city_name": "索县"
},
{
"_id": 2228,
"id": 2970,
"pid": 347,
"city_code": "101140604",
"city_name": "班戈县"
},
{
"_id": 2229,
"id": 2971,
"pid": 347,
"city_code": "101140608",
"city_name": "巴青县"
},
{
"_id": 2230,
"id": 2972,
"pid": 347,
"city_code": "101140602",
"city_name": "尼玛县"
},
{
"_id": 2231,
"id": 2973,
"pid": 348,
"city_code": "101140201",
"city_name": "日喀则市"
},
{
"_id": 2232,
"id": 2974,
"pid": 348,
"city_code": "101140203",
"city_name": "南木林县"
},
{
"_id": 2233,
"id": 2975,
"pid": 348,
"city_code": "101140206",
"city_name": "江孜县"
},
{
"_id": 2234,
"id": 2976,
"pid": 348,
"city_code": "101140205",
"city_name": "定日县"
},
{
"_id": 2235,
"id": 2977,
"pid": 348,
"city_code": "101140213",
"city_name": "萨迦县"
},
{
"_id": 2236,
"id": 2978,
"pid": 348,
"city_code": "101140202",
"city_name": "拉孜县"
},
{
"_id": 2237,
"id": 2979,
"pid": 348,
"city_code": "101140211",
"city_name": "昂仁县"
},
{
"_id": 2238,
"id": 2980,
"pid": 348,
"city_code": "101140214",
"city_name": "谢通门县"
},
{
"_id": 2239,
"id": 2981,
"pid": 348,
"city_code": "101140217",
"city_name": "白朗县"
},
{
"_id": 2240,
"id": 2982,
"pid": 348,
"city_code": "101140220",
"city_name": "仁布县"
},
{
"_id": 2241,
"id": 2983,
"pid": 348,
"city_code": "101140219",
"city_name": "康马县"
},
{
"_id": 2242,
"id": 2984,
"pid": 348,
"city_code": "101140212",
"city_name": "定结县"
},
{
"_id": 2243,
"id": 2985,
"pid": 348,
"city_code": "101140208",
"city_name": "仲巴县"
},
{
"_id": 2244,
"id": 2986,
"pid": 348,
"city_code": "101140218",
"city_name": "亚东县"
},
{
"_id": 2245,
"id": 2987,
"pid": 348,
"city_code": "101140210",
"city_name": "吉隆县"
},
{
"_id": 2246,
"id": 2988,
"pid": 348,
"city_code": "101140204",
"city_name": "聂拉木县"
},
{
"_id": 2247,
"id": 2989,
"pid": 348,
"city_code": "101140209",
"city_name": "萨嘎县"
},
{
"_id": 2248,
"id": 2990,
"pid": 348,
"city_code": "101140216",
"city_name": "岗巴县"
},
{
"_id": 2249,
"id": 2991,
"pid": 349,
"city_code": "101140309",
"city_name": "乃东县"
},
{
"_id": 2250,
"id": 2992,
"pid": 349,
"city_code": "101140303",
"city_name": "扎囊县"
},
{
"_id": 2251,
"id": 2993,
"pid": 349,
"city_code": "101140302",
"city_name": "贡嘎县"
},
{
"_id": 2252,
"id": 2994,
"pid": 349,
"city_code": "101140310",
"city_name": "桑日县"
},
{
"_id": 2253,
"id": 2995,
"pid": 349,
"city_code": "101140303",
"city_name": "琼结县"
},
{
"_id": 2254,
"id": 2996,
"pid": 349,
"city_code": "101140314",
"city_name": "曲松县"
},
{
"_id": 2255,
"id": 2997,
"pid": 349,
"city_code": "101140312",
"city_name": "措美县"
},
{
"_id": 2256,
"id": 2998,
"pid": 349,
"city_code": "101140311",
"city_name": "洛扎县"
},
{
"_id": 2257,
"id": 2999,
"pid": 349,
"city_code": "101140304",
"city_name": "加查县"
},
{
"_id": 2258,
"id": 3000,
"pid": 349,
"city_code": "101140307",
"city_name": "隆子县"
},
{
"_id": 2259,
"id": 3001,
"pid": 349,
"city_code": "101140306",
"city_name": "错那县"
},
{
"_id": 2260,
"id": 3002,
"pid": 349,
"city_code": "101140305",
"city_name": "浪卡子县"
},
{
"_id": 2261,
"id": 3008,
"pid": 350,
"city_code": "101130105",
"city_name": "达坂城区"
},
{
"_id": 2262,
"id": 3010,
"pid": 350,
"city_code": "101130101",
"city_name": "乌鲁木齐县"
},
{
"_id": 2263,
"id": 3011,
"pid": 351,
"city_code": "101130801",
"city_name": "阿克苏市"
},
{
"_id": 2264,
"id": 3012,
"pid": 351,
"city_code": "101130803",
"city_name": "温宿县"
},
{
"_id": 2265,
"id": 3013,
"pid": 351,
"city_code": "101130807",
"city_name": "库车县"
},
{
"_id": 2266,
"id": 3014,
"pid": 351,
"city_code": "101130806",
"city_name": "沙雅县"
},
{
"_id": 2267,
"id": 3015,
"pid": 351,
"city_code": "101130805",
"city_name": "新和县"
},
{
"_id": 2268,
"id": 3016,
"pid": 351,
"city_code": "101130804",
"city_name": "拜城县"
},
{
"_id": 2269,
"id": 3017,
"pid": 351,
"city_code": "101130802",
"city_name": "乌什县"
},
{
"_id": 2270,
"id": 3018,
"pid": 351,
"city_code": "101130809",
"city_name": "阿瓦提县"
},
{
"_id": 2271,
"id": 3019,
"pid": 351,
"city_code": "101130808",
"city_name": "柯坪县"
},
{
"_id": 2272,
"id": 3020,
"pid": 352,
"city_code": "101130701",
"city_name": "阿拉尔市"
},
{
"_id": 2273,
"id": 3021,
"pid": 353,
"city_code": "101130601",
"city_name": "库尔勒"
},
{
"_id": 2274,
"id": 3022,
"pid": 353,
"city_code": "101130602",
"city_name": "轮台县"
},
{
"_id": 2275,
"id": 3023,
"pid": 353,
"city_code": "101130603",
"city_name": "尉犁县"
},
{
"_id": 2276,
"id": 3024,
"pid": 353,
"city_code": "101130604",
"city_name": "若羌县"
},
{
"_id": 2277,
"id": 3025,
"pid": 353,
"city_code": "101130605",
"city_name": "且末县"
},
{
"_id": 2278,
"id": 3026,
"pid": 353,
"city_code": "101130607",
"city_name": "焉耆县"
},
{
"_id": 2279,
"id": 3027,
"pid": 353,
"city_code": "101130606",
"city_name": "和静县"
},
{
"_id": 2280,
"id": 3028,
"pid": 353,
"city_code": "101130608",
"city_name": "和硕县"
},
{
"_id": 2281,
"id": 3029,
"pid": 353,
"city_code": "101130612",
"city_name": "博湖县"
},
{
"_id": 2282,
"id": 3030,
"pid": 354,
"city_code": "101131601",
"city_name": "博乐市"
},
{
"_id": 2283,
"id": 3031,
"pid": 354,
"city_code": "101131603",
"city_name": "精河县"
},
{
"_id": 2284,
"id": 3032,
"pid": 354,
"city_code": "101131602",
"city_name": "温泉县"
},
{
"_id": 2285,
"id": 3033,
"pid": 355,
"city_code": "101130402",
"city_name": "呼图壁县"
},
{
"_id": 2286,
"id": 3034,
"pid": 355,
"city_code": "101130403",
"city_name": "米泉市"
},
{
"_id": 2287,
"id": 3035,
"pid": 355,
"city_code": "101130401",
"city_name": "昌吉市"
},
{
"_id": 2288,
"id": 3036,
"pid": 355,
"city_code": "101130404",
"city_name": "阜康市"
},
{
"_id": 2289,
"id": 3037,
"pid": 355,
"city_code": "101130407",
"city_name": "玛纳斯县"
},
{
"_id": 2290,
"id": 3038,
"pid": 355,
"city_code": "101130406",
"city_name": "奇台县"
},
{
"_id": 2291,
"id": 3039,
"pid": 355,
"city_code": "101130405",
"city_name": "吉木萨尔县"
},
{
"_id": 2292,
"id": 3040,
"pid": 355,
"city_code": "101130408",
"city_name": "木垒县"
},
{
"_id": 2293,
"id": 3041,
"pid": 356,
"city_code": "101131201",
"city_name": "哈密市"
},
{
"_id": 2294,
"id": 3042,
"pid": 356,
"city_code": "101131204",
"city_name": "伊吾县"
},
{
"_id": 2295,
"id": 3043,
"pid": 356,
"city_code": "101131203",
"city_name": "巴里坤"
},
{
"_id": 2296,
"id": 3044,
"pid": 357,
"city_code": "101131301",
"city_name": "和田市"
},
{
"_id": 2297,
"id": 3045,
"pid": 357,
"city_code": "101131301",
"city_name": "和田县"
},
{
"_id": 2298,
"id": 3046,
"pid": 357,
"city_code": "101131304",
"city_name": "墨玉县"
},
{
"_id": 2299,
"id": 3047,
"pid": 357,
"city_code": "101131302",
"city_name": "皮山县"
},
{
"_id": 2300,
"id": 3048,
"pid": 357,
"city_code": "101131305",
"city_name": "洛浦县"
},
{
"_id": 2301,
"id": 3049,
"pid": 357,
"city_code": "101131303",
"city_name": "策勒县"
},
{
"_id": 2302,
"id": 3050,
"pid": 357,
"city_code": "101131307",
"city_name": "于田县"
},
{
"_id": 2303,
"id": 3051,
"pid": 357,
"city_code": "101131306",
"city_name": "民丰县"
},
{
"_id": 2304,
"id": 3052,
"pid": 358,
"city_code": "101130901",
"city_name": "喀什市"
},
{
"_id": 2305,
"id": 3053,
"pid": 358,
"city_code": "101130911",
"city_name": "疏附县"
},
{
"_id": 2306,
"id": 3054,
"pid": 358,
"city_code": "101130912",
"city_name": "疏勒县"
},
{
"_id": 2307,
"id": 3055,
"pid": 358,
"city_code": "101130902",
"city_name": "英吉沙县"
},
{
"_id": 2308,
"id": 3056,
"pid": 358,
"city_code": "101130907",
"city_name": "泽普县"
},
{
"_id": 2309,
"id": 3057,
"pid": 358,
"city_code": "101130905",
"city_name": "莎车县"
},
{
"_id": 2310,
"id": 3058,
"pid": 358,
"city_code": "101130906",
"city_name": "叶城县"
},
{
"_id": 2311,
"id": 3059,
"pid": 358,
"city_code": "101130904",
"city_name": "麦盖提县"
},
{
"_id": 2312,
"id": 3060,
"pid": 358,
"city_code": "101130909",
"city_name": "岳普湖县"
},
{
"_id": 2313,
"id": 3061,
"pid": 358,
"city_code": "101130910",
"city_name": "伽师县"
},
{
"_id": 2314,
"id": 3062,
"pid": 358,
"city_code": "101130908",
"city_name": "巴楚县"
},
{
"_id": 2315,
"id": 3063,
"pid": 358,
"city_code": "101130903",
"city_name": "塔什库尔干"
},
{
"_id": 2316,
"id": 3064,
"pid": 359,
"city_code": "101130201",
"city_name": "克拉玛依市"
},
{
"_id": 2317,
"id": 3065,
"pid": 360,
"city_code": "101131501",
"city_name": "阿图什市"
},
{
"_id": 2318,
"id": 3066,
"pid": 360,
"city_code": "101131503",
"city_name": "阿克陶县"
},
{
"_id": 2319,
"id": 3067,
"pid": 360,
"city_code": "101131504",
"city_name": "阿合奇县"
},
{
"_id": 2320,
"id": 3068,
"pid": 360,
"city_code": "101131502",
"city_name": "乌恰县"
},
{
"_id": 2321,
"id": 3069,
"pid": 361,
"city_code": "101130301",
"city_name": "石河子市"
},
{
"_id": 2322,
"id": 3071,
"pid": 363,
"city_code": "101130501",
"city_name": "吐鲁番市"
},
{
"_id": 2323,
"id": 3072,
"pid": 363,
"city_code": "101130504",
"city_name": "鄯善县"
},
{
"_id": 2324,
"id": 3073,
"pid": 363,
"city_code": "101130502",
"city_name": "托克逊县"
},
{
"_id": 2325,
"id": 3075,
"pid": 365,
"city_code": "101131401",
"city_name": "阿勒泰"
},
{
"_id": 2326,
"id": 3076,
"pid": 365,
"city_code": "101131104",
"city_name": "和布克赛尔"
},
{
"_id": 2327,
"id": 3077,
"pid": 365,
"city_code": "101131001",
"city_name": "伊宁市"
},
{
"_id": 2328,
"id": 3078,
"pid": 365,
"city_code": "101131406",
"city_name": "布尔津县"
},
{
"_id": 2329,
"id": 3079,
"pid": 365,
"city_code": "101131011",
"city_name": "奎屯市"
},
{
"_id": 2330,
"id": 3080,
"pid": 365,
"city_code": "101131106",
"city_name": "乌苏市"
},
{
"_id": 2331,
"id": 3081,
"pid": 365,
"city_code": "101131103",
"city_name": "额敏县"
},
{
"_id": 2332,
"id": 3082,
"pid": 365,
"city_code": "101131408",
"city_name": "富蕴县"
},
{
"_id": 2333,
"id": 3083,
"pid": 365,
"city_code": "101131004",
"city_name": "伊宁县"
},
{
"_id": 2334,
"id": 3084,
"pid": 365,
"city_code": "101131407",
"city_name": "福海县"
},
{
"_id": 2335,
"id": 3085,
"pid": 365,
"city_code": "101131009",
"city_name": "霍城县"
},
{
"_id": 2336,
"id": 3086,
"pid": 365,
"city_code": "101131107",
"city_name": "沙湾县"
},
{
"_id": 2337,
"id": 3087,
"pid": 365,
"city_code": "101131005",
"city_name": "巩留县"
},
{
"_id": 2338,
"id": 3088,
"pid": 365,
"city_code": "101131402",
"city_name": "哈巴河县"
},
{
"_id": 2339,
"id": 3089,
"pid": 365,
"city_code": "101131105",
"city_name": "托里县"
},
{
"_id": 2340,
"id": 3090,
"pid": 365,
"city_code": "101131409",
"city_name": "青河县"
},
{
"_id": 2341,
"id": 3091,
"pid": 365,
"city_code": "101131006",
"city_name": "新源县"
},
{
"_id": 2342,
"id": 3092,
"pid": 365,
"city_code": "101131102",
"city_name": "裕民县"
},
{
"_id": 2343,
"id": 3094,
"pid": 365,
"city_code": "101131405",
"city_name": "吉木乃县"
},
{
"_id": 2344,
"id": 3095,
"pid": 365,
"city_code": "101131007",
"city_name": "昭苏县"
},
{
"_id": 2345,
"id": 3096,
"pid": 365,
"city_code": "101131008",
"city_name": "特克斯县"
},
{
"_id": 2346,
"id": 3097,
"pid": 365,
"city_code": "101131003",
"city_name": "尼勒克县"
},
{
"_id": 2347,
"id": 3098,
"pid": 365,
"city_code": "101131002",
"city_name": "察布查尔"
},
{
"_id": 2348,
"id": 3103,
"pid": 366,
"city_code": "101290103",
"city_name": "东川区"
},
{
"_id": 2349,
"id": 3104,
"pid": 366,
"city_code": "101290112",
"city_name": "安宁市"
},
{
"_id": 2350,
"id": 3105,
"pid": 366,
"city_code": "101290108",
"city_name": "呈贡县"
},
{
"_id": 2351,
"id": 3106,
"pid": 366,
"city_code": "101290105",
"city_name": "晋宁县"
},
{
"_id": 2352,
"id": 3107,
"pid": 366,
"city_code": "101290109",
"city_name": "富民县"
},
{
"_id": 2353,
"id": 3108,
"pid": 366,
"city_code": "101290106",
"city_name": "宜良县"
},
{
"_id": 2354,
"id": 3109,
"pid": 366,
"city_code": "101290110",
"city_name": "嵩明县"
},
{
"_id": 2355,
"id": 3110,
"pid": 366,
"city_code": "101290107",
"city_name": "石林县"
},
{
"_id": 2356,
"id": 3111,
"pid": 366,
"city_code": "101290111",
"city_name": "禄劝县"
},
{
"_id": 2357,
"id": 3112,
"pid": 366,
"city_code": "101290104",
"city_name": "寻甸县"
},
{
"_id": 2358,
"id": 3113,
"pid": 367,
"city_code": "101291204",
"city_name": "兰坪县"
},
{
"_id": 2359,
"id": 3114,
"pid": 367,
"city_code": "101291205",
"city_name": "泸水县"
},
{
"_id": 2360,
"id": 3115,
"pid": 367,
"city_code": "101291203",
"city_name": "福贡县"
},
{
"_id": 2361,
"id": 3116,
"pid": 367,
"city_code": "101291207",
"city_name": "贡山县"
},
{
"_id": 2362,
"id": 3117,
"pid": 368,
"city_code": "101290912",
"city_name": "宁洱县"
},
{
"_id": 2363,
"id": 3118,
"pid": 368,
"city_code": "101290901",
"city_name": "思茅区"
},
{
"_id": 2364,
"id": 3119,
"pid": 368,
"city_code": "101290906",
"city_name": "墨江县"
},
{
"_id": 2365,
"id": 3120,
"pid": 368,
"city_code": "101290903",
"city_name": "景东县"
},
{
"_id": 2366,
"id": 3121,
"pid": 368,
"city_code": "101290902",
"city_name": "景谷县"
},
{
"_id": 2367,
"id": 3122,
"pid": 368,
"city_code": "101290911",
"city_name": "镇沅县"
},
{
"_id": 2368,
"id": 3123,
"pid": 368,
"city_code": "101290907",
"city_name": "江城县"
},
{
"_id": 2369,
"id": 3124,
"pid": 368,
"city_code": "101290908",
"city_name": "孟连县"
},
{
"_id": 2370,
"id": 3125,
"pid": 368,
"city_code": "101290904",
"city_name": "澜沧县"
},
{
"_id": 2371,
"id": 3126,
"pid": 368,
"city_code": "101290909",
"city_name": "西盟县"
},
{
"_id": 2372,
"id": 3128,
"pid": 369,
"city_code": "101291404",
"city_name": "宁蒗县"
},
{
"_id": 2373,
"id": 3130,
"pid": 369,
"city_code": "101291402",
"city_name": "永胜县"
},
{
"_id": 2374,
"id": 3131,
"pid": 369,
"city_code": "101291403",
"city_name": "华坪县"
},
{
"_id": 2375,
"id": 3133,
"pid": 370,
"city_code": "101290504",
"city_name": "施甸县"
},
{
"_id": 2376,
"id": 3134,
"pid": 370,
"city_code": "101290506",
"city_name": "腾冲县"
},
{
"_id": 2377,
"id": 3135,
"pid": 370,
"city_code": "101290503",
"city_name": "龙陵县"
},
{
"_id": 2378,
"id": 3136,
"pid": 370,
"city_code": "101290505",
"city_name": "昌宁县"
},
{
"_id": 2379,
"id": 3137,
"pid": 371,
"city_code": "101290801",
"city_name": "楚雄市"
},
{
"_id": 2380,
"id": 3138,
"pid": 371,
"city_code": "101290809",
"city_name": "双柏县"
},
{
"_id": 2381,
"id": 3139,
"pid": 371,
"city_code": "101290805",
"city_name": "牟定县"
},
{
"_id": 2382,
"id": 3140,
"pid": 371,
"city_code": "101290806",
"city_name": "南华县"
},
{
"_id": 2383,
"id": 3141,
"pid": 371,
"city_code": "101290804",
"city_name": "姚安县"
},
{
"_id": 2384,
"id": 3142,
"pid": 371,
"city_code": "101290802",
"city_name": "大姚县"
},
{
"_id": 2385,
"id": 3143,
"pid": 371,
"city_code": "101290810",
"city_name": "永仁县"
},
{
"_id": 2386,
"id": 3144,
"pid": 371,
"city_code": "101290803",
"city_name": "元谋县"
},
{
"_id": 2387,
"id": 3145,
"pid": 371,
"city_code": "101290807",
"city_name": "武定县"
},
{
"_id": 2388,
"id": 3146,
"pid": 371,
"city_code": "101290808",
"city_name": "禄丰县"
},
{
"_id": 2389,
"id": 3147,
"pid": 372,
"city_code": "101290201",
"city_name": "大理市"
},
{
"_id": 2390,
"id": 3148,
"pid": 372,
"city_code": "101290207",
"city_name": "祥云县"
},
{
"_id": 2391,
"id": 3149,
"pid": 372,
"city_code": "101290205",
"city_name": "宾川县"
},
{
"_id": 2392,
"id": 3150,
"pid": 372,
"city_code": "101290206",
"city_name": "弥渡县"
},
{
"_id": 2393,
"id": 3151,
"pid": 372,
"city_code": "101290204",
"city_name": "永平县"
},
{
"_id": 2394,
"id": 3152,
"pid": 372,
"city_code": "101290202",
"city_name": "云龙县"
},
{
"_id": 2395,
"id": 3153,
"pid": 372,
"city_code": "101290210",
"city_name": "洱源县"
},
{
"_id": 2396,
"id": 3154,
"pid": 372,
"city_code": "101290209",
"city_name": "剑川县"
},
{
"_id": 2397,
"id": 3155,
"pid": 372,
"city_code": "101290211",
"city_name": "鹤庆县"
},
{
"_id": 2398,
"id": 3156,
"pid": 372,
"city_code": "101290203",
"city_name": "漾濞县"
},
{
"_id": 2399,
"id": 3157,
"pid": 372,
"city_code": "101290212",
"city_name": "南涧县"
},
{
"_id": 2400,
"id": 3158,
"pid": 372,
"city_code": "101290208",
"city_name": "巍山县"
},
{
"_id": 2401,
"id": 3159,
"pid": 373,
"city_code": "101291508",
"city_name": "潞西市"
},
{
"_id": 2402,
"id": 3160,
"pid": 373,
"city_code": "101291506",
"city_name": "瑞丽市"
},
{
"_id": 2403,
"id": 3161,
"pid": 373,
"city_code": "101291507",
"city_name": "梁河县"
},
{
"_id": 2404,
"id": 3162,
"pid": 373,
"city_code": "101291504",
"city_name": "盈江县"
},
{
"_id": 2405,
"id": 3163,
"pid": 373,
"city_code": "101291503",
"city_name": "陇川县"
},
{
"_id": 2406,
"id": 3164,
"pid": 374,
"city_code": "101291301",
"city_name": "香格里拉县"
},
{
"_id": 2407,
"id": 3165,
"pid": 374,
"city_code": "101291302",
"city_name": "德钦县"
},
{
"_id": 2408,
"id": 3166,
"pid": 374,
"city_code": "101291303",
"city_name": "维西县"
},
{
"_id": 2409,
"id": 3167,
"pid": 375,
"city_code": "101290311",
"city_name": "泸西县"
},
{
"_id": 2410,
"id": 3168,
"pid": 375,
"city_code": "101290309",
"city_name": "蒙自市"
},
{
"_id": 2411,
"id": 3169,
"pid": 375,
"city_code": "101290308",
"city_name": "个旧市"
},
{
"_id": 2412,
"id": 3170,
"pid": 375,
"city_code": "101290307",
"city_name": "开远市"
},
{
"_id": 2413,
"id": 3171,
"pid": 375,
"city_code": "101290306",
"city_name": "绿春县"
},
{
"_id": 2414,
"id": 3172,
"pid": 375,
"city_code": "101290303",
"city_name": "建水县"
},
{
"_id": 2415,
"id": 3173,
"pid": 375,
"city_code": "101290302",
"city_name": "石屏县"
},
{
"_id": 2416,
"id": 3174,
"pid": 375,
"city_code": "101290304",
"city_name": "弥勒县"
},
{
"_id": 2417,
"id": 3175,
"pid": 375,
"city_code": "101290305",
"city_name": "元阳县"
},
{
"_id": 2418,
"id": 3176,
"pid": 375,
"city_code": "101290301",
"city_name": "红河县"
},
{
"_id": 2419,
"id": 3177,
"pid": 375,
"city_code": "101290312",
"city_name": "金平县"
},
{
"_id": 2420,
"id": 3178,
"pid": 375,
"city_code": "101290313",
"city_name": "河口县"
},
{
"_id": 2421,
"id": 3179,
"pid": 375,
"city_code": "101290310",
"city_name": "屏边县"
},
{
"_id": 2422,
"id": 3181,
"pid": 376,
"city_code": "101291105",
"city_name": "凤庆县"
},
{
"_id": 2423,
"id": 3182,
"pid": 376,
"city_code": "101291107",
"city_name": "云县"
},
{
"_id": 2424,
"id": 3183,
"pid": 376,
"city_code": "101291106",
"city_name": "永德县"
},
{
"_id": 2425,
"id": 3184,
"pid": 376,
"city_code": "101291108",
"city_name": "镇康县"
},
{
"_id": 2426,
"id": 3185,
"pid": 376,
"city_code": "101291104",
"city_name": "双江县"
},
{
"_id": 2427,
"id": 3186,
"pid": 376,
"city_code": "101291103",
"city_name": "耿马县"
},
{
"_id": 2428,
"id": 3187,
"pid": 376,
"city_code": "101291102",
"city_name": "沧源县"
},
{
"_id": 2429,
"id": 3189,
"pid": 377,
"city_code": "101290409",
"city_name": "宣威市"
},
{
"_id": 2430,
"id": 3190,
"pid": 377,
"city_code": "101290405",
"city_name": "马龙县"
},
{
"_id": 2431,
"id": 3191,
"pid": 377,
"city_code": "101290403",
"city_name": "陆良县"
},
{
"_id": 2432,
"id": 3192,
"pid": 377,
"city_code": "101290406",
"city_name": "师宗县"
},
{
"_id": 2433,
"id": 3193,
"pid": 377,
"city_code": "101290407",
"city_name": "罗平县"
},
{
"_id": 2434,
"id": 3194,
"pid": 377,
"city_code": "101290404",
"city_name": "富源县"
},
{
"_id": 2435,
"id": 3195,
"pid": 377,
"city_code": "101290408",
"city_name": "会泽县"
},
{
"_id": 2436,
"id": 3196,
"pid": 377,
"city_code": "101290402",
"city_name": "沾益县"
},
{
"_id": 2437,
"id": 3197,
"pid": 378,
"city_code": "101290601",
"city_name": "文山县"
},
{
"_id": 2438,
"id": 3198,
"pid": 378,
"city_code": "101290605",
"city_name": "砚山县"
},
{
"_id": 2439,
"id": 3199,
"pid": 378,
"city_code": "101290602",
"city_name": "西畴县"
},
{
"_id": 2440,
"id": 3200,
"pid": 378,
"city_code": "101290604",
"city_name": "麻栗坡县"
},
{
"_id": 2441,
"id": 3201,
"pid": 378,
"city_code": "101290603",
"city_name": "马关县"
},
{
"_id": 2442,
"id": 3202,
"pid": 378,
"city_code": "101290606",
"city_name": "丘北县"
},
{
"_id": 2443,
"id": 3203,
"pid": 378,
"city_code": "101290607",
"city_name": "广南县"
},
{
"_id": 2444,
"id": 3204,
"pid": 378,
"city_code": "101290608",
"city_name": "富宁县"
},
{
"_id": 2445,
"id": 3205,
"pid": 379,
"city_code": "101291601",
"city_name": "景洪市"
},
{
"_id": 2446,
"id": 3206,
"pid": 379,
"city_code": "101291603",
"city_name": "勐海县"
},
{
"_id": 2447,
"id": 3207,
"pid": 379,
"city_code": "101291605",
"city_name": "勐腊县"
},
{
"_id": 2448,
"id": 3209,
"pid": 380,
"city_code": "101290703",
"city_name": "江川县"
},
{
"_id": 2449,
"id": 3210,
"pid": 380,
"city_code": "101290702",
"city_name": "澄江县"
},
{
"_id": 2450,
"id": 3211,
"pid": 380,
"city_code": "101290704",
"city_name": "通海县"
},
{
"_id": 2451,
"id": 3212,
"pid": 380,
"city_code": "101290705",
"city_name": "华宁县"
},
{
"_id": 2452,
"id": 3213,
"pid": 380,
"city_code": "101290707",
"city_name": "易门县"
},
{
"_id": 2453,
"id": 3214,
"pid": 380,
"city_code": "101290708",
"city_name": "峨山县"
},
{
"_id": 2454,
"id": 3215,
"pid": 380,
"city_code": "101290706",
"city_name": "新平县"
},
{
"_id": 2455,
"id": 3216,
"pid": 380,
"city_code": "101290709",
"city_name": "元江县"
},
{
"_id": 2456,
"id": 3218,
"pid": 381,
"city_code": "101291002",
"city_name": "鲁甸县"
},
{
"_id": 2457,
"id": 3219,
"pid": 381,
"city_code": "101291006",
"city_name": "巧家县"
},
{
"_id": 2458,
"id": 3220,
"pid": 381,
"city_code": "101291009",
"city_name": "盐津县"
},
{
"_id": 2459,
"id": 3221,
"pid": 381,
"city_code": "101291010",
"city_name": "大关县"
},
{
"_id": 2460,
"id": 3222,
"pid": 381,
"city_code": "101291008",
"city_name": "永善县"
},
{
"_id": 2461,
"id": 3223,
"pid": 381,
"city_code": "101291007",
"city_name": "绥江县"
},
{
"_id": 2462,
"id": 3224,
"pid": 381,
"city_code": "101291004",
"city_name": "镇雄县"
},
{
"_id": 2463,
"id": 3225,
"pid": 381,
"city_code": "101291003",
"city_name": "彝良县"
},
{
"_id": 2464,
"id": 3226,
"pid": 381,
"city_code": "101291005",
"city_name": "威信县"
},
{
"_id": 2465,
"id": 3227,
"pid": 381,
"city_code": "101291011",
"city_name": "水富县"
},
{
"_id": 2466,
"id": 3234,
"pid": 382,
"city_code": "101210102",
"city_name": "萧山区"
},
{
"_id": 2467,
"id": 3235,
"pid": 382,
"city_code": "101210106",
"city_name": "余杭区"
},
{
"_id": 2468,
"id": 3237,
"pid": 382,
"city_code": "101210105",
"city_name": "建德市"
},
{
"_id": 2469,
"id": 3238,
"pid": 382,
"city_code": "101210108",
"city_name": "富阳区"
},
{
"_id": 2470,
"id": 3239,
"pid": 382,
"city_code": "101210107",
"city_name": "临安市"
},
{
"_id": 2471,
"id": 3240,
"pid": 382,
"city_code": "101210103",
"city_name": "桐庐县"
},
{
"_id": 2472,
"id": 3241,
"pid": 382,
"city_code": "101210104",
"city_name": "淳安县"
},
{
"_id": 2473,
"id": 3244,
"pid": 383,
"city_code": "101210204",
"city_name": "德清县"
},
{
"_id": 2474,
"id": 3245,
"pid": 383,
"city_code": "101210202",
"city_name": "长兴县"
},
{
"_id": 2475,
"id": 3246,
"pid": 383,
"city_code": "101210203",
"city_name": "安吉县"
},
{
"_id": 2476,
"id": 3249,
"pid": 384,
"city_code": "101210303",
"city_name": "海宁市"
},
{
"_id": 2477,
"id": 3250,
"pid": 384,
"city_code": "101210302",
"city_name": "嘉善县"
},
{
"_id": 2478,
"id": 3251,
"pid": 384,
"city_code": "101210305",
"city_name": "平湖市"
},
{
"_id": 2479,
"id": 3252,
"pid": 384,
"city_code": "101210304",
"city_name": "桐乡市"
},
{
"_id": 2480,
"id": 3253,
"pid": 384,
"city_code": "101210306",
"city_name": "海盐县"
},
{
"_id": 2481,
"id": 3256,
"pid": 385,
"city_code": "101210903",
"city_name": "兰溪市"
},
{
"_id": 2482,
"id": 3257,
"pid": 385,
"city_code": "101210904",
"city_name": "义乌市"
},
{
"_id": 2483,
"id": 3264,
"pid": 385,
"city_code": "101210905",
"city_name": "东阳市"
},
{
"_id": 2484,
"id": 3265,
"pid": 385,
"city_code": "101210907",
"city_name": "永康市"
},
{
"_id": 2485,
"id": 3266,
"pid": 385,
"city_code": "101210906",
"city_name": "武义县"
},
{
"_id": 2486,
"id": 3267,
"pid": 385,
"city_code": "101210902",
"city_name": "浦江县"
},
{
"_id": 2487,
"id": 3268,
"pid": 385,
"city_code": "101210908",
"city_name": "磐安县"
},
{
"_id": 2488,
"id": 3270,
"pid": 386,
"city_code": "101210803",
"city_name": "龙泉市"
},
{
"_id": 2489,
"id": 3271,
"pid": 386,
"city_code": "101210805",
"city_name": "青田县"
},
{
"_id": 2490,
"id": 3272,
"pid": 386,
"city_code": "101210804",
"city_name": "缙云县"
},
{
"_id": 2491,
"id": 3273,
"pid": 386,
"city_code": "101210802",
"city_name": "遂昌县"
},
{
"_id": 2492,
"id": 3274,
"pid": 386,
"city_code": "101210808",
"city_name": "松阳县"
},
{
"_id": 2493,
"id": 3275,
"pid": 386,
"city_code": "101210806",
"city_name": "云和县"
},
{
"_id": 2494,
"id": 3276,
"pid": 386,
"city_code": "101210807",
"city_name": "庆元县"
},
{
"_id": 2495,
"id": 3277,
"pid": 386,
"city_code": "101210809",
"city_name": "景宁县"
},
{
"_id": 2496,
"id": 3281,
"pid": 387,
"city_code": "101210412",
"city_name": "镇海区"
},
{
"_id": 2497,
"id": 3282,
"pid": 387,
"city_code": "101210410",
"city_name": "北仑区"
},
{
"_id": 2498,
"id": 3283,
"pid": 387,
"city_code": "101210411",
"city_name": "鄞州区"
},
{
"_id": 2499,
"id": 3284,
"pid": 387,
"city_code": "101210404",
"city_name": "余姚市"
},
{
"_id": 2500,
"id": 3285,
"pid": 387,
"city_code": "101210403",
"city_name": "慈溪市"
},
{
"_id": 2501,
"id": 3286,
"pid": 387,
"city_code": "101210405",
"city_name": "奉化区"
},
{
"_id": 2502,
"id": 3287,
"pid": 387,
"city_code": "101210406",
"city_name": "象山县"
},
{
"_id": 2503,
"id": 3288,
"pid": 387,
"city_code": "101210408",
"city_name": "宁海县"
},
{
"_id": 2504,
"id": 3290,
"pid": 388,
"city_code": "101210503",
"city_name": "上虞区"
},
{
"_id": 2505,
"id": 3291,
"pid": 388,
"city_code": "101210505",
"city_name": "嵊州市"
},
{
"_id": 2506,
"id": 3292,
"pid": 388,
"city_code": "101210501",
"city_name": "绍兴县"
},
{
"_id": 2507,
"id": 3293,
"pid": 388,
"city_code": "101210504",
"city_name": "新昌县"
},
{
"_id": 2508,
"id": 3294,
"pid": 388,
"city_code": "101210502",
"city_name": "诸暨市"
},
{
"_id": 2509,
"id": 3295,
"pid": 389,
"city_code": "101210611",
"city_name": "椒江区"
},
{
"_id": 2510,
"id": 3296,
"pid": 389,
"city_code": "101210612",
"city_name": "黄岩区"
},
{
"_id": 2511,
"id": 3297,
"pid": 389,
"city_code": "101210613",
"city_name": "路桥区"
},
{
"_id": 2512,
"id": 3298,
"pid": 389,
"city_code": "101210607",
"city_name": "温岭市"
},
{
"_id": 2513,
"id": 3299,
"pid": 389,
"city_code": "101210610",
"city_name": "临海市"
},
{
"_id": 2514,
"id": 3300,
"pid": 389,
"city_code": "101210603",
"city_name": "玉环县"
},
{
"_id": 2515,
"id": 3301,
"pid": 389,
"city_code": "101210604",
"city_name": "三门县"
},
{
"_id": 2516,
"id": 3302,
"pid": 389,
"city_code": "101210605",
"city_name": "天台县"
},
{
"_id": 2517,
"id": 3303,
"pid": 389,
"city_code": "101210606",
"city_name": "仙居县"
},
{
"_id": 2518,
"id": 3307,
"pid": 390,
"city_code": "101210705",
"city_name": "瑞安市"
},
{
"_id": 2519,
"id": 3308,
"pid": 390,
"city_code": "101210707",
"city_name": "乐清市"
},
{
"_id": 2520,
"id": 3309,
"pid": 390,
"city_code": "101210706",
"city_name": "洞头区"
},
{
"_id": 2521,
"id": 3310,
"pid": 390,
"city_code": "101210708",
"city_name": "永嘉县"
},
{
"_id": 2522,
"id": 3311,
"pid": 390,
"city_code": "101210704",
"city_name": "平阳县"
},
{
"_id": 2523,
"id": 3312,
"pid": 390,
"city_code": "101210709",
"city_name": "苍南县"
},
{
"_id": 2524,
"id": 3313,
"pid": 390,
"city_code": "101210703",
"city_name": "文成县"
},
{
"_id": 2525,
"id": 3314,
"pid": 390,
"city_code": "101210702",
"city_name": "泰顺县"
},
{
"_id": 2526,
"id": 3315,
"pid": 391,
"city_code": "101211106",
"city_name": "定海区"
},
{
"_id": 2527,
"id": 3316,
"pid": 391,
"city_code": "101211105",
"city_name": "普陀区"
},
{
"_id": 2528,
"id": 3317,
"pid": 391,
"city_code": "101211104",
"city_name": "岱山县"
},
{
"_id": 2529,
"id": 3318,
"pid": 391,
"city_code": "101211102",
"city_name": "嵊泗县"
},
{
"_id": 2530,
"id": 3319,
"pid": 392,
"city_code": "101211006",
"city_name": "衢江区"
},
{
"_id": 2531,
"id": 3320,
"pid": 392,
"city_code": "101211005",
"city_name": "江山市"
},
{
"_id": 2532,
"id": 3321,
"pid": 392,
"city_code": "101211002",
"city_name": "常山县"
},
{
"_id": 2533,
"id": 3322,
"pid": 392,
"city_code": "101211003",
"city_name": "开化县"
},
{
"_id": 2534,
"id": 3323,
"pid": 392,
"city_code": "101211004",
"city_name": "龙游县"
},
{
"_id": 2535,
"id": 3324,
"pid": 31,
"city_code": "101040300",
"city_name": "合川区"
},
{
"_id": 2536,
"id": 3325,
"pid": 31,
"city_code": "101040500",
"city_name": "江津区"
},
{
"_id": 2537,
"id": 3326,
"pid": 31,
"city_code": "101040400",
"city_name": "南川区"
},
{
"_id": 2538,
"id": 3327,
"pid": 31,
"city_code": "101040200",
"city_name": "永川区"
},
{
"_id": 2539,
"id": 3329,
"pid": 31,
"city_code": "101040700",
"city_name": "渝北区"
},
{
"_id": 2540,
"id": 3330,
"pid": 31,
"city_code": "101040600",
"city_name": "万盛区"
},
{
"_id": 2541,
"id": 3332,
"pid": 31,
"city_code": "101041300",
"city_name": "万州区"
},
{
"_id": 2542,
"id": 3333,
"pid": 31,
"city_code": "101040800",
"city_name": "北碚区"
},
{
"_id": 2543,
"id": 3334,
"pid": 31,
"city_code": "101043700",
"city_name": "沙坪坝区"
},
{
"_id": 2544,
"id": 3335,
"pid": 31,
"city_code": "101040900",
"city_name": "巴南区"
},
{
"_id": 2545,
"id": 3336,
"pid": 31,
"city_code": "101041400",
"city_name": "涪陵区"
},
{
"_id": 2546,
"id": 3340,
"pid": 31,
"city_code": "101041100",
"city_name": "黔江区"
},
{
"_id": 2547,
"id": 3341,
"pid": 31,
"city_code": "101041000",
"city_name": "长寿区"
},
{
"_id": 2548,
"id": 3343,
"pid": 31,
"city_code": "101043300",
"city_name": "綦江区"
},
{
"_id": 2549,
"id": 3344,
"pid": 31,
"city_code": "101042100",
"city_name": "潼南区"
},
{
"_id": 2550,
"id": 3345,
"pid": 31,
"city_code": "101042800",
"city_name": "铜梁区"
},
{
"_id": 2551,
"id": 3346,
"pid": 31,
"city_code": "101042600",
"city_name": "大足县"
},
{
"_id": 2552,
"id": 3347,
"pid": 31,
"city_code": "101042700",
"city_name": "荣昌区"
},
{
"_id": 2553,
"id": 3348,
"pid": 31,
"city_code": "101042900",
"city_name": "璧山区"
},
{
"_id": 2554,
"id": 3349,
"pid": 31,
"city_code": "101042200",
"city_name": "垫江县"
},
{
"_id": 2555,
"id": 3350,
"pid": 31,
"city_code": "101043100",
"city_name": "武隆县"
},
{
"_id": 2556,
"id": 3351,
"pid": 31,
"city_code": "101043000",
"city_name": "丰都县"
},
{
"_id": 2557,
"id": 3352,
"pid": 31,
"city_code": "101041600",
"city_name": "城口县"
},
{
"_id": 2558,
"id": 3353,
"pid": 31,
"city_code": "101042300",
"city_name": "梁平县"
},
{
"_id": 2559,
"id": 3354,
"pid": 31,
"city_code": "101041500",
"city_name": "开县"
},
{
"_id": 2560,
"id": 3355,
"pid": 31,
"city_code": "101041800",
"city_name": "巫溪县"
},
{
"_id": 2561,
"id": 3356,
"pid": 31,
"city_code": "101042000",
"city_name": "巫山县"
},
{
"_id": 2562,
"id": 3357,
"pid": 31,
"city_code": "101041900",
"city_name": "奉节县"
},
{
"_id": 2563,
"id": 3358,
"pid": 31,
"city_code": "101041700",
"city_name": "云阳县"
},
{
"_id": 2564,
"id": 3359,
"pid": 31,
"city_code": "101042400",
"city_name": "忠县"
},
{
"_id": 2565,
"id": 3360,
"pid": 31,
"city_code": "101042500",
"city_name": "石柱县"
},
{
"_id": 2566,
"id": 3361,
"pid": 31,
"city_code": "101043200",
"city_name": "彭水县"
},
{
"_id": 2567,
"id": 3362,
"pid": 31,
"city_code": "101043400",
"city_name": "酉阳县"
},
{
"_id": 2568,
"id": 3363,
"pid": 31,
"city_code": "101043600",
"city_name": "秀山县"
},
{
"_id": 2569,
"id": 3368,
"pid": 32,
"city_code": "101320102",
"city_name": "九龙城区"
},
{
"_id": 2570,
"id": 3383,
"pid": 34,
"city_code": "101340101",
"city_name": "台北"
},
{
"_id": 2571,
"id": 3384,
"pid": 34,
"city_code": "101340201",
"city_name": "高雄"
},
{
"_id": 2572,
"id": 3385,
"pid": 34,
"city_code": "CHTW0006",
"city_name": "基隆"
},
{
"_id": 2573,
"id": 3386,
"pid": 34,
"city_code": "101340401",
"city_name": "台中"
},
{
"_id": 2574,
"id": 3387,
"pid": 34,
"city_code": "101340301",
"city_name": "台南"
},
{
"_id": 2575,
"id": 3388,
"pid": 34,
"city_code": "101340103",
"city_name": "新竹"
},
{
"_id": 2576,
"id": 3389,
"pid": 34,
"city_code": "101340901",
"city_name": "嘉义"
},
{
"_id": 2577,
"id": 3390,
"pid": 34,
"city_code": "101340701",
"city_name": "宜兰县"
},
{
"_id": 2578,
"id": 3391,
"pid": 34,
"city_code": "101340102",
"city_name": "桃园县"
},
{
"_id": 2579,
"id": 3392,
"pid": 34,
"city_code": "CHTW0016",
"city_name": "苗栗县"
},
{
"_id": 2580,
"id": 3393,
"pid": 34,
"city_code": "CHTW0017",
"city_name": "彰化县"
},
{
"_id": 2581,
"id": 3394,
"pid": 34,
"city_code": "101340404",
"city_name": "南投县"
},
{
"_id": 2582,
"id": 3395,
"pid": 34,
"city_code": "101340406",
"city_name": "云林县"
},
{
"_id": 2583,
"id": 3396,
"pid": 34,
"city_code": "101340205",
"city_name": "屏东县"
},
{
"_id": 2584,
"id": 3397,
"pid": 34,
"city_code": "101341101",
"city_name": "台东县"
},
{
"_id": 2585,
"id": 3398,
"pid": 34,
"city_code": "101340405",
"city_name": "花莲县"
},
{
"_id": 2586,
"id": 3400,
"pid": 2,
"city_code": "101220101",
"city_name": "合肥"
},
{
"_id": 2587,
"id": 3405,
"pid": 3400,
"city_code": "101220102",
"city_name": "长丰县"
},
{
"_id": 2588,
"id": 3406,
"pid": 3400,
"city_code": "101220103",
"city_name": "肥东县"
},
{
"_id": 2589,
"id": 3407,
"pid": 3400,
"city_code": "101220104",
"city_name": "肥西县"
},
{
"_id": 2590,
"id": 3259,
"pid": 168,
"city_code": "101050708",
"city_name": "加格达奇区"
},
{
"_id": 2591,
"id": 3261,
"pid": 168,
"city_code": "101050706",
"city_name": "新林区"
},
{
"_id": 2592,
"id": 3262,
"pid": 168,
"city_code": "101050705",
"city_name": "呼中区"
},
{
"_id": 2593,
"id": 1856,
"pid": 365,
"city_code": "101131101",
"city_name": "塔城市"
},
{
"_id": 2594,
"id": 3657,
"pid": 28,
"city_code": "",
"city_name": "北屯"
},
{
"_id": 2595,
"id": 3661,
"pid": 8,
"city_code": "",
"city_name": "三沙"
}
]
"""
def print_c():
return json.loads(a)
| 15.661439
| 30
| 0.434654
|
import json
a="""
[
{
"_id": 1,
"id": 1,
"pid": 0,
"city_code": "101010100",
"city_name": "北京"
},
{
"_id": 2,
"id": 2,
"pid": 0,
"city_code": "",
"city_name": "安徽"
},
{
"_id": 3,
"id": 3,
"pid": 0,
"city_code": "",
"city_name": "福建"
},
{
"_id": 4,
"id": 4,
"pid": 0,
"city_code": "",
"city_name": "甘肃"
},
{
"_id": 5,
"id": 5,
"pid": 0,
"city_code": "",
"city_name": "广东"
},
{
"_id": 6,
"id": 6,
"pid": 0,
"city_code": "",
"city_name": "广西"
},
{
"_id": 7,
"id": 7,
"pid": 0,
"city_code": "",
"city_name": "贵州"
},
{
"_id": 8,
"id": 8,
"pid": 0,
"city_code": "",
"city_name": "海南"
},
{
"_id": 9,
"id": 9,
"pid": 0,
"city_code": "",
"city_name": "河北"
},
{
"_id": 10,
"id": 10,
"pid": 0,
"city_code": "",
"city_name": "河南"
},
{
"_id": 11,
"id": 11,
"pid": 0,
"city_code": "",
"city_name": "黑龙江"
},
{
"_id": 12,
"id": 12,
"pid": 0,
"city_code": "",
"city_name": "湖北"
},
{
"_id": 13,
"id": 13,
"pid": 0,
"city_code": "",
"city_name": "湖南"
},
{
"_id": 14,
"id": 14,
"pid": 0,
"city_code": "",
"city_name": "吉林"
},
{
"_id": 15,
"id": 15,
"pid": 0,
"city_code": "",
"city_name": "江苏"
},
{
"_id": 16,
"id": 16,
"pid": 0,
"city_code": "",
"city_name": "江西"
},
{
"_id": 17,
"id": 17,
"pid": 0,
"city_code": "",
"city_name": "辽宁"
},
{
"_id": 18,
"id": 18,
"pid": 0,
"city_code": "",
"city_name": "内蒙古"
},
{
"_id": 19,
"id": 19,
"pid": 0,
"city_code": "",
"city_name": "宁夏"
},
{
"_id": 20,
"id": 20,
"pid": 0,
"city_code": "",
"city_name": "青海"
},
{
"_id": 21,
"id": 21,
"pid": 0,
"city_code": "",
"city_name": "山东"
},
{
"_id": 22,
"id": 22,
"pid": 0,
"city_code": "",
"city_name": "山西"
},
{
"_id": 23,
"id": 23,
"pid": 0,
"city_code": "",
"city_name": "陕西"
},
{
"_id": 24,
"id": 24,
"pid": 0,
"city_code": "101020100",
"city_name": "上海"
},
{
"_id": 25,
"id": 25,
"pid": 0,
"city_code": "",
"city_name": "四川"
},
{
"_id": 26,
"id": 26,
"pid": 0,
"city_code": "101030100",
"city_name": "天津"
},
{
"_id": 27,
"id": 27,
"pid": 0,
"city_code": "",
"city_name": "西藏"
},
{
"_id": 28,
"id": 28,
"pid": 0,
"city_code": "",
"city_name": "新疆"
},
{
"_id": 29,
"id": 29,
"pid": 0,
"city_code": "",
"city_name": "云南"
},
{
"_id": 30,
"id": 30,
"pid": 0,
"city_code": "",
"city_name": "浙江"
},
{
"_id": 31,
"id": 31,
"pid": 0,
"city_code": "101040100",
"city_name": "重庆"
},
{
"_id": 32,
"id": 32,
"pid": 0,
"city_code": "101320101",
"city_name": "香港"
},
{
"_id": 33,
"id": 33,
"pid": 0,
"city_code": "101330101",
"city_name": "澳门"
},
{
"_id": 34,
"id": 34,
"pid": 0,
"city_code": "",
"city_name": "台湾"
},
{
"_id": 35,
"id": 35,
"pid": 2,
"city_code": "101220601",
"city_name": "安庆"
},
{
"_id": 36,
"id": 36,
"pid": 2,
"city_code": "101220201",
"city_name": "蚌埠"
},
{
"_id": 37,
"id": 37,
"pid": 3400,
"city_code": "101220105",
"city_name": "巢湖市"
},
{
"_id": 38,
"id": 38,
"pid": 2,
"city_code": "101221701",
"city_name": "池州"
},
{
"_id": 39,
"id": 39,
"pid": 2,
"city_code": "101221101",
"city_name": "滁州"
},
{
"_id": 40,
"id": 40,
"pid": 2,
"city_code": "101220801",
"city_name": "阜阳"
},
{
"_id": 41,
"id": 41,
"pid": 2,
"city_code": "101221201",
"city_name": "淮北"
},
{
"_id": 42,
"id": 42,
"pid": 2,
"city_code": "101220401",
"city_name": "淮南"
},
{
"_id": 43,
"id": 43,
"pid": 2,
"city_code": "101221001",
"city_name": "黄山"
},
{
"_id": 44,
"id": 44,
"pid": 2,
"city_code": "101221501",
"city_name": "六安"
},
{
"_id": 45,
"id": 45,
"pid": 2,
"city_code": "101220501",
"city_name": "马鞍山"
},
{
"_id": 46,
"id": 46,
"pid": 2,
"city_code": "101220701",
"city_name": "宿州"
},
{
"_id": 47,
"id": 47,
"pid": 2,
"city_code": "101221301",
"city_name": "铜陵"
},
{
"_id": 48,
"id": 48,
"pid": 2,
"city_code": "101220301",
"city_name": "芜湖"
},
{
"_id": 49,
"id": 49,
"pid": 2,
"city_code": "101221401",
"city_name": "宣城"
},
{
"_id": 50,
"id": 50,
"pid": 2,
"city_code": "101220901",
"city_name": "亳州"
},
{
"_id": 51,
"id": 52,
"pid": 3,
"city_code": "101230101",
"city_name": "福州"
},
{
"_id": 52,
"id": 53,
"pid": 3,
"city_code": "101230701",
"city_name": "龙岩"
},
{
"_id": 53,
"id": 54,
"pid": 3,
"city_code": "101230901",
"city_name": "南平"
},
{
"_id": 54,
"id": 55,
"pid": 3,
"city_code": "101230301",
"city_name": "宁德"
},
{
"_id": 55,
"id": 56,
"pid": 3,
"city_code": "101230401",
"city_name": "莆田"
},
{
"_id": 56,
"id": 57,
"pid": 3,
"city_code": "101230501",
"city_name": "泉州"
},
{
"_id": 57,
"id": 58,
"pid": 3,
"city_code": "101230801",
"city_name": "三明"
},
{
"_id": 58,
"id": 59,
"pid": 3,
"city_code": "101230201",
"city_name": "厦门"
},
{
"_id": 59,
"id": 60,
"pid": 3,
"city_code": "101230601",
"city_name": "漳州"
},
{
"_id": 60,
"id": 61,
"pid": 4,
"city_code": "101160101",
"city_name": "兰州"
},
{
"_id": 61,
"id": 62,
"pid": 4,
"city_code": "101161301",
"city_name": "白银"
},
{
"_id": 62,
"id": 63,
"pid": 4,
"city_code": "101160201",
"city_name": "定西"
},
{
"_id": 63,
"id": 64,
"pid": 4,
"city_code": "",
"city_name": "甘南州"
},
{
"_id": 64,
"id": 65,
"pid": 4,
"city_code": "101161401",
"city_name": "嘉峪关"
},
{
"_id": 65,
"id": 66,
"pid": 4,
"city_code": "101160601",
"city_name": "金昌"
},
{
"_id": 66,
"id": 67,
"pid": 4,
"city_code": "101160801",
"city_name": "酒泉"
},
{
"_id": 67,
"id": 68,
"pid": 4,
"city_code": "101161101",
"city_name": "临夏"
},
{
"_id": 68,
"id": 69,
"pid": 4,
"city_code": "101161010",
"city_name": "陇南市"
},
{
"_id": 69,
"id": 70,
"pid": 4,
"city_code": "101160301",
"city_name": "平凉"
},
{
"_id": 70,
"id": 71,
"pid": 4,
"city_code": "101160401",
"city_name": "庆阳"
},
{
"_id": 71,
"id": 72,
"pid": 4,
"city_code": "101160901",
"city_name": "天水"
},
{
"_id": 72,
"id": 73,
"pid": 4,
"city_code": "101160501",
"city_name": "武威"
},
{
"_id": 73,
"id": 74,
"pid": 4,
"city_code": "101160701",
"city_name": "张掖"
},
{
"_id": 74,
"id": 75,
"pid": 5,
"city_code": "101280101",
"city_name": "广州"
},
{
"_id": 75,
"id": 76,
"pid": 5,
"city_code": "101280601",
"city_name": "深圳"
},
{
"_id": 76,
"id": 77,
"pid": 5,
"city_code": "101281501",
"city_name": "潮州"
},
{
"_id": 77,
"id": 78,
"pid": 5,
"city_code": "101281601",
"city_name": "东莞"
},
{
"_id": 78,
"id": 79,
"pid": 5,
"city_code": "101280800",
"city_name": "佛山"
},
{
"_id": 79,
"id": 80,
"pid": 5,
"city_code": "101281201",
"city_name": "河源"
},
{
"_id": 80,
"id": 81,
"pid": 5,
"city_code": "101280301",
"city_name": "惠州"
},
{
"_id": 81,
"id": 82,
"pid": 5,
"city_code": "101281101",
"city_name": "江门"
},
{
"_id": 82,
"id": 83,
"pid": 5,
"city_code": "101281901",
"city_name": "揭阳"
},
{
"_id": 83,
"id": 84,
"pid": 5,
"city_code": "101282001",
"city_name": "茂名"
},
{
"_id": 84,
"id": 85,
"pid": 5,
"city_code": "101280401",
"city_name": "梅州"
},
{
"_id": 85,
"id": 86,
"pid": 5,
"city_code": "101281301",
"city_name": "清远"
},
{
"_id": 86,
"id": 87,
"pid": 5,
"city_code": "101280501",
"city_name": "汕头"
},
{
"_id": 87,
"id": 88,
"pid": 5,
"city_code": "101282101",
"city_name": "汕尾"
},
{
"_id": 88,
"id": 89,
"pid": 5,
"city_code": "101280201",
"city_name": "韶关"
},
{
"_id": 89,
"id": 90,
"pid": 5,
"city_code": "101281801",
"city_name": "阳江"
},
{
"_id": 90,
"id": 91,
"pid": 5,
"city_code": "101281401",
"city_name": "云浮"
},
{
"_id": 91,
"id": 92,
"pid": 5,
"city_code": "101281001",
"city_name": "湛江"
},
{
"_id": 92,
"id": 93,
"pid": 5,
"city_code": "101280901",
"city_name": "肇庆"
},
{
"_id": 93,
"id": 94,
"pid": 5,
"city_code": "101281701",
"city_name": "中山"
},
{
"_id": 94,
"id": 95,
"pid": 5,
"city_code": "101280701",
"city_name": "珠海"
},
{
"_id": 95,
"id": 96,
"pid": 6,
"city_code": "101300101",
"city_name": "南宁"
},
{
"_id": 96,
"id": 97,
"pid": 6,
"city_code": "101300501",
"city_name": "桂林"
},
{
"_id": 97,
"id": 98,
"pid": 6,
"city_code": "101301001",
"city_name": "百色"
},
{
"_id": 98,
"id": 99,
"pid": 6,
"city_code": "101301301",
"city_name": "北海"
},
{
"_id": 99,
"id": 100,
"pid": 6,
"city_code": "101300201",
"city_name": "崇左"
},
{
"_id": 100,
"id": 101,
"pid": 6,
"city_code": "101301401",
"city_name": "防城港"
},
{
"_id": 101,
"id": 102,
"pid": 6,
"city_code": "101300801",
"city_name": "贵港"
},
{
"_id": 102,
"id": 103,
"pid": 6,
"city_code": "101301201",
"city_name": "河池"
},
{
"_id": 103,
"id": 104,
"pid": 6,
"city_code": "101300701",
"city_name": "贺州"
},
{
"_id": 104,
"id": 105,
"pid": 6,
"city_code": "101300401",
"city_name": "来宾"
},
{
"_id": 105,
"id": 106,
"pid": 6,
"city_code": "101300301",
"city_name": "柳州"
},
{
"_id": 106,
"id": 107,
"pid": 6,
"city_code": "101301101",
"city_name": "钦州"
},
{
"_id": 107,
"id": 108,
"pid": 6,
"city_code": "101300601",
"city_name": "梧州"
},
{
"_id": 108,
"id": 109,
"pid": 6,
"city_code": "101300901",
"city_name": "玉林"
},
{
"_id": 109,
"id": 110,
"pid": 7,
"city_code": "101260101",
"city_name": "贵阳"
},
{
"_id": 110,
"id": 111,
"pid": 7,
"city_code": "101260301",
"city_name": "安顺"
},
{
"_id": 111,
"id": 112,
"pid": 7,
"city_code": "101260701",
"city_name": "毕节"
},
{
"_id": 112,
"id": 113,
"pid": 7,
"city_code": "101260801",
"city_name": "六盘水"
},
{
"_id": 113,
"id": 114,
"pid": 7,
"city_code": "101260506",
"city_name": "黔东南"
},
{
"_id": 114,
"id": 115,
"pid": 7,
"city_code": "101260413",
"city_name": "黔南"
},
{
"_id": 115,
"id": 116,
"pid": 7,
"city_code": "101260906",
"city_name": "黔西南"
},
{
"_id": 116,
"id": 117,
"pid": 7,
"city_code": "101260601",
"city_name": "铜仁"
},
{
"_id": 117,
"id": 118,
"pid": 7,
"city_code": "101260201",
"city_name": "遵义"
},
{
"_id": 118,
"id": 119,
"pid": 8,
"city_code": "101310101",
"city_name": "海口"
},
{
"_id": 119,
"id": 120,
"pid": 8,
"city_code": "101310201",
"city_name": "三亚"
},
{
"_id": 120,
"id": 121,
"pid": 8,
"city_code": "101310207",
"city_name": "白沙县"
},
{
"_id": 121,
"id": 122,
"pid": 8,
"city_code": "101310214",
"city_name": "保亭县"
},
{
"_id": 122,
"id": 123,
"pid": 8,
"city_code": "101310206",
"city_name": "昌江县"
},
{
"_id": 123,
"id": 124,
"pid": 8,
"city_code": "101310204",
"city_name": "澄迈县"
},
{
"_id": 124,
"id": 125,
"pid": 8,
"city_code": "101310209",
"city_name": "定安县"
},
{
"_id": 125,
"id": 126,
"pid": 8,
"city_code": "101310202",
"city_name": "东方"
},
{
"_id": 126,
"id": 127,
"pid": 8,
"city_code": "101310221",
"city_name": "乐东县"
},
{
"_id": 127,
"id": 128,
"pid": 8,
"city_code": "101310203",
"city_name": "临高县"
},
{
"_id": 128,
"id": 129,
"pid": 8,
"city_code": "101310216",
"city_name": "陵水县"
},
{
"_id": 129,
"id": 130,
"pid": 8,
"city_code": "101310211",
"city_name": "琼海"
},
{
"_id": 130,
"id": 131,
"pid": 8,
"city_code": "101310208",
"city_name": "琼中"
},
{
"_id": 131,
"id": 132,
"pid": 8,
"city_code": "101310210",
"city_name": "屯昌县"
},
{
"_id": 132,
"id": 133,
"pid": 8,
"city_code": "101310215",
"city_name": "万宁"
},
{
"_id": 133,
"id": 134,
"pid": 8,
"city_code": "101310212",
"city_name": "文昌"
},
{
"_id": 134,
"id": 135,
"pid": 8,
"city_code": "101310222",
"city_name": "五指山"
},
{
"_id": 135,
"id": 136,
"pid": 8,
"city_code": "101310205",
"city_name": "儋州"
},
{
"_id": 136,
"id": 137,
"pid": 9,
"city_code": "101090101",
"city_name": "石家庄"
},
{
"_id": 137,
"id": 138,
"pid": 9,
"city_code": "101090201",
"city_name": "保定"
},
{
"_id": 138,
"id": 139,
"pid": 9,
"city_code": "101090701",
"city_name": "沧州"
},
{
"_id": 139,
"id": 140,
"pid": 9,
"city_code": "101090402",
"city_name": "承德"
},
{
"_id": 140,
"id": 141,
"pid": 9,
"city_code": "101091001",
"city_name": "邯郸"
},
{
"_id": 141,
"id": 142,
"pid": 9,
"city_code": "101090801",
"city_name": "衡水"
},
{
"_id": 142,
"id": 143,
"pid": 9,
"city_code": "101090601",
"city_name": "廊坊"
},
{
"_id": 143,
"id": 144,
"pid": 9,
"city_code": "101091101",
"city_name": "秦皇岛"
},
{
"_id": 144,
"id": 145,
"pid": 9,
"city_code": "101090501",
"city_name": "唐山"
},
{
"_id": 145,
"id": 146,
"pid": 9,
"city_code": "101090901",
"city_name": "邢台"
},
{
"_id": 146,
"id": 147,
"pid": 9,
"city_code": "101090301",
"city_name": "张家口"
},
{
"_id": 147,
"id": 148,
"pid": 10,
"city_code": "101180101",
"city_name": "郑州"
},
{
"_id": 148,
"id": 149,
"pid": 10,
"city_code": "101180901",
"city_name": "洛阳"
},
{
"_id": 149,
"id": 150,
"pid": 10,
"city_code": "101180801",
"city_name": "开封"
},
{
"_id": 150,
"id": 151,
"pid": 10,
"city_code": "101180201",
"city_name": "安阳"
},
{
"_id": 151,
"id": 152,
"pid": 10,
"city_code": "101181201",
"city_name": "鹤壁"
},
{
"_id": 152,
"id": 153,
"pid": 10,
"city_code": "101181801",
"city_name": "济源"
},
{
"_id": 153,
"id": 154,
"pid": 10,
"city_code": "101181101",
"city_name": "焦作"
},
{
"_id": 154,
"id": 155,
"pid": 10,
"city_code": "101180701",
"city_name": "南阳"
},
{
"_id": 155,
"id": 156,
"pid": 10,
"city_code": "101180501",
"city_name": "平顶山"
},
{
"_id": 156,
"id": 157,
"pid": 10,
"city_code": "101181701",
"city_name": "三门峡"
},
{
"_id": 157,
"id": 158,
"pid": 10,
"city_code": "101181001",
"city_name": "商丘"
},
{
"_id": 158,
"id": 159,
"pid": 10,
"city_code": "101180301",
"city_name": "新乡"
},
{
"_id": 159,
"id": 160,
"pid": 10,
"city_code": "101180601",
"city_name": "信阳"
},
{
"_id": 160,
"id": 161,
"pid": 10,
"city_code": "101180401",
"city_name": "许昌"
},
{
"_id": 161,
"id": 162,
"pid": 10,
"city_code": "101181401",
"city_name": "周口"
},
{
"_id": 162,
"id": 163,
"pid": 10,
"city_code": "101181601",
"city_name": "驻马店"
},
{
"_id": 163,
"id": 164,
"pid": 10,
"city_code": "101181501",
"city_name": "漯河"
},
{
"_id": 164,
"id": 165,
"pid": 10,
"city_code": "101181301",
"city_name": "濮阳"
},
{
"_id": 165,
"id": 166,
"pid": 11,
"city_code": "101050101",
"city_name": "哈尔滨"
},
{
"_id": 166,
"id": 167,
"pid": 11,
"city_code": "101050901",
"city_name": "大庆"
},
{
"_id": 167,
"id": 168,
"pid": 11,
"city_code": "101050701",
"city_name": "大兴安岭"
},
{
"_id": 168,
"id": 169,
"pid": 11,
"city_code": "101051201",
"city_name": "鹤岗"
},
{
"_id": 169,
"id": 170,
"pid": 11,
"city_code": "101050601",
"city_name": "黑河"
},
{
"_id": 170,
"id": 171,
"pid": 11,
"city_code": "101051101",
"city_name": "鸡西"
},
{
"_id": 171,
"id": 172,
"pid": 11,
"city_code": "101050401",
"city_name": "佳木斯"
},
{
"_id": 172,
"id": 173,
"pid": 11,
"city_code": "101050301",
"city_name": "牡丹江"
},
{
"_id": 173,
"id": 174,
"pid": 11,
"city_code": "101051002",
"city_name": "七台河"
},
{
"_id": 174,
"id": 175,
"pid": 11,
"city_code": "101050201",
"city_name": "齐齐哈尔"
},
{
"_id": 175,
"id": 176,
"pid": 11,
"city_code": "101051301",
"city_name": "双鸭山"
},
{
"_id": 176,
"id": 177,
"pid": 11,
"city_code": "101050501",
"city_name": "绥化"
},
{
"_id": 177,
"id": 178,
"pid": 11,
"city_code": "101050801",
"city_name": "伊春"
},
{
"_id": 178,
"id": 179,
"pid": 12,
"city_code": "101200101",
"city_name": "武汉"
},
{
"_id": 179,
"id": 180,
"pid": 12,
"city_code": "101201601",
"city_name": "仙桃"
},
{
"_id": 180,
"id": 181,
"pid": 12,
"city_code": "101200301",
"city_name": "鄂州"
},
{
"_id": 181,
"id": 182,
"pid": 12,
"city_code": "101200501",
"city_name": "黄冈"
},
{
"_id": 182,
"id": 183,
"pid": 12,
"city_code": "101200601",
"city_name": "黄石"
},
{
"_id": 183,
"id": 184,
"pid": 12,
"city_code": "101201401",
"city_name": "荆门"
},
{
"_id": 184,
"id": 185,
"pid": 12,
"city_code": "101200801",
"city_name": "荆州"
},
{
"_id": 185,
"id": 186,
"pid": 12,
"city_code": "101201701",
"city_name": "潜江"
},
{
"_id": 186,
"id": 187,
"pid": 12,
"city_code": "101201201",
"city_name": "神农架林区"
},
{
"_id": 187,
"id": 188,
"pid": 12,
"city_code": "101201101",
"city_name": "十堰"
},
{
"_id": 188,
"id": 189,
"pid": 12,
"city_code": "101201301",
"city_name": "随州"
},
{
"_id": 189,
"id": 190,
"pid": 12,
"city_code": "101201501",
"city_name": "天门"
},
{
"_id": 190,
"id": 191,
"pid": 12,
"city_code": "101200701",
"city_name": "咸宁"
},
{
"_id": 191,
"id": 192,
"pid": 12,
"city_code": "101200202",
"city_name": "襄阳"
},
{
"_id": 192,
"id": 193,
"pid": 12,
"city_code": "101200401",
"city_name": "孝感"
},
{
"_id": 193,
"id": 194,
"pid": 12,
"city_code": "101200901",
"city_name": "宜昌"
},
{
"_id": 194,
"id": 195,
"pid": 12,
"city_code": "101201001",
"city_name": "恩施"
},
{
"_id": 195,
"id": 196,
"pid": 13,
"city_code": "101250101",
"city_name": "长沙"
},
{
"_id": 196,
"id": 197,
"pid": 13,
"city_code": "101251101",
"city_name": "张家界"
},
{
"_id": 197,
"id": 198,
"pid": 13,
"city_code": "101250601",
"city_name": "常德"
},
{
"_id": 198,
"id": 199,
"pid": 13,
"city_code": "101250501",
"city_name": "郴州"
},
{
"_id": 199,
"id": 200,
"pid": 13,
"city_code": "101250401",
"city_name": "衡阳"
},
{
"_id": 200,
"id": 201,
"pid": 13,
"city_code": "101251201",
"city_name": "怀化"
},
{
"_id": 201,
"id": 202,
"pid": 13,
"city_code": "101250801",
"city_name": "娄底"
},
{
"_id": 202,
"id": 203,
"pid": 13,
"city_code": "101250901",
"city_name": "邵阳"
},
{
"_id": 203,
"id": 204,
"pid": 13,
"city_code": "101250201",
"city_name": "湘潭"
},
{
"_id": 204,
"id": 205,
"pid": 13,
"city_code": "101251509",
"city_name": "湘西"
},
{
"_id": 205,
"id": 206,
"pid": 13,
"city_code": "101250700",
"city_name": "益阳"
},
{
"_id": 206,
"id": 207,
"pid": 13,
"city_code": "101251401",
"city_name": "永州"
},
{
"_id": 207,
"id": 208,
"pid": 13,
"city_code": "101251001",
"city_name": "岳阳"
},
{
"_id": 208,
"id": 209,
"pid": 13,
"city_code": "101250301",
"city_name": "株洲"
},
{
"_id": 209,
"id": 210,
"pid": 14,
"city_code": "101060101",
"city_name": "长春"
},
{
"_id": 210,
"id": 211,
"pid": 14,
"city_code": "101060201",
"city_name": "吉林市"
},
{
"_id": 211,
"id": 212,
"pid": 14,
"city_code": "101060601",
"city_name": "白城"
},
{
"_id": 212,
"id": 213,
"pid": 14,
"city_code": "101060901",
"city_name": "白山"
},
{
"_id": 213,
"id": 214,
"pid": 14,
"city_code": "101060701",
"city_name": "辽源"
},
{
"_id": 214,
"id": 215,
"pid": 14,
"city_code": "101060401",
"city_name": "四平"
},
{
"_id": 215,
"id": 216,
"pid": 14,
"city_code": "101060801",
"city_name": "松原"
},
{
"_id": 216,
"id": 217,
"pid": 14,
"city_code": "101060501",
"city_name": "通化"
},
{
"_id": 217,
"id": 218,
"pid": 14,
"city_code": "101060312",
"city_name": "延边"
},
{
"_id": 218,
"id": 219,
"pid": 15,
"city_code": "101190101",
"city_name": "南京"
},
{
"_id": 219,
"id": 220,
"pid": 15,
"city_code": "101190401",
"city_name": "苏州"
},
{
"_id": 220,
"id": 221,
"pid": 15,
"city_code": "101190201",
"city_name": "无锡"
},
{
"_id": 221,
"id": 222,
"pid": 15,
"city_code": "101191101",
"city_name": "常州"
},
{
"_id": 222,
"id": 223,
"pid": 15,
"city_code": "101190901",
"city_name": "淮安"
},
{
"_id": 223,
"id": 224,
"pid": 15,
"city_code": "101191001",
"city_name": "连云港"
},
{
"_id": 224,
"id": 225,
"pid": 15,
"city_code": "101190501",
"city_name": "南通"
},
{
"_id": 225,
"id": 226,
"pid": 15,
"city_code": "101191301",
"city_name": "宿迁"
},
{
"_id": 226,
"id": 227,
"pid": 15,
"city_code": "101191201",
"city_name": "泰州"
},
{
"_id": 227,
"id": 228,
"pid": 15,
"city_code": "101190801",
"city_name": "徐州"
},
{
"_id": 228,
"id": 229,
"pid": 15,
"city_code": "101190701",
"city_name": "盐城"
},
{
"_id": 229,
"id": 230,
"pid": 15,
"city_code": "101190601",
"city_name": "扬州"
},
{
"_id": 230,
"id": 231,
"pid": 15,
"city_code": "101190301",
"city_name": "镇江"
},
{
"_id": 231,
"id": 232,
"pid": 16,
"city_code": "101240101",
"city_name": "南昌"
},
{
"_id": 232,
"id": 233,
"pid": 16,
"city_code": "101240401",
"city_name": "抚州"
},
{
"_id": 233,
"id": 234,
"pid": 16,
"city_code": "101240701",
"city_name": "赣州"
},
{
"_id": 234,
"id": 235,
"pid": 16,
"city_code": "101240601",
"city_name": "吉安"
},
{
"_id": 235,
"id": 236,
"pid": 16,
"city_code": "101240801",
"city_name": "景德镇"
},
{
"_id": 236,
"id": 237,
"pid": 16,
"city_code": "101240201",
"city_name": "九江"
},
{
"_id": 237,
"id": 238,
"pid": 16,
"city_code": "101240901",
"city_name": "萍乡"
},
{
"_id": 238,
"id": 239,
"pid": 16,
"city_code": "101240301",
"city_name": "上饶"
},
{
"_id": 239,
"id": 240,
"pid": 16,
"city_code": "101241001",
"city_name": "新余"
},
{
"_id": 240,
"id": 241,
"pid": 16,
"city_code": "101240501",
"city_name": "宜春"
},
{
"_id": 241,
"id": 242,
"pid": 16,
"city_code": "101241101",
"city_name": "鹰潭"
},
{
"_id": 242,
"id": 243,
"pid": 17,
"city_code": "101070101",
"city_name": "沈阳"
},
{
"_id": 243,
"id": 244,
"pid": 17,
"city_code": "101070201",
"city_name": "大连"
},
{
"_id": 244,
"id": 245,
"pid": 17,
"city_code": "101070301",
"city_name": "鞍山"
},
{
"_id": 245,
"id": 246,
"pid": 17,
"city_code": "101070501",
"city_name": "本溪"
},
{
"_id": 246,
"id": 247,
"pid": 17,
"city_code": "101071201",
"city_name": "朝阳"
},
{
"_id": 247,
"id": 248,
"pid": 17,
"city_code": "101070601",
"city_name": "丹东"
},
{
"_id": 248,
"id": 249,
"pid": 17,
"city_code": "101070401",
"city_name": "抚顺"
},
{
"_id": 249,
"id": 250,
"pid": 17,
"city_code": "101070901",
"city_name": "阜新"
},
{
"_id": 250,
"id": 251,
"pid": 17,
"city_code": "101071401",
"city_name": "葫芦岛"
},
{
"_id": 251,
"id": 252,
"pid": 17,
"city_code": "101070701",
"city_name": "锦州"
},
{
"_id": 252,
"id": 253,
"pid": 17,
"city_code": "101071001",
"city_name": "辽阳"
},
{
"_id": 253,
"id": 254,
"pid": 17,
"city_code": "101071301",
"city_name": "盘锦"
},
{
"_id": 254,
"id": 255,
"pid": 17,
"city_code": "101071101",
"city_name": "铁岭"
},
{
"_id": 255,
"id": 256,
"pid": 17,
"city_code": "101070801",
"city_name": "营口"
},
{
"_id": 256,
"id": 257,
"pid": 18,
"city_code": "101080101",
"city_name": "呼和浩特"
},
{
"_id": 257,
"id": 258,
"pid": 18,
"city_code": "101081213",
"city_name": "阿拉善盟"
},
{
"_id": 258,
"id": 259,
"pid": 18,
"city_code": "101080801",
"city_name": "巴彦淖尔"
},
{
"_id": 259,
"id": 260,
"pid": 18,
"city_code": "101080201",
"city_name": "包头"
},
{
"_id": 260,
"id": 261,
"pid": 18,
"city_code": "101080601",
"city_name": "赤峰"
},
{
"_id": 261,
"id": 262,
"pid": 18,
"city_code": "101080701",
"city_name": "鄂尔多斯"
},
{
"_id": 262,
"id": 263,
"pid": 18,
"city_code": "101081001",
"city_name": "呼伦贝尔"
},
{
"_id": 263,
"id": 264,
"pid": 18,
"city_code": "101080501",
"city_name": "通辽"
},
{
"_id": 264,
"id": 265,
"pid": 18,
"city_code": "101080301",
"city_name": "乌海"
},
{
"_id": 265,
"id": 266,
"pid": 18,
"city_code": "101080405",
"city_name": "乌兰察布"
},
{
"_id": 266,
"id": 267,
"pid": 18,
"city_code": "101080902",
"city_name": "锡林郭勒"
},
{
"_id": 267,
"id": 268,
"pid": 18,
"city_code": "101081108",
"city_name": "兴安盟"
},
{
"_id": 268,
"id": 269,
"pid": 19,
"city_code": "101170101",
"city_name": "银川"
},
{
"_id": 269,
"id": 270,
"pid": 19,
"city_code": "101170401",
"city_name": "固原"
},
{
"_id": 270,
"id": 271,
"pid": 19,
"city_code": "101170201",
"city_name": "石嘴山"
},
{
"_id": 271,
"id": 272,
"pid": 19,
"city_code": "101170301",
"city_name": "吴忠"
},
{
"_id": 272,
"id": 273,
"pid": 19,
"city_code": "101170501",
"city_name": "中卫"
},
{
"_id": 273,
"id": 274,
"pid": 20,
"city_code": "101150101",
"city_name": "西宁"
},
{
"_id": 274,
"id": 275,
"pid": 20,
"city_code": "101150501",
"city_name": "果洛"
},
{
"_id": 275,
"id": 276,
"pid": 20,
"city_code": "101150801",
"city_name": "海北"
},
{
"_id": 276,
"id": 277,
"pid": 20,
"city_code": "101150201",
"city_name": "海东"
},
{
"_id": 277,
"id": 278,
"pid": 20,
"city_code": "101150401",
"city_name": "海南州"
},
{
"_id": 278,
"id": 279,
"pid": 20,
"city_code": "101150701",
"city_name": "海西"
},
{
"_id": 279,
"id": 280,
"pid": 20,
"city_code": "101150301",
"city_name": "黄南"
},
{
"_id": 280,
"id": 281,
"pid": 20,
"city_code": "101150601",
"city_name": "玉树"
},
{
"_id": 281,
"id": 282,
"pid": 21,
"city_code": "101120101",
"city_name": "济南"
},
{
"_id": 282,
"id": 283,
"pid": 21,
"city_code": "101120201",
"city_name": "青岛"
},
{
"_id": 283,
"id": 284,
"pid": 21,
"city_code": "101121101",
"city_name": "滨州"
},
{
"_id": 284,
"id": 285,
"pid": 21,
"city_code": "101120401",
"city_name": "德州"
},
{
"_id": 285,
"id": 286,
"pid": 21,
"city_code": "101121201",
"city_name": "东营"
},
{
"_id": 286,
"id": 287,
"pid": 21,
"city_code": "101121001",
"city_name": "菏泽"
},
{
"_id": 287,
"id": 288,
"pid": 21,
"city_code": "101120701",
"city_name": "济宁"
},
{
"_id": 288,
"id": 289,
"pid": 21,
"city_code": "101121601",
"city_name": "莱芜"
},
{
"_id": 289,
"id": 290,
"pid": 21,
"city_code": "101121701",
"city_name": "聊城"
},
{
"_id": 290,
"id": 291,
"pid": 21,
"city_code": "101120901",
"city_name": "临沂"
},
{
"_id": 291,
"id": 292,
"pid": 21,
"city_code": "101121501",
"city_name": "日照"
},
{
"_id": 292,
"id": 293,
"pid": 21,
"city_code": "101120801",
"city_name": "泰安"
},
{
"_id": 293,
"id": 294,
"pid": 21,
"city_code": "101121301",
"city_name": "威海"
},
{
"_id": 294,
"id": 295,
"pid": 21,
"city_code": "101120601",
"city_name": "潍坊"
},
{
"_id": 295,
"id": 296,
"pid": 21,
"city_code": "101120501",
"city_name": "烟台"
},
{
"_id": 296,
"id": 297,
"pid": 21,
"city_code": "101121401",
"city_name": "枣庄"
},
{
"_id": 297,
"id": 298,
"pid": 21,
"city_code": "101120301",
"city_name": "淄博"
},
{
"_id": 298,
"id": 299,
"pid": 22,
"city_code": "101100101",
"city_name": "太原"
},
{
"_id": 299,
"id": 300,
"pid": 22,
"city_code": "101100501",
"city_name": "长治"
},
{
"_id": 300,
"id": 301,
"pid": 22,
"city_code": "101100201",
"city_name": "大同"
},
{
"_id": 301,
"id": 302,
"pid": 22,
"city_code": "101100601",
"city_name": "晋城"
},
{
"_id": 302,
"id": 303,
"pid": 22,
"city_code": "101100401",
"city_name": "晋中"
},
{
"_id": 303,
"id": 304,
"pid": 22,
"city_code": "101100701",
"city_name": "临汾"
},
{
"_id": 304,
"id": 305,
"pid": 22,
"city_code": "101101100",
"city_name": "吕梁"
},
{
"_id": 305,
"id": 306,
"pid": 22,
"city_code": "101100901",
"city_name": "朔州"
},
{
"_id": 306,
"id": 307,
"pid": 22,
"city_code": "101101001",
"city_name": "忻州"
},
{
"_id": 307,
"id": 308,
"pid": 22,
"city_code": "101100301",
"city_name": "阳泉"
},
{
"_id": 308,
"id": 309,
"pid": 22,
"city_code": "101100801",
"city_name": "运城"
},
{
"_id": 309,
"id": 310,
"pid": 23,
"city_code": "101110101",
"city_name": "西安"
},
{
"_id": 310,
"id": 311,
"pid": 23,
"city_code": "101110701",
"city_name": "安康"
},
{
"_id": 311,
"id": 312,
"pid": 23,
"city_code": "101110901",
"city_name": "宝鸡"
},
{
"_id": 312,
"id": 313,
"pid": 23,
"city_code": "101110801",
"city_name": "汉中"
},
{
"_id": 313,
"id": 314,
"pid": 23,
"city_code": "101110601",
"city_name": "商洛"
},
{
"_id": 314,
"id": 315,
"pid": 23,
"city_code": "101111001",
"city_name": "铜川"
},
{
"_id": 315,
"id": 316,
"pid": 23,
"city_code": "101110501",
"city_name": "渭南"
},
{
"_id": 316,
"id": 317,
"pid": 23,
"city_code": "101110200",
"city_name": "咸阳"
},
{
"_id": 317,
"id": 318,
"pid": 23,
"city_code": "101110300",
"city_name": "延安"
},
{
"_id": 318,
"id": 319,
"pid": 23,
"city_code": "101110401",
"city_name": "榆林"
},
{
"_id": 319,
"id": 321,
"pid": 25,
"city_code": "101270101",
"city_name": "成都"
},
{
"_id": 320,
"id": 322,
"pid": 25,
"city_code": "101270401",
"city_name": "绵阳"
},
{
"_id": 321,
"id": 323,
"pid": 25,
"city_code": "101271901",
"city_name": "阿坝"
},
{
"_id": 322,
"id": 324,
"pid": 25,
"city_code": "101270901",
"city_name": "巴中"
},
{
"_id": 323,
"id": 325,
"pid": 25,
"city_code": "101270601",
"city_name": "达州"
},
{
"_id": 324,
"id": 326,
"pid": 25,
"city_code": "101272001",
"city_name": "德阳"
},
{
"_id": 325,
"id": 327,
"pid": 25,
"city_code": "101271801",
"city_name": "甘孜"
},
{
"_id": 326,
"id": 328,
"pid": 25,
"city_code": "101270801",
"city_name": "广安"
},
{
"_id": 327,
"id": 329,
"pid": 25,
"city_code": "101272101",
"city_name": "广元"
},
{
"_id": 328,
"id": 330,
"pid": 25,
"city_code": "101271401",
"city_name": "乐山"
},
{
"_id": 329,
"id": 331,
"pid": 25,
"city_code": "101271601",
"city_name": "凉山"
},
{
"_id": 330,
"id": 332,
"pid": 25,
"city_code": "101271501",
"city_name": "眉山"
},
{
"_id": 331,
"id": 333,
"pid": 25,
"city_code": "101270501",
"city_name": "南充"
},
{
"_id": 332,
"id": 334,
"pid": 25,
"city_code": "101271201",
"city_name": "内江"
},
{
"_id": 333,
"id": 335,
"pid": 25,
"city_code": "101270201",
"city_name": "攀枝花"
},
{
"_id": 334,
"id": 336,
"pid": 25,
"city_code": "101270701",
"city_name": "遂宁"
},
{
"_id": 335,
"id": 337,
"pid": 25,
"city_code": "101271701",
"city_name": "雅安"
},
{
"_id": 336,
"id": 338,
"pid": 25,
"city_code": "101271101",
"city_name": "宜宾"
},
{
"_id": 337,
"id": 339,
"pid": 25,
"city_code": "101271301",
"city_name": "资阳"
},
{
"_id": 338,
"id": 340,
"pid": 25,
"city_code": "101270301",
"city_name": "自贡"
},
{
"_id": 339,
"id": 341,
"pid": 25,
"city_code": "101271001",
"city_name": "泸州"
},
{
"_id": 340,
"id": 343,
"pid": 27,
"city_code": "101140101",
"city_name": "拉萨"
},
{
"_id": 341,
"id": 344,
"pid": 27,
"city_code": "101140701",
"city_name": "阿里"
},
{
"_id": 342,
"id": 345,
"pid": 27,
"city_code": "101140501",
"city_name": "昌都"
},
{
"_id": 343,
"id": 346,
"pid": 27,
"city_code": "101140401",
"city_name": "林芝"
},
{
"_id": 344,
"id": 347,
"pid": 27,
"city_code": "101140601",
"city_name": "那曲"
},
{
"_id": 345,
"id": 348,
"pid": 27,
"city_code": "101140201",
"city_name": "日喀则"
},
{
"_id": 346,
"id": 349,
"pid": 27,
"city_code": "101140301",
"city_name": "山南"
},
{
"_id": 347,
"id": 350,
"pid": 28,
"city_code": "101130101",
"city_name": "乌鲁木齐"
},
{
"_id": 348,
"id": 351,
"pid": 28,
"city_code": "101130801",
"city_name": "阿克苏"
},
{
"_id": 349,
"id": 352,
"pid": 28,
"city_code": "101130701",
"city_name": "阿拉尔"
},
{
"_id": 350,
"id": 353,
"pid": 28,
"city_code": "101130609",
"city_name": "巴音郭楞"
},
{
"_id": 351,
"id": 354,
"pid": 28,
"city_code": "101131604",
"city_name": "博尔塔拉"
},
{
"_id": 352,
"id": 355,
"pid": 28,
"city_code": "101130401",
"city_name": "昌吉"
},
{
"_id": 353,
"id": 356,
"pid": 28,
"city_code": "101131201",
"city_name": "哈密"
},
{
"_id": 354,
"id": 357,
"pid": 28,
"city_code": "101131301",
"city_name": "和田"
},
{
"_id": 355,
"id": 358,
"pid": 28,
"city_code": "101130901",
"city_name": "喀什"
},
{
"_id": 356,
"id": 359,
"pid": 28,
"city_code": "101130201",
"city_name": "克拉玛依"
},
{
"_id": 357,
"id": 360,
"pid": 28,
"city_code": "",
"city_name": "克孜勒苏"
},
{
"_id": 358,
"id": 361,
"pid": 28,
"city_code": "101130301",
"city_name": "石河子"
},
{
"_id": 359,
"id": 362,
"pid": 28,
"city_code": "",
"city_name": "图木舒克"
},
{
"_id": 360,
"id": 363,
"pid": 28,
"city_code": "101130501",
"city_name": "吐鲁番"
},
{
"_id": 361,
"id": 364,
"pid": 28,
"city_code": "",
"city_name": "五家渠"
},
{
"_id": 362,
"id": 365,
"pid": 28,
"city_code": "101131012",
"city_name": "伊犁"
},
{
"_id": 363,
"id": 366,
"pid": 29,
"city_code": "101290101",
"city_name": "昆明"
},
{
"_id": 364,
"id": 367,
"pid": 29,
"city_code": "101291201",
"city_name": "怒江"
},
{
"_id": 365,
"id": 368,
"pid": 29,
"city_code": "101290901",
"city_name": "普洱"
},
{
"_id": 366,
"id": 369,
"pid": 29,
"city_code": "101291401",
"city_name": "丽江"
},
{
"_id": 367,
"id": 370,
"pid": 29,
"city_code": "101290501",
"city_name": "保山"
},
{
"_id": 368,
"id": 371,
"pid": 29,
"city_code": "101290801",
"city_name": "楚雄"
},
{
"_id": 369,
"id": 372,
"pid": 29,
"city_code": "101290201",
"city_name": "大理"
},
{
"_id": 370,
"id": 373,
"pid": 29,
"city_code": "101291501",
"city_name": "德宏"
},
{
"_id": 371,
"id": 374,
"pid": 29,
"city_code": "101291305",
"city_name": "迪庆"
},
{
"_id": 372,
"id": 375,
"pid": 29,
"city_code": "101290301",
"city_name": "红河"
},
{
"_id": 373,
"id": 376,
"pid": 29,
"city_code": "101291101",
"city_name": "临沧"
},
{
"_id": 374,
"id": 377,
"pid": 29,
"city_code": "101290401",
"city_name": "曲靖"
},
{
"_id": 375,
"id": 378,
"pid": 29,
"city_code": "101290601",
"city_name": "文山"
},
{
"_id": 376,
"id": 379,
"pid": 29,
"city_code": "101291602",
"city_name": "西双版纳"
},
{
"_id": 377,
"id": 380,
"pid": 29,
"city_code": "101290701",
"city_name": "玉溪"
},
{
"_id": 378,
"id": 381,
"pid": 29,
"city_code": "101291001",
"city_name": "昭通"
},
{
"_id": 379,
"id": 382,
"pid": 30,
"city_code": "101210101",
"city_name": "杭州"
},
{
"_id": 380,
"id": 383,
"pid": 30,
"city_code": "101210201",
"city_name": "湖州"
},
{
"_id": 381,
"id": 384,
"pid": 30,
"city_code": "101210301",
"city_name": "嘉兴"
},
{
"_id": 382,
"id": 385,
"pid": 30,
"city_code": "101210901",
"city_name": "金华"
},
{
"_id": 383,
"id": 386,
"pid": 30,
"city_code": "101210801",
"city_name": "丽水"
},
{
"_id": 384,
"id": 387,
"pid": 30,
"city_code": "101210401",
"city_name": "宁波"
},
{
"_id": 385,
"id": 388,
"pid": 30,
"city_code": "101210501",
"city_name": "绍兴"
},
{
"_id": 386,
"id": 389,
"pid": 30,
"city_code": "101210601",
"city_name": "台州"
},
{
"_id": 387,
"id": 390,
"pid": 30,
"city_code": "101210701",
"city_name": "温州"
},
{
"_id": 388,
"id": 391,
"pid": 30,
"city_code": "101211101",
"city_name": "舟山"
},
{
"_id": 389,
"id": 392,
"pid": 30,
"city_code": "101211001",
"city_name": "衢州"
},
{
"_id": 390,
"id": 400,
"pid": 35,
"city_code": "101220609",
"city_name": "桐城市"
},
{
"_id": 391,
"id": 401,
"pid": 35,
"city_code": "101220605",
"city_name": "怀宁县"
},
{
"_id": 392,
"id": 402,
"pid": 47,
"city_code": "101220602",
"city_name": "枞阳县"
},
{
"_id": 393,
"id": 403,
"pid": 35,
"city_code": "101220604",
"city_name": "潜山县"
},
{
"_id": 394,
"id": 404,
"pid": 35,
"city_code": "101220603",
"city_name": "太湖县"
},
{
"_id": 395,
"id": 405,
"pid": 35,
"city_code": "101220606",
"city_name": "宿松县"
},
{
"_id": 396,
"id": 406,
"pid": 35,
"city_code": "101220607",
"city_name": "望江县"
},
{
"_id": 397,
"id": 407,
"pid": 35,
"city_code": "101220608",
"city_name": "岳西县"
},
{
"_id": 398,
"id": 412,
"pid": 36,
"city_code": "101220202",
"city_name": "怀远县"
},
{
"_id": 399,
"id": 413,
"pid": 36,
"city_code": "101220204",
"city_name": "五河县"
},
{
"_id": 400,
"id": 414,
"pid": 36,
"city_code": "101220203",
"city_name": "固镇县"
},
{
"_id": 401,
"id": 416,
"pid": 3400,
"city_code": "101220106",
"city_name": "庐江县"
},
{
"_id": 402,
"id": 417,
"pid": 48,
"city_code": "101220305",
"city_name": "无为县"
},
{
"_id": 403,
"id": 418,
"pid": 45,
"city_code": "101220503",
"city_name": "含山县"
},
{
"_id": 404,
"id": 419,
"pid": 45,
"city_code": "101220504",
"city_name": "和县"
},
{
"_id": 405,
"id": 421,
"pid": 38,
"city_code": "101221702",
"city_name": "东至县"
},
{
"_id": 406,
"id": 422,
"pid": 38,
"city_code": "101221705",
"city_name": "石台县"
},
{
"_id": 407,
"id": 423,
"pid": 38,
"city_code": "101221703",
"city_name": "青阳县"
},
{
"_id": 408,
"id": 426,
"pid": 39,
"city_code": "101221107",
"city_name": "天长市"
},
{
"_id": 409,
"id": 427,
"pid": 39,
"city_code": "101221103",
"city_name": "明光市"
},
{
"_id": 410,
"id": 428,
"pid": 39,
"city_code": "101221106",
"city_name": "来安县"
},
{
"_id": 411,
"id": 429,
"pid": 39,
"city_code": "101221105",
"city_name": "全椒县"
},
{
"_id": 412,
"id": 430,
"pid": 39,
"city_code": "101221104",
"city_name": "定远县"
},
{
"_id": 413,
"id": 431,
"pid": 39,
"city_code": "101221102",
"city_name": "凤阳县"
},
{
"_id": 414,
"id": 439,
"pid": 40,
"city_code": "101220805",
"city_name": "界首市"
},
{
"_id": 415,
"id": 440,
"pid": 40,
"city_code": "101220804",
"city_name": "临泉县"
},
{
"_id": 416,
"id": 441,
"pid": 40,
"city_code": "101220806",
"city_name": "太和县"
},
{
"_id": 417,
"id": 442,
"pid": 40,
"city_code": "101220802",
"city_name": "阜南县"
},
{
"_id": 418,
"id": 443,
"pid": 40,
"city_code": "101220803",
"city_name": "颍上县"
},
{
"_id": 419,
"id": 447,
"pid": 41,
"city_code": "101221202",
"city_name": "濉溪县"
},
{
"_id": 420,
"id": 452,
"pid": 42,
"city_code": "101220403",
"city_name": "潘集区"
},
{
"_id": 421,
"id": 453,
"pid": 42,
"city_code": "101220402",
"city_name": "凤台县"
},
{
"_id": 422,
"id": 454,
"pid": 43,
"city_code": "101221003",
"city_name": "屯溪区"
},
{
"_id": 423,
"id": 455,
"pid": 43,
"city_code": "101221002",
"city_name": "黄山区"
},
{
"_id": 424,
"id": 457,
"pid": 43,
"city_code": "101221006",
"city_name": "歙县"
},
{
"_id": 425,
"id": 458,
"pid": 43,
"city_code": "101221007",
"city_name": "休宁县"
},
{
"_id": 426,
"id": 459,
"pid": 43,
"city_code": "101221005",
"city_name": "黟县"
},
{
"_id": 427,
"id": 460,
"pid": 43,
"city_code": "101221004",
"city_name": "祁门县"
},
{
"_id": 428,
"id": 463,
"pid": 44,
"city_code": "101221503",
"city_name": "寿县"
},
{
"_id": 429,
"id": 464,
"pid": 44,
"city_code": "101221502",
"city_name": "霍邱县"
},
{
"_id": 430,
"id": 465,
"pid": 44,
"city_code": "101221507",
"city_name": "舒城县"
},
{
"_id": 431,
"id": 466,
"pid": 44,
"city_code": "101221505",
"city_name": "金寨县"
},
{
"_id": 432,
"id": 467,
"pid": 44,
"city_code": "101221506",
"city_name": "霍山县"
},
{
"_id": 433,
"id": 471,
"pid": 45,
"city_code": "101220502",
"city_name": "当涂县"
},
{
"_id": 434,
"id": 473,
"pid": 46,
"city_code": "101220702",
"city_name": "砀山县"
},
{
"_id": 435,
"id": 474,
"pid": 46,
"city_code": "101220705",
"city_name": "萧县"
},
{
"_id": 436,
"id": 475,
"pid": 46,
"city_code": "101220703",
"city_name": "灵璧县"
},
{
"_id": 437,
"id": 476,
"pid": 46,
"city_code": "101220704",
"city_name": "泗县"
},
{
"_id": 438,
"id": 480,
"pid": 47,
"city_code": "101221301",
"city_name": "义安区"
},
{
"_id": 439,
"id": 485,
"pid": 48,
"city_code": "101220303",
"city_name": "芜湖县"
},
{
"_id": 440,
"id": 486,
"pid": 48,
"city_code": "101220302",
"city_name": "繁昌县"
},
{
"_id": 441,
"id": 487,
"pid": 48,
"city_code": "101220304",
"city_name": "南陵县"
},
{
"_id": 442,
"id": 489,
"pid": 49,
"city_code": "101221404",
"city_name": "宁国市"
},
{
"_id": 443,
"id": 490,
"pid": 49,
"city_code": "101221407",
"city_name": "郎溪县"
},
{
"_id": 444,
"id": 491,
"pid": 49,
"city_code": "101221406",
"city_name": "广德县"
},
{
"_id": 445,
"id": 492,
"pid": 49,
"city_code": "101221402",
"city_name": "泾县"
},
{
"_id": 446,
"id": 493,
"pid": 49,
"city_code": "101221405",
"city_name": "绩溪县"
},
{
"_id": 447,
"id": 494,
"pid": 49,
"city_code": "101221403",
"city_name": "旌德县"
},
{
"_id": 448,
"id": 495,
"pid": 50,
"city_code": "101220902",
"city_name": "涡阳县"
},
{
"_id": 449,
"id": 496,
"pid": 50,
"city_code": "101220904",
"city_name": "蒙城县"
},
{
"_id": 450,
"id": 497,
"pid": 50,
"city_code": "101220903",
"city_name": "利辛县"
},
{
"_id": 451,
"id": 501,
"pid": 1,
"city_code": "101010200",
"city_name": "海淀区"
},
{
"_id": 452,
"id": 502,
"pid": 1,
"city_code": "101010300",
"city_name": "朝阳区"
},
{
"_id": 453,
"id": 505,
"pid": 1,
"city_code": "101010900",
"city_name": "丰台区"
},
{
"_id": 454,
"id": 506,
"pid": 1,
"city_code": "101011000",
"city_name": "石景山区"
},
{
"_id": 455,
"id": 507,
"pid": 1,
"city_code": "101011200",
"city_name": "房山区"
},
{
"_id": 456,
"id": 508,
"pid": 1,
"city_code": "101011400",
"city_name": "门头沟区"
},
{
"_id": 457,
"id": 509,
"pid": 1,
"city_code": "101010600",
"city_name": "通州区"
},
{
"_id": 458,
"id": 510,
"pid": 1,
"city_code": "101010400",
"city_name": "顺义区"
},
{
"_id": 459,
"id": 511,
"pid": 1,
"city_code": "101010700",
"city_name": "昌平区"
},
{
"_id": 460,
"id": 512,
"pid": 1,
"city_code": "101010500",
"city_name": "怀柔区"
},
{
"_id": 461,
"id": 513,
"pid": 1,
"city_code": "101011500",
"city_name": "平谷区"
},
{
"_id": 462,
"id": 514,
"pid": 1,
"city_code": "101011100",
"city_name": "大兴区"
},
{
"_id": 463,
"id": 515,
"pid": 1,
"city_code": "101011300",
"city_name": "密云县"
},
{
"_id": 464,
"id": 516,
"pid": 1,
"city_code": "101010800",
"city_name": "延庆县"
},
{
"_id": 465,
"id": 522,
"pid": 52,
"city_code": "101230111",
"city_name": "福清市"
},
{
"_id": 466,
"id": 523,
"pid": 52,
"city_code": "101230110",
"city_name": "长乐市"
},
{
"_id": 467,
"id": 524,
"pid": 52,
"city_code": "101230103",
"city_name": "闽侯县"
},
{
"_id": 468,
"id": 525,
"pid": 52,
"city_code": "101230105",
"city_name": "连江县"
},
{
"_id": 469,
"id": 526,
"pid": 52,
"city_code": "101230104",
"city_name": "罗源县"
},
{
"_id": 470,
"id": 527,
"pid": 52,
"city_code": "101230102",
"city_name": "闽清县"
},
{
"_id": 471,
"id": 528,
"pid": 52,
"city_code": "101230107",
"city_name": "永泰县"
},
{
"_id": 472,
"id": 529,
"pid": 52,
"city_code": "101230108",
"city_name": "平潭县"
},
{
"_id": 473,
"id": 531,
"pid": 53,
"city_code": "101230707",
"city_name": "漳平市"
},
{
"_id": 474,
"id": 532,
"pid": 53,
"city_code": "101230702",
"city_name": "长汀县"
},
{
"_id": 475,
"id": 533,
"pid": 53,
"city_code": "101230706",
"city_name": "永定县"
},
{
"_id": 476,
"id": 534,
"pid": 53,
"city_code": "101230705",
"city_name": "上杭县"
},
{
"_id": 477,
"id": 535,
"pid": 53,
"city_code": "101230704",
"city_name": "武平县"
},
{
"_id": 478,
"id": 536,
"pid": 53,
"city_code": "101230703",
"city_name": "连城县"
},
{
"_id": 479,
"id": 538,
"pid": 54,
"city_code": "101230904",
"city_name": "邵武市"
},
{
"_id": 480,
"id": 539,
"pid": 54,
"city_code": "101230905",
"city_name": "武夷山市"
},
{
"_id": 481,
"id": 540,
"pid": 54,
"city_code": "101230910",
"city_name": "建瓯市"
},
{
"_id": 482,
"id": 541,
"pid": 54,
"city_code": "101230907",
"city_name": "建阳市"
},
{
"_id": 483,
"id": 542,
"pid": 54,
"city_code": "101230902",
"city_name": "顺昌县"
},
{
"_id": 484,
"id": 543,
"pid": 54,
"city_code": "101230906",
"city_name": "浦城县"
},
{
"_id": 485,
"id": 544,
"pid": 54,
"city_code": "101230903",
"city_name": "光泽县"
},
{
"_id": 486,
"id": 545,
"pid": 54,
"city_code": "101230908",
"city_name": "松溪县"
},
{
"_id": 487,
"id": 546,
"pid": 54,
"city_code": "101230909",
"city_name": "政和县"
},
{
"_id": 488,
"id": 548,
"pid": 55,
"city_code": "101230306",
"city_name": "福安市"
},
{
"_id": 489,
"id": 549,
"pid": 55,
"city_code": "101230308",
"city_name": "福鼎市"
},
{
"_id": 490,
"id": 550,
"pid": 55,
"city_code": "101230303",
"city_name": "霞浦县"
},
{
"_id": 491,
"id": 551,
"pid": 55,
"city_code": "101230302",
"city_name": "古田县"
},
{
"_id": 492,
"id": 552,
"pid": 55,
"city_code": "101230309",
"city_name": "屏南县"
},
{
"_id": 493,
"id": 553,
"pid": 55,
"city_code": "101230304",
"city_name": "寿宁县"
},
{
"_id": 494,
"id": 554,
"pid": 55,
"city_code": "101230305",
"city_name": "周宁县"
},
{
"_id": 495,
"id": 555,
"pid": 55,
"city_code": "101230307",
"city_name": "柘荣县"
},
{
"_id": 496,
"id": 556,
"pid": 56,
"city_code": "101230407",
"city_name": "城厢区"
},
{
"_id": 497,
"id": 557,
"pid": 56,
"city_code": "101230404",
"city_name": "涵江区"
},
{
"_id": 498,
"id": 558,
"pid": 56,
"city_code": "101230406",
"city_name": "荔城区"
},
{
"_id": 499,
"id": 559,
"pid": 56,
"city_code": "101230405",
"city_name": "秀屿区"
},
{
"_id": 500,
"id": 560,
"pid": 56,
"city_code": "101230402",
"city_name": "仙游县"
},
{
"_id": 501,
"id": 566,
"pid": 57,
"city_code": "101230510",
"city_name": "石狮市"
},
{
"_id": 502,
"id": 567,
"pid": 57,
"city_code": "101230509",
"city_name": "晋江市"
},
{
"_id": 503,
"id": 568,
"pid": 57,
"city_code": "101230506",
"city_name": "南安市"
},
{
"_id": 504,
"id": 569,
"pid": 57,
"city_code": "101230508",
"city_name": "惠安县"
},
{
"_id": 505,
"id": 570,
"pid": 57,
"city_code": "101230502",
"city_name": "安溪县"
},
{
"_id": 506,
"id": 571,
"pid": 57,
"city_code": "101230504",
"city_name": "永春县"
},
{
"_id": 507,
"id": 572,
"pid": 57,
"city_code": "101230505",
"city_name": "德化县"
},
{
"_id": 508,
"id": 576,
"pid": 58,
"city_code": "101230810",
"city_name": "永安市"
},
{
"_id": 509,
"id": 577,
"pid": 58,
"city_code": "101230807",
"city_name": "明溪县"
},
{
"_id": 510,
"id": 578,
"pid": 58,
"city_code": "101230803",
"city_name": "清流县"
},
{
"_id": 511,
"id": 579,
"pid": 58,
"city_code": "101230802",
"city_name": "宁化县"
},
{
"_id": 512,
"id": 580,
"pid": 58,
"city_code": "101230811",
"city_name": "大田县"
},
{
"_id": 513,
"id": 581,
"pid": 58,
"city_code": "101230809",
"city_name": "尤溪县"
},
{
"_id": 514,
"id": 582,
"pid": 58,
"city_code": "101230808",
"city_name": "沙县"
},
{
"_id": 515,
"id": 583,
"pid": 58,
"city_code": "101230805",
"city_name": "将乐县"
},
{
"_id": 516,
"id": 584,
"pid": 58,
"city_code": "101230804",
"city_name": "泰宁县"
},
{
"_id": 517,
"id": 585,
"pid": 58,
"city_code": "101230806",
"city_name": "建宁县"
},
{
"_id": 518,
"id": 590,
"pid": 59,
"city_code": "101230202",
"city_name": "同安区"
},
{
"_id": 519,
"id": 594,
"pid": 60,
"city_code": "101230605",
"city_name": "龙海市"
},
{
"_id": 520,
"id": 595,
"pid": 60,
"city_code": "101230609",
"city_name": "云霄县"
},
{
"_id": 521,
"id": 596,
"pid": 60,
"city_code": "101230606",
"city_name": "漳浦县"
},
{
"_id": 522,
"id": 597,
"pid": 60,
"city_code": "101230607",
"city_name": "诏安县"
},
{
"_id": 523,
"id": 598,
"pid": 60,
"city_code": "101230602",
"city_name": "长泰县"
},
{
"_id": 524,
"id": 599,
"pid": 60,
"city_code": "101230608",
"city_name": "东山县"
},
{
"_id": 525,
"id": 600,
"pid": 60,
"city_code": "101230603",
"city_name": "南靖县"
},
{
"_id": 526,
"id": 601,
"pid": 60,
"city_code": "101230604",
"city_name": "平和县"
},
{
"_id": 527,
"id": 602,
"pid": 60,
"city_code": "101230610",
"city_name": "华安县"
},
{
"_id": 528,
"id": 603,
"pid": 61,
"city_code": "101160102",
"city_name": "皋兰县"
},
{
"_id": 529,
"id": 609,
"pid": 61,
"city_code": "101160103",
"city_name": "永登县"
},
{
"_id": 530,
"id": 610,
"pid": 61,
"city_code": "101160104",
"city_name": "榆中县"
},
{
"_id": 531,
"id": 611,
"pid": 62,
"city_code": "101161301",
"city_name": "白银区"
},
{
"_id": 532,
"id": 612,
"pid": 62,
"city_code": "101161304",
"city_name": "平川区"
},
{
"_id": 533,
"id": 613,
"pid": 62,
"city_code": "101161303",
"city_name": "会宁县"
},
{
"_id": 534,
"id": 614,
"pid": 62,
"city_code": "101161305",
"city_name": "景泰县"
},
{
"_id": 535,
"id": 615,
"pid": 62,
"city_code": "101161302",
"city_name": "靖远县"
},
{
"_id": 536,
"id": 616,
"pid": 63,
"city_code": "101160205",
"city_name": "临洮县"
},
{
"_id": 537,
"id": 617,
"pid": 63,
"city_code": "101160203",
"city_name": "陇西县"
},
{
"_id": 538,
"id": 618,
"pid": 63,
"city_code": "101160202",
"city_name": "通渭县"
},
{
"_id": 539,
"id": 619,
"pid": 63,
"city_code": "101160204",
"city_name": "渭源县"
},
{
"_id": 540,
"id": 620,
"pid": 63,
"city_code": "101160206",
"city_name": "漳县"
},
{
"_id": 541,
"id": 621,
"pid": 63,
"city_code": "101160207",
"city_name": "岷县"
},
{
"_id": 542,
"id": 624,
"pid": 64,
"city_code": "101161201",
"city_name": "合作市"
},
{
"_id": 543,
"id": 625,
"pid": 64,
"city_code": "101161202",
"city_name": "临潭县"
},
{
"_id": 544,
"id": 626,
"pid": 64,
"city_code": "101161203",
"city_name": "卓尼县"
},
{
"_id": 545,
"id": 627,
"pid": 64,
"city_code": "101161204",
"city_name": "舟曲县"
},
{
"_id": 546,
"id": 628,
"pid": 64,
"city_code": "101161205",
"city_name": "迭部县"
},
{
"_id": 547,
"id": 629,
"pid": 64,
"city_code": "101161206",
"city_name": "玛曲县"
},
{
"_id": 548,
"id": 630,
"pid": 64,
"city_code": "101161207",
"city_name": "碌曲县"
},
{
"_id": 549,
"id": 631,
"pid": 64,
"city_code": "101161208",
"city_name": "夏河县"
},
{
"_id": 550,
"id": 634,
"pid": 66,
"city_code": "101160602",
"city_name": "永昌县"
},
{
"_id": 551,
"id": 636,
"pid": 67,
"city_code": "101160807",
"city_name": "玉门市"
},
{
"_id": 552,
"id": 637,
"pid": 67,
"city_code": "101160808",
"city_name": "敦煌市"
},
{
"_id": 553,
"id": 638,
"pid": 67,
"city_code": "101160803",
"city_name": "金塔县"
},
{
"_id": 554,
"id": 639,
"pid": 67,
"city_code": "101160805",
"city_name": "瓜州县"
},
{
"_id": 555,
"id": 640,
"pid": 67,
"city_code": "101160806",
"city_name": "肃北县"
},
{
"_id": 556,
"id": 641,
"pid": 67,
"city_code": "101160804",
"city_name": "阿克塞"
},
{
"_id": 557,
"id": 642,
"pid": 68,
"city_code": "101161101",
"city_name": "临夏市"
},
{
"_id": 558,
"id": 643,
"pid": 68,
"city_code": "101161101",
"city_name": "临夏县"
},
{
"_id": 559,
"id": 644,
"pid": 68,
"city_code": "101161102",
"city_name": "康乐县"
},
{
"_id": 560,
"id": 645,
"pid": 68,
"city_code": "101161103",
"city_name": "永靖县"
},
{
"_id": 561,
"id": 646,
"pid": 68,
"city_code": "101161104",
"city_name": "广河县"
},
{
"_id": 562,
"id": 647,
"pid": 68,
"city_code": "101161105",
"city_name": "和政县"
},
{
"_id": 563,
"id": 648,
"pid": 68,
"city_code": "101161106",
"city_name": "东乡族自治县"
},
{
"_id": 564,
"id": 649,
"pid": 68,
"city_code": "101161107",
"city_name": "积石山"
},
{
"_id": 565,
"id": 650,
"pid": 69,
"city_code": "101161002",
"city_name": "成县"
},
{
"_id": 566,
"id": 651,
"pid": 69,
"city_code": "101161008",
"city_name": "徽县"
},
{
"_id": 567,
"id": 652,
"pid": 69,
"city_code": "101161005",
"city_name": "康县"
},
{
"_id": 568,
"id": 653,
"pid": 69,
"city_code": "101161007",
"city_name": "礼县"
},
{
"_id": 569,
"id": 654,
"pid": 69,
"city_code": "101161009",
"city_name": "两当县"
},
{
"_id": 570,
"id": 655,
"pid": 69,
"city_code": "101161003",
"city_name": "文县"
},
{
"_id": 571,
"id": 656,
"pid": 69,
"city_code": "101161006",
"city_name": "西和县"
},
{
"_id": 572,
"id": 657,
"pid": 69,
"city_code": "101161004",
"city_name": "宕昌县"
},
{
"_id": 573,
"id": 658,
"pid": 69,
"city_code": "101161001",
"city_name": "武都区"
},
{
"_id": 574,
"id": 659,
"pid": 70,
"city_code": "101160304",
"city_name": "崇信县"
},
{
"_id": 575,
"id": 660,
"pid": 70,
"city_code": "101160305",
"city_name": "华亭县"
},
{
"_id": 576,
"id": 661,
"pid": 70,
"city_code": "101160307",
"city_name": "静宁县"
},
{
"_id": 577,
"id": 662,
"pid": 70,
"city_code": "101160303",
"city_name": "灵台县"
},
{
"_id": 578,
"id": 663,
"pid": 70,
"city_code": "101160308",
"city_name": "崆峒区"
},
{
"_id": 579,
"id": 664,
"pid": 70,
"city_code": "101160306",
"city_name": "庄浪县"
},
{
"_id": 580,
"id": 665,
"pid": 70,
"city_code": "101160302",
"city_name": "泾川县"
},
{
"_id": 581,
"id": 666,
"pid": 71,
"city_code": "101160405",
"city_name": "合水县"
},
{
"_id": 582,
"id": 667,
"pid": 71,
"city_code": "101160404",
"city_name": "华池县"
},
{
"_id": 583,
"id": 668,
"pid": 71,
"city_code": "101160403",
"city_name": "环县"
},
{
"_id": 584,
"id": 669,
"pid": 71,
"city_code": "101160407",
"city_name": "宁县"
},
{
"_id": 585,
"id": 670,
"pid": 71,
"city_code": "101160409",
"city_name": "庆城县"
},
{
"_id": 586,
"id": 671,
"pid": 71,
"city_code": "101160402",
"city_name": "西峰区"
},
{
"_id": 587,
"id": 672,
"pid": 71,
"city_code": "101160408",
"city_name": "镇原县"
},
{
"_id": 588,
"id": 673,
"pid": 71,
"city_code": "101160406",
"city_name": "正宁县"
},
{
"_id": 589,
"id": 674,
"pid": 72,
"city_code": "101160905",
"city_name": "甘谷县"
},
{
"_id": 590,
"id": 675,
"pid": 72,
"city_code": "101160904",
"city_name": "秦安县"
},
{
"_id": 591,
"id": 676,
"pid": 72,
"city_code": "101160903",
"city_name": "清水县"
},
{
"_id": 592,
"id": 678,
"pid": 72,
"city_code": "101160908",
"city_name": "麦积区"
},
{
"_id": 593,
"id": 679,
"pid": 72,
"city_code": "101160906",
"city_name": "武山县"
},
{
"_id": 594,
"id": 680,
"pid": 72,
"city_code": "101160907",
"city_name": "张家川"
},
{
"_id": 595,
"id": 681,
"pid": 73,
"city_code": "101160503",
"city_name": "古浪县"
},
{
"_id": 596,
"id": 682,
"pid": 73,
"city_code": "101160502",
"city_name": "民勤县"
},
{
"_id": 597,
"id": 683,
"pid": 73,
"city_code": "101160505",
"city_name": "天祝县"
},
{
"_id": 598,
"id": 685,
"pid": 74,
"city_code": "101160705",
"city_name": "高台县"
},
{
"_id": 599,
"id": 686,
"pid": 74,
"city_code": "101160704",
"city_name": "临泽县"
},
{
"_id": 600,
"id": 687,
"pid": 74,
"city_code": "101160703",
"city_name": "民乐县"
},
{
"_id": 601,
"id": 688,
"pid": 74,
"city_code": "101160706",
"city_name": "山丹县"
},
{
"_id": 602,
"id": 689,
"pid": 74,
"city_code": "101160702",
"city_name": "肃南县"
},
{
"_id": 603,
"id": 691,
"pid": 75,
"city_code": "101280103",
"city_name": "从化区"
},
{
"_id": 604,
"id": 692,
"pid": 75,
"city_code": "101280106",
"city_name": "天河区"
},
{
"_id": 605,
"id": 699,
"pid": 75,
"city_code": "101280102",
"city_name": "番禺区"
},
{
"_id": 606,
"id": 700,
"pid": 75,
"city_code": "101280105",
"city_name": "花都区"
},
{
"_id": 607,
"id": 701,
"pid": 75,
"city_code": "101280104",
"city_name": "增城区"
},
{
"_id": 608,
"id": 706,
"pid": 76,
"city_code": "101280604",
"city_name": "南山区"
},
{
"_id": 609,
"id": 711,
"pid": 77,
"city_code": "101281503",
"city_name": "潮安县"
},
{
"_id": 610,
"id": 712,
"pid": 77,
"city_code": "101281502",
"city_name": "饶平县"
},
{
"_id": 611,
"id": 746,
"pid": 79,
"city_code": "101280803",
"city_name": "南海区"
},
{
"_id": 612,
"id": 747,
"pid": 79,
"city_code": "101280801",
"city_name": "顺德区"
},
{
"_id": 613,
"id": 748,
"pid": 79,
"city_code": "101280802",
"city_name": "三水区"
},
{
"_id": 614,
"id": 749,
"pid": 79,
"city_code": "101280804",
"city_name": "高明区"
},
{
"_id": 615,
"id": 750,
"pid": 80,
"city_code": "101281206",
"city_name": "东源县"
},
{
"_id": 616,
"id": 751,
"pid": 80,
"city_code": "101281204",
"city_name": "和平县"
},
{
"_id": 617,
"id": 753,
"pid": 80,
"city_code": "101281203",
"city_name": "连平县"
},
{
"_id": 618,
"id": 754,
"pid": 80,
"city_code": "101281205",
"city_name": "龙川县"
},
{
"_id": 619,
"id": 755,
"pid": 80,
"city_code": "101281202",
"city_name": "紫金县"
},
{
"_id": 620,
"id": 756,
"pid": 81,
"city_code": "101280303",
"city_name": "惠阳区"
},
{
"_id": 621,
"id": 759,
"pid": 81,
"city_code": "101280302",
"city_name": "博罗县"
},
{
"_id": 622,
"id": 760,
"pid": 81,
"city_code": "101280304",
"city_name": "惠东县"
},
{
"_id": 623,
"id": 761,
"pid": 81,
"city_code": "101280305",
"city_name": "龙门县"
},
{
"_id": 624,
"id": 762,
"pid": 82,
"city_code": "101281109",
"city_name": "江海区"
},
{
"_id": 625,
"id": 763,
"pid": 82,
"city_code": "101281107",
"city_name": "蓬江区"
},
{
"_id": 626,
"id": 764,
"pid": 82,
"city_code": "101281104",
"city_name": "新会区"
},
{
"_id": 627,
"id": 765,
"pid": 82,
"city_code": "101281106",
"city_name": "台山市"
},
{
"_id": 628,
"id": 766,
"pid": 82,
"city_code": "101281103",
"city_name": "开平市"
},
{
"_id": 629,
"id": 767,
"pid": 82,
"city_code": "101281108",
"city_name": "鹤山市"
},
{
"_id": 630,
"id": 768,
"pid": 82,
"city_code": "101281105",
"city_name": "恩平市"
},
{
"_id": 631,
"id": 770,
"pid": 83,
"city_code": "101281903",
"city_name": "普宁市"
},
{
"_id": 632,
"id": 771,
"pid": 83,
"city_code": "101281905",
"city_name": "揭东县"
},
{
"_id": 633,
"id": 772,
"pid": 83,
"city_code": "101281902",
"city_name": "揭西县"
},
{
"_id": 634,
"id": 773,
"pid": 83,
"city_code": "101281904",
"city_name": "惠来县"
},
{
"_id": 635,
"id": 775,
"pid": 84,
"city_code": "101282006",
"city_name": "茂港区"
},
{
"_id": 636,
"id": 776,
"pid": 84,
"city_code": "101282002",
"city_name": "高州市"
},
{
"_id": 637,
"id": 777,
"pid": 84,
"city_code": "101282003",
"city_name": "化州市"
},
{
"_id": 638,
"id": 778,
"pid": 84,
"city_code": "101282005",
"city_name": "信宜市"
},
{
"_id": 639,
"id": 779,
"pid": 84,
"city_code": "101282004",
"city_name": "电白县"
},
{
"_id": 640,
"id": 780,
"pid": 85,
"city_code": "101280409",
"city_name": "梅县"
},
{
"_id": 641,
"id": 782,
"pid": 85,
"city_code": "101280402",
"city_name": "兴宁市"
},
{
"_id": 642,
"id": 783,
"pid": 85,
"city_code": "101280404",
"city_name": "大埔县"
},
{
"_id": 643,
"id": 784,
"pid": 85,
"city_code": "101280406",
"city_name": "丰顺县"
},
{
"_id": 644,
"id": 785,
"pid": 85,
"city_code": "101280408",
"city_name": "五华县"
},
{
"_id": 645,
"id": 786,
"pid": 85,
"city_code": "101280407",
"city_name": "平远县"
},
{
"_id": 646,
"id": 787,
"pid": 85,
"city_code": "101280403",
"city_name": "蕉岭县"
},
{
"_id": 647,
"id": 789,
"pid": 86,
"city_code": "101281307",
"city_name": "英德市"
},
{
"_id": 648,
"id": 790,
"pid": 86,
"city_code": "101281303",
"city_name": "连州市"
},
{
"_id": 649,
"id": 791,
"pid": 86,
"city_code": "101281306",
"city_name": "佛冈县"
},
{
"_id": 650,
"id": 792,
"pid": 86,
"city_code": "101281305",
"city_name": "阳山县"
},
{
"_id": 651,
"id": 793,
"pid": 86,
"city_code": "101281308",
"city_name": "清新县"
},
{
"_id": 652,
"id": 794,
"pid": 86,
"city_code": "101281304",
"city_name": "连山县"
},
{
"_id": 653,
"id": 795,
"pid": 86,
"city_code": "101281302",
"city_name": "连南县"
},
{
"_id": 654,
"id": 796,
"pid": 87,
"city_code": "101280504",
"city_name": "南澳县"
},
{
"_id": 655,
"id": 797,
"pid": 87,
"city_code": "101280502",
"city_name": "潮阳区"
},
{
"_id": 656,
"id": 798,
"pid": 87,
"city_code": "101280503",
"city_name": "澄海区"
},
{
"_id": 657,
"id": 804,
"pid": 88,
"city_code": "101282103",
"city_name": "陆丰市"
},
{
"_id": 658,
"id": 805,
"pid": 88,
"city_code": "101282102",
"city_name": "海丰县"
},
{
"_id": 659,
"id": 806,
"pid": 88,
"city_code": "101282104",
"city_name": "陆河县"
},
{
"_id": 660,
"id": 807,
"pid": 89,
"city_code": "101280209",
"city_name": "曲江区"
},
{
"_id": 661,
"id": 808,
"pid": 89,
"city_code": "101280210",
"city_name": "浈江区"
},
{
"_id": 662,
"id": 809,
"pid": 89,
"city_code": "101280211",
"city_name": "武江区"
},
{
"_id": 663,
"id": 810,
"pid": 89,
"city_code": "101280209",
"city_name": "曲江区"
},
{
"_id": 664,
"id": 811,
"pid": 89,
"city_code": "101280205",
"city_name": "乐昌市"
},
{
"_id": 665,
"id": 812,
"pid": 89,
"city_code": "101280207",
"city_name": "南雄市"
},
{
"_id": 666,
"id": 813,
"pid": 89,
"city_code": "101280203",
"city_name": "始兴县"
},
{
"_id": 667,
"id": 814,
"pid": 89,
"city_code": "101280206",
"city_name": "仁化县"
},
{
"_id": 668,
"id": 815,
"pid": 89,
"city_code": "101280204",
"city_name": "翁源县"
},
{
"_id": 669,
"id": 816,
"pid": 89,
"city_code": "101280208",
"city_name": "新丰县"
},
{
"_id": 670,
"id": 817,
"pid": 89,
"city_code": "101280202",
"city_name": "乳源县"
},
{
"_id": 671,
"id": 819,
"pid": 90,
"city_code": "101281802",
"city_name": "阳春市"
},
{
"_id": 672,
"id": 820,
"pid": 90,
"city_code": "101281804",
"city_name": "阳西县"
},
{
"_id": 673,
"id": 821,
"pid": 90,
"city_code": "101281803",
"city_name": "阳东县"
},
{
"_id": 674,
"id": 823,
"pid": 91,
"city_code": "101281402",
"city_name": "罗定市"
},
{
"_id": 675,
"id": 824,
"pid": 91,
"city_code": "101281403",
"city_name": "新兴县"
},
{
"_id": 676,
"id": 825,
"pid": 91,
"city_code": "101281404",
"city_name": "郁南县"
},
{
"_id": 677,
"id": 826,
"pid": 91,
"city_code": "101281406",
"city_name": "云安县"
},
{
"_id": 678,
"id": 827,
"pid": 92,
"city_code": "101281006",
"city_name": "赤坎区"
},
{
"_id": 679,
"id": 828,
"pid": 92,
"city_code": "101281009",
"city_name": "霞山区"
},
{
"_id": 680,
"id": 829,
"pid": 92,
"city_code": "101281008",
"city_name": "坡头区"
},
{
"_id": 681,
"id": 830,
"pid": 92,
"city_code": "101281010",
"city_name": "麻章区"
},
{
"_id": 682,
"id": 831,
"pid": 92,
"city_code": "101281005",
"city_name": "廉江市"
},
{
"_id": 683,
"id": 832,
"pid": 92,
"city_code": "101281003",
"city_name": "雷州市"
},
{
"_id": 684,
"id": 833,
"pid": 92,
"city_code": "101281002",
"city_name": "吴川市"
},
{
"_id": 685,
"id": 834,
"pid": 92,
"city_code": "101281007",
"city_name": "遂溪县"
},
{
"_id": 686,
"id": 835,
"pid": 92,
"city_code": "101281004",
"city_name": "徐闻县"
},
{
"_id": 687,
"id": 837,
"pid": 93,
"city_code": "101280908",
"city_name": "高要区"
},
{
"_id": 688,
"id": 838,
"pid": 93,
"city_code": "101280903",
"city_name": "四会市"
},
{
"_id": 689,
"id": 839,
"pid": 93,
"city_code": "101280902",
"city_name": "广宁县"
},
{
"_id": 690,
"id": 840,
"pid": 93,
"city_code": "101280906",
"city_name": "怀集县"
},
{
"_id": 691,
"id": 841,
"pid": 93,
"city_code": "101280907",
"city_name": "封开县"
},
{
"_id": 692,
"id": 842,
"pid": 93,
"city_code": "101280905",
"city_name": "德庆县"
},
{
"_id": 693,
"id": 850,
"pid": 95,
"city_code": "101280702",
"city_name": "斗门区"
},
{
"_id": 694,
"id": 851,
"pid": 95,
"city_code": "101280703",
"city_name": "金湾区"
},
{
"_id": 695,
"id": 852,
"pid": 96,
"city_code": "101300103",
"city_name": "邕宁区"
},
{
"_id": 696,
"id": 858,
"pid": 96,
"city_code": "101300108",
"city_name": "武鸣县"
},
{
"_id": 697,
"id": 859,
"pid": 96,
"city_code": "101300105",
"city_name": "隆安县"
},
{
"_id": 698,
"id": 860,
"pid": 96,
"city_code": "101300106",
"city_name": "马山县"
},
{
"_id": 699,
"id": 861,
"pid": 96,
"city_code": "101300107",
"city_name": "上林县"
},
{
"_id": 700,
"id": 862,
"pid": 96,
"city_code": "101300109",
"city_name": "宾阳县"
},
{
"_id": 701,
"id": 863,
"pid": 96,
"city_code": "101300104",
"city_name": "横县"
},
{
"_id": 702,
"id": 869,
"pid": 97,
"city_code": "101300510",
"city_name": "阳朔县"
},
{
"_id": 703,
"id": 870,
"pid": 97,
"city_code": "101300505",
"city_name": "临桂县"
},
{
"_id": 704,
"id": 871,
"pid": 97,
"city_code": "101300507",
"city_name": "灵川县"
},
{
"_id": 705,
"id": 872,
"pid": 97,
"city_code": "101300508",
"city_name": "全州县"
},
{
"_id": 706,
"id": 873,
"pid": 97,
"city_code": "101300512",
"city_name": "平乐县"
},
{
"_id": 707,
"id": 874,
"pid": 97,
"city_code": "101300506",
"city_name": "兴安县"
},
{
"_id": 708,
"id": 875,
"pid": 97,
"city_code": "101300509",
"city_name": "灌阳县"
},
{
"_id": 709,
"id": 876,
"pid": 97,
"city_code": "101300513",
"city_name": "荔浦县"
},
{
"_id": 710,
"id": 877,
"pid": 97,
"city_code": "101300514",
"city_name": "资源县"
},
{
"_id": 711,
"id": 878,
"pid": 97,
"city_code": "101300504",
"city_name": "永福县"
},
{
"_id": 712,
"id": 879,
"pid": 97,
"city_code": "101300503",
"city_name": "龙胜县"
},
{
"_id": 713,
"id": 880,
"pid": 97,
"city_code": "101300511",
"city_name": "恭城县"
},
{
"_id": 714,
"id": 882,
"pid": 98,
"city_code": "101301011",
"city_name": "凌云县"
},
{
"_id": 715,
"id": 883,
"pid": 98,
"city_code": "101301007",
"city_name": "平果县"
},
{
"_id": 716,
"id": 884,
"pid": 98,
"city_code": "101301009",
"city_name": "西林县"
},
{
"_id": 717,
"id": 885,
"pid": 98,
"city_code": "101301010",
"city_name": "乐业县"
},
{
"_id": 718,
"id": 886,
"pid": 98,
"city_code": "101301004",
"city_name": "德保县"
},
{
"_id": 719,
"id": 887,
"pid": 98,
"city_code": "101301012",
"city_name": "田林县"
},
{
"_id": 720,
"id": 888,
"pid": 98,
"city_code": "101301003",
"city_name": "田阳县"
},
{
"_id": 721,
"id": 889,
"pid": 98,
"city_code": "101301005",
"city_name": "靖西县"
},
{
"_id": 722,
"id": 890,
"pid": 98,
"city_code": "101301006",
"city_name": "田东县"
},
{
"_id": 723,
"id": 891,
"pid": 98,
"city_code": "101301002",
"city_name": "那坡县"
},
{
"_id": 724,
"id": 892,
"pid": 98,
"city_code": "101301008",
"city_name": "隆林县"
},
{
"_id": 725,
"id": 896,
"pid": 99,
"city_code": "101301302",
"city_name": "合浦县"
},
{
"_id": 726,
"id": 898,
"pid": 100,
"city_code": "101300204",
"city_name": "凭祥市"
},
{
"_id": 727,
"id": 899,
"pid": 100,
"city_code": "101300207",
"city_name": "宁明县"
},
{
"_id": 728,
"id": 900,
"pid": 100,
"city_code": "101300206",
"city_name": "扶绥县"
},
{
"_id": 729,
"id": 901,
"pid": 100,
"city_code": "101300203",
"city_name": "龙州县"
},
{
"_id": 730,
"id": 902,
"pid": 100,
"city_code": "101300205",
"city_name": "大新县"
},
{
"_id": 731,
"id": 903,
"pid": 100,
"city_code": "101300202",
"city_name": "天等县"
},
{
"_id": 732,
"id": 905,
"pid": 101,
"city_code": "101301405",
"city_name": "防城区"
},
{
"_id": 733,
"id": 906,
"pid": 101,
"city_code": "101301403",
"city_name": "东兴市"
},
{
"_id": 734,
"id": 907,
"pid": 101,
"city_code": "101301402",
"city_name": "上思县"
},
{
"_id": 735,
"id": 911,
"pid": 102,
"city_code": "101300802",
"city_name": "桂平市"
},
{
"_id": 736,
"id": 912,
"pid": 102,
"city_code": "101300803",
"city_name": "平南县"
},
{
"_id": 737,
"id": 914,
"pid": 103,
"city_code": "101301207",
"city_name": "宜州市"
},
{
"_id": 738,
"id": 915,
"pid": 103,
"city_code": "101301202",
"city_name": "天峨县"
},
{
"_id": 739,
"id": 916,
"pid": 103,
"city_code": "101301208",
"city_name": "凤山县"
},
{
"_id": 740,
"id": 917,
"pid": 103,
"city_code": "101301209",
"city_name": "南丹县"
},
{
"_id": 741,
"id": 918,
"pid": 103,
"city_code": "101301203",
"city_name": "东兰县"
},
{
"_id": 742,
"id": 919,
"pid": 103,
"city_code": "101301210",
"city_name": "都安县"
},
{
"_id": 743,
"id": 920,
"pid": 103,
"city_code": "101301206",
"city_name": "罗城县"
},
{
"_id": 744,
"id": 921,
"pid": 103,
"city_code": "101301204",
"city_name": "巴马县"
},
{
"_id": 745,
"id": 922,
"pid": 103,
"city_code": "101301205",
"city_name": "环江县"
},
{
"_id": 746,
"id": 923,
"pid": 103,
"city_code": "101301211",
"city_name": "大化县"
},
{
"_id": 747,
"id": 925,
"pid": 104,
"city_code": "101300704",
"city_name": "钟山县"
},
{
"_id": 748,
"id": 926,
"pid": 104,
"city_code": "101300702",
"city_name": "昭平县"
},
{
"_id": 749,
"id": 927,
"pid": 104,
"city_code": "101300703",
"city_name": "富川县"
},
{
"_id": 750,
"id": 929,
"pid": 105,
"city_code": "101300406",
"city_name": "合山市"
},
{
"_id": 751,
"id": 930,
"pid": 105,
"city_code": "101300404",
"city_name": "象州县"
},
{
"_id": 752,
"id": 931,
"pid": 105,
"city_code": "101300405",
"city_name": "武宣县"
},
{
"_id": 753,
"id": 932,
"pid": 105,
"city_code": "101300402",
"city_name": "忻城县"
},
{
"_id": 754,
"id": 933,
"pid": 105,
"city_code": "101300403",
"city_name": "金秀县"
},
{
"_id": 755,
"id": 938,
"pid": 106,
"city_code": "101300305",
"city_name": "柳江县"
},
{
"_id": 756,
"id": 939,
"pid": 106,
"city_code": "101300302",
"city_name": "柳城县"
},
{
"_id": 757,
"id": 940,
"pid": 106,
"city_code": "101300304",
"city_name": "鹿寨县"
},
{
"_id": 758,
"id": 941,
"pid": 106,
"city_code": "101300306",
"city_name": "融安县"
},
{
"_id": 759,
"id": 942,
"pid": 106,
"city_code": "101300307",
"city_name": "融水县"
},
{
"_id": 760,
"id": 943,
"pid": 106,
"city_code": "101300308",
"city_name": "三江县"
},
{
"_id": 761,
"id": 946,
"pid": 107,
"city_code": "101301103",
"city_name": "灵山县"
},
{
"_id": 762,
"id": 947,
"pid": 107,
"city_code": "101301102",
"city_name": "浦北县"
},
{
"_id": 763,
"id": 950,
"pid": 108,
"city_code": "101300607",
"city_name": "长洲区"
},
{
"_id": 764,
"id": 951,
"pid": 108,
"city_code": "101300606",
"city_name": "岑溪市"
},
{
"_id": 765,
"id": 952,
"pid": 108,
"city_code": "101300604",
"city_name": "苍梧县"
},
{
"_id": 766,
"id": 953,
"pid": 108,
"city_code": "101300602",
"city_name": "藤县"
},
{
"_id": 767,
"id": 954,
"pid": 108,
"city_code": "101300605",
"city_name": "蒙山县"
},
{
"_id": 768,
"id": 956,
"pid": 109,
"city_code": "101300903",
"city_name": "北流市"
},
{
"_id": 769,
"id": 957,
"pid": 109,
"city_code": "101300904",
"city_name": "容县"
},
{
"_id": 770,
"id": 958,
"pid": 109,
"city_code": "101300905",
"city_name": "陆川县"
},
{
"_id": 771,
"id": 959,
"pid": 109,
"city_code": "101300902",
"city_name": "博白县"
},
{
"_id": 772,
"id": 960,
"pid": 109,
"city_code": "101300906",
"city_name": "兴业县"
},
{
"_id": 773,
"id": 961,
"pid": 110,
"city_code": "101260111",
"city_name": "南明区"
},
{
"_id": 774,
"id": 962,
"pid": 110,
"city_code": "101260110",
"city_name": "云岩区"
},
{
"_id": 775,
"id": 963,
"pid": 110,
"city_code": "101260103",
"city_name": "花溪区"
},
{
"_id": 776,
"id": 964,
"pid": 110,
"city_code": "101260104",
"city_name": "乌当区"
},
{
"_id": 777,
"id": 965,
"pid": 110,
"city_code": "101260102",
"city_name": "白云区"
},
{
"_id": 778,
"id": 966,
"pid": 110,
"city_code": "101260109",
"city_name": "小河区"
},
{
"_id": 779,
"id": 969,
"pid": 110,
"city_code": "101260108",
"city_name": "清镇市"
},
{
"_id": 780,
"id": 970,
"pid": 110,
"city_code": "101260106",
"city_name": "开阳县"
},
{
"_id": 781,
"id": 971,
"pid": 110,
"city_code": "101260107",
"city_name": "修文县"
},
{
"_id": 782,
"id": 972,
"pid": 110,
"city_code": "101260105",
"city_name": "息烽县"
},
{
"_id": 783,
"id": 974,
"pid": 111,
"city_code": "101260306",
"city_name": "关岭县"
},
{
"_id": 784,
"id": 976,
"pid": 111,
"city_code": "101260305",
"city_name": "紫云县"
},
{
"_id": 785,
"id": 977,
"pid": 111,
"city_code": "101260304",
"city_name": "平坝县"
},
{
"_id": 786,
"id": 978,
"pid": 111,
"city_code": "101260302",
"city_name": "普定县"
},
{
"_id": 787,
"id": 980,
"pid": 112,
"city_code": "101260705",
"city_name": "大方县"
},
{
"_id": 788,
"id": 981,
"pid": 112,
"city_code": "101260708",
"city_name": "黔西县"
},
{
"_id": 789,
"id": 982,
"pid": 112,
"city_code": "101260703",
"city_name": "金沙县"
},
{
"_id": 790,
"id": 983,
"pid": 112,
"city_code": "101260707",
"city_name": "织金县"
},
{
"_id": 791,
"id": 984,
"pid": 112,
"city_code": "101260706",
"city_name": "纳雍县"
},
{
"_id": 792,
"id": 985,
"pid": 112,
"city_code": "101260702",
"city_name": "赫章县"
},
{
"_id": 793,
"id": 986,
"pid": 112,
"city_code": "101260704",
"city_name": "威宁县"
},
{
"_id": 794,
"id": 989,
"pid": 113,
"city_code": "101260801",
"city_name": "水城县"
},
{
"_id": 795,
"id": 990,
"pid": 113,
"city_code": "101260804",
"city_name": "盘县"
},
{
"_id": 796,
"id": 991,
"pid": 114,
"city_code": "101260501",
"city_name": "凯里市"
},
{
"_id": 797,
"id": 992,
"pid": 114,
"city_code": "101260505",
"city_name": "黄平县"
},
{
"_id": 798,
"id": 993,
"pid": 114,
"city_code": "101260503",
"city_name": "施秉县"
},
{
"_id": 799,
"id": 994,
"pid": 114,
"city_code": "101260509",
"city_name": "三穗县"
},
{
"_id": 800,
"id": 995,
"pid": 114,
"city_code": "101260504",
"city_name": "镇远县"
},
{
"_id": 801,
"id": 996,
"pid": 114,
"city_code": "101260502",
"city_name": "岑巩县"
},
{
"_id": 802,
"id": 997,
"pid": 114,
"city_code": "101260514",
"city_name": "天柱县"
},
{
"_id": 803,
"id": 998,
"pid": 114,
"city_code": "101260515",
"city_name": "锦屏县"
},
{
"_id": 804,
"id": 999,
"pid": 114,
"city_code": "101260511",
"city_name": "剑河县"
},
{
"_id": 805,
"id": 1000,
"pid": 114,
"city_code": "101260510",
"city_name": "台江县"
},
{
"_id": 806,
"id": 1001,
"pid": 114,
"city_code": "101260513",
"city_name": "黎平县"
},
{
"_id": 807,
"id": 1002,
"pid": 114,
"city_code": "101260516",
"city_name": "榕江县"
},
{
"_id": 808,
"id": 1003,
"pid": 114,
"city_code": "101260517",
"city_name": "从江县"
},
{
"_id": 809,
"id": 1004,
"pid": 114,
"city_code": "101260512",
"city_name": "雷山县"
},
{
"_id": 810,
"id": 1005,
"pid": 114,
"city_code": "101260507",
"city_name": "麻江县"
},
{
"_id": 811,
"id": 1006,
"pid": 114,
"city_code": "101260508",
"city_name": "丹寨县"
},
{
"_id": 812,
"id": 1007,
"pid": 115,
"city_code": "101260401",
"city_name": "都匀市"
},
{
"_id": 813,
"id": 1008,
"pid": 115,
"city_code": "101260405",
"city_name": "福泉市"
},
{
"_id": 814,
"id": 1009,
"pid": 115,
"city_code": "101260412",
"city_name": "荔波县"
},
{
"_id": 815,
"id": 1010,
"pid": 115,
"city_code": "101260402",
"city_name": "贵定县"
},
{
"_id": 816,
"id": 1011,
"pid": 115,
"city_code": "101260403",
"city_name": "瓮安县"
},
{
"_id": 817,
"id": 1012,
"pid": 115,
"city_code": "101260410",
"city_name": "独山县"
},
{
"_id": 818,
"id": 1013,
"pid": 115,
"city_code": "101260409",
"city_name": "平塘县"
},
{
"_id": 819,
"id": 1014,
"pid": 115,
"city_code": "101260408",
"city_name": "罗甸县"
},
{
"_id": 820,
"id": 1015,
"pid": 115,
"city_code": "101260404",
"city_name": "长顺县"
},
{
"_id": 821,
"id": 1016,
"pid": 115,
"city_code": "101260407",
"city_name": "龙里县"
},
{
"_id": 822,
"id": 1017,
"pid": 115,
"city_code": "101260406",
"city_name": "惠水县"
},
{
"_id": 823,
"id": 1018,
"pid": 115,
"city_code": "101260411",
"city_name": "三都县"
},
{
"_id": 824,
"id": 1019,
"pid": 116,
"city_code": "101260906",
"city_name": "兴义市"
},
{
"_id": 825,
"id": 1020,
"pid": 116,
"city_code": "101260903",
"city_name": "兴仁县"
},
{
"_id": 826,
"id": 1021,
"pid": 116,
"city_code": "101260909",
"city_name": "普安县"
},
{
"_id": 827,
"id": 1022,
"pid": 116,
"city_code": "101260902",
"city_name": "晴隆县"
},
{
"_id": 828,
"id": 1023,
"pid": 116,
"city_code": "101260904",
"city_name": "贞丰县"
},
{
"_id": 829,
"id": 1024,
"pid": 116,
"city_code": "101260905",
"city_name": "望谟县"
},
{
"_id": 830,
"id": 1025,
"pid": 116,
"city_code": "101260908",
"city_name": "册亨县"
},
{
"_id": 831,
"id": 1026,
"pid": 116,
"city_code": "101260907",
"city_name": "安龙县"
},
{
"_id": 832,
"id": 1027,
"pid": 117,
"city_code": "101260601",
"city_name": "铜仁市"
},
{
"_id": 833,
"id": 1028,
"pid": 117,
"city_code": "101260602",
"city_name": "江口县"
},
{
"_id": 834,
"id": 1029,
"pid": 117,
"city_code": "101260608",
"city_name": "石阡县"
},
{
"_id": 835,
"id": 1030,
"pid": 117,
"city_code": "101260605",
"city_name": "思南县"
},
{
"_id": 836,
"id": 1031,
"pid": 117,
"city_code": "101260610",
"city_name": "德江县"
},
{
"_id": 837,
"id": 1032,
"pid": 117,
"city_code": "101260603",
"city_name": "玉屏县"
},
{
"_id": 838,
"id": 1033,
"pid": 117,
"city_code": "101260607",
"city_name": "印江县"
},
{
"_id": 839,
"id": 1034,
"pid": 117,
"city_code": "101260609",
"city_name": "沿河县"
},
{
"_id": 840,
"id": 1035,
"pid": 117,
"city_code": "101260611",
"city_name": "松桃县"
},
{
"_id": 841,
"id": 1037,
"pid": 118,
"city_code": "101260215",
"city_name": "红花岗区"
},
{
"_id": 842,
"id": 1038,
"pid": 118,
"city_code": "101260212",
"city_name": "务川县"
},
{
"_id": 843,
"id": 1039,
"pid": 118,
"city_code": "101260210",
"city_name": "道真县"
},
{
"_id": 844,
"id": 1040,
"pid": 118,
"city_code": "101260214",
"city_name": "汇川区"
},
{
"_id": 845,
"id": 1041,
"pid": 118,
"city_code": "101260208",
"city_name": "赤水市"
},
{
"_id": 846,
"id": 1042,
"pid": 118,
"city_code": "101260203",
"city_name": "仁怀市"
},
{
"_id": 847,
"id": 1043,
"pid": 118,
"city_code": "101260202",
"city_name": "遵义县"
},
{
"_id": 848,
"id": 1044,
"pid": 118,
"city_code": "101260207",
"city_name": "桐梓县"
},
{
"_id": 849,
"id": 1045,
"pid": 118,
"city_code": "101260204",
"city_name": "绥阳县"
},
{
"_id": 850,
"id": 1046,
"pid": 118,
"city_code": "101260211",
"city_name": "正安县"
},
{
"_id": 851,
"id": 1047,
"pid": 118,
"city_code": "101260206",
"city_name": "凤冈县"
},
{
"_id": 852,
"id": 1048,
"pid": 118,
"city_code": "101260205",
"city_name": "湄潭县"
},
{
"_id": 853,
"id": 1049,
"pid": 118,
"city_code": "101260213",
"city_name": "余庆县"
},
{
"_id": 854,
"id": 1050,
"pid": 118,
"city_code": "101260209",
"city_name": "习水县"
},
{
"_id": 855,
"id": 1055,
"pid": 119,
"city_code": "101310102",
"city_name": "琼山区"
},
{
"_id": 856,
"id": 1082,
"pid": 137,
"city_code": "101090102",
"city_name": "井陉矿区"
},
{
"_id": 857,
"id": 1084,
"pid": 137,
"city_code": "101090114",
"city_name": "辛集市"
},
{
"_id": 858,
"id": 1085,
"pid": 137,
"city_code": "101090115",
"city_name": "藁城市"
},
{
"_id": 859,
"id": 1086,
"pid": 137,
"city_code": "101090116",
"city_name": "晋州市"
},
{
"_id": 860,
"id": 1087,
"pid": 137,
"city_code": "101090117",
"city_name": "新乐市"
},
{
"_id": 861,
"id": 1088,
"pid": 137,
"city_code": "101090118",
"city_name": "鹿泉区"
},
{
"_id": 862,
"id": 1089,
"pid": 137,
"city_code": "101090102",
"city_name": "井陉县"
},
{
"_id": 863,
"id": 1090,
"pid": 137,
"city_code": "101090103",
"city_name": "正定县"
},
{
"_id": 864,
"id": 1091,
"pid": 137,
"city_code": "101090104",
"city_name": "栾城区"
},
{
"_id": 865,
"id": 1092,
"pid": 137,
"city_code": "101090105",
"city_name": "行唐县"
},
{
"_id": 866,
"id": 1093,
"pid": 137,
"city_code": "101090106",
"city_name": "灵寿县"
},
{
"_id": 867,
"id": 1094,
"pid": 137,
"city_code": "101090107",
"city_name": "高邑县"
},
{
"_id": 868,
"id": 1095,
"pid": 137,
"city_code": "101090108",
"city_name": "深泽县"
},
{
"_id": 869,
"id": 1096,
"pid": 137,
"city_code": "101090109",
"city_name": "赞皇县"
},
{
"_id": 870,
"id": 1097,
"pid": 137,
"city_code": "101090110",
"city_name": "无极县"
},
{
"_id": 871,
"id": 1098,
"pid": 137,
"city_code": "101090111",
"city_name": "平山县"
},
{
"_id": 872,
"id": 1099,
"pid": 137,
"city_code": "101090112",
"city_name": "元氏县"
},
{
"_id": 873,
"id": 1100,
"pid": 137,
"city_code": "101090113",
"city_name": "赵县"
},
{
"_id": 874,
"id": 1104,
"pid": 138,
"city_code": "101090218",
"city_name": "涿州市"
},
{
"_id": 875,
"id": 1105,
"pid": 138,
"city_code": "101090219",
"city_name": "定州市"
},
{
"_id": 876,
"id": 1106,
"pid": 138,
"city_code": "101090220",
"city_name": "安国市"
},
{
"_id": 877,
"id": 1107,
"pid": 138,
"city_code": "101090221",
"city_name": "高碑店市"
},
{
"_id": 878,
"id": 1108,
"pid": 138,
"city_code": "101090202",
"city_name": "满城县"
},
{
"_id": 879,
"id": 1109,
"pid": 138,
"city_code": "101090224",
"city_name": "清苑县"
},
{
"_id": 880,
"id": 1110,
"pid": 138,
"city_code": "101090213",
"city_name": "涞水县"
},
{
"_id": 881,
"id": 1111,
"pid": 138,
"city_code": "101090203",
"city_name": "阜平县"
},
{
"_id": 882,
"id": 1112,
"pid": 138,
"city_code": "101090204",
"city_name": "徐水县"
},
{
"_id": 883,
"id": 1113,
"pid": 138,
"city_code": "101090223",
"city_name": "定兴县"
},
{
"_id": 884,
"id": 1114,
"pid": 138,
"city_code": "101090205",
"city_name": "唐县"
},
{
"_id": 885,
"id": 1115,
"pid": 138,
"city_code": "101090206",
"city_name": "高阳县"
},
{
"_id": 886,
"id": 1116,
"pid": 138,
"city_code": "101090207",
"city_name": "容城县"
},
{
"_id": 887,
"id": 1117,
"pid": 138,
"city_code": "101090209",
"city_name": "涞源县"
},
{
"_id": 888,
"id": 1118,
"pid": 138,
"city_code": "101090210",
"city_name": "望都县"
},
{
"_id": 889,
"id": 1119,
"pid": 138,
"city_code": "101090211",
"city_name": "安新县"
},
{
"_id": 890,
"id": 1120,
"pid": 138,
"city_code": "101090212",
"city_name": "易县"
},
{
"_id": 891,
"id": 1121,
"pid": 138,
"city_code": "101090214",
"city_name": "曲阳县"
},
{
"_id": 892,
"id": 1122,
"pid": 138,
"city_code": "101090215",
"city_name": "蠡县"
},
{
"_id": 893,
"id": 1123,
"pid": 138,
"city_code": "101090216",
"city_name": "顺平县"
},
{
"_id": 894,
"id": 1124,
"pid": 138,
"city_code": "101090225",
"city_name": "博野县"
},
{
"_id": 895,
"id": 1125,
"pid": 138,
"city_code": "101090217",
"city_name": "雄县"
},
{
"_id": 896,
"id": 1128,
"pid": 139,
"city_code": "101090711",
"city_name": "泊头市"
},
{
"_id": 897,
"id": 1129,
"pid": 139,
"city_code": "101090712",
"city_name": "任丘市"
},
{
"_id": 898,
"id": 1130,
"pid": 139,
"city_code": "101090713",
"city_name": "黄骅市"
},
{
"_id": 899,
"id": 1131,
"pid": 139,
"city_code": "101090714",
"city_name": "河间市"
},
{
"_id": 900,
"id": 1132,
"pid": 139,
"city_code": "101090716",
"city_name": "沧县"
},
{
"_id": 901,
"id": 1133,
"pid": 139,
"city_code": "101090702",
"city_name": "青县"
},
{
"_id": 902,
"id": 1134,
"pid": 139,
"city_code": "101090703",
"city_name": "东光县"
},
{
"_id": 903,
"id": 1135,
"pid": 139,
"city_code": "101090704",
"city_name": "海兴县"
},
{
"_id": 904,
"id": 1136,
"pid": 139,
"city_code": "101090705",
"city_name": "盐山县"
},
{
"_id": 905,
"id": 1137,
"pid": 139,
"city_code": "101090706",
"city_name": "肃宁县"
},
{
"_id": 906,
"id": 1138,
"pid": 139,
"city_code": "101090707",
"city_name": "南皮县"
},
{
"_id": 907,
"id": 1139,
"pid": 139,
"city_code": "101090708",
"city_name": "吴桥县"
},
{
"_id": 908,
"id": 1140,
"pid": 139,
"city_code": "101090709",
"city_name": "献县"
},
{
"_id": 909,
"id": 1141,
"pid": 139,
"city_code": "101090710",
"city_name": "孟村县"
},
{
"_id": 910,
"id": 1145,
"pid": 140,
"city_code": "101090403",
"city_name": "承德县"
},
{
"_id": 911,
"id": 1146,
"pid": 140,
"city_code": "101090404",
"city_name": "兴隆县"
},
{
"_id": 912,
"id": 1147,
"pid": 140,
"city_code": "101090405",
"city_name": "平泉县"
},
{
"_id": 913,
"id": 1148,
"pid": 140,
"city_code": "101090406",
"city_name": "滦平县"
},
{
"_id": 914,
"id": 1149,
"pid": 140,
"city_code": "101090407",
"city_name": "隆化县"
},
{
"_id": 915,
"id": 1150,
"pid": 140,
"city_code": "101090408",
"city_name": "丰宁县"
},
{
"_id": 916,
"id": 1151,
"pid": 140,
"city_code": "101090409",
"city_name": "宽城县"
},
{
"_id": 917,
"id": 1152,
"pid": 140,
"city_code": "101090410",
"city_name": "围场县"
},
{
"_id": 918,
"id": 1156,
"pid": 141,
"city_code": "101091002",
"city_name": "峰峰矿区"
},
{
"_id": 919,
"id": 1157,
"pid": 141,
"city_code": "101091016",
"city_name": "武安市"
},
{
"_id": 920,
"id": 1158,
"pid": 141,
"city_code": "101091001",
"city_name": "邯郸县"
},
{
"_id": 921,
"id": 1159,
"pid": 141,
"city_code": "101091003",
"city_name": "临漳县"
},
{
"_id": 922,
"id": 1160,
"pid": 141,
"city_code": "101091004",
"city_name": "成安县"
},
{
"_id": 923,
"id": 1161,
"pid": 141,
"city_code": "101091005",
"city_name": "大名县"
},
{
"_id": 924,
"id": 1162,
"pid": 141,
"city_code": "101091006",
"city_name": "涉县"
},
{
"_id": 925,
"id": 1163,
"pid": 141,
"city_code": "101091007",
"city_name": "磁县"
},
{
"_id": 926,
"id": 1164,
"pid": 141,
"city_code": "101091008",
"city_name": "肥乡县"
},
{
"_id": 927,
"id": 1165,
"pid": 141,
"city_code": "101091009",
"city_name": "永年县"
},
{
"_id": 928,
"id": 1166,
"pid": 141,
"city_code": "101091010",
"city_name": "邱县"
},
{
"_id": 929,
"id": 1167,
"pid": 141,
"city_code": "101091011",
"city_name": "鸡泽县"
},
{
"_id": 930,
"id": 1168,
"pid": 141,
"city_code": "101091012",
"city_name": "广平县"
},
{
"_id": 931,
"id": 1169,
"pid": 141,
"city_code": "101091013",
"city_name": "馆陶县"
},
{
"_id": 932,
"id": 1170,
"pid": 141,
"city_code": "101091014",
"city_name": "魏县"
},
{
"_id": 933,
"id": 1171,
"pid": 141,
"city_code": "101091015",
"city_name": "曲周县"
},
{
"_id": 934,
"id": 1173,
"pid": 142,
"city_code": "101090810",
"city_name": "冀州市"
},
{
"_id": 935,
"id": 1174,
"pid": 142,
"city_code": "101090811",
"city_name": "深州市"
},
{
"_id": 936,
"id": 1175,
"pid": 142,
"city_code": "101090802",
"city_name": "枣强县"
},
{
"_id": 937,
"id": 1176,
"pid": 142,
"city_code": "101090803",
"city_name": "武邑县"
},
{
"_id": 938,
"id": 1177,
"pid": 142,
"city_code": "101090804",
"city_name": "武强县"
},
{
"_id": 939,
"id": 1178,
"pid": 142,
"city_code": "101090805",
"city_name": "饶阳县"
},
{
"_id": 940,
"id": 1179,
"pid": 142,
"city_code": "101090806",
"city_name": "安平县"
},
{
"_id": 941,
"id": 1180,
"pid": 142,
"city_code": "101090807",
"city_name": "故城县"
},
{
"_id": 942,
"id": 1181,
"pid": 142,
"city_code": "101090808",
"city_name": "景县"
},
{
"_id": 943,
"id": 1182,
"pid": 142,
"city_code": "101090809",
"city_name": "阜城县"
},
{
"_id": 944,
"id": 1185,
"pid": 143,
"city_code": "101090608",
"city_name": "霸州市"
},
{
"_id": 945,
"id": 1186,
"pid": 143,
"city_code": "101090609",
"city_name": "三河市"
},
{
"_id": 946,
"id": 1187,
"pid": 143,
"city_code": "101090602",
"city_name": "固安县"
},
{
"_id": 947,
"id": 1188,
"pid": 143,
"city_code": "101090603",
"city_name": "永清县"
},
{
"_id": 948,
"id": 1189,
"pid": 143,
"city_code": "101090604",
"city_name": "香河县"
},
{
"_id": 949,
"id": 1190,
"pid": 143,
"city_code": "101090605",
"city_name": "大城县"
},
{
"_id": 950,
"id": 1191,
"pid": 143,
"city_code": "101090606",
"city_name": "文安县"
},
{
"_id": 951,
"id": 1192,
"pid": 143,
"city_code": "101090607",
"city_name": "大厂县"
},
{
"_id": 952,
"id": 1195,
"pid": 144,
"city_code": "101091106",
"city_name": "北戴河区"
},
{
"_id": 953,
"id": 1196,
"pid": 144,
"city_code": "101091103",
"city_name": "昌黎县"
},
{
"_id": 954,
"id": 1197,
"pid": 144,
"city_code": "101091104",
"city_name": "抚宁县"
},
{
"_id": 955,
"id": 1198,
"pid": 144,
"city_code": "101091105",
"city_name": "卢龙县"
},
{
"_id": 956,
"id": 1199,
"pid": 144,
"city_code": "101091102",
"city_name": "青龙县"
},
{
"_id": 957,
"id": 1204,
"pid": 145,
"city_code": "101090502",
"city_name": "丰南区"
},
{
"_id": 958,
"id": 1205,
"pid": 145,
"city_code": "101090503",
"city_name": "丰润区"
},
{
"_id": 959,
"id": 1206,
"pid": 145,
"city_code": "101090510",
"city_name": "遵化市"
},
{
"_id": 960,
"id": 1207,
"pid": 145,
"city_code": "101090511",
"city_name": "迁安市"
},
{
"_id": 961,
"id": 1208,
"pid": 145,
"city_code": "101090504",
"city_name": "滦县"
},
{
"_id": 962,
"id": 1209,
"pid": 145,
"city_code": "101090505",
"city_name": "滦南县"
},
{
"_id": 963,
"id": 1210,
"pid": 145,
"city_code": "101090506",
"city_name": "乐亭县"
},
{
"_id": 964,
"id": 1211,
"pid": 145,
"city_code": "101090507",
"city_name": "迁西县"
},
{
"_id": 965,
"id": 1212,
"pid": 145,
"city_code": "101090508",
"city_name": "玉田县"
},
{
"_id": 966,
"id": 1213,
"pid": 145,
"city_code": "101090509",
"city_name": "唐海县"
},
{
"_id": 967,
"id": 1216,
"pid": 146,
"city_code": "101090916",
"city_name": "南宫市"
},
{
"_id": 968,
"id": 1217,
"pid": 146,
"city_code": "101090917",
"city_name": "沙河市"
},
{
"_id": 969,
"id": 1218,
"pid": 146,
"city_code": "101090901",
"city_name": "邢台县"
},
{
"_id": 970,
"id": 1219,
"pid": 146,
"city_code": "101090902",
"city_name": "临城县"
},
{
"_id": 971,
"id": 1220,
"pid": 146,
"city_code": "101090904",
"city_name": "内丘县"
},
{
"_id": 972,
"id": 1221,
"pid": 146,
"city_code": "101090905",
"city_name": "柏乡县"
},
{
"_id": 973,
"id": 1222,
"pid": 146,
"city_code": "101090906",
"city_name": "隆尧县"
},
{
"_id": 974,
"id": 1223,
"pid": 146,
"city_code": "101090918",
"city_name": "任县"
},
{
"_id": 975,
"id": 1224,
"pid": 146,
"city_code": "101090907",
"city_name": "南和县"
},
{
"_id": 976,
"id": 1225,
"pid": 146,
"city_code": "101090908",
"city_name": "宁晋县"
},
{
"_id": 977,
"id": 1226,
"pid": 146,
"city_code": "101090909",
"city_name": "巨鹿县"
},
{
"_id": 978,
"id": 1227,
"pid": 146,
"city_code": "101090910",
"city_name": "新河县"
},
{
"_id": 979,
"id": 1228,
"pid": 146,
"city_code": "101090911",
"city_name": "广宗县"
},
{
"_id": 980,
"id": 1229,
"pid": 146,
"city_code": "101090912",
"city_name": "平乡县"
},
{
"_id": 981,
"id": 1230,
"pid": 146,
"city_code": "101090913",
"city_name": "威县"
},
{
"_id": 982,
"id": 1231,
"pid": 146,
"city_code": "101090914",
"city_name": "清河县"
},
{
"_id": 983,
"id": 1232,
"pid": 146,
"city_code": "101090915",
"city_name": "临西县"
},
{
"_id": 984,
"id": 1235,
"pid": 147,
"city_code": "101090302",
"city_name": "宣化区"
},
{
"_id": 985,
"id": 1237,
"pid": 147,
"city_code": "101090302",
"city_name": "宣化县"
},
{
"_id": 986,
"id": 1238,
"pid": 147,
"city_code": "101090303",
"city_name": "张北县"
},
{
"_id": 987,
"id": 1239,
"pid": 147,
"city_code": "101090304",
"city_name": "康保县"
},
{
"_id": 988,
"id": 1240,
"pid": 147,
"city_code": "101090305",
"city_name": "沽源县"
},
{
"_id": 989,
"id": 1241,
"pid": 147,
"city_code": "101090306",
"city_name": "尚义县"
},
{
"_id": 990,
"id": 1242,
"pid": 147,
"city_code": "101090307",
"city_name": "蔚县"
},
{
"_id": 991,
"id": 1243,
"pid": 147,
"city_code": "101090308",
"city_name": "阳原县"
},
{
"_id": 992,
"id": 1244,
"pid": 147,
"city_code": "101090309",
"city_name": "怀安县"
},
{
"_id": 993,
"id": 1245,
"pid": 147,
"city_code": "101090310",
"city_name": "万全县"
},
{
"_id": 994,
"id": 1246,
"pid": 147,
"city_code": "101090311",
"city_name": "怀来县"
},
{
"_id": 995,
"id": 1247,
"pid": 147,
"city_code": "101090312",
"city_name": "涿鹿县"
},
{
"_id": 996,
"id": 1248,
"pid": 147,
"city_code": "101090313",
"city_name": "赤城县"
},
{
"_id": 997,
"id": 1249,
"pid": 147,
"city_code": "101090314",
"city_name": "崇礼县"
},
{
"_id": 998,
"id": 1255,
"pid": 148,
"city_code": "101180108",
"city_name": "上街区"
},
{
"_id": 999,
"id": 1261,
"pid": 148,
"city_code": "101180102",
"city_name": "巩义市"
},
{
"_id": 1000,
"id": 1262,
"pid": 148,
"city_code": "101180103",
"city_name": "荥阳市"
},
{
"_id": 1001,
"id": 1263,
"pid": 148,
"city_code": "101180105",
"city_name": "新密市"
},
{
"_id": 1002,
"id": 1264,
"pid": 148,
"city_code": "101180106",
"city_name": "新郑市"
},
{
"_id": 1003,
"id": 1265,
"pid": 148,
"city_code": "101180104",
"city_name": "登封市"
},
{
"_id": 1004,
"id": 1266,
"pid": 148,
"city_code": "101180107",
"city_name": "中牟县"
},
{
"_id": 1005,
"id": 1272,
"pid": 149,
"city_code": "101180911",
"city_name": "吉利区"
},
{
"_id": 1006,
"id": 1273,
"pid": 149,
"city_code": "101180908",
"city_name": "偃师市"
},
{
"_id": 1007,
"id": 1274,
"pid": 149,
"city_code": "101180903",
"city_name": "孟津县"
},
{
"_id": 1008,
"id": 1275,
"pid": 149,
"city_code": "101180902",
"city_name": "新安县"
},
{
"_id": 1009,
"id": 1276,
"pid": 149,
"city_code": "101180909",
"city_name": "栾川县"
},
{
"_id": 1010,
"id": 1277,
"pid": 149,
"city_code": "101180907",
"city_name": "嵩县"
},
{
"_id": 1011,
"id": 1278,
"pid": 149,
"city_code": "101180910",
"city_name": "汝阳县"
},
{
"_id": 1012,
"id": 1279,
"pid": 149,
"city_code": "101180904",
"city_name": "宜阳县"
},
{
"_id": 1013,
"id": 1280,
"pid": 149,
"city_code": "101180905",
"city_name": "洛宁县"
},
{
"_id": 1014,
"id": 1281,
"pid": 149,
"city_code": "101180906",
"city_name": "伊川县"
},
{
"_id": 1015,
"id": 1287,
"pid": 150,
"city_code": "101180802",
"city_name": "杞县"
},
{
"_id": 1016,
"id": 1288,
"pid": 150,
"city_code": "101180804",
"city_name": "通许县"
},
{
"_id": 1017,
"id": 1289,
"pid": 150,
"city_code": "101180803",
"city_name": "尉氏县"
},
{
"_id": 1018,
"id": 1290,
"pid": 150,
"city_code": "101180801",
"city_name": "开封县"
},
{
"_id": 1019,
"id": 1291,
"pid": 150,
"city_code": "101180805",
"city_name": "兰考县"
},
{
"_id": 1020,
"id": 1296,
"pid": 151,
"city_code": "101180205",
"city_name": "林州市"
},
{
"_id": 1021,
"id": 1297,
"pid": 151,
"city_code": "101180201",
"city_name": "安阳县"
},
{
"_id": 1022,
"id": 1298,
"pid": 151,
"city_code": "101180202",
"city_name": "汤阴县"
},
{
"_id": 1023,
"id": 1299,
"pid": 151,
"city_code": "101180203",
"city_name": "滑县"
},
{
"_id": 1024,
"id": 1300,
"pid": 151,
"city_code": "101180204",
"city_name": "内黄县"
},
{
"_id": 1025,
"id": 1304,
"pid": 152,
"city_code": "101181202",
"city_name": "浚县"
},
{
"_id": 1026,
"id": 1305,
"pid": 152,
"city_code": "101181203",
"city_name": "淇县"
},
{
"_id": 1027,
"id": 1306,
"pid": 153,
"city_code": "101181801",
"city_name": "济源市"
},
{
"_id": 1028,
"id": 1311,
"pid": 154,
"city_code": "101181104",
"city_name": "沁阳市"
},
{
"_id": 1029,
"id": 1312,
"pid": 154,
"city_code": "101181108",
"city_name": "孟州市"
},
{
"_id": 1030,
"id": 1313,
"pid": 154,
"city_code": "101181102",
"city_name": "修武县"
},
{
"_id": 1031,
"id": 1314,
"pid": 154,
"city_code": "101181106",
"city_name": "博爱县"
},
{
"_id": 1032,
"id": 1315,
"pid": 154,
"city_code": "101181103",
"city_name": "武陟县"
},
{
"_id": 1033,
"id": 1316,
"pid": 154,
"city_code": "101181107",
"city_name": "温县"
},
{
"_id": 1034,
"id": 1319,
"pid": 155,
"city_code": "101180711",
"city_name": "邓州市"
},
{
"_id": 1035,
"id": 1320,
"pid": 155,
"city_code": "101180702",
"city_name": "南召县"
},
{
"_id": 1036,
"id": 1321,
"pid": 155,
"city_code": "101180703",
"city_name": "方城县"
},
{
"_id": 1037,
"id": 1322,
"pid": 155,
"city_code": "101180705",
"city_name": "西峡县"
},
{
"_id": 1038,
"id": 1323,
"pid": 155,
"city_code": "101180707",
"city_name": "镇平县"
},
{
"_id": 1039,
"id": 1324,
"pid": 155,
"city_code": "101180706",
"city_name": "内乡县"
},
{
"_id": 1040,
"id": 1325,
"pid": 155,
"city_code": "101180708",
"city_name": "淅川县"
},
{
"_id": 1041,
"id": 1326,
"pid": 155,
"city_code": "101180704",
"city_name": "社旗县"
},
{
"_id": 1042,
"id": 1327,
"pid": 155,
"city_code": "101180710",
"city_name": "唐河县"
},
{
"_id": 1043,
"id": 1328,
"pid": 155,
"city_code": "101180709",
"city_name": "新野县"
},
{
"_id": 1044,
"id": 1329,
"pid": 155,
"city_code": "101180712",
"city_name": "桐柏县"
},
{
"_id": 1045,
"id": 1333,
"pid": 156,
"city_code": "101180508",
"city_name": "石龙区"
},
{
"_id": 1046,
"id": 1334,
"pid": 156,
"city_code": "101180506",
"city_name": "舞钢市"
},
{
"_id": 1047,
"id": 1335,
"pid": 156,
"city_code": "101180504",
"city_name": "汝州市"
},
{
"_id": 1048,
"id": 1336,
"pid": 156,
"city_code": "101180503",
"city_name": "宝丰县"
},
{
"_id": 1049,
"id": 1337,
"pid": 156,
"city_code": "101180505",
"city_name": "叶县"
},
{
"_id": 1050,
"id": 1338,
"pid": 156,
"city_code": "101180507",
"city_name": "鲁山县"
},
{
"_id": 1051,
"id": 1339,
"pid": 156,
"city_code": "101180502",
"city_name": "郏县"
},
{
"_id": 1052,
"id": 1341,
"pid": 157,
"city_code": "101181705",
"city_name": "义马市"
},
{
"_id": 1053,
"id": 1342,
"pid": 157,
"city_code": "101181702",
"city_name": "灵宝市"
},
{
"_id": 1054,
"id": 1343,
"pid": 157,
"city_code": "101181703",
"city_name": "渑池县"
},
{
"_id": 1055,
"id": 1344,
"pid": 157,
"city_code": "101181706",
"city_name": "陕县"
},
{
"_id": 1056,
"id": 1345,
"pid": 157,
"city_code": "101181704",
"city_name": "卢氏县"
},
{
"_id": 1057,
"id": 1347,
"pid": 158,
"city_code": "101181002",
"city_name": "睢阳区"
},
{
"_id": 1058,
"id": 1348,
"pid": 158,
"city_code": "101181009",
"city_name": "永城市"
},
{
"_id": 1059,
"id": 1349,
"pid": 158,
"city_code": "101181004",
"city_name": "民权县"
},
{
"_id": 1060,
"id": 1350,
"pid": 158,
"city_code": "101181003",
"city_name": "睢县"
},
{
"_id": 1061,
"id": 1351,
"pid": 158,
"city_code": "101181007",
"city_name": "宁陵县"
},
{
"_id": 1062,
"id": 1352,
"pid": 158,
"city_code": "101181005",
"city_name": "虞城县"
},
{
"_id": 1063,
"id": 1353,
"pid": 158,
"city_code": "101181006",
"city_name": "柘城县"
},
{
"_id": 1064,
"id": 1354,
"pid": 158,
"city_code": "101181008",
"city_name": "夏邑县"
},
{
"_id": 1065,
"id": 1359,
"pid": 159,
"city_code": "101180305",
"city_name": "卫辉市"
},
{
"_id": 1066,
"id": 1360,
"pid": 159,
"city_code": "101180304",
"city_name": "辉县市"
},
{
"_id": 1067,
"id": 1361,
"pid": 159,
"city_code": "101180301",
"city_name": "新乡县"
},
{
"_id": 1068,
"id": 1362,
"pid": 159,
"city_code": "101180302",
"city_name": "获嘉县"
},
{
"_id": 1069,
"id": 1363,
"pid": 159,
"city_code": "101180303",
"city_name": "原阳县"
},
{
"_id": 1070,
"id": 1364,
"pid": 159,
"city_code": "101180306",
"city_name": "延津县"
},
{
"_id": 1071,
"id": 1365,
"pid": 159,
"city_code": "101180307",
"city_name": "封丘县"
},
{
"_id": 1072,
"id": 1366,
"pid": 159,
"city_code": "101180308",
"city_name": "长垣县"
},
{
"_id": 1073,
"id": 1369,
"pid": 160,
"city_code": "101180603",
"city_name": "罗山县"
},
{
"_id": 1074,
"id": 1370,
"pid": 160,
"city_code": "101180604",
"city_name": "光山县"
},
{
"_id": 1075,
"id": 1371,
"pid": 160,
"city_code": "101180605",
"city_name": "新县"
},
{
"_id": 1076,
"id": 1372,
"pid": 160,
"city_code": "101180609",
"city_name": "商城县"
},
{
"_id": 1077,
"id": 1373,
"pid": 160,
"city_code": "101180608",
"city_name": "固始县"
},
{
"_id": 1078,
"id": 1374,
"pid": 160,
"city_code": "101180607",
"city_name": "潢川县"
},
{
"_id": 1079,
"id": 1375,
"pid": 160,
"city_code": "101180606",
"city_name": "淮滨县"
},
{
"_id": 1080,
"id": 1376,
"pid": 160,
"city_code": "101180602",
"city_name": "息县"
},
{
"_id": 1081,
"id": 1378,
"pid": 161,
"city_code": "101180405",
"city_name": "禹州市"
},
{
"_id": 1082,
"id": 1379,
"pid": 161,
"city_code": "101180404",
"city_name": "长葛市"
},
{
"_id": 1083,
"id": 1380,
"pid": 161,
"city_code": "101180401",
"city_name": "许昌县"
},
{
"_id": 1084,
"id": 1381,
"pid": 161,
"city_code": "101180402",
"city_name": "鄢陵县"
},
{
"_id": 1085,
"id": 1382,
"pid": 161,
"city_code": "101180403",
"city_name": "襄城县"
},
{
"_id": 1086,
"id": 1384,
"pid": 162,
"city_code": "101181407",
"city_name": "项城市"
},
{
"_id": 1087,
"id": 1385,
"pid": 162,
"city_code": "101181402",
"city_name": "扶沟县"
},
{
"_id": 1088,
"id": 1386,
"pid": 162,
"city_code": "101181405",
"city_name": "西华县"
},
{
"_id": 1089,
"id": 1387,
"pid": 162,
"city_code": "101181406",
"city_name": "商水县"
},
{
"_id": 1090,
"id": 1388,
"pid": 162,
"city_code": "101181410",
"city_name": "沈丘县"
},
{
"_id": 1091,
"id": 1389,
"pid": 162,
"city_code": "101181408",
"city_name": "郸城县"
},
{
"_id": 1092,
"id": 1390,
"pid": 162,
"city_code": "101181404",
"city_name": "淮阳县"
},
{
"_id": 1093,
"id": 1391,
"pid": 162,
"city_code": "101181403",
"city_name": "太康县"
},
{
"_id": 1094,
"id": 1392,
"pid": 162,
"city_code": "101181409",
"city_name": "鹿邑县"
},
{
"_id": 1095,
"id": 1394,
"pid": 163,
"city_code": "101181602",
"city_name": "西平县"
},
{
"_id": 1096,
"id": 1395,
"pid": 163,
"city_code": "101181604",
"city_name": "上蔡县"
},
{
"_id": 1097,
"id": 1396,
"pid": 163,
"city_code": "101181607",
"city_name": "平舆县"
},
{
"_id": 1098,
"id": 1397,
"pid": 163,
"city_code": "101181610",
"city_name": "正阳县"
},
{
"_id": 1099,
"id": 1398,
"pid": 163,
"city_code": "101181609",
"city_name": "确山县"
},
{
"_id": 1100,
"id": 1399,
"pid": 163,
"city_code": "101181606",
"city_name": "泌阳县"
},
{
"_id": 1101,
"id": 1400,
"pid": 163,
"city_code": "101181605",
"city_name": "汝南县"
},
{
"_id": 1102,
"id": 1401,
"pid": 163,
"city_code": "101181603",
"city_name": "遂平县"
},
{
"_id": 1103,
"id": 1402,
"pid": 163,
"city_code": "101181608",
"city_name": "新蔡县"
},
{
"_id": 1104,
"id": 1406,
"pid": 164,
"city_code": "101181503",
"city_name": "舞阳县"
},
{
"_id": 1105,
"id": 1407,
"pid": 164,
"city_code": "101181502",
"city_name": "临颍县"
},
{
"_id": 1106,
"id": 1409,
"pid": 165,
"city_code": "101181304",
"city_name": "清丰县"
},
{
"_id": 1107,
"id": 1410,
"pid": 165,
"city_code": "101181303",
"city_name": "南乐县"
},
{
"_id": 1108,
"id": 1411,
"pid": 165,
"city_code": "101181305",
"city_name": "范县"
},
{
"_id": 1109,
"id": 1412,
"pid": 165,
"city_code": "101181302",
"city_name": "台前县"
},
{
"_id": 1110,
"id": 1413,
"pid": 165,
"city_code": "101181301",
"city_name": "濮阳县"
},
{
"_id": 1111,
"id": 1421,
"pid": 166,
"city_code": "101050104",
"city_name": "阿城区"
},
{
"_id": 1112,
"id": 1422,
"pid": 166,
"city_code": "101050103",
"city_name": "呼兰区"
},
{
"_id": 1113,
"id": 1424,
"pid": 166,
"city_code": "101050111",
"city_name": "尚志市"
},
{
"_id": 1114,
"id": 1425,
"pid": 166,
"city_code": "101050102",
"city_name": "双城市"
},
{
"_id": 1115,
"id": 1426,
"pid": 166,
"city_code": "101050112",
"city_name": "五常市"
},
{
"_id": 1116,
"id": 1427,
"pid": 166,
"city_code": "101050109",
"city_name": "方正县"
},
{
"_id": 1117,
"id": 1428,
"pid": 166,
"city_code": "101050105",
"city_name": "宾县"
},
{
"_id": 1118,
"id": 1429,
"pid": 166,
"city_code": "101050106",
"city_name": "依兰县"
},
{
"_id": 1119,
"id": 1430,
"pid": 166,
"city_code": "101050107",
"city_name": "巴彦县"
},
{
"_id": 1120,
"id": 1431,
"pid": 166,
"city_code": "101050108",
"city_name": "通河县"
},
{
"_id": 1121,
"id": 1432,
"pid": 166,
"city_code": "101050113",
"city_name": "木兰县"
},
{
"_id": 1122,
"id": 1433,
"pid": 166,
"city_code": "101050110",
"city_name": "延寿县"
},
{
"_id": 1123,
"id": 1439,
"pid": 167,
"city_code": "101050903",
"city_name": "肇州县"
},
{
"_id": 1124,
"id": 1440,
"pid": 167,
"city_code": "101050904",
"city_name": "肇源县"
},
{
"_id": 1125,
"id": 1441,
"pid": 167,
"city_code": "101050902",
"city_name": "林甸县"
},
{
"_id": 1126,
"id": 1442,
"pid": 167,
"city_code": "101050905",
"city_name": "杜尔伯特"
},
{
"_id": 1127,
"id": 1443,
"pid": 168,
"city_code": "101050704",
"city_name": "呼玛县"
},
{
"_id": 1128,
"id": 1444,
"pid": 168,
"city_code": "101050703",
"city_name": "漠河县"
},
{
"_id": 1129,
"id": 1445,
"pid": 168,
"city_code": "101050702",
"city_name": "塔河县"
},
{
"_id": 1130,
"id": 1448,
"pid": 169,
"city_code": "101051206",
"city_name": "南山区"
},
{
"_id": 1131,
"id": 1452,
"pid": 169,
"city_code": "101051203",
"city_name": "萝北县"
},
{
"_id": 1132,
"id": 1453,
"pid": 169,
"city_code": "101051202",
"city_name": "绥滨县"
},
{
"_id": 1133,
"id": 1455,
"pid": 170,
"city_code": "101050605",
"city_name": "五大连池市"
},
{
"_id": 1134,
"id": 1456,
"pid": 170,
"city_code": "101050606",
"city_name": "北安市"
},
{
"_id": 1135,
"id": 1457,
"pid": 170,
"city_code": "101050602",
"city_name": "嫩江县"
},
{
"_id": 1136,
"id": 1458,
"pid": 170,
"city_code": "101050604",
"city_name": "逊克县"
},
{
"_id": 1137,
"id": 1459,
"pid": 170,
"city_code": "101050603",
"city_name": "孙吴县"
},
{
"_id": 1138,
"id": 1465,
"pid": 171,
"city_code": "101051102",
"city_name": "虎林市"
},
{
"_id": 1139,
"id": 1466,
"pid": 171,
"city_code": "101051103",
"city_name": "密山市"
},
{
"_id": 1140,
"id": 1467,
"pid": 171,
"city_code": "101051104",
"city_name": "鸡东县"
},
{
"_id": 1141,
"id": 1472,
"pid": 172,
"city_code": "101050406",
"city_name": "同江市"
},
{
"_id": 1142,
"id": 1473,
"pid": 172,
"city_code": "101050407",
"city_name": "富锦市"
},
{
"_id": 1143,
"id": 1474,
"pid": 172,
"city_code": "101050405",
"city_name": "桦南县"
},
{
"_id": 1144,
"id": 1475,
"pid": 172,
"city_code": "101050404",
"city_name": "桦川县"
},
{
"_id": 1145,
"id": 1476,
"pid": 172,
"city_code": "101050402",
"city_name": "汤原县"
},
{
"_id": 1146,
"id": 1477,
"pid": 172,
"city_code": "101050403",
"city_name": "抚远县"
},
{
"_id": 1147,
"id": 1482,
"pid": 173,
"city_code": "101050305",
"city_name": "绥芬河市"
},
{
"_id": 1148,
"id": 1483,
"pid": 173,
"city_code": "101050302",
"city_name": "海林市"
},
{
"_id": 1149,
"id": 1484,
"pid": 173,
"city_code": "101050306",
"city_name": "宁安市"
},
{
"_id": 1150,
"id": 1485,
"pid": 173,
"city_code": "101050303",
"city_name": "穆棱市"
},
{
"_id": 1151,
"id": 1486,
"pid": 173,
"city_code": "101050307",
"city_name": "东宁县"
},
{
"_id": 1152,
"id": 1487,
"pid": 173,
"city_code": "101050304",
"city_name": "林口县"
},
{
"_id": 1153,
"id": 1491,
"pid": 174,
"city_code": "101051002",
"city_name": "勃利县"
},
{
"_id": 1154,
"id": 1499,
"pid": 175,
"city_code": "101050202",
"city_name": "讷河市"
},
{
"_id": 1155,
"id": 1500,
"pid": 175,
"city_code": "101050203",
"city_name": "龙江县"
},
{
"_id": 1156,
"id": 1501,
"pid": 175,
"city_code": "101050206",
"city_name": "依安县"
},
{
"_id": 1157,
"id": 1502,
"pid": 175,
"city_code": "101050210",
"city_name": "泰来县"
},
{
"_id": 1158,
"id": 1503,
"pid": 175,
"city_code": "101050204",
"city_name": "甘南县"
},
{
"_id": 1159,
"id": 1504,
"pid": 175,
"city_code": "101050205",
"city_name": "富裕县"
},
{
"_id": 1160,
"id": 1505,
"pid": 175,
"city_code": "101050208",
"city_name": "克山县"
},
{
"_id": 1161,
"id": 1506,
"pid": 175,
"city_code": "101050209",
"city_name": "克东县"
},
{
"_id": 1162,
"id": 1507,
"pid": 175,
"city_code": "101050207",
"city_name": "拜泉县"
},
{
"_id": 1163,
"id": 1512,
"pid": 176,
"city_code": "101051302",
"city_name": "集贤县"
},
{
"_id": 1164,
"id": 1513,
"pid": 176,
"city_code": "101051305",
"city_name": "友谊县"
},
{
"_id": 1165,
"id": 1514,
"pid": 176,
"city_code": "101051303",
"city_name": "宝清县"
},
{
"_id": 1166,
"id": 1515,
"pid": 176,
"city_code": "101051304",
"city_name": "饶河县"
},
{
"_id": 1167,
"id": 1517,
"pid": 177,
"city_code": "101050503",
"city_name": "安达市"
},
{
"_id": 1168,
"id": 1518,
"pid": 177,
"city_code": "101050502",
"city_name": "肇东市"
},
{
"_id": 1169,
"id": 1519,
"pid": 177,
"city_code": "101050504",
"city_name": "海伦市"
},
{
"_id": 1170,
"id": 1520,
"pid": 177,
"city_code": "101050506",
"city_name": "望奎县"
},
{
"_id": 1171,
"id": 1521,
"pid": 177,
"city_code": "101050507",
"city_name": "兰西县"
},
{
"_id": 1172,
"id": 1522,
"pid": 177,
"city_code": "101050508",
"city_name": "青冈县"
},
{
"_id": 1173,
"id": 1523,
"pid": 177,
"city_code": "101050509",
"city_name": "庆安县"
},
{
"_id": 1174,
"id": 1524,
"pid": 177,
"city_code": "101050505",
"city_name": "明水县"
},
{
"_id": 1175,
"id": 1525,
"pid": 177,
"city_code": "101050510",
"city_name": "绥棱县"
},
{
"_id": 1176,
"id": 1526,
"pid": 178,
"city_code": "101050801",
"city_name": "伊春区"
},
{
"_id": 1177,
"id": 1536,
"pid": 178,
"city_code": "101050803",
"city_name": "五营区"
},
{
"_id": 1178,
"id": 1540,
"pid": 178,
"city_code": "101050802",
"city_name": "乌伊岭区"
},
{
"_id": 1179,
"id": 1541,
"pid": 178,
"city_code": "101050804",
"city_name": "铁力市"
},
{
"_id": 1180,
"id": 1542,
"pid": 178,
"city_code": "101050805",
"city_name": "嘉荫县"
},
{
"_id": 1181,
"id": 1550,
"pid": 179,
"city_code": "101200106",
"city_name": "东西湖区"
},
{
"_id": 1182,
"id": 1552,
"pid": 179,
"city_code": "101200102",
"city_name": "蔡甸区"
},
{
"_id": 1183,
"id": 1553,
"pid": 179,
"city_code": "101200105",
"city_name": "江夏区"
},
{
"_id": 1184,
"id": 1554,
"pid": 179,
"city_code": "101200103",
"city_name": "黄陂区"
},
{
"_id": 1185,
"id": 1555,
"pid": 179,
"city_code": "101200104",
"city_name": "新洲区"
},
{
"_id": 1186,
"id": 1560,
"pid": 181,
"city_code": "101200302",
"city_name": "梁子湖区"
},
{
"_id": 1187,
"id": 1562,
"pid": 182,
"city_code": "101200503",
"city_name": "麻城市"
},
{
"_id": 1188,
"id": 1563,
"pid": 182,
"city_code": "101200509",
"city_name": "武穴市"
},
{
"_id": 1189,
"id": 1564,
"pid": 182,
"city_code": "101200510",
"city_name": "团风县"
},
{
"_id": 1190,
"id": 1565,
"pid": 182,
"city_code": "101200502",
"city_name": "红安县"
},
{
"_id": 1191,
"id": 1566,
"pid": 182,
"city_code": "101200504",
"city_name": "罗田县"
},
{
"_id": 1192,
"id": 1567,
"pid": 182,
"city_code": "101200505",
"city_name": "英山县"
},
{
"_id": 1193,
"id": 1568,
"pid": 182,
"city_code": "101200506",
"city_name": "浠水县"
},
{
"_id": 1194,
"id": 1569,
"pid": 182,
"city_code": "101200507",
"city_name": "蕲春县"
},
{
"_id": 1195,
"id": 1570,
"pid": 182,
"city_code": "101200508",
"city_name": "黄梅县"
},
{
"_id": 1196,
"id": 1572,
"pid": 183,
"city_code": "101200606",
"city_name": "西塞山区"
},
{
"_id": 1197,
"id": 1573,
"pid": 183,
"city_code": "101200605",
"city_name": "下陆区"
},
{
"_id": 1198,
"id": 1574,
"pid": 183,
"city_code": "101200604",
"city_name": "铁山区"
},
{
"_id": 1199,
"id": 1575,
"pid": 183,
"city_code": "101200602",
"city_name": "大冶市"
},
{
"_id": 1200,
"id": 1576,
"pid": 183,
"city_code": "101200603",
"city_name": "阳新县"
},
{
"_id": 1201,
"id": 1578,
"pid": 184,
"city_code": "101201404",
"city_name": "掇刀区"
},
{
"_id": 1202,
"id": 1579,
"pid": 184,
"city_code": "101201402",
"city_name": "钟祥市"
},
{
"_id": 1203,
"id": 1580,
"pid": 184,
"city_code": "101201403",
"city_name": "京山县"
},
{
"_id": 1204,
"id": 1581,
"pid": 184,
"city_code": "101201405",
"city_name": "沙洋县"
},
{
"_id": 1205,
"id": 1583,
"pid": 185,
"city_code": "101200801",
"city_name": "荆州区"
},
{
"_id": 1206,
"id": 1584,
"pid": 185,
"city_code": "101200804",
"city_name": "石首市"
},
{
"_id": 1207,
"id": 1585,
"pid": 185,
"city_code": "101200806",
"city_name": "洪湖市"
},
{
"_id": 1208,
"id": 1586,
"pid": 185,
"city_code": "101200807",
"city_name": "松滋市"
},
{
"_id": 1209,
"id": 1587,
"pid": 185,
"city_code": "101200803",
"city_name": "公安县"
},
{
"_id": 1210,
"id": 1588,
"pid": 185,
"city_code": "101200805",
"city_name": "监利县"
},
{
"_id": 1211,
"id": 1589,
"pid": 185,
"city_code": "101200802",
"city_name": "江陵县"
},
{
"_id": 1212,
"id": 1590,
"pid": 186,
"city_code": "101201701",
"city_name": "潜江市"
},
{
"_id": 1213,
"id": 1592,
"pid": 188,
"city_code": "101201109",
"city_name": "张湾区"
},
{
"_id": 1214,
"id": 1593,
"pid": 188,
"city_code": "101201108",
"city_name": "茅箭区"
},
{
"_id": 1215,
"id": 1594,
"pid": 188,
"city_code": "101201107",
"city_name": "丹江口市"
},
{
"_id": 1216,
"id": 1595,
"pid": 188,
"city_code": "101201104",
"city_name": "郧县"
},
{
"_id": 1217,
"id": 1596,
"pid": 188,
"city_code": "101201103",
"city_name": "郧西县"
},
{
"_id": 1218,
"id": 1597,
"pid": 188,
"city_code": "101201105",
"city_name": "竹山县"
},
{
"_id": 1219,
"id": 1598,
"pid": 188,
"city_code": "101201102",
"city_name": "竹溪县"
},
{
"_id": 1220,
"id": 1599,
"pid": 188,
"city_code": "101201106",
"city_name": "房县"
},
{
"_id": 1221,
"id": 1601,
"pid": 189,
"city_code": "101201302",
"city_name": "广水市"
},
{
"_id": 1222,
"id": 1602,
"pid": 190,
"city_code": "101201501",
"city_name": "天门市"
},
{
"_id": 1223,
"id": 1604,
"pid": 191,
"city_code": "101200702",
"city_name": "赤壁市"
},
{
"_id": 1224,
"id": 1605,
"pid": 191,
"city_code": "101200703",
"city_name": "嘉鱼县"
},
{
"_id": 1225,
"id": 1606,
"pid": 191,
"city_code": "101200705",
"city_name": "通城县"
},
{
"_id": 1226,
"id": 1607,
"pid": 191,
"city_code": "101200704",
"city_name": "崇阳县"
},
{
"_id": 1227,
"id": 1608,
"pid": 191,
"city_code": "101200706",
"city_name": "通山县"
},
{
"_id": 1228,
"id": 1611,
"pid": 192,
"city_code": "101200202",
"city_name": "襄州区"
},
{
"_id": 1229,
"id": 1612,
"pid": 192,
"city_code": "101200206",
"city_name": "老河口市"
},
{
"_id": 1230,
"id": 1613,
"pid": 192,
"city_code": "101200208",
"city_name": "枣阳市"
},
{
"_id": 1231,
"id": 1614,
"pid": 192,
"city_code": "101200205",
"city_name": "宜城市"
},
{
"_id": 1232,
"id": 1615,
"pid": 192,
"city_code": "101200204",
"city_name": "南漳县"
},
{
"_id": 1233,
"id": 1616,
"pid": 192,
"city_code": "101200207",
"city_name": "谷城县"
},
{
"_id": 1234,
"id": 1617,
"pid": 192,
"city_code": "101200203",
"city_name": "保康县"
},
{
"_id": 1235,
"id": 1619,
"pid": 193,
"city_code": "101200405",
"city_name": "应城市"
},
{
"_id": 1236,
"id": 1620,
"pid": 193,
"city_code": "101200402",
"city_name": "安陆市"
},
{
"_id": 1237,
"id": 1621,
"pid": 193,
"city_code": "101200406",
"city_name": "汉川市"
},
{
"_id": 1238,
"id": 1622,
"pid": 193,
"city_code": "101200407",
"city_name": "孝昌县"
},
{
"_id": 1239,
"id": 1623,
"pid": 193,
"city_code": "101200404",
"city_name": "大悟县"
},
{
"_id": 1240,
"id": 1624,
"pid": 193,
"city_code": "101200403",
"city_name": "云梦县"
},
{
"_id": 1241,
"id": 1625,
"pid": 194,
"city_code": "101200908",
"city_name": "长阳县"
},
{
"_id": 1242,
"id": 1626,
"pid": 194,
"city_code": "101200906",
"city_name": "五峰县"
},
{
"_id": 1243,
"id": 1631,
"pid": 194,
"city_code": "101200912",
"city_name": "夷陵区"
},
{
"_id": 1244,
"id": 1632,
"pid": 194,
"city_code": "101200909",
"city_name": "宜都市"
},
{
"_id": 1245,
"id": 1633,
"pid": 194,
"city_code": "101200907",
"city_name": "当阳市"
},
{
"_id": 1246,
"id": 1634,
"pid": 194,
"city_code": "101200910",
"city_name": "枝江市"
},
{
"_id": 1247,
"id": 1635,
"pid": 194,
"city_code": "101200902",
"city_name": "远安县"
},
{
"_id": 1248,
"id": 1636,
"pid": 194,
"city_code": "101200904",
"city_name": "兴山县"
},
{
"_id": 1249,
"id": 1637,
"pid": 194,
"city_code": "101200903",
"city_name": "秭归县"
},
{
"_id": 1250,
"id": 1638,
"pid": 195,
"city_code": "101201001",
"city_name": "恩施市"
},
{
"_id": 1251,
"id": 1639,
"pid": 195,
"city_code": "101201002",
"city_name": "利川市"
},
{
"_id": 1252,
"id": 1640,
"pid": 195,
"city_code": "101201003",
"city_name": "建始县"
},
{
"_id": 1253,
"id": 1641,
"pid": 195,
"city_code": "101201008",
"city_name": "巴东县"
},
{
"_id": 1254,
"id": 1642,
"pid": 195,
"city_code": "101201005",
"city_name": "宣恩县"
},
{
"_id": 1255,
"id": 1643,
"pid": 195,
"city_code": "101201004",
"city_name": "咸丰县"
},
{
"_id": 1256,
"id": 1644,
"pid": 195,
"city_code": "101201007",
"city_name": "来凤县"
},
{
"_id": 1257,
"id": 1645,
"pid": 195,
"city_code": "101201006",
"city_name": "鹤峰县"
},
{
"_id": 1258,
"id": 1652,
"pid": 196,
"city_code": "101250103",
"city_name": "浏阳市"
},
{
"_id": 1259,
"id": 1653,
"pid": 196,
"city_code": "101250101",
"city_name": "长沙县"
},
{
"_id": 1260,
"id": 1654,
"pid": 196,
"city_code": "101250105",
"city_name": "望城县"
},
{
"_id": 1261,
"id": 1655,
"pid": 196,
"city_code": "101250102",
"city_name": "宁乡县"
},
{
"_id": 1262,
"id": 1657,
"pid": 197,
"city_code": "101251104",
"city_name": "武陵源区"
},
{
"_id": 1263,
"id": 1658,
"pid": 197,
"city_code": "101251103",
"city_name": "慈利县"
},
{
"_id": 1264,
"id": 1659,
"pid": 197,
"city_code": "101251102",
"city_name": "桑植县"
},
{
"_id": 1265,
"id": 1662,
"pid": 198,
"city_code": "101250608",
"city_name": "津市市"
},
{
"_id": 1266,
"id": 1663,
"pid": 198,
"city_code": "101250602",
"city_name": "安乡县"
},
{
"_id": 1267,
"id": 1664,
"pid": 198,
"city_code": "101250604",
"city_name": "汉寿县"
},
{
"_id": 1268,
"id": 1665,
"pid": 198,
"city_code": "101250605",
"city_name": "澧县"
},
{
"_id": 1269,
"id": 1666,
"pid": 198,
"city_code": "101250606",
"city_name": "临澧县"
},
{
"_id": 1270,
"id": 1667,
"pid": 198,
"city_code": "101250603",
"city_name": "桃源县"
},
{
"_id": 1271,
"id": 1668,
"pid": 198,
"city_code": "101250607",
"city_name": "石门县"
},
{
"_id": 1272,
"id": 1670,
"pid": 199,
"city_code": "101250512",
"city_name": "苏仙区"
},
{
"_id": 1273,
"id": 1671,
"pid": 199,
"city_code": "101250507",
"city_name": "资兴市"
},
{
"_id": 1274,
"id": 1672,
"pid": 199,
"city_code": "101250502",
"city_name": "桂阳县"
},
{
"_id": 1275,
"id": 1673,
"pid": 199,
"city_code": "101250504",
"city_name": "宜章县"
},
{
"_id": 1276,
"id": 1674,
"pid": 199,
"city_code": "101250510",
"city_name": "永兴县"
},
{
"_id": 1277,
"id": 1675,
"pid": 199,
"city_code": "101250503",
"city_name": "嘉禾县"
},
{
"_id": 1278,
"id": 1676,
"pid": 199,
"city_code": "101250505",
"city_name": "临武县"
},
{
"_id": 1279,
"id": 1677,
"pid": 199,
"city_code": "101250508",
"city_name": "汝城县"
},
{
"_id": 1280,
"id": 1678,
"pid": 199,
"city_code": "101250511",
"city_name": "桂东县"
},
{
"_id": 1281,
"id": 1679,
"pid": 199,
"city_code": "101250509",
"city_name": "安仁县"
},
{
"_id": 1282,
"id": 1684,
"pid": 200,
"city_code": "101250409",
"city_name": "南岳区"
},
{
"_id": 1283,
"id": 1685,
"pid": 200,
"city_code": "101250408",
"city_name": "耒阳市"
},
{
"_id": 1284,
"id": 1686,
"pid": 200,
"city_code": "101250406",
"city_name": "常宁市"
},
{
"_id": 1285,
"id": 1687,
"pid": 200,
"city_code": "101250405",
"city_name": "衡阳县"
},
{
"_id": 1286,
"id": 1688,
"pid": 200,
"city_code": "101250407",
"city_name": "衡南县"
},
{
"_id": 1287,
"id": 1689,
"pid": 200,
"city_code": "101250402",
"city_name": "衡山县"
},
{
"_id": 1288,
"id": 1690,
"pid": 200,
"city_code": "101250403",
"city_name": "衡东县"
},
{
"_id": 1289,
"id": 1691,
"pid": 200,
"city_code": "101250404",
"city_name": "祁东县"
},
{
"_id": 1290,
"id": 1692,
"pid": 201,
"city_code": "101251202",
"city_name": "鹤城区"
},
{
"_id": 1291,
"id": 1693,
"pid": 201,
"city_code": "101251205",
"city_name": "靖州县"
},
{
"_id": 1292,
"id": 1694,
"pid": 201,
"city_code": "101251208",
"city_name": "麻阳县"
},
{
"_id": 1293,
"id": 1695,
"pid": 201,
"city_code": "101251207",
"city_name": "通道县"
},
{
"_id": 1294,
"id": 1696,
"pid": 201,
"city_code": "101251209",
"city_name": "新晃县"
},
{
"_id": 1295,
"id": 1697,
"pid": 201,
"city_code": "101251210",
"city_name": "芷江县"
},
{
"_id": 1296,
"id": 1698,
"pid": 201,
"city_code": "101251203",
"city_name": "沅陵县"
},
{
"_id": 1297,
"id": 1699,
"pid": 201,
"city_code": "101251204",
"city_name": "辰溪县"
},
{
"_id": 1298,
"id": 1700,
"pid": 201,
"city_code": "101251211",
"city_name": "溆浦县"
},
{
"_id": 1299,
"id": 1701,
"pid": 201,
"city_code": "101251212",
"city_name": "中方县"
},
{
"_id": 1300,
"id": 1702,
"pid": 201,
"city_code": "101251206",
"city_name": "会同县"
},
{
"_id": 1301,
"id": 1703,
"pid": 201,
"city_code": "101251213",
"city_name": "洪江市"
},
{
"_id": 1302,
"id": 1705,
"pid": 202,
"city_code": "101250803",
"city_name": "冷水江市"
},
{
"_id": 1303,
"id": 1706,
"pid": 202,
"city_code": "101250806",
"city_name": "涟源市"
},
{
"_id": 1304,
"id": 1707,
"pid": 202,
"city_code": "101250802",
"city_name": "双峰县"
},
{
"_id": 1305,
"id": 1708,
"pid": 202,
"city_code": "101250805",
"city_name": "新化县"
},
{
"_id": 1306,
"id": 1709,
"pid": 203,
"city_code": "101250909",
"city_name": "城步县"
},
{
"_id": 1307,
"id": 1713,
"pid": 203,
"city_code": "101250908",
"city_name": "武冈市"
},
{
"_id": 1308,
"id": 1714,
"pid": 203,
"city_code": "101250905",
"city_name": "邵东县"
},
{
"_id": 1309,
"id": 1715,
"pid": 203,
"city_code": "101250904",
"city_name": "新邵县"
},
{
"_id": 1310,
"id": 1716,
"pid": 203,
"city_code": "101250910",
"city_name": "邵阳县"
},
{
"_id": 1311,
"id": 1717,
"pid": 203,
"city_code": "101250902",
"city_name": "隆回县"
},
{
"_id": 1312,
"id": 1718,
"pid": 203,
"city_code": "101250903",
"city_name": "洞口县"
},
{
"_id": 1313,
"id": 1719,
"pid": 203,
"city_code": "101250906",
"city_name": "绥宁县"
},
{
"_id": 1314,
"id": 1720,
"pid": 203,
"city_code": "101250907",
"city_name": "新宁县"
},
{
"_id": 1315,
"id": 1723,
"pid": 204,
"city_code": "101250203",
"city_name": "湘乡市"
},
{
"_id": 1316,
"id": 1724,
"pid": 204,
"city_code": "101250202",
"city_name": "韶山市"
},
{
"_id": 1317,
"id": 1725,
"pid": 204,
"city_code": "101250201",
"city_name": "湘潭县"
},
{
"_id": 1318,
"id": 1726,
"pid": 205,
"city_code": "101251501",
"city_name": "吉首市"
},
{
"_id": 1319,
"id": 1727,
"pid": 205,
"city_code": "101251506",
"city_name": "泸溪县"
},
{
"_id": 1320,
"id": 1728,
"pid": 205,
"city_code": "101251505",
"city_name": "凤凰县"
},
{
"_id": 1321,
"id": 1729,
"pid": 205,
"city_code": "101251508",
"city_name": "花垣县"
},
{
"_id": 1322,
"id": 1730,
"pid": 205,
"city_code": "101251502",
"city_name": "保靖县"
},
{
"_id": 1323,
"id": 1731,
"pid": 205,
"city_code": "101251504",
"city_name": "古丈县"
},
{
"_id": 1324,
"id": 1732,
"pid": 205,
"city_code": "101251503",
"city_name": "永顺县"
},
{
"_id": 1325,
"id": 1733,
"pid": 205,
"city_code": "101251507",
"city_name": "龙山县"
},
{
"_id": 1326,
"id": 1734,
"pid": 206,
"city_code": "101250701",
"city_name": "赫山区"
},
{
"_id": 1327,
"id": 1736,
"pid": 206,
"city_code": "101250705",
"city_name": "沅江市"
},
{
"_id": 1328,
"id": 1737,
"pid": 206,
"city_code": "101250702",
"city_name": "南县"
},
{
"_id": 1329,
"id": 1738,
"pid": 206,
"city_code": "101250703",
"city_name": "桃江县"
},
{
"_id": 1330,
"id": 1739,
"pid": 206,
"city_code": "101250704",
"city_name": "安化县"
},
{
"_id": 1331,
"id": 1740,
"pid": 207,
"city_code": "101251410",
"city_name": "江华县"
},
{
"_id": 1332,
"id": 1743,
"pid": 207,
"city_code": "101251402",
"city_name": "祁阳县"
},
{
"_id": 1333,
"id": 1744,
"pid": 207,
"city_code": "101251403",
"city_name": "东安县"
},
{
"_id": 1334,
"id": 1745,
"pid": 207,
"city_code": "101251404",
"city_name": "双牌县"
},
{
"_id": 1335,
"id": 1746,
"pid": 207,
"city_code": "101251405",
"city_name": "道县"
},
{
"_id": 1336,
"id": 1747,
"pid": 207,
"city_code": "101251407",
"city_name": "江永县"
},
{
"_id": 1337,
"id": 1748,
"pid": 207,
"city_code": "101251406",
"city_name": "宁远县"
},
{
"_id": 1338,
"id": 1749,
"pid": 207,
"city_code": "101251408",
"city_name": "蓝山县"
},
{
"_id": 1339,
"id": 1750,
"pid": 207,
"city_code": "101251409",
"city_name": "新田县"
},
{
"_id": 1340,
"id": 1754,
"pid": 208,
"city_code": "101251004",
"city_name": "汨罗市"
},
{
"_id": 1341,
"id": 1755,
"pid": 208,
"city_code": "101251006",
"city_name": "临湘市"
},
{
"_id": 1342,
"id": 1756,
"pid": 208,
"city_code": "101251001",
"city_name": "岳阳县"
},
{
"_id": 1343,
"id": 1757,
"pid": 208,
"city_code": "101251002",
"city_name": "华容县"
},
{
"_id": 1344,
"id": 1758,
"pid": 208,
"city_code": "101251003",
"city_name": "湘阴县"
},
{
"_id": 1345,
"id": 1759,
"pid": 208,
"city_code": "101251005",
"city_name": "平江县"
},
{
"_id": 1346,
"id": 1764,
"pid": 209,
"city_code": "101250303",
"city_name": "醴陵市"
},
{
"_id": 1347,
"id": 1765,
"pid": 209,
"city_code": "101250304",
"city_name": "株洲县"
},
{
"_id": 1348,
"id": 1766,
"pid": 209,
"city_code": "101250302",
"city_name": "攸县"
},
{
"_id": 1349,
"id": 1767,
"pid": 209,
"city_code": "101250305",
"city_name": "茶陵县"
},
{
"_id": 1350,
"id": 1768,
"pid": 209,
"city_code": "101250306",
"city_name": "炎陵县"
},
{
"_id": 1351,
"id": 1774,
"pid": 210,
"city_code": "101060106",
"city_name": "双阳区"
},
{
"_id": 1352,
"id": 1779,
"pid": 210,
"city_code": "101060103",
"city_name": "德惠市"
},
{
"_id": 1353,
"id": 1780,
"pid": 210,
"city_code": "101060104",
"city_name": "九台市"
},
{
"_id": 1354,
"id": 1781,
"pid": 210,
"city_code": "101060105",
"city_name": "榆树市"
},
{
"_id": 1355,
"id": 1782,
"pid": 210,
"city_code": "101060102",
"city_name": "农安县"
},
{
"_id": 1356,
"id": 1787,
"pid": 211,
"city_code": "101060204",
"city_name": "蛟河市"
},
{
"_id": 1357,
"id": 1788,
"pid": 211,
"city_code": "101060206",
"city_name": "桦甸市"
},
{
"_id": 1358,
"id": 1789,
"pid": 211,
"city_code": "101060202",
"city_name": "舒兰市"
},
{
"_id": 1359,
"id": 1790,
"pid": 211,
"city_code": "101060205",
"city_name": "磐石市"
},
{
"_id": 1360,
"id": 1791,
"pid": 211,
"city_code": "101060203",
"city_name": "永吉县"
},
{
"_id": 1361,
"id": 1793,
"pid": 212,
"city_code": "101060602",
"city_name": "洮南市"
},
{
"_id": 1362,
"id": 1794,
"pid": 212,
"city_code": "101060603",
"city_name": "大安市"
},
{
"_id": 1363,
"id": 1795,
"pid": 212,
"city_code": "101060604",
"city_name": "镇赉县"
},
{
"_id": 1364,
"id": 1796,
"pid": 212,
"city_code": "101060605",
"city_name": "通榆县"
},
{
"_id": 1365,
"id": 1797,
"pid": 213,
"city_code": "101060907",
"city_name": "江源区"
},
{
"_id": 1366,
"id": 1799,
"pid": 213,
"city_code": "101060905",
"city_name": "长白县"
},
{
"_id": 1367,
"id": 1800,
"pid": 213,
"city_code": "101060903",
"city_name": "临江市"
},
{
"_id": 1368,
"id": 1801,
"pid": 213,
"city_code": "101060906",
"city_name": "抚松县"
},
{
"_id": 1369,
"id": 1802,
"pid": 213,
"city_code": "101060902",
"city_name": "靖宇县"
},
{
"_id": 1370,
"id": 1805,
"pid": 214,
"city_code": "101060702",
"city_name": "东丰县"
},
{
"_id": 1371,
"id": 1806,
"pid": 214,
"city_code": "101060703",
"city_name": "东辽县"
},
{
"_id": 1372,
"id": 1809,
"pid": 215,
"city_code": "101060405",
"city_name": "伊通县"
},
{
"_id": 1373,
"id": 1810,
"pid": 215,
"city_code": "101060404",
"city_name": "公主岭市"
},
{
"_id": 1374,
"id": 1811,
"pid": 215,
"city_code": "101060402",
"city_name": "双辽市"
},
{
"_id": 1375,
"id": 1812,
"pid": 215,
"city_code": "101060403",
"city_name": "梨树县"
},
{
"_id": 1376,
"id": 1813,
"pid": 216,
"city_code": "101060803",
"city_name": "前郭尔罗斯"
},
{
"_id": 1377,
"id": 1815,
"pid": 216,
"city_code": "101060804",
"city_name": "长岭县"
},
{
"_id": 1378,
"id": 1816,
"pid": 216,
"city_code": "101060802",
"city_name": "乾安县"
},
{
"_id": 1379,
"id": 1817,
"pid": 216,
"city_code": "101060805",
"city_name": "扶余市"
},
{
"_id": 1380,
"id": 1820,
"pid": 217,
"city_code": "101060502",
"city_name": "梅河口市"
},
{
"_id": 1381,
"id": 1821,
"pid": 217,
"city_code": "101060505",
"city_name": "集安市"
},
{
"_id": 1382,
"id": 1822,
"pid": 217,
"city_code": "101060506",
"city_name": "通化县"
},
{
"_id": 1383,
"id": 1823,
"pid": 217,
"city_code": "101060504",
"city_name": "辉南县"
},
{
"_id": 1384,
"id": 1824,
"pid": 217,
"city_code": "101060503",
"city_name": "柳河县"
},
{
"_id": 1385,
"id": 1825,
"pid": 218,
"city_code": "101060301",
"city_name": "延吉市"
},
{
"_id": 1386,
"id": 1826,
"pid": 218,
"city_code": "101060309",
"city_name": "图们市"
},
{
"_id": 1387,
"id": 1827,
"pid": 218,
"city_code": "101060302",
"city_name": "敦化市"
},
{
"_id": 1388,
"id": 1828,
"pid": 218,
"city_code": "101060308",
"city_name": "珲春市"
},
{
"_id": 1389,
"id": 1829,
"pid": 218,
"city_code": "101060307",
"city_name": "龙井市"
},
{
"_id": 1390,
"id": 1830,
"pid": 218,
"city_code": "101060305",
"city_name": "和龙市"
},
{
"_id": 1391,
"id": 1831,
"pid": 218,
"city_code": "101060303",
"city_name": "安图县"
},
{
"_id": 1392,
"id": 1832,
"pid": 218,
"city_code": "101060304",
"city_name": "汪清县"
},
{
"_id": 1393,
"id": 1841,
"pid": 219,
"city_code": "101190107",
"city_name": "浦口区"
},
{
"_id": 1394,
"id": 1842,
"pid": 219,
"city_code": "101190104",
"city_name": "江宁区"
},
{
"_id": 1395,
"id": 1843,
"pid": 219,
"city_code": "101190105",
"city_name": "六合区"
},
{
"_id": 1396,
"id": 1844,
"pid": 219,
"city_code": "101190102",
"city_name": "溧水区"
},
{
"_id": 1397,
"id": 1845,
"pid": 219,
"city_code": "101190103",
"city_name": "高淳县"
},
{
"_id": 1398,
"id": 1850,
"pid": 220,
"city_code": "101190405",
"city_name": "吴中区"
},
{
"_id": 1399,
"id": 1853,
"pid": 220,
"city_code": "101190404",
"city_name": "昆山市"
},
{
"_id": 1400,
"id": 1854,
"pid": 220,
"city_code": "101190402",
"city_name": "常熟市"
},
{
"_id": 1401,
"id": 1855,
"pid": 220,
"city_code": "101190403",
"city_name": "张家港市"
},
{
"_id": 1402,
"id": 1867,
"pid": 220,
"city_code": "101190407",
"city_name": "吴江区"
},
{
"_id": 1403,
"id": 1868,
"pid": 220,
"city_code": "101190408",
"city_name": "太仓市"
},
{
"_id": 1404,
"id": 1872,
"pid": 221,
"city_code": "101190204",
"city_name": "锡山区"
},
{
"_id": 1405,
"id": 1876,
"pid": 221,
"city_code": "101190202",
"city_name": "江阴市"
},
{
"_id": 1406,
"id": 1877,
"pid": 221,
"city_code": "101190203",
"city_name": "宜兴市"
},
{
"_id": 1407,
"id": 1883,
"pid": 222,
"city_code": "101191104",
"city_name": "武进区"
},
{
"_id": 1408,
"id": 1884,
"pid": 222,
"city_code": "101191102",
"city_name": "溧阳市"
},
{
"_id": 1409,
"id": 1885,
"pid": 222,
"city_code": "101191103",
"city_name": "金坛区"
},
{
"_id": 1410,
"id": 1888,
"pid": 223,
"city_code": "101190908",
"city_name": "楚州区"
},
{
"_id": 1411,
"id": 1889,
"pid": 223,
"city_code": "101190907",
"city_name": "淮阴区"
},
{
"_id": 1412,
"id": 1890,
"pid": 223,
"city_code": "101190905",
"city_name": "涟水县"
},
{
"_id": 1413,
"id": 1891,
"pid": 223,
"city_code": "101190904",
"city_name": "洪泽县"
},
{
"_id": 1414,
"id": 1892,
"pid": 223,
"city_code": "101190903",
"city_name": "盱眙县"
},
{
"_id": 1415,
"id": 1893,
"pid": 223,
"city_code": "101190902",
"city_name": "金湖县"
},
{
"_id": 1416,
"id": 1897,
"pid": 224,
"city_code": "101191003",
"city_name": "赣榆县"
},
{
"_id": 1417,
"id": 1898,
"pid": 224,
"city_code": "101191002",
"city_name": "东海县"
},
{
"_id": 1418,
"id": 1899,
"pid": 224,
"city_code": "101191004",
"city_name": "灌云县"
},
{
"_id": 1419,
"id": 1900,
"pid": 224,
"city_code": "101191005",
"city_name": "灌南县"
},
{
"_id": 1420,
"id": 1904,
"pid": 225,
"city_code": "101190507",
"city_name": "启东市"
},
{
"_id": 1421,
"id": 1905,
"pid": 225,
"city_code": "101190503",
"city_name": "如皋市"
},
{
"_id": 1422,
"id": 1906,
"pid": 225,
"city_code": "101190509",
"city_name": "通州区"
},
{
"_id": 1423,
"id": 1907,
"pid": 225,
"city_code": "101190508",
"city_name": "海门市"
},
{
"_id": 1424,
"id": 1908,
"pid": 225,
"city_code": "101190502",
"city_name": "海安县"
},
{
"_id": 1425,
"id": 1909,
"pid": 225,
"city_code": "101190504",
"city_name": "如东县"
},
{
"_id": 1426,
"id": 1911,
"pid": 226,
"city_code": "101191305",
"city_name": "宿豫区"
},
{
"_id": 1427,
"id": 1912,
"pid": 226,
"city_code": "101191305",
"city_name": "宿豫县"
},
{
"_id": 1428,
"id": 1913,
"pid": 226,
"city_code": "101191302",
"city_name": "沭阳县"
},
{
"_id": 1429,
"id": 1914,
"pid": 226,
"city_code": "101191303",
"city_name": "泗阳县"
},
{
"_id": 1430,
"id": 1915,
"pid": 226,
"city_code": "101191304",
"city_name": "泗洪县"
},
{
"_id": 1431,
"id": 1918,
"pid": 227,
"city_code": "101191202",
"city_name": "兴化市"
},
{
"_id": 1432,
"id": 1919,
"pid": 227,
"city_code": "101191205",
"city_name": "靖江市"
},
{
"_id": 1433,
"id": 1920,
"pid": 227,
"city_code": "101191203",
"city_name": "泰兴市"
},
{
"_id": 1434,
"id": 1921,
"pid": 227,
"city_code": "101191204",
"city_name": "姜堰区"
},
{
"_id": 1435,
"id": 1927,
"pid": 228,
"city_code": "101190807",
"city_name": "新沂市"
},
{
"_id": 1436,
"id": 1928,
"pid": 228,
"city_code": "101190805",
"city_name": "邳州市"
},
{
"_id": 1437,
"id": 1929,
"pid": 228,
"city_code": "101190803",
"city_name": "丰县"
},
{
"_id": 1438,
"id": 1930,
"pid": 228,
"city_code": "101190804",
"city_name": "沛县"
},
{
"_id": 1439,
"id": 1931,
"pid": 228,
"city_code": "101190802",
"city_name": "铜山区"
},
{
"_id": 1440,
"id": 1932,
"pid": 228,
"city_code": "101190806",
"city_name": "睢宁县"
},
{
"_id": 1441,
"id": 1935,
"pid": 229,
"city_code": "101190709",
"city_name": "盐都区"
},
{
"_id": 1442,
"id": 1937,
"pid": 229,
"city_code": "101190707",
"city_name": "东台市"
},
{
"_id": 1443,
"id": 1938,
"pid": 229,
"city_code": "101190708",
"city_name": "大丰区"
},
{
"_id": 1444,
"id": 1939,
"pid": 229,
"city_code": "101190702",
"city_name": "响水县"
},
{
"_id": 1445,
"id": 1940,
"pid": 229,
"city_code": "101190703",
"city_name": "滨海县"
},
{
"_id": 1446,
"id": 1941,
"pid": 229,
"city_code": "101190704",
"city_name": "阜宁县"
},
{
"_id": 1447,
"id": 1942,
"pid": 229,
"city_code": "101190705",
"city_name": "射阳县"
},
{
"_id": 1448,
"id": 1943,
"pid": 229,
"city_code": "101190706",
"city_name": "建湖县"
},
{
"_id": 1449,
"id": 1946,
"pid": 230,
"city_code": "101190606",
"city_name": "邗江区"
},
{
"_id": 1450,
"id": 1947,
"pid": 230,
"city_code": "101190603",
"city_name": "仪征市"
},
{
"_id": 1451,
"id": 1948,
"pid": 230,
"city_code": "101190604",
"city_name": "高邮市"
},
{
"_id": 1452,
"id": 1949,
"pid": 230,
"city_code": "101190605",
"city_name": "江都市"
},
{
"_id": 1453,
"id": 1950,
"pid": 230,
"city_code": "101190602",
"city_name": "宝应县"
},
{
"_id": 1454,
"id": 1953,
"pid": 231,
"city_code": "101190305",
"city_name": "丹徒区"
},
{
"_id": 1455,
"id": 1954,
"pid": 231,
"city_code": "101190302",
"city_name": "丹阳市"
},
{
"_id": 1456,
"id": 1955,
"pid": 231,
"city_code": "101190303",
"city_name": "扬中市"
},
{
"_id": 1457,
"id": 1956,
"pid": 231,
"city_code": "101190304",
"city_name": "句容市"
},
{
"_id": 1458,
"id": 1965,
"pid": 232,
"city_code": "101240103",
"city_name": "南昌县"
},
{
"_id": 1459,
"id": 1966,
"pid": 232,
"city_code": "101240102",
"city_name": "新建县"
},
{
"_id": 1460,
"id": 1967,
"pid": 232,
"city_code": "101240104",
"city_name": "安义县"
},
{
"_id": 1461,
"id": 1968,
"pid": 232,
"city_code": "101240105",
"city_name": "进贤县"
},
{
"_id": 1462,
"id": 1970,
"pid": 233,
"city_code": "101240408",
"city_name": "南城县"
},
{
"_id": 1463,
"id": 1971,
"pid": 233,
"city_code": "101240410",
"city_name": "黎川县"
},
{
"_id": 1464,
"id": 1972,
"pid": 233,
"city_code": "101240409",
"city_name": "南丰县"
},
{
"_id": 1465,
"id": 1973,
"pid": 233,
"city_code": "101240404",
"city_name": "崇仁县"
},
{
"_id": 1466,
"id": 1974,
"pid": 233,
"city_code": "101240403",
"city_name": "乐安县"
},
{
"_id": 1467,
"id": 1975,
"pid": 233,
"city_code": "101240407",
"city_name": "宜黄县"
},
{
"_id": 1468,
"id": 1976,
"pid": 233,
"city_code": "101240405",
"city_name": "金溪县"
},
{
"_id": 1469,
"id": 1977,
"pid": 233,
"city_code": "101240406",
"city_name": "资溪县"
},
{
"_id": 1470,
"id": 1978,
"pid": 233,
"city_code": "101240411",
"city_name": "东乡县"
},
{
"_id": 1471,
"id": 1979,
"pid": 233,
"city_code": "101240402",
"city_name": "广昌县"
},
{
"_id": 1472,
"id": 1981,
"pid": 234,
"city_code": "101240710",
"city_name": "于都县"
},
{
"_id": 1473,
"id": 1982,
"pid": 234,
"city_code": "101240709",
"city_name": "瑞金市"
},
{
"_id": 1474,
"id": 1983,
"pid": 234,
"city_code": "101240704",
"city_name": "南康市"
},
{
"_id": 1475,
"id": 1984,
"pid": 234,
"city_code": "101240718",
"city_name": "赣县"
},
{
"_id": 1476,
"id": 1985,
"pid": 234,
"city_code": "101240706",
"city_name": "信丰县"
},
{
"_id": 1477,
"id": 1986,
"pid": 234,
"city_code": "101240705",
"city_name": "大余县"
},
{
"_id": 1478,
"id": 1987,
"pid": 234,
"city_code": "101240703",
"city_name": "上犹县"
},
{
"_id": 1479,
"id": 1988,
"pid": 234,
"city_code": "101240702",
"city_name": "崇义县"
},
{
"_id": 1480,
"id": 1989,
"pid": 234,
"city_code": "101240712",
"city_name": "安远县"
},
{
"_id": 1481,
"id": 1990,
"pid": 234,
"city_code": "101240714",
"city_name": "龙南县"
},
{
"_id": 1482,
"id": 1991,
"pid": 234,
"city_code": "101240715",
"city_name": "定南县"
},
{
"_id": 1483,
"id": 1992,
"pid": 234,
"city_code": "101240713",
"city_name": "全南县"
},
{
"_id": 1484,
"id": 1993,
"pid": 234,
"city_code": "101240707",
"city_name": "宁都县"
},
{
"_id": 1485,
"id": 1994,
"pid": 234,
"city_code": "101240717",
"city_name": "兴国县"
},
{
"_id": 1486,
"id": 1995,
"pid": 234,
"city_code": "101240711",
"city_name": "会昌县"
},
{
"_id": 1487,
"id": 1996,
"pid": 234,
"city_code": "101240716",
"city_name": "寻乌县"
},
{
"_id": 1488,
"id": 1997,
"pid": 234,
"city_code": "101240708",
"city_name": "石城县"
},
{
"_id": 1489,
"id": 1998,
"pid": 235,
"city_code": "101240612",
"city_name": "安福县"
},
{
"_id": 1490,
"id": 2001,
"pid": 235,
"city_code": "101240608",
"city_name": "井冈山市"
},
{
"_id": 1491,
"id": 2002,
"pid": 235,
"city_code": "101240602",
"city_name": "吉安县"
},
{
"_id": 1492,
"id": 2003,
"pid": 235,
"city_code": "101240603",
"city_name": "吉水县"
},
{
"_id": 1493,
"id": 2004,
"pid": 235,
"city_code": "101240605",
"city_name": "峡江县"
},
{
"_id": 1494,
"id": 2005,
"pid": 235,
"city_code": "101240604",
"city_name": "新干县"
},
{
"_id": 1495,
"id": 2006,
"pid": 235,
"city_code": "101240606",
"city_name": "永丰县"
},
{
"_id": 1496,
"id": 2007,
"pid": 235,
"city_code": "101240611",
"city_name": "泰和县"
},
{
"_id": 1497,
"id": 2008,
"pid": 235,
"city_code": "101240610",
"city_name": "遂川县"
},
{
"_id": 1498,
"id": 2009,
"pid": 235,
"city_code": "101240609",
"city_name": "万安县"
},
{
"_id": 1499,
"id": 2010,
"pid": 235,
"city_code": "101240607",
"city_name": "永新县"
},
{
"_id": 1500,
"id": 2013,
"pid": 236,
"city_code": "101240802",
"city_name": "乐平市"
},
{
"_id": 1501,
"id": 2014,
"pid": 236,
"city_code": "101240803",
"city_name": "浮梁县"
},
{
"_id": 1502,
"id": 2016,
"pid": 237,
"city_code": "101240203",
"city_name": "庐山区"
},
{
"_id": 1503,
"id": 2017,
"pid": 237,
"city_code": "101240202",
"city_name": "瑞昌市"
},
{
"_id": 1504,
"id": 2018,
"pid": 237,
"city_code": "101240201",
"city_name": "九江县"
},
{
"_id": 1505,
"id": 2019,
"pid": 237,
"city_code": "101240204",
"city_name": "武宁县"
},
{
"_id": 1506,
"id": 2020,
"pid": 237,
"city_code": "101240212",
"city_name": "修水县"
},
{
"_id": 1507,
"id": 2021,
"pid": 237,
"city_code": "101240206",
"city_name": "永修县"
},
{
"_id": 1508,
"id": 2022,
"pid": 237,
"city_code": "101240205",
"city_name": "德安县"
},
{
"_id": 1509,
"id": 2023,
"pid": 237,
"city_code": "101240209",
"city_name": "星子县"
},
{
"_id": 1510,
"id": 2024,
"pid": 237,
"city_code": "101240210",
"city_name": "都昌县"
},
{
"_id": 1511,
"id": 2025,
"pid": 237,
"city_code": "101240207",
"city_name": "湖口县"
},
{
"_id": 1512,
"id": 2026,
"pid": 237,
"city_code": "101240208",
"city_name": "彭泽县"
},
{
"_id": 1513,
"id": 2027,
"pid": 238,
"city_code": "101240904",
"city_name": "安源区"
},
{
"_id": 1514,
"id": 2028,
"pid": 238,
"city_code": "101240906",
"city_name": "湘东区"
},
{
"_id": 1515,
"id": 2029,
"pid": 238,
"city_code": "101240902",
"city_name": "莲花县"
},
{
"_id": 1516,
"id": 2030,
"pid": 238,
"city_code": "101240905",
"city_name": "芦溪县"
},
{
"_id": 1517,
"id": 2031,
"pid": 238,
"city_code": "101240903",
"city_name": "上栗县"
},
{
"_id": 1518,
"id": 2033,
"pid": 239,
"city_code": "101240307",
"city_name": "德兴市"
},
{
"_id": 1519,
"id": 2034,
"pid": 239,
"city_code": "101240308",
"city_name": "上饶县"
},
{
"_id": 1520,
"id": 2035,
"pid": 239,
"city_code": "101240313",
"city_name": "广丰县"
},
{
"_id": 1521,
"id": 2036,
"pid": 239,
"city_code": "101240312",
"city_name": "玉山县"
},
{
"_id": 1522,
"id": 2037,
"pid": 239,
"city_code": "101240311",
"city_name": "铅山县"
},
{
"_id": 1523,
"id": 2038,
"pid": 239,
"city_code": "101240310",
"city_name": "横峰县"
},
{
"_id": 1524,
"id": 2039,
"pid": 239,
"city_code": "101240309",
"city_name": "弋阳县"
},
{
"_id": 1525,
"id": 2040,
"pid": 239,
"city_code": "101240305",
"city_name": "余干县"
},
{
"_id": 1526,
"id": 2041,
"pid": 239,
"city_code": "101240302",
"city_name": "鄱阳县"
},
{
"_id": 1527,
"id": 2042,
"pid": 239,
"city_code": "101240306",
"city_name": "万年县"
},
{
"_id": 1528,
"id": 2043,
"pid": 239,
"city_code": "101240303",
"city_name": "婺源县"
},
{
"_id": 1529,
"id": 2045,
"pid": 240,
"city_code": "101241002",
"city_name": "分宜县"
},
{
"_id": 1530,
"id": 2047,
"pid": 241,
"city_code": "101240510",
"city_name": "丰城市"
},
{
"_id": 1531,
"id": 2048,
"pid": 241,
"city_code": "101240509",
"city_name": "樟树市"
},
{
"_id": 1532,
"id": 2049,
"pid": 241,
"city_code": "101240508",
"city_name": "高安市"
},
{
"_id": 1533,
"id": 2050,
"pid": 241,
"city_code": "101240507",
"city_name": "奉新县"
},
{
"_id": 1534,
"id": 2051,
"pid": 241,
"city_code": "101240504",
"city_name": "万载县"
},
{
"_id": 1535,
"id": 2052,
"pid": 241,
"city_code": "101240505",
"city_name": "上高县"
},
{
"_id": 1536,
"id": 2053,
"pid": 241,
"city_code": "101240503",
"city_name": "宜丰县"
},
{
"_id": 1537,
"id": 2054,
"pid": 241,
"city_code": "101240506",
"city_name": "靖安县"
},
{
"_id": 1538,
"id": 2055,
"pid": 241,
"city_code": "101240502",
"city_name": "铜鼓县"
},
{
"_id": 1539,
"id": 2057,
"pid": 242,
"city_code": "101241103",
"city_name": "贵溪市"
},
{
"_id": 1540,
"id": 2058,
"pid": 242,
"city_code": "101241102",
"city_name": "余江县"
},
{
"_id": 1541,
"id": 2064,
"pid": 243,
"city_code": "101070102",
"city_name": "苏家屯区"
},
{
"_id": 1542,
"id": 2067,
"pid": 243,
"city_code": "101070107",
"city_name": "于洪区"
},
{
"_id": 1543,
"id": 2069,
"pid": 243,
"city_code": "101070106",
"city_name": "新民市"
},
{
"_id": 1544,
"id": 2070,
"pid": 243,
"city_code": "101070103",
"city_name": "辽中县"
},
{
"_id": 1545,
"id": 2071,
"pid": 243,
"city_code": "101070104",
"city_name": "康平县"
},
{
"_id": 1546,
"id": 2072,
"pid": 243,
"city_code": "101070105",
"city_name": "法库县"
},
{
"_id": 1547,
"id": 2077,
"pid": 244,
"city_code": "101070205",
"city_name": "旅顺口区"
},
{
"_id": 1548,
"id": 2078,
"pid": 244,
"city_code": "101070203",
"city_name": "金州区"
},
{
"_id": 1549,
"id": 2080,
"pid": 244,
"city_code": "101070202",
"city_name": "瓦房店市"
},
{
"_id": 1550,
"id": 2081,
"pid": 244,
"city_code": "101070204",
"city_name": "普兰店市"
},
{
"_id": 1551,
"id": 2082,
"pid": 244,
"city_code": "101070207",
"city_name": "庄河市"
},
{
"_id": 1552,
"id": 2083,
"pid": 244,
"city_code": "101070206",
"city_name": "长海县"
},
{
"_id": 1553,
"id": 2088,
"pid": 245,
"city_code": "101070303",
"city_name": "岫岩县"
},
{
"_id": 1554,
"id": 2089,
"pid": 245,
"city_code": "101070304",
"city_name": "海城市"
},
{
"_id": 1555,
"id": 2090,
"pid": 245,
"city_code": "101070302",
"city_name": "台安县"
},
{
"_id": 1556,
"id": 2091,
"pid": 246,
"city_code": "101070502",
"city_name": "本溪县"
},
{
"_id": 1557,
"id": 2096,
"pid": 246,
"city_code": "101070504",
"city_name": "桓仁县"
},
{
"_id": 1558,
"id": 2099,
"pid": 247,
"city_code": "101071204",
"city_name": "喀喇沁左翼蒙古族自治县"
},
{
"_id": 1559,
"id": 2100,
"pid": 247,
"city_code": "101071205",
"city_name": "北票市"
},
{
"_id": 1560,
"id": 2101,
"pid": 247,
"city_code": "101071203",
"city_name": "凌源市"
},
{
"_id": 1561,
"id": 2103,
"pid": 247,
"city_code": "101071207",
"city_name": "建平县"
},
{
"_id": 1562,
"id": 2107,
"pid": 248,
"city_code": "101070603",
"city_name": "宽甸县"
},
{
"_id": 1563,
"id": 2108,
"pid": 248,
"city_code": "101070604",
"city_name": "东港市"
},
{
"_id": 1564,
"id": 2109,
"pid": 248,
"city_code": "101070602",
"city_name": "凤城市"
},
{
"_id": 1565,
"id": 2114,
"pid": 249,
"city_code": "101070403",
"city_name": "清原县"
},
{
"_id": 1566,
"id": 2115,
"pid": 249,
"city_code": "101070402",
"city_name": "新宾县"
},
{
"_id": 1567,
"id": 2116,
"pid": 249,
"city_code": "101070401",
"city_name": "抚顺县"
},
{
"_id": 1568,
"id": 2123,
"pid": 250,
"city_code": "101070902",
"city_name": "彰武县"
},
{
"_id": 1569,
"id": 2127,
"pid": 251,
"city_code": "101071404",
"city_name": "兴城市"
},
{
"_id": 1570,
"id": 2128,
"pid": 251,
"city_code": "101071403",
"city_name": "绥中县"
},
{
"_id": 1571,
"id": 2129,
"pid": 251,
"city_code": "101071402",
"city_name": "建昌县"
},
{
"_id": 1572,
"id": 2133,
"pid": 252,
"city_code": "101070702",
"city_name": "凌海市"
},
{
"_id": 1573,
"id": 2134,
"pid": 252,
"city_code": "101070706",
"city_name": "北镇市"
},
{
"_id": 1574,
"id": 2135,
"pid": 252,
"city_code": "101070705",
"city_name": "黑山县"
},
{
"_id": 1575,
"id": 2136,
"pid": 252,
"city_code": "101070704",
"city_name": "义县"
},
{
"_id": 1576,
"id": 2141,
"pid": 253,
"city_code": "101071004",
"city_name": "弓长岭区"
},
{
"_id": 1577,
"id": 2142,
"pid": 253,
"city_code": "101071003",
"city_name": "灯塔市"
},
{
"_id": 1578,
"id": 2143,
"pid": 253,
"city_code": "101071002",
"city_name": "辽阳县"
},
{
"_id": 1579,
"id": 2146,
"pid": 254,
"city_code": "101071302",
"city_name": "大洼县"
},
{
"_id": 1580,
"id": 2147,
"pid": 254,
"city_code": "101071303",
"city_name": "盘山县"
},
{
"_id": 1581,
"id": 2150,
"pid": 255,
"city_code": "101071105",
"city_name": "调兵山市"
},
{
"_id": 1582,
"id": 2151,
"pid": 255,
"city_code": "101071102",
"city_name": "开原市"
},
{
"_id": 1583,
"id": 2152,
"pid": 255,
"city_code": "101071101",
"city_name": "铁岭县"
},
{
"_id": 1584,
"id": 2153,
"pid": 255,
"city_code": "101071104",
"city_name": "西丰县"
},
{
"_id": 1585,
"id": 2154,
"pid": 255,
"city_code": "101071103",
"city_name": "昌图县"
},
{
"_id": 1586,
"id": 2159,
"pid": 256,
"city_code": "101070803",
"city_name": "盖州市"
},
{
"_id": 1587,
"id": 2160,
"pid": 256,
"city_code": "101070802",
"city_name": "大石桥市"
},
{
"_id": 1588,
"id": 2165,
"pid": 257,
"city_code": "101080105",
"city_name": "清水河县"
},
{
"_id": 1589,
"id": 2166,
"pid": 257,
"city_code": "101080102",
"city_name": "土默特左旗"
},
{
"_id": 1590,
"id": 2167,
"pid": 257,
"city_code": "101080103",
"city_name": "托克托县"
},
{
"_id": 1591,
"id": 2168,
"pid": 257,
"city_code": "101080104",
"city_name": "和林格尔县"
},
{
"_id": 1592,
"id": 2169,
"pid": 257,
"city_code": "101080107",
"city_name": "武川县"
},
{
"_id": 1593,
"id": 2170,
"pid": 258,
"city_code": "101081201",
"city_name": "阿拉善左旗"
},
{
"_id": 1594,
"id": 2171,
"pid": 258,
"city_code": "101081202",
"city_name": "阿拉善右旗"
},
{
"_id": 1595,
"id": 2172,
"pid": 258,
"city_code": "101081203",
"city_name": "额济纳旗"
},
{
"_id": 1596,
"id": 2173,
"pid": 259,
"city_code": "101080801",
"city_name": "临河区"
},
{
"_id": 1597,
"id": 2174,
"pid": 259,
"city_code": "101080802",
"city_name": "五原县"
},
{
"_id": 1598,
"id": 2175,
"pid": 259,
"city_code": "101080803",
"city_name": "磴口县"
},
{
"_id": 1599,
"id": 2176,
"pid": 259,
"city_code": "101080804",
"city_name": "乌拉特前旗"
},
{
"_id": 1600,
"id": 2177,
"pid": 259,
"city_code": "101080806",
"city_name": "乌拉特中旗"
},
{
"_id": 1601,
"id": 2178,
"pid": 259,
"city_code": "101080807",
"city_name": "乌拉特后旗"
},
{
"_id": 1602,
"id": 2179,
"pid": 259,
"city_code": "101080810",
"city_name": "杭锦后旗"
},
{
"_id": 1603,
"id": 2184,
"pid": 260,
"city_code": "101080207",
"city_name": "石拐区"
},
{
"_id": 1604,
"id": 2185,
"pid": 260,
"city_code": "101080202",
"city_name": "白云鄂博"
},
{
"_id": 1605,
"id": 2186,
"pid": 260,
"city_code": "101080204",
"city_name": "土默特右旗"
},
{
"_id": 1606,
"id": 2187,
"pid": 260,
"city_code": "101080205",
"city_name": "固阳县"
},
{
"_id": 1607,
"id": 2188,
"pid": 260,
"city_code": "101080206",
"city_name": "达尔罕茂明安联合旗"
},
{
"_id": 1608,
"id": 2192,
"pid": 261,
"city_code": "101080603",
"city_name": "阿鲁科尔沁旗"
},
{
"_id": 1609,
"id": 2193,
"pid": 261,
"city_code": "101080605",
"city_name": "巴林左旗"
},
{
"_id": 1610,
"id": 2194,
"pid": 261,
"city_code": "101080606",
"city_name": "巴林右旗"
},
{
"_id": 1611,
"id": 2195,
"pid": 261,
"city_code": "101080607",
"city_name": "林西县"
},
{
"_id": 1612,
"id": 2196,
"pid": 261,
"city_code": "101080608",
"city_name": "克什克腾旗"
},
{
"_id": 1613,
"id": 2197,
"pid": 261,
"city_code": "101080609",
"city_name": "翁牛特旗"
},
{
"_id": 1614,
"id": 2198,
"pid": 261,
"city_code": "101080611",
"city_name": "喀喇沁旗"
},
{
"_id": 1615,
"id": 2199,
"pid": 261,
"city_code": "101080613",
"city_name": "宁城县"
},
{
"_id": 1616,
"id": 2200,
"pid": 261,
"city_code": "101080614",
"city_name": "敖汉旗"
},
{
"_id": 1617,
"id": 2201,
"pid": 262,
"city_code": "101080713",
"city_name": "东胜区"
},
{
"_id": 1618,
"id": 2202,
"pid": 262,
"city_code": "101080703",
"city_name": "达拉特旗"
},
{
"_id": 1619,
"id": 2203,
"pid": 262,
"city_code": "101080704",
"city_name": "准格尔旗"
},
{
"_id": 1620,
"id": 2204,
"pid": 262,
"city_code": "101080705",
"city_name": "鄂托克前旗"
},
{
"_id": 1621,
"id": 2205,
"pid": 262,
"city_code": "101080708",
"city_name": "鄂托克旗"
},
{
"_id": 1622,
"id": 2206,
"pid": 262,
"city_code": "101080709",
"city_name": "杭锦旗"
},
{
"_id": 1623,
"id": 2207,
"pid": 262,
"city_code": "101080710",
"city_name": "乌审旗"
},
{
"_id": 1624,
"id": 2208,
"pid": 262,
"city_code": "101080711",
"city_name": "伊金霍洛旗"
},
{
"_id": 1625,
"id": 2209,
"pid": 263,
"city_code": "101081001",
"city_name": "海拉尔区"
},
{
"_id": 1626,
"id": 2210,
"pid": 263,
"city_code": "101081004",
"city_name": "莫力达瓦"
},
{
"_id": 1627,
"id": 2211,
"pid": 263,
"city_code": "101081010",
"city_name": "满洲里市"
},
{
"_id": 1628,
"id": 2212,
"pid": 263,
"city_code": "101081011",
"city_name": "牙克石市"
},
{
"_id": 1629,
"id": 2213,
"pid": 263,
"city_code": "101081012",
"city_name": "扎兰屯市"
},
{
"_id": 1630,
"id": 2214,
"pid": 263,
"city_code": "101081014",
"city_name": "额尔古纳市"
},
{
"_id": 1631,
"id": 2215,
"pid": 263,
"city_code": "101081015",
"city_name": "根河市"
},
{
"_id": 1632,
"id": 2216,
"pid": 263,
"city_code": "101081003",
"city_name": "阿荣旗"
},
{
"_id": 1633,
"id": 2217,
"pid": 263,
"city_code": "101081005",
"city_name": "鄂伦春自治旗"
},
{
"_id": 1634,
"id": 2218,
"pid": 263,
"city_code": "101081006",
"city_name": "鄂温克族自治旗"
},
{
"_id": 1635,
"id": 2219,
"pid": 263,
"city_code": "101081007",
"city_name": "陈巴尔虎旗"
},
{
"_id": 1636,
"id": 2220,
"pid": 263,
"city_code": "101081008",
"city_name": "新巴尔虎左旗"
},
{
"_id": 1637,
"id": 2221,
"pid": 263,
"city_code": "101081009",
"city_name": "新巴尔虎右旗"
},
{
"_id": 1638,
"id": 2223,
"pid": 264,
"city_code": "101080512",
"city_name": "霍林郭勒市"
},
{
"_id": 1639,
"id": 2224,
"pid": 264,
"city_code": "101080503",
"city_name": "科尔沁左翼中旗"
},
{
"_id": 1640,
"id": 2225,
"pid": 264,
"city_code": "101080504",
"city_name": "科尔沁左翼后旗"
},
{
"_id": 1641,
"id": 2226,
"pid": 264,
"city_code": "101080506",
"city_name": "开鲁县"
},
{
"_id": 1642,
"id": 2227,
"pid": 264,
"city_code": "101080507",
"city_name": "库伦旗"
},
{
"_id": 1643,
"id": 2228,
"pid": 264,
"city_code": "101080508",
"city_name": "奈曼旗"
},
{
"_id": 1644,
"id": 2229,
"pid": 264,
"city_code": "101080509",
"city_name": "扎鲁特旗"
},
{
"_id": 1645,
"id": 2233,
"pid": 266,
"city_code": "101080403",
"city_name": "化德县"
},
{
"_id": 1646,
"id": 2234,
"pid": 266,
"city_code": "101080401",
"city_name": "集宁区"
},
{
"_id": 1647,
"id": 2235,
"pid": 266,
"city_code": "101080412",
"city_name": "丰镇市"
},
{
"_id": 1648,
"id": 2236,
"pid": 266,
"city_code": "101080402",
"city_name": "卓资县"
},
{
"_id": 1649,
"id": 2237,
"pid": 266,
"city_code": "101080404",
"city_name": "商都县"
},
{
"_id": 1650,
"id": 2238,
"pid": 266,
"city_code": "101080406",
"city_name": "兴和县"
},
{
"_id": 1651,
"id": 2239,
"pid": 266,
"city_code": "101080407",
"city_name": "凉城县"
},
{
"_id": 1652,
"id": 2240,
"pid": 266,
"city_code": "101080408",
"city_name": "察哈尔右翼前旗"
},
{
"_id": 1653,
"id": 2241,
"pid": 266,
"city_code": "101080409",
"city_name": "察哈尔右翼中旗"
},
{
"_id": 1654,
"id": 2242,
"pid": 266,
"city_code": "101080410",
"city_name": "察哈尔右翼后旗"
},
{
"_id": 1655,
"id": 2243,
"pid": 266,
"city_code": "101080411",
"city_name": "四子王旗"
},
{
"_id": 1656,
"id": 2244,
"pid": 267,
"city_code": "101080903",
"city_name": "二连浩特市"
},
{
"_id": 1657,
"id": 2245,
"pid": 267,
"city_code": "101080901",
"city_name": "锡林浩特市"
},
{
"_id": 1658,
"id": 2246,
"pid": 267,
"city_code": "101080904",
"city_name": "阿巴嘎旗"
},
{
"_id": 1659,
"id": 2247,
"pid": 267,
"city_code": "101080906",
"city_name": "苏尼特左旗"
},
{
"_id": 1660,
"id": 2248,
"pid": 267,
"city_code": "101080907",
"city_name": "苏尼特右旗"
},
{
"_id": 1661,
"id": 2249,
"pid": 267,
"city_code": "101080909",
"city_name": "东乌珠穆沁旗"
},
{
"_id": 1662,
"id": 2250,
"pid": 267,
"city_code": "101080910",
"city_name": "西乌珠穆沁旗"
},
{
"_id": 1663,
"id": 2251,
"pid": 267,
"city_code": "101080911",
"city_name": "太仆寺旗"
},
{
"_id": 1664,
"id": 2252,
"pid": 267,
"city_code": "101080912",
"city_name": "镶黄旗"
},
{
"_id": 1665,
"id": 2253,
"pid": 267,
"city_code": "101080913",
"city_name": "正镶白旗"
},
{
"_id": 1666,
"id": 2255,
"pid": 267,
"city_code": "101080915",
"city_name": "多伦县"
},
{
"_id": 1667,
"id": 2256,
"pid": 268,
"city_code": "101081101",
"city_name": "乌兰浩特市"
},
{
"_id": 1668,
"id": 2257,
"pid": 268,
"city_code": "101081102",
"city_name": "阿尔山市"
},
{
"_id": 1669,
"id": 2258,
"pid": 268,
"city_code": "101081109",
"city_name": "科尔沁右翼前旗"
},
{
"_id": 1670,
"id": 2259,
"pid": 268,
"city_code": "101081103",
"city_name": "科尔沁右翼中旗"
},
{
"_id": 1671,
"id": 2260,
"pid": 268,
"city_code": "101081105",
"city_name": "扎赉特旗"
},
{
"_id": 1672,
"id": 2261,
"pid": 268,
"city_code": "101081107",
"city_name": "突泉县"
},
{
"_id": 1673,
"id": 2265,
"pid": 269,
"city_code": "101170103",
"city_name": "灵武市"
},
{
"_id": 1674,
"id": 2266,
"pid": 269,
"city_code": "101170102",
"city_name": "永宁县"
},
{
"_id": 1675,
"id": 2267,
"pid": 269,
"city_code": "101170104",
"city_name": "贺兰县"
},
{
"_id": 1676,
"id": 2270,
"pid": 270,
"city_code": "101170402",
"city_name": "西吉县"
},
{
"_id": 1677,
"id": 2271,
"pid": 270,
"city_code": "101170403",
"city_name": "隆德县"
},
{
"_id": 1678,
"id": 2272,
"pid": 270,
"city_code": "101170404",
"city_name": "泾源县"
},
{
"_id": 1679,
"id": 2273,
"pid": 270,
"city_code": "101170406",
"city_name": "彭阳县"
},
{
"_id": 1680,
"id": 2274,
"pid": 271,
"city_code": "101170202",
"city_name": "惠农区"
},
{
"_id": 1681,
"id": 2275,
"pid": 271,
"city_code": "101170206",
"city_name": "大武口区"
},
{
"_id": 1682,
"id": 2276,
"pid": 271,
"city_code": "101170202",
"city_name": "惠农区"
},
{
"_id": 1683,
"id": 2277,
"pid": 271,
"city_code": "101170204",
"city_name": "陶乐县"
},
{
"_id": 1684,
"id": 2278,
"pid": 271,
"city_code": "101170203",
"city_name": "平罗县"
},
{
"_id": 1685,
"id": 2281,
"pid": 272,
"city_code": "101170306",
"city_name": "青铜峡市"
},
{
"_id": 1686,
"id": 2283,
"pid": 272,
"city_code": "101170303",
"city_name": "盐池县"
},
{
"_id": 1687,
"id": 2284,
"pid": 272,
"city_code": "101170302",
"city_name": "同心县"
},
{
"_id": 1688,
"id": 2286,
"pid": 273,
"city_code": "101170504",
"city_name": "海原县"
},
{
"_id": 1689,
"id": 2287,
"pid": 273,
"city_code": "101170502",
"city_name": "中宁县"
},
{
"_id": 1690,
"id": 2292,
"pid": 274,
"city_code": "101150104",
"city_name": "湟中县"
},
{
"_id": 1691,
"id": 2293,
"pid": 274,
"city_code": "101150103",
"city_name": "湟源县"
},
{
"_id": 1692,
"id": 2294,
"pid": 274,
"city_code": "101150102",
"city_name": "大通县"
},
{
"_id": 1693,
"id": 2295,
"pid": 275,
"city_code": "101150508",
"city_name": "玛沁县"
},
{
"_id": 1694,
"id": 2296,
"pid": 275,
"city_code": "101150502",
"city_name": "班玛县"
},
{
"_id": 1695,
"id": 2297,
"pid": 275,
"city_code": "101150503",
"city_name": "甘德县"
},
{
"_id": 1696,
"id": 2298,
"pid": 275,
"city_code": "101150504",
"city_name": "达日县"
},
{
"_id": 1697,
"id": 2299,
"pid": 275,
"city_code": "101150505",
"city_name": "久治县"
},
{
"_id": 1698,
"id": 2300,
"pid": 275,
"city_code": "101150506",
"city_name": "玛多县"
},
{
"_id": 1699,
"id": 2301,
"pid": 276,
"city_code": "101150804",
"city_name": "海晏县"
},
{
"_id": 1700,
"id": 2302,
"pid": 276,
"city_code": "101150803",
"city_name": "祁连县"
},
{
"_id": 1701,
"id": 2303,
"pid": 276,
"city_code": "101150806",
"city_name": "刚察县"
},
{
"_id": 1702,
"id": 2304,
"pid": 276,
"city_code": "101150802",
"city_name": "门源县"
},
{
"_id": 1703,
"id": 2305,
"pid": 277,
"city_code": "101150208",
"city_name": "平安县"
},
{
"_id": 1704,
"id": 2306,
"pid": 277,
"city_code": "101150202",
"city_name": "乐都县"
},
{
"_id": 1705,
"id": 2307,
"pid": 277,
"city_code": "101150203",
"city_name": "民和县"
},
{
"_id": 1706,
"id": 2308,
"pid": 277,
"city_code": "101150204",
"city_name": "互助县"
},
{
"_id": 1707,
"id": 2309,
"pid": 277,
"city_code": "101150205",
"city_name": "化隆县"
},
{
"_id": 1708,
"id": 2310,
"pid": 277,
"city_code": "101150206",
"city_name": "循化县"
},
{
"_id": 1709,
"id": 2311,
"pid": 278,
"city_code": "101150409",
"city_name": "共和县"
},
{
"_id": 1710,
"id": 2312,
"pid": 278,
"city_code": "101150408",
"city_name": "同德县"
},
{
"_id": 1711,
"id": 2313,
"pid": 278,
"city_code": "101150404",
"city_name": "贵德县"
},
{
"_id": 1712,
"id": 2314,
"pid": 278,
"city_code": "101150406",
"city_name": "兴海县"
},
{
"_id": 1713,
"id": 2315,
"pid": 278,
"city_code": "101150407",
"city_name": "贵南县"
},
{
"_id": 1714,
"id": 2316,
"pid": 279,
"city_code": "101150716",
"city_name": "德令哈市"
},
{
"_id": 1715,
"id": 2317,
"pid": 279,
"city_code": "101150702",
"city_name": "格尔木市"
},
{
"_id": 1716,
"id": 2318,
"pid": 279,
"city_code": "101150709",
"city_name": "乌兰县"
},
{
"_id": 1717,
"id": 2319,
"pid": 279,
"city_code": "101150710",
"city_name": "都兰县"
},
{
"_id": 1718,
"id": 2320,
"pid": 279,
"city_code": "101150708",
"city_name": "天峻县"
},
{
"_id": 1719,
"id": 2321,
"pid": 280,
"city_code": "101150305",
"city_name": "同仁县"
},
{
"_id": 1720,
"id": 2322,
"pid": 280,
"city_code": "101150302",
"city_name": "尖扎县"
},
{
"_id": 1721,
"id": 2323,
"pid": 280,
"city_code": "101150303",
"city_name": "泽库县"
},
{
"_id": 1722,
"id": 2324,
"pid": 280,
"city_code": "101150304",
"city_name": "河南蒙古族自治县"
},
{
"_id": 1723,
"id": 2325,
"pid": 281,
"city_code": "101150601",
"city_name": "玉树县"
},
{
"_id": 1724,
"id": 2326,
"pid": 281,
"city_code": "101150604",
"city_name": "杂多县"
},
{
"_id": 1725,
"id": 2327,
"pid": 281,
"city_code": "101150602",
"city_name": "称多县"
},
{
"_id": 1726,
"id": 2328,
"pid": 281,
"city_code": "101150603",
"city_name": "治多县"
},
{
"_id": 1727,
"id": 2329,
"pid": 281,
"city_code": "101150605",
"city_name": "囊谦县"
},
{
"_id": 1728,
"id": 2330,
"pid": 281,
"city_code": "101150606",
"city_name": "曲麻莱县"
},
{
"_id": 1729,
"id": 2336,
"pid": 282,
"city_code": "101120102",
"city_name": "长清区"
},
{
"_id": 1730,
"id": 2337,
"pid": 282,
"city_code": "101120104",
"city_name": "章丘市"
},
{
"_id": 1731,
"id": 2338,
"pid": 282,
"city_code": "101120105",
"city_name": "平阴县"
},
{
"_id": 1732,
"id": 2339,
"pid": 282,
"city_code": "101120106",
"city_name": "济阳县"
},
{
"_id": 1733,
"id": 2340,
"pid": 282,
"city_code": "101120103",
"city_name": "商河县"
},
{
"_id": 1734,
"id": 2347,
"pid": 283,
"city_code": "101120202",
"city_name": "崂山区"
},
{
"_id": 1735,
"id": 2348,
"pid": 283,
"city_code": "101120205",
"city_name": "胶州市"
},
{
"_id": 1736,
"id": 2349,
"pid": 283,
"city_code": "101120204",
"city_name": "即墨市"
},
{
"_id": 1737,
"id": 2350,
"pid": 283,
"city_code": "101120208",
"city_name": "平度市"
},
{
"_id": 1738,
"id": 2351,
"pid": 283,
"city_code": "101120206",
"city_name": "胶南市"
},
{
"_id": 1739,
"id": 2352,
"pid": 283,
"city_code": "101120207",
"city_name": "莱西市"
},
{
"_id": 1740,
"id": 2354,
"pid": 284,
"city_code": "101121105",
"city_name": "惠民县"
},
{
"_id": 1741,
"id": 2355,
"pid": 284,
"city_code": "101121104",
"city_name": "阳信县"
},
{
"_id": 1742,
"id": 2356,
"pid": 284,
"city_code": "101121103",
"city_name": "无棣县"
},
{
"_id": 1743,
"id": 2357,
"pid": 284,
"city_code": "101121106",
"city_name": "沾化县"
},
{
"_id": 1744,
"id": 2358,
"pid": 284,
"city_code": "101121102",
"city_name": "博兴县"
},
{
"_id": 1745,
"id": 2359,
"pid": 284,
"city_code": "101121107",
"city_name": "邹平县"
},
{
"_id": 1746,
"id": 2361,
"pid": 285,
"city_code": "101120404",
"city_name": "陵县"
},
{
"_id": 1747,
"id": 2362,
"pid": 285,
"city_code": "101120406",
"city_name": "乐陵市"
},
{
"_id": 1748,
"id": 2363,
"pid": 285,
"city_code": "101120411",
"city_name": "禹城市"
},
{
"_id": 1749,
"id": 2364,
"pid": 285,
"city_code": "101120409",
"city_name": "宁津县"
},
{
"_id": 1750,
"id": 2365,
"pid": 285,
"city_code": "101120407",
"city_name": "庆云县"
},
{
"_id": 1751,
"id": 2366,
"pid": 285,
"city_code": "101120403",
"city_name": "临邑县"
},
{
"_id": 1752,
"id": 2367,
"pid": 285,
"city_code": "101120405",
"city_name": "齐河县"
},
{
"_id": 1753,
"id": 2368,
"pid": 285,
"city_code": "101120408",
"city_name": "平原县"
},
{
"_id": 1754,
"id": 2369,
"pid": 285,
"city_code": "101120410",
"city_name": "夏津县"
},
{
"_id": 1755,
"id": 2370,
"pid": 285,
"city_code": "101120402",
"city_name": "武城县"
},
{
"_id": 1756,
"id": 2371,
"pid": 286,
"city_code": "101121201",
"city_name": "东营区"
},
{
"_id": 1757,
"id": 2372,
"pid": 286,
"city_code": "101121202",
"city_name": "河口区"
},
{
"_id": 1758,
"id": 2373,
"pid": 286,
"city_code": "101121203",
"city_name": "垦利县"
},
{
"_id": 1759,
"id": 2374,
"pid": 286,
"city_code": "101121204",
"city_name": "利津县"
},
{
"_id": 1760,
"id": 2375,
"pid": 286,
"city_code": "101121205",
"city_name": "广饶县"
},
{
"_id": 1761,
"id": 2377,
"pid": 287,
"city_code": "101121007",
"city_name": "曹县"
},
{
"_id": 1762,
"id": 2378,
"pid": 287,
"city_code": "101121009",
"city_name": "单县"
},
{
"_id": 1763,
"id": 2379,
"pid": 287,
"city_code": "101121008",
"city_name": "成武县"
},
{
"_id": 1764,
"id": 2380,
"pid": 287,
"city_code": "101121006",
"city_name": "巨野县"
},
{
"_id": 1765,
"id": 2381,
"pid": 287,
"city_code": "101121003",
"city_name": "郓城县"
},
{
"_id": 1766,
"id": 2382,
"pid": 287,
"city_code": "101121002",
"city_name": "鄄城县"
},
{
"_id": 1767,
"id": 2383,
"pid": 287,
"city_code": "101121005",
"city_name": "定陶县"
},
{
"_id": 1768,
"id": 2384,
"pid": 287,
"city_code": "101121004",
"city_name": "东明县"
},
{
"_id": 1769,
"id": 2387,
"pid": 288,
"city_code": "101120710",
"city_name": "曲阜市"
},
{
"_id": 1770,
"id": 2388,
"pid": 288,
"city_code": "101120705",
"city_name": "兖州市"
},
{
"_id": 1771,
"id": 2389,
"pid": 288,
"city_code": "101120711",
"city_name": "邹城市"
},
{
"_id": 1772,
"id": 2390,
"pid": 288,
"city_code": "101120703",
"city_name": "微山县"
},
{
"_id": 1773,
"id": 2391,
"pid": 288,
"city_code": "101120704",
"city_name": "鱼台县"
},
{
"_id": 1774,
"id": 2392,
"pid": 288,
"city_code": "101120706",
"city_name": "金乡县"
},
{
"_id": 1775,
"id": 2393,
"pid": 288,
"city_code": "101120702",
"city_name": "嘉祥县"
},
{
"_id": 1776,
"id": 2394,
"pid": 288,
"city_code": "101120707",
"city_name": "汶上县"
},
{
"_id": 1777,
"id": 2395,
"pid": 288,
"city_code": "101120708",
"city_name": "泗水县"
},
{
"_id": 1778,
"id": 2396,
"pid": 288,
"city_code": "101120709",
"city_name": "梁山县"
},
{
"_id": 1779,
"id": 2400,
"pid": 290,
"city_code": "101121707",
"city_name": "临清市"
},
{
"_id": 1780,
"id": 2401,
"pid": 290,
"city_code": "101121703",
"city_name": "阳谷县"
},
{
"_id": 1781,
"id": 2402,
"pid": 290,
"city_code": "101121709",
"city_name": "莘县"
},
{
"_id": 1782,
"id": 2403,
"pid": 290,
"city_code": "101121705",
"city_name": "茌平县"
},
{
"_id": 1783,
"id": 2404,
"pid": 290,
"city_code": "101121706",
"city_name": "东阿县"
},
{
"_id": 1784,
"id": 2405,
"pid": 290,
"city_code": "101121702",
"city_name": "冠县"
},
{
"_id": 1785,
"id": 2406,
"pid": 290,
"city_code": "101121704",
"city_name": "高唐县"
},
{
"_id": 1786,
"id": 2410,
"pid": 291,
"city_code": "101120903",
"city_name": "沂南县"
},
{
"_id": 1787,
"id": 2411,
"pid": 291,
"city_code": "101120906",
"city_name": "郯城县"
},
{
"_id": 1788,
"id": 2412,
"pid": 291,
"city_code": "101120910",
"city_name": "沂水县"
},
{
"_id": 1789,
"id": 2413,
"pid": 291,
"city_code": "101120904",
"city_name": "兰陵县"
},
{
"_id": 1790,
"id": 2414,
"pid": 291,
"city_code": "101120909",
"city_name": "费县"
},
{
"_id": 1791,
"id": 2415,
"pid": 291,
"city_code": "101120908",
"city_name": "平邑县"
},
{
"_id": 1792,
"id": 2416,
"pid": 291,
"city_code": "101120902",
"city_name": "莒南县"
},
{
"_id": 1793,
"id": 2417,
"pid": 291,
"city_code": "101120907",
"city_name": "蒙阴县"
},
{
"_id": 1794,
"id": 2418,
"pid": 291,
"city_code": "101120905",
"city_name": "临沭县"
},
{
"_id": 1795,
"id": 2421,
"pid": 292,
"city_code": "101121502",
"city_name": "五莲县"
},
{
"_id": 1796,
"id": 2422,
"pid": 292,
"city_code": "101121503",
"city_name": "莒县"
},
{
"_id": 1797,
"id": 2423,
"pid": 293,
"city_code": "101120803",
"city_name": "泰山区"
},
{
"_id": 1798,
"id": 2425,
"pid": 293,
"city_code": "101120802",
"city_name": "新泰市"
},
{
"_id": 1799,
"id": 2426,
"pid": 293,
"city_code": "101120804",
"city_name": "肥城市"
},
{
"_id": 1800,
"id": 2427,
"pid": 293,
"city_code": "101120806",
"city_name": "宁阳县"
},
{
"_id": 1801,
"id": 2428,
"pid": 293,
"city_code": "101120805",
"city_name": "东平县"
},
{
"_id": 1802,
"id": 2429,
"pid": 294,
"city_code": "101121303",
"city_name": "荣成市"
},
{
"_id": 1803,
"id": 2430,
"pid": 294,
"city_code": "101121304",
"city_name": "乳山市"
},
{
"_id": 1804,
"id": 2432,
"pid": 294,
"city_code": "101121302",
"city_name": "文登市"
},
{
"_id": 1805,
"id": 2437,
"pid": 295,
"city_code": "101120602",
"city_name": "青州市"
},
{
"_id": 1806,
"id": 2438,
"pid": 295,
"city_code": "101120609",
"city_name": "诸城市"
},
{
"_id": 1807,
"id": 2439,
"pid": 295,
"city_code": "101120603",
"city_name": "寿光市"
},
{
"_id": 1808,
"id": 2440,
"pid": 295,
"city_code": "101120607",
"city_name": "安丘市"
},
{
"_id": 1809,
"id": 2441,
"pid": 295,
"city_code": "101120608",
"city_name": "高密市"
},
{
"_id": 1810,
"id": 2442,
"pid": 295,
"city_code": "101120606",
"city_name": "昌邑市"
},
{
"_id": 1811,
"id": 2443,
"pid": 295,
"city_code": "101120604",
"city_name": "临朐县"
},
{
"_id": 1812,
"id": 2444,
"pid": 295,
"city_code": "101120605",
"city_name": "昌乐县"
},
{
"_id": 1813,
"id": 2446,
"pid": 296,
"city_code": "101120508",
"city_name": "福山区"
},
{
"_id": 1814,
"id": 2447,
"pid": 296,
"city_code": "101120509",
"city_name": "牟平区"
},
{
"_id": 1815,
"id": 2450,
"pid": 296,
"city_code": "101120505",
"city_name": "龙口市"
},
{
"_id": 1816,
"id": 2451,
"pid": 296,
"city_code": "101120510",
"city_name": "莱阳市"
},
{
"_id": 1817,
"id": 2452,
"pid": 296,
"city_code": "101120502",
"city_name": "莱州市"
},
{
"_id": 1818,
"id": 2453,
"pid": 296,
"city_code": "101120504",
"city_name": "蓬莱市"
},
{
"_id": 1819,
"id": 2454,
"pid": 296,
"city_code": "101120506",
"city_name": "招远市"
},
{
"_id": 1820,
"id": 2455,
"pid": 296,
"city_code": "101120507",
"city_name": "栖霞市"
},
{
"_id": 1821,
"id": 2456,
"pid": 296,
"city_code": "101120511",
"city_name": "海阳市"
},
{
"_id": 1822,
"id": 2457,
"pid": 296,
"city_code": "101120503",
"city_name": "长岛县"
},
{
"_id": 1823,
"id": 2460,
"pid": 297,
"city_code": "101121403",
"city_name": "峄城区"
},
{
"_id": 1824,
"id": 2461,
"pid": 297,
"city_code": "101121404",
"city_name": "台儿庄区"
},
{
"_id": 1825,
"id": 2462,
"pid": 297,
"city_code": "101121402",
"city_name": "薛城区"
},
{
"_id": 1826,
"id": 2463,
"pid": 297,
"city_code": "101121405",
"city_name": "滕州市"
},
{
"_id": 1827,
"id": 2465,
"pid": 298,
"city_code": "101120308",
"city_name": "临淄区"
},
{
"_id": 1828,
"id": 2466,
"pid": 298,
"city_code": "101120302",
"city_name": "淄川区"
},
{
"_id": 1829,
"id": 2467,
"pid": 298,
"city_code": "101120303",
"city_name": "博山区"
},
{
"_id": 1830,
"id": 2468,
"pid": 298,
"city_code": "101120305",
"city_name": "周村区"
},
{
"_id": 1831,
"id": 2469,
"pid": 298,
"city_code": "101120307",
"city_name": "桓台县"
},
{
"_id": 1832,
"id": 2470,
"pid": 298,
"city_code": "101120304",
"city_name": "高青县"
},
{
"_id": 1833,
"id": 2471,
"pid": 298,
"city_code": "101120306",
"city_name": "沂源县"
},
{
"_id": 1834,
"id": 2481,
"pid": 299,
"city_code": "101100102",
"city_name": "清徐县"
},
{
"_id": 1835,
"id": 2482,
"pid": 299,
"city_code": "101100103",
"city_name": "阳曲县"
},
{
"_id": 1836,
"id": 2483,
"pid": 299,
"city_code": "101100104",
"city_name": "娄烦县"
},
{
"_id": 1837,
"id": 2484,
"pid": 299,
"city_code": "101100105",
"city_name": "古交市"
},
{
"_id": 1838,
"id": 2487,
"pid": 300,
"city_code": "101100508",
"city_name": "沁县"
},
{
"_id": 1839,
"id": 2488,
"pid": 300,
"city_code": "101100504",
"city_name": "潞城市"
},
{
"_id": 1840,
"id": 2489,
"pid": 300,
"city_code": "101100501",
"city_name": "长治县"
},
{
"_id": 1841,
"id": 2490,
"pid": 300,
"city_code": "101100505",
"city_name": "襄垣县"
},
{
"_id": 1842,
"id": 2491,
"pid": 300,
"city_code": "101100503",
"city_name": "屯留县"
},
{
"_id": 1843,
"id": 2492,
"pid": 300,
"city_code": "101100506",
"city_name": "平顺县"
},
{
"_id": 1844,
"id": 2493,
"pid": 300,
"city_code": "101100502",
"city_name": "黎城县"
},
{
"_id": 1845,
"id": 2494,
"pid": 300,
"city_code": "101100511",
"city_name": "壶关县"
},
{
"_id": 1846,
"id": 2495,
"pid": 300,
"city_code": "101100509",
"city_name": "长子县"
},
{
"_id": 1847,
"id": 2496,
"pid": 300,
"city_code": "101100507",
"city_name": "武乡县"
},
{
"_id": 1848,
"id": 2497,
"pid": 300,
"city_code": "101100510",
"city_name": "沁源县"
},
{
"_id": 1849,
"id": 2502,
"pid": 301,
"city_code": "101100202",
"city_name": "阳高县"
},
{
"_id": 1850,
"id": 2503,
"pid": 301,
"city_code": "101100204",
"city_name": "天镇县"
},
{
"_id": 1851,
"id": 2504,
"pid": 301,
"city_code": "101100205",
"city_name": "广灵县"
},
{
"_id": 1852,
"id": 2505,
"pid": 301,
"city_code": "101100206",
"city_name": "灵丘县"
},
{
"_id": 1853,
"id": 2506,
"pid": 301,
"city_code": "101100207",
"city_name": "浑源县"
},
{
"_id": 1854,
"id": 2507,
"pid": 301,
"city_code": "101100208",
"city_name": "左云县"
},
{
"_id": 1855,
"id": 2508,
"pid": 301,
"city_code": "101100203",
"city_name": "大同县"
},
{
"_id": 1856,
"id": 2510,
"pid": 302,
"city_code": "101100605",
"city_name": "高平市"
},
{
"_id": 1857,
"id": 2511,
"pid": 302,
"city_code": "101100602",
"city_name": "沁水县"
},
{
"_id": 1858,
"id": 2512,
"pid": 302,
"city_code": "101100603",
"city_name": "阳城县"
},
{
"_id": 1859,
"id": 2513,
"pid": 302,
"city_code": "101100604",
"city_name": "陵川县"
},
{
"_id": 1860,
"id": 2514,
"pid": 302,
"city_code": "101100606",
"city_name": "泽州县"
},
{
"_id": 1861,
"id": 2515,
"pid": 303,
"city_code": "101100402",
"city_name": "榆次区"
},
{
"_id": 1862,
"id": 2516,
"pid": 303,
"city_code": "101100412",
"city_name": "介休市"
},
{
"_id": 1863,
"id": 2517,
"pid": 303,
"city_code": "101100403",
"city_name": "榆社县"
},
{
"_id": 1864,
"id": 2518,
"pid": 303,
"city_code": "101100404",
"city_name": "左权县"
},
{
"_id": 1865,
"id": 2519,
"pid": 303,
"city_code": "101100405",
"city_name": "和顺县"
},
{
"_id": 1866,
"id": 2520,
"pid": 303,
"city_code": "101100406",
"city_name": "昔阳县"
},
{
"_id": 1867,
"id": 2521,
"pid": 303,
"city_code": "101100407",
"city_name": "寿阳县"
},
{
"_id": 1868,
"id": 2522,
"pid": 303,
"city_code": "101100408",
"city_name": "太谷县"
},
{
"_id": 1869,
"id": 2523,
"pid": 303,
"city_code": "101100409",
"city_name": "祁县"
},
{
"_id": 1870,
"id": 2524,
"pid": 303,
"city_code": "101100410",
"city_name": "平遥县"
},
{
"_id": 1871,
"id": 2525,
"pid": 303,
"city_code": "101100411",
"city_name": "灵石县"
},
{
"_id": 1872,
"id": 2527,
"pid": 304,
"city_code": "101100714",
"city_name": "侯马市"
},
{
"_id": 1873,
"id": 2528,
"pid": 304,
"city_code": "101100711",
"city_name": "霍州市"
},
{
"_id": 1874,
"id": 2529,
"pid": 304,
"city_code": "101100702",
"city_name": "曲沃县"
},
{
"_id": 1875,
"id": 2530,
"pid": 304,
"city_code": "101100713",
"city_name": "翼城县"
},
{
"_id": 1876,
"id": 2531,
"pid": 304,
"city_code": "101100707",
"city_name": "襄汾县"
},
{
"_id": 1877,
"id": 2532,
"pid": 304,
"city_code": "101100710",
"city_name": "洪洞县"
},
{
"_id": 1878,
"id": 2533,
"pid": 304,
"city_code": "101100706",
"city_name": "吉县"
},
{
"_id": 1879,
"id": 2534,
"pid": 304,
"city_code": "101100716",
"city_name": "安泽县"
},
{
"_id": 1880,
"id": 2535,
"pid": 304,
"city_code": "101100715",
"city_name": "浮山县"
},
{
"_id": 1881,
"id": 2536,
"pid": 304,
"city_code": "101100717",
"city_name": "古县"
},
{
"_id": 1882,
"id": 2537,
"pid": 304,
"city_code": "101100712",
"city_name": "乡宁县"
},
{
"_id": 1883,
"id": 2538,
"pid": 304,
"city_code": "101100705",
"city_name": "大宁县"
},
{
"_id": 1884,
"id": 2539,
"pid": 304,
"city_code": "101100704",
"city_name": "隰县"
},
{
"_id": 1885,
"id": 2540,
"pid": 304,
"city_code": "101100703",
"city_name": "永和县"
},
{
"_id": 1886,
"id": 2541,
"pid": 304,
"city_code": "101100708",
"city_name": "蒲县"
},
{
"_id": 1887,
"id": 2542,
"pid": 304,
"city_code": "101100709",
"city_name": "汾西县"
},
{
"_id": 1888,
"id": 2543,
"pid": 305,
"city_code": "101101101",
"city_name": "离石市"
},
{
"_id": 1889,
"id": 2544,
"pid": 305,
"city_code": "101101101",
"city_name": "离石区"
},
{
"_id": 1890,
"id": 2545,
"pid": 305,
"city_code": "101101110",
"city_name": "孝义市"
},
{
"_id": 1891,
"id": 2546,
"pid": 305,
"city_code": "101101111",
"city_name": "汾阳市"
},
{
"_id": 1892,
"id": 2547,
"pid": 305,
"city_code": "101101112",
"city_name": "文水县"
},
{
"_id": 1893,
"id": 2548,
"pid": 305,
"city_code": "101101113",
"city_name": "交城县"
},
{
"_id": 1894,
"id": 2549,
"pid": 305,
"city_code": "101101103",
"city_name": "兴县"
},
{
"_id": 1895,
"id": 2550,
"pid": 305,
"city_code": "101101102",
"city_name": "临县"
},
{
"_id": 1896,
"id": 2551,
"pid": 305,
"city_code": "101101105",
"city_name": "柳林县"
},
{
"_id": 1897,
"id": 2552,
"pid": 305,
"city_code": "101101106",
"city_name": "石楼县"
},
{
"_id": 1898,
"id": 2553,
"pid": 305,
"city_code": "101101104",
"city_name": "岚县"
},
{
"_id": 1899,
"id": 2554,
"pid": 305,
"city_code": "101101107",
"city_name": "方山县"
},
{
"_id": 1900,
"id": 2555,
"pid": 305,
"city_code": "101101109",
"city_name": "中阳县"
},
{
"_id": 1901,
"id": 2556,
"pid": 305,
"city_code": "101101108",
"city_name": "交口县"
},
{
"_id": 1902,
"id": 2558,
"pid": 306,
"city_code": "101100902",
"city_name": "平鲁区"
},
{
"_id": 1903,
"id": 2559,
"pid": 306,
"city_code": "101100903",
"city_name": "山阴县"
},
{
"_id": 1904,
"id": 2560,
"pid": 306,
"city_code": "101100905",
"city_name": "应县"
},
{
"_id": 1905,
"id": 2561,
"pid": 306,
"city_code": "101100904",
"city_name": "右玉县"
},
{
"_id": 1906,
"id": 2562,
"pid": 306,
"city_code": "101100906",
"city_name": "怀仁县"
},
{
"_id": 1907,
"id": 2564,
"pid": 307,
"city_code": "101101015",
"city_name": "原平市"
},
{
"_id": 1908,
"id": 2565,
"pid": 307,
"city_code": "101101002",
"city_name": "定襄县"
},
{
"_id": 1909,
"id": 2566,
"pid": 307,
"city_code": "101101003",
"city_name": "五台县"
},
{
"_id": 1910,
"id": 2567,
"pid": 307,
"city_code": "101101008",
"city_name": "代县"
},
{
"_id": 1911,
"id": 2568,
"pid": 307,
"city_code": "101101009",
"city_name": "繁峙县"
},
{
"_id": 1912,
"id": 2569,
"pid": 307,
"city_code": "101101007",
"city_name": "宁武县"
},
{
"_id": 1913,
"id": 2570,
"pid": 307,
"city_code": "101101012",
"city_name": "静乐县"
},
{
"_id": 1914,
"id": 2571,
"pid": 307,
"city_code": "101101006",
"city_name": "神池县"
},
{
"_id": 1915,
"id": 2572,
"pid": 307,
"city_code": "101101014",
"city_name": "五寨县"
},
{
"_id": 1916,
"id": 2573,
"pid": 307,
"city_code": "101101013",
"city_name": "岢岚县"
},
{
"_id": 1917,
"id": 2574,
"pid": 307,
"city_code": "101101004",
"city_name": "河曲县"
},
{
"_id": 1918,
"id": 2575,
"pid": 307,
"city_code": "101101011",
"city_name": "保德县"
},
{
"_id": 1919,
"id": 2576,
"pid": 307,
"city_code": "101101005",
"city_name": "偏关县"
},
{
"_id": 1920,
"id": 2580,
"pid": 308,
"city_code": "101100303",
"city_name": "平定县"
},
{
"_id": 1921,
"id": 2581,
"pid": 308,
"city_code": "101100302",
"city_name": "盂县"
},
{
"_id": 1922,
"id": 2583,
"pid": 309,
"city_code": "101100810",
"city_name": "永济市"
},
{
"_id": 1923,
"id": 2584,
"pid": 309,
"city_code": "101100805",
"city_name": "河津市"
},
{
"_id": 1924,
"id": 2585,
"pid": 309,
"city_code": "101100802",
"city_name": "临猗县"
},
{
"_id": 1925,
"id": 2586,
"pid": 309,
"city_code": "101100804",
"city_name": "万荣县"
},
{
"_id": 1926,
"id": 2587,
"pid": 309,
"city_code": "101100808",
"city_name": "闻喜县"
},
{
"_id": 1927,
"id": 2588,
"pid": 309,
"city_code": "101100803",
"city_name": "稷山县"
},
{
"_id": 1928,
"id": 2589,
"pid": 309,
"city_code": "101100806",
"city_name": "新绛县"
},
{
"_id": 1929,
"id": 2590,
"pid": 309,
"city_code": "101100807",
"city_name": "绛县"
},
{
"_id": 1930,
"id": 2591,
"pid": 309,
"city_code": "101100809",
"city_name": "垣曲县"
},
{
"_id": 1931,
"id": 2592,
"pid": 309,
"city_code": "101100812",
"city_name": "夏县"
},
{
"_id": 1932,
"id": 2593,
"pid": 309,
"city_code": "101100813",
"city_name": "平陆县"
},
{
"_id": 1933,
"id": 2594,
"pid": 309,
"city_code": "101100811",
"city_name": "芮城县"
},
{
"_id": 1934,
"id": 2602,
"pid": 310,
"city_code": "101110103",
"city_name": "临潼区"
},
{
"_id": 1935,
"id": 2603,
"pid": 310,
"city_code": "101110102",
"city_name": "长安区"
},
{
"_id": 1936,
"id": 2604,
"pid": 310,
"city_code": "101110104",
"city_name": "蓝田县"
},
{
"_id": 1937,
"id": 2605,
"pid": 310,
"city_code": "101110105",
"city_name": "周至县"
},
{
"_id": 1938,
"id": 2606,
"pid": 310,
"city_code": "101110106",
"city_name": "户县"
},
{
"_id": 1939,
"id": 2607,
"pid": 310,
"city_code": "101110107",
"city_name": "高陵县"
},
{
"_id": 1940,
"id": 2609,
"pid": 311,
"city_code": "101110704",
"city_name": "汉阴县"
},
{
"_id": 1941,
"id": 2610,
"pid": 311,
"city_code": "101110703",
"city_name": "石泉县"
},
{
"_id": 1942,
"id": 2611,
"pid": 311,
"city_code": "101110710",
"city_name": "宁陕县"
},
{
"_id": 1943,
"id": 2612,
"pid": 311,
"city_code": "101110702",
"city_name": "紫阳县"
},
{
"_id": 1944,
"id": 2613,
"pid": 311,
"city_code": "101110706",
"city_name": "岚皋县"
},
{
"_id": 1945,
"id": 2614,
"pid": 311,
"city_code": "101110707",
"city_name": "平利县"
},
{
"_id": 1946,
"id": 2615,
"pid": 311,
"city_code": "101110709",
"city_name": "镇坪县"
},
{
"_id": 1947,
"id": 2616,
"pid": 311,
"city_code": "101110705",
"city_name": "旬阳县"
},
{
"_id": 1948,
"id": 2617,
"pid": 311,
"city_code": "101110708",
"city_name": "白河县"
},
{
"_id": 1949,
"id": 2618,
"pid": 312,
"city_code": "101110912",
"city_name": "陈仓区"
},
{
"_id": 1950,
"id": 2621,
"pid": 312,
"city_code": "101110906",
"city_name": "凤翔县"
},
{
"_id": 1951,
"id": 2622,
"pid": 312,
"city_code": "101110905",
"city_name": "岐山县"
},
{
"_id": 1952,
"id": 2623,
"pid": 312,
"city_code": "101110907",
"city_name": "扶风县"
},
{
"_id": 1953,
"id": 2624,
"pid": 312,
"city_code": "101110908",
"city_name": "眉县"
},
{
"_id": 1954,
"id": 2625,
"pid": 312,
"city_code": "101110911",
"city_name": "陇县"
},
{
"_id": 1955,
"id": 2626,
"pid": 312,
"city_code": "101110903",
"city_name": "千阳县"
},
{
"_id": 1956,
"id": 2627,
"pid": 312,
"city_code": "101110904",
"city_name": "麟游县"
},
{
"_id": 1957,
"id": 2628,
"pid": 312,
"city_code": "101110910",
"city_name": "凤县"
},
{
"_id": 1958,
"id": 2629,
"pid": 312,
"city_code": "101110909",
"city_name": "太白县"
},
{
"_id": 1959,
"id": 2631,
"pid": 313,
"city_code": "101110810",
"city_name": "南郑县"
},
{
"_id": 1960,
"id": 2632,
"pid": 313,
"city_code": "101110806",
"city_name": "城固县"
},
{
"_id": 1961,
"id": 2633,
"pid": 313,
"city_code": "101110805",
"city_name": "洋县"
},
{
"_id": 1962,
"id": 2634,
"pid": 313,
"city_code": "101110807",
"city_name": "西乡县"
},
{
"_id": 1963,
"id": 2635,
"pid": 313,
"city_code": "101110803",
"city_name": "勉县"
},
{
"_id": 1964,
"id": 2636,
"pid": 313,
"city_code": "101110809",
"city_name": "宁强县"
},
{
"_id": 1965,
"id": 2637,
"pid": 313,
"city_code": "101110802",
"city_name": "略阳县"
},
{
"_id": 1966,
"id": 2638,
"pid": 313,
"city_code": "101110811",
"city_name": "镇巴县"
},
{
"_id": 1967,
"id": 2639,
"pid": 313,
"city_code": "101110804",
"city_name": "留坝县"
},
{
"_id": 1968,
"id": 2640,
"pid": 313,
"city_code": "101110808",
"city_name": "佛坪县"
},
{
"_id": 1969,
"id": 2641,
"pid": 314,
"city_code": "101110604",
"city_name": "商州区"
},
{
"_id": 1970,
"id": 2642,
"pid": 314,
"city_code": "101110602",
"city_name": "洛南县"
},
{
"_id": 1971,
"id": 2643,
"pid": 314,
"city_code": "101110606",
"city_name": "丹凤县"
},
{
"_id": 1972,
"id": 2644,
"pid": 314,
"city_code": "101110607",
"city_name": "商南县"
},
{
"_id": 1973,
"id": 2645,
"pid": 314,
"city_code": "101110608",
"city_name": "山阳县"
},
{
"_id": 1974,
"id": 2646,
"pid": 314,
"city_code": "101110605",
"city_name": "镇安县"
},
{
"_id": 1975,
"id": 2647,
"pid": 314,
"city_code": "101110603",
"city_name": "柞水县"
},
{
"_id": 1976,
"id": 2648,
"pid": 315,
"city_code": "101111004",
"city_name": "耀州区"
},
{
"_id": 1977,
"id": 2651,
"pid": 315,
"city_code": "101111003",
"city_name": "宜君县"
},
{
"_id": 1978,
"id": 2653,
"pid": 316,
"city_code": "101110510",
"city_name": "韩城市"
},
{
"_id": 1979,
"id": 2654,
"pid": 316,
"city_code": "101110511",
"city_name": "华阴市"
},
{
"_id": 1980,
"id": 2655,
"pid": 316,
"city_code": "101110502",
"city_name": "华县"
},
{
"_id": 1981,
"id": 2656,
"pid": 316,
"city_code": "101110503",
"city_name": "潼关县"
},
{
"_id": 1982,
"id": 2657,
"pid": 316,
"city_code": "101110504",
"city_name": "大荔县"
},
{
"_id": 1983,
"id": 2658,
"pid": 316,
"city_code": "101110509",
"city_name": "合阳县"
},
{
"_id": 1984,
"id": 2659,
"pid": 316,
"city_code": "101110508",
"city_name": "澄城县"
},
{
"_id": 1985,
"id": 2660,
"pid": 316,
"city_code": "101110507",
"city_name": "蒲城县"
},
{
"_id": 1986,
"id": 2661,
"pid": 316,
"city_code": "101110505",
"city_name": "白水县"
},
{
"_id": 1987,
"id": 2662,
"pid": 316,
"city_code": "101110506",
"city_name": "富平县"
},
{
"_id": 1988,
"id": 2666,
"pid": 317,
"city_code": "101110211",
"city_name": "兴平市"
},
{
"_id": 1989,
"id": 2667,
"pid": 317,
"city_code": "101110201",
"city_name": "三原县"
},
{
"_id": 1990,
"id": 2668,
"pid": 317,
"city_code": "101110205",
"city_name": "泾阳县"
},
{
"_id": 1991,
"id": 2669,
"pid": 317,
"city_code": "101110207",
"city_name": "乾县"
},
{
"_id": 1992,
"id": 2670,
"pid": 317,
"city_code": "101110202",
"city_name": "礼泉县"
},
{
"_id": 1993,
"id": 2671,
"pid": 317,
"city_code": "101110203",
"city_name": "永寿县"
},
{
"_id": 1994,
"id": 2672,
"pid": 317,
"city_code": "101110208",
"city_name": "彬县"
},
{
"_id": 1995,
"id": 2673,
"pid": 317,
"city_code": "101110209",
"city_name": "长武县"
},
{
"_id": 1996,
"id": 2674,
"pid": 317,
"city_code": "101110210",
"city_name": "旬邑县"
},
{
"_id": 1997,
"id": 2675,
"pid": 317,
"city_code": "101110204",
"city_name": "淳化县"
},
{
"_id": 1998,
"id": 2676,
"pid": 317,
"city_code": "101110206",
"city_name": "武功县"
},
{
"_id": 1999,
"id": 2677,
"pid": 318,
"city_code": "101110312",
"city_name": "吴起县"
},
{
"_id": 2000,
"id": 2679,
"pid": 318,
"city_code": "101110301",
"city_name": "延长县"
},
{
"_id": 2001,
"id": 2680,
"pid": 318,
"city_code": "101110302",
"city_name": "延川县"
},
{
"_id": 2002,
"id": 2681,
"pid": 318,
"city_code": "101110303",
"city_name": "子长县"
},
{
"_id": 2003,
"id": 2682,
"pid": 318,
"city_code": "101110307",
"city_name": "安塞县"
},
{
"_id": 2004,
"id": 2683,
"pid": 318,
"city_code": "101110306",
"city_name": "志丹县"
},
{
"_id": 2005,
"id": 2684,
"pid": 318,
"city_code": "101110308",
"city_name": "甘泉县"
},
{
"_id": 2006,
"id": 2685,
"pid": 318,
"city_code": "101110305",
"city_name": "富县"
},
{
"_id": 2007,
"id": 2686,
"pid": 318,
"city_code": "101110309",
"city_name": "洛川县"
},
{
"_id": 2008,
"id": 2687,
"pid": 318,
"city_code": "101110304",
"city_name": "宜川县"
},
{
"_id": 2009,
"id": 2688,
"pid": 318,
"city_code": "101110311",
"city_name": "黄龙县"
},
{
"_id": 2010,
"id": 2689,
"pid": 318,
"city_code": "101110310",
"city_name": "黄陵县"
},
{
"_id": 2011,
"id": 2690,
"pid": 319,
"city_code": "101110413",
"city_name": "榆阳区"
},
{
"_id": 2012,
"id": 2691,
"pid": 319,
"city_code": "101110403",
"city_name": "神木县"
},
{
"_id": 2013,
"id": 2692,
"pid": 319,
"city_code": "101110402",
"city_name": "府谷县"
},
{
"_id": 2014,
"id": 2693,
"pid": 319,
"city_code": "101110407",
"city_name": "横山县"
},
{
"_id": 2015,
"id": 2694,
"pid": 319,
"city_code": "101110406",
"city_name": "靖边县"
},
{
"_id": 2016,
"id": 2695,
"pid": 319,
"city_code": "101110405",
"city_name": "定边县"
},
{
"_id": 2017,
"id": 2696,
"pid": 319,
"city_code": "101110410",
"city_name": "绥德县"
},
{
"_id": 2018,
"id": 2697,
"pid": 319,
"city_code": "101110408",
"city_name": "米脂县"
},
{
"_id": 2019,
"id": 2698,
"pid": 319,
"city_code": "101110404",
"city_name": "佳县"
},
{
"_id": 2020,
"id": 2699,
"pid": 319,
"city_code": "101110411",
"city_name": "吴堡县"
},
{
"_id": 2021,
"id": 2700,
"pid": 319,
"city_code": "101110412",
"city_name": "清涧县"
},
{
"_id": 2022,
"id": 2701,
"pid": 319,
"city_code": "101110409",
"city_name": "子洲县"
},
{
"_id": 2023,
"id": 2704,
"pid": 24,
"city_code": "101020200",
"city_name": "闵行区"
},
{
"_id": 2024,
"id": 2706,
"pid": 24,
"city_code": "101021300",
"city_name": "浦东新区"
},
{
"_id": 2025,
"id": 2714,
"pid": 24,
"city_code": "101020900",
"city_name": "松江区"
},
{
"_id": 2026,
"id": 2715,
"pid": 24,
"city_code": "101020500",
"city_name": "嘉定区"
},
{
"_id": 2027,
"id": 2716,
"pid": 24,
"city_code": "101020300",
"city_name": "宝山区"
},
{
"_id": 2028,
"id": 2717,
"pid": 24,
"city_code": "101020800",
"city_name": "青浦区"
},
{
"_id": 2029,
"id": 2718,
"pid": 24,
"city_code": "101020700",
"city_name": "金山区"
},
{
"_id": 2030,
"id": 2719,
"pid": 24,
"city_code": "101021000",
"city_name": "奉贤区"
},
{
"_id": 2031,
"id": 2720,
"pid": 24,
"city_code": "101021100",
"city_name": "崇明区"
},
{
"_id": 2032,
"id": 2726,
"pid": 321,
"city_code": "101270102",
"city_name": "龙泉驿区"
},
{
"_id": 2033,
"id": 2727,
"pid": 321,
"city_code": "101270115",
"city_name": "青白江区"
},
{
"_id": 2034,
"id": 2728,
"pid": 321,
"city_code": "101270103",
"city_name": "新都区"
},
{
"_id": 2035,
"id": 2729,
"pid": 321,
"city_code": "101270104",
"city_name": "温江区"
},
{
"_id": 2036,
"id": 2732,
"pid": 321,
"city_code": "101270111",
"city_name": "都江堰市"
},
{
"_id": 2037,
"id": 2733,
"pid": 321,
"city_code": "101270112",
"city_name": "彭州市"
},
{
"_id": 2038,
"id": 2734,
"pid": 321,
"city_code": "101270113",
"city_name": "邛崃市"
},
{
"_id": 2039,
"id": 2735,
"pid": 321,
"city_code": "101270114",
"city_name": "崇州市"
},
{
"_id": 2040,
"id": 2736,
"pid": 321,
"city_code": "101270105",
"city_name": "金堂县"
},
{
"_id": 2041,
"id": 2737,
"pid": 321,
"city_code": "101270106",
"city_name": "双流县"
},
{
"_id": 2042,
"id": 2738,
"pid": 321,
"city_code": "101270107",
"city_name": "郫县"
},
{
"_id": 2043,
"id": 2739,
"pid": 321,
"city_code": "101270108",
"city_name": "大邑县"
},
{
"_id": 2044,
"id": 2740,
"pid": 321,
"city_code": "101270109",
"city_name": "蒲江县"
},
{
"_id": 2045,
"id": 2741,
"pid": 321,
"city_code": "101270110",
"city_name": "新津县"
},
{
"_id": 2046,
"id": 2754,
"pid": 322,
"city_code": "101270408",
"city_name": "江油市"
},
{
"_id": 2047,
"id": 2755,
"pid": 322,
"city_code": "101270403",
"city_name": "盐亭县"
},
{
"_id": 2048,
"id": 2756,
"pid": 322,
"city_code": "101270402",
"city_name": "三台县"
},
{
"_id": 2049,
"id": 2757,
"pid": 322,
"city_code": "101270407",
"city_name": "平武县"
},
{
"_id": 2050,
"id": 2758,
"pid": 322,
"city_code": "101270404",
"city_name": "安县"
},
{
"_id": 2051,
"id": 2759,
"pid": 322,
"city_code": "101270405",
"city_name": "梓潼县"
},
{
"_id": 2052,
"id": 2760,
"pid": 322,
"city_code": "101270406",
"city_name": "北川县"
},
{
"_id": 2053,
"id": 2761,
"pid": 323,
"city_code": "101271910",
"city_name": "马尔康县"
},
{
"_id": 2054,
"id": 2762,
"pid": 323,
"city_code": "101271902",
"city_name": "汶川县"
},
{
"_id": 2055,
"id": 2763,
"pid": 323,
"city_code": "101271903",
"city_name": "理县"
},
{
"_id": 2056,
"id": 2764,
"pid": 323,
"city_code": "101271904",
"city_name": "茂县"
},
{
"_id": 2057,
"id": 2765,
"pid": 323,
"city_code": "101271905",
"city_name": "松潘县"
},
{
"_id": 2058,
"id": 2766,
"pid": 323,
"city_code": "101271906",
"city_name": "九寨沟县"
},
{
"_id": 2059,
"id": 2767,
"pid": 323,
"city_code": "101271907",
"city_name": "金川县"
},
{
"_id": 2060,
"id": 2768,
"pid": 323,
"city_code": "101271908",
"city_name": "小金县"
},
{
"_id": 2061,
"id": 2769,
"pid": 323,
"city_code": "101271909",
"city_name": "黑水县"
},
{
"_id": 2062,
"id": 2770,
"pid": 323,
"city_code": "101271911",
"city_name": "壤塘县"
},
{
"_id": 2063,
"id": 2771,
"pid": 323,
"city_code": "101271901",
"city_name": "阿坝县"
},
{
"_id": 2064,
"id": 2772,
"pid": 323,
"city_code": "101271912",
"city_name": "若尔盖县"
},
{
"_id": 2065,
"id": 2773,
"pid": 323,
"city_code": "101271913",
"city_name": "红原县"
},
{
"_id": 2066,
"id": 2775,
"pid": 324,
"city_code": "101270902",
"city_name": "通江县"
},
{
"_id": 2067,
"id": 2776,
"pid": 324,
"city_code": "101270903",
"city_name": "南江县"
},
{
"_id": 2068,
"id": 2777,
"pid": 324,
"city_code": "101270904",
"city_name": "平昌县"
},
{
"_id": 2069,
"id": 2779,
"pid": 325,
"city_code": "101270606",
"city_name": "万源市"
},
{
"_id": 2070,
"id": 2780,
"pid": 325,
"city_code": "101270608",
"city_name": "达川区"
},
{
"_id": 2071,
"id": 2781,
"pid": 325,
"city_code": "101270602",
"city_name": "宣汉县"
},
{
"_id": 2072,
"id": 2782,
"pid": 325,
"city_code": "101270603",
"city_name": "开江县"
},
{
"_id": 2073,
"id": 2783,
"pid": 325,
"city_code": "101270604",
"city_name": "大竹县"
},
{
"_id": 2074,
"id": 2784,
"pid": 325,
"city_code": "101270605",
"city_name": "渠县"
},
{
"_id": 2075,
"id": 2786,
"pid": 326,
"city_code": "101272003",
"city_name": "广汉市"
},
{
"_id": 2076,
"id": 2787,
"pid": 326,
"city_code": "101272004",
"city_name": "什邡市"
},
{
"_id": 2077,
"id": 2788,
"pid": 326,
"city_code": "101272005",
"city_name": "绵竹市"
},
{
"_id": 2078,
"id": 2789,
"pid": 326,
"city_code": "101272006",
"city_name": "罗江县"
},
{
"_id": 2079,
"id": 2790,
"pid": 326,
"city_code": "101272002",
"city_name": "中江县"
},
{
"_id": 2080,
"id": 2791,
"pid": 327,
"city_code": "101271802",
"city_name": "康定县"
},
{
"_id": 2081,
"id": 2792,
"pid": 327,
"city_code": "101271804",
"city_name": "丹巴县"
},
{
"_id": 2082,
"id": 2793,
"pid": 327,
"city_code": "101271803",
"city_name": "泸定县"
},
{
"_id": 2083,
"id": 2794,
"pid": 327,
"city_code": "101271808",
"city_name": "炉霍县"
},
{
"_id": 2084,
"id": 2795,
"pid": 327,
"city_code": "101271805",
"city_name": "九龙县"
},
{
"_id": 2085,
"id": 2796,
"pid": 327,
"city_code": "101271801",
"city_name": "甘孜县"
},
{
"_id": 2086,
"id": 2797,
"pid": 327,
"city_code": "101271806",
"city_name": "雅江县"
},
{
"_id": 2087,
"id": 2798,
"pid": 327,
"city_code": "101271809",
"city_name": "新龙县"
},
{
"_id": 2088,
"id": 2799,
"pid": 327,
"city_code": "101271807",
"city_name": "道孚县"
},
{
"_id": 2089,
"id": 2800,
"pid": 327,
"city_code": "101271811",
"city_name": "白玉县"
},
{
"_id": 2090,
"id": 2801,
"pid": 327,
"city_code": "101271814",
"city_name": "理塘县"
},
{
"_id": 2091,
"id": 2802,
"pid": 327,
"city_code": "101271810",
"city_name": "德格县"
},
{
"_id": 2092,
"id": 2803,
"pid": 327,
"city_code": "101271816",
"city_name": "乡城县"
},
{
"_id": 2093,
"id": 2804,
"pid": 327,
"city_code": "101271812",
"city_name": "石渠县"
},
{
"_id": 2094,
"id": 2805,
"pid": 327,
"city_code": "101271817",
"city_name": "稻城县"
},
{
"_id": 2095,
"id": 2806,
"pid": 327,
"city_code": "101271813",
"city_name": "色达县"
},
{
"_id": 2096,
"id": 2807,
"pid": 327,
"city_code": "101271815",
"city_name": "巴塘县"
},
{
"_id": 2097,
"id": 2808,
"pid": 327,
"city_code": "101271818",
"city_name": "得荣县"
},
{
"_id": 2098,
"id": 2809,
"pid": 328,
"city_code": "101270801",
"city_name": "广安区"
},
{
"_id": 2099,
"id": 2810,
"pid": 328,
"city_code": "101270805",
"city_name": "华蓥市"
},
{
"_id": 2100,
"id": 2811,
"pid": 328,
"city_code": "101270802",
"city_name": "岳池县"
},
{
"_id": 2101,
"id": 2812,
"pid": 328,
"city_code": "101270803",
"city_name": "武胜县"
},
{
"_id": 2102,
"id": 2813,
"pid": 328,
"city_code": "101270804",
"city_name": "邻水县"
},
{
"_id": 2103,
"id": 2817,
"pid": 329,
"city_code": "101272102",
"city_name": "旺苍县"
},
{
"_id": 2104,
"id": 2818,
"pid": 329,
"city_code": "101272103",
"city_name": "青川县"
},
{
"_id": 2105,
"id": 2819,
"pid": 329,
"city_code": "101272104",
"city_name": "剑阁县"
},
{
"_id": 2106,
"id": 2820,
"pid": 329,
"city_code": "101272105",
"city_name": "苍溪县"
},
{
"_id": 2107,
"id": 2821,
"pid": 330,
"city_code": "101271409",
"city_name": "峨眉山市"
},
{
"_id": 2108,
"id": 2823,
"pid": 330,
"city_code": "101271402",
"city_name": "犍为县"
},
{
"_id": 2109,
"id": 2824,
"pid": 330,
"city_code": "101271403",
"city_name": "井研县"
},
{
"_id": 2110,
"id": 2825,
"pid": 330,
"city_code": "101271404",
"city_name": "夹江县"
},
{
"_id": 2111,
"id": 2826,
"pid": 330,
"city_code": "101271405",
"city_name": "沐川县"
},
{
"_id": 2112,
"id": 2827,
"pid": 330,
"city_code": "101271406",
"city_name": "峨边县"
},
{
"_id": 2113,
"id": 2828,
"pid": 330,
"city_code": "101271407",
"city_name": "马边县"
},
{
"_id": 2114,
"id": 2829,
"pid": 331,
"city_code": "101271610",
"city_name": "西昌市"
},
{
"_id": 2115,
"id": 2830,
"pid": 331,
"city_code": "101271604",
"city_name": "盐源县"
},
{
"_id": 2116,
"id": 2831,
"pid": 331,
"city_code": "101271605",
"city_name": "德昌县"
},
{
"_id": 2117,
"id": 2832,
"pid": 331,
"city_code": "101271606",
"city_name": "会理县"
},
{
"_id": 2118,
"id": 2833,
"pid": 331,
"city_code": "101271607",
"city_name": "会东县"
},
{
"_id": 2119,
"id": 2834,
"pid": 331,
"city_code": "101271608",
"city_name": "宁南县"
},
{
"_id": 2120,
"id": 2835,
"pid": 331,
"city_code": "101271609",
"city_name": "普格县"
},
{
"_id": 2121,
"id": 2836,
"pid": 331,
"city_code": "101271619",
"city_name": "布拖县"
},
{
"_id": 2122,
"id": 2837,
"pid": 331,
"city_code": "101271611",
"city_name": "金阳县"
},
{
"_id": 2123,
"id": 2838,
"pid": 331,
"city_code": "101271612",
"city_name": "昭觉县"
},
{
"_id": 2124,
"id": 2839,
"pid": 331,
"city_code": "101271613",
"city_name": "喜德县"
},
{
"_id": 2125,
"id": 2840,
"pid": 331,
"city_code": "101271614",
"city_name": "冕宁县"
},
{
"_id": 2126,
"id": 2841,
"pid": 331,
"city_code": "101271615",
"city_name": "越西县"
},
{
"_id": 2127,
"id": 2842,
"pid": 331,
"city_code": "101271616",
"city_name": "甘洛县"
},
{
"_id": 2128,
"id": 2843,
"pid": 331,
"city_code": "101271618",
"city_name": "美姑县"
},
{
"_id": 2129,
"id": 2844,
"pid": 331,
"city_code": "101271617",
"city_name": "雷波县"
},
{
"_id": 2130,
"id": 2845,
"pid": 331,
"city_code": "101271603",
"city_name": "木里县"
},
{
"_id": 2131,
"id": 2847,
"pid": 332,
"city_code": "101271502",
"city_name": "仁寿县"
},
{
"_id": 2132,
"id": 2848,
"pid": 332,
"city_code": "101271503",
"city_name": "彭山县"
},
{
"_id": 2133,
"id": 2849,
"pid": 332,
"city_code": "101271504",
"city_name": "洪雅县"
},
{
"_id": 2134,
"id": 2850,
"pid": 332,
"city_code": "101271505",
"city_name": "丹棱县"
},
{
"_id": 2135,
"id": 2851,
"pid": 332,
"city_code": "101271506",
"city_name": "青神县"
},
{
"_id": 2136,
"id": 2852,
"pid": 333,
"city_code": "101270507",
"city_name": "阆中市"
},
{
"_id": 2137,
"id": 2853,
"pid": 333,
"city_code": "101270502",
"city_name": "南部县"
},
{
"_id": 2138,
"id": 2854,
"pid": 333,
"city_code": "101270503",
"city_name": "营山县"
},
{
"_id": 2139,
"id": 2855,
"pid": 333,
"city_code": "101270504",
"city_name": "蓬安县"
},
{
"_id": 2140,
"id": 2856,
"pid": 333,
"city_code": "101270505",
"city_name": "仪陇县"
},
{
"_id": 2141,
"id": 2860,
"pid": 333,
"city_code": "101270506",
"city_name": "西充县"
},
{
"_id": 2142,
"id": 2862,
"pid": 334,
"city_code": "101271202",
"city_name": "东兴区"
},
{
"_id": 2143,
"id": 2863,
"pid": 334,
"city_code": "101271203",
"city_name": "威远县"
},
{
"_id": 2144,
"id": 2864,
"pid": 334,
"city_code": "101271204",
"city_name": "资中县"
},
{
"_id": 2145,
"id": 2865,
"pid": 334,
"city_code": "101271205",
"city_name": "隆昌县"
},
{
"_id": 2146,
"id": 2868,
"pid": 335,
"city_code": "101270202",
"city_name": "仁和区"
},
{
"_id": 2147,
"id": 2869,
"pid": 335,
"city_code": "101270203",
"city_name": "米易县"
},
{
"_id": 2148,
"id": 2870,
"pid": 335,
"city_code": "101270204",
"city_name": "盐边县"
},
{
"_id": 2149,
"id": 2873,
"pid": 336,
"city_code": "101270702",
"city_name": "蓬溪县"
},
{
"_id": 2150,
"id": 2874,
"pid": 336,
"city_code": "101270703",
"city_name": "射洪县"
},
{
"_id": 2151,
"id": 2877,
"pid": 337,
"city_code": "101271702",
"city_name": "名山县"
},
{
"_id": 2152,
"id": 2878,
"pid": 337,
"city_code": "101271703",
"city_name": "荥经县"
},
{
"_id": 2153,
"id": 2879,
"pid": 337,
"city_code": "101271704",
"city_name": "汉源县"
},
{
"_id": 2154,
"id": 2880,
"pid": 337,
"city_code": "101271705",
"city_name": "石棉县"
},
{
"_id": 2155,
"id": 2881,
"pid": 337,
"city_code": "101271706",
"city_name": "天全县"
},
{
"_id": 2156,
"id": 2882,
"pid": 337,
"city_code": "101271707",
"city_name": "芦山县"
},
{
"_id": 2157,
"id": 2883,
"pid": 337,
"city_code": "101271708",
"city_name": "宝兴县"
},
{
"_id": 2158,
"id": 2885,
"pid": 338,
"city_code": "101271103",
"city_name": "宜宾县"
},
{
"_id": 2159,
"id": 2886,
"pid": 338,
"city_code": "101271104",
"city_name": "南溪县"
},
{
"_id": 2160,
"id": 2887,
"pid": 338,
"city_code": "101271105",
"city_name": "江安县"
},
{
"_id": 2161,
"id": 2888,
"pid": 338,
"city_code": "101271106",
"city_name": "长宁县"
},
{
"_id": 2162,
"id": 2889,
"pid": 338,
"city_code": "101271107",
"city_name": "高县"
},
{
"_id": 2163,
"id": 2890,
"pid": 338,
"city_code": "101271108",
"city_name": "珙县"
},
{
"_id": 2164,
"id": 2891,
"pid": 338,
"city_code": "101271109",
"city_name": "筠连县"
},
{
"_id": 2165,
"id": 2892,
"pid": 338,
"city_code": "101271110",
"city_name": "兴文县"
},
{
"_id": 2166,
"id": 2893,
"pid": 338,
"city_code": "101271111",
"city_name": "屏山县"
},
{
"_id": 2167,
"id": 2895,
"pid": 321,
"city_code": "101271304",
"city_name": "简阳市"
},
{
"_id": 2168,
"id": 2896,
"pid": 339,
"city_code": "101271302",
"city_name": "安岳县"
},
{
"_id": 2169,
"id": 2897,
"pid": 339,
"city_code": "101271303",
"city_name": "乐至县"
},
{
"_id": 2170,
"id": 2902,
"pid": 340,
"city_code": "101270303",
"city_name": "荣县"
},
{
"_id": 2171,
"id": 2903,
"pid": 340,
"city_code": "101270302",
"city_name": "富顺县"
},
{
"_id": 2172,
"id": 2905,
"pid": 341,
"city_code": "101271007",
"city_name": "纳溪区"
},
{
"_id": 2173,
"id": 2907,
"pid": 341,
"city_code": "101271003",
"city_name": "泸县"
},
{
"_id": 2174,
"id": 2908,
"pid": 341,
"city_code": "101271004",
"city_name": "合江县"
},
{
"_id": 2175,
"id": 2909,
"pid": 341,
"city_code": "101271005",
"city_name": "叙永县"
},
{
"_id": 2176,
"id": 2910,
"pid": 341,
"city_code": "101271006",
"city_name": "古蔺县"
},
{
"_id": 2177,
"id": 2917,
"pid": 26,
"city_code": "101030400",
"city_name": "东丽区"
},
{
"_id": 2178,
"id": 2918,
"pid": 26,
"city_code": "101031000",
"city_name": "津南区"
},
{
"_id": 2179,
"id": 2919,
"pid": 26,
"city_code": "101030500",
"city_name": "西青区"
},
{
"_id": 2180,
"id": 2920,
"pid": 26,
"city_code": "101030600",
"city_name": "北辰区"
},
{
"_id": 2181,
"id": 2921,
"pid": 26,
"city_code": "101031100",
"city_name": "塘沽区"
},
{
"_id": 2182,
"id": 2922,
"pid": 26,
"city_code": "101030800",
"city_name": "汉沽区"
},
{
"_id": 2183,
"id": 2923,
"pid": 26,
"city_code": "101031200",
"city_name": "大港区"
},
{
"_id": 2184,
"id": 2924,
"pid": 26,
"city_code": "101030200",
"city_name": "武清区"
},
{
"_id": 2185,
"id": 2925,
"pid": 26,
"city_code": "101030300",
"city_name": "宝坻区"
},
{
"_id": 2186,
"id": 2927,
"pid": 26,
"city_code": "101030700",
"city_name": "宁河区"
},
{
"_id": 2187,
"id": 2928,
"pid": 26,
"city_code": "101030900",
"city_name": "静海区"
},
{
"_id": 2188,
"id": 2929,
"pid": 26,
"city_code": "101031400",
"city_name": "蓟州区"
},
{
"_id": 2189,
"id": 2931,
"pid": 343,
"city_code": "101140104",
"city_name": "林周县"
},
{
"_id": 2190,
"id": 2932,
"pid": 343,
"city_code": "101140102",
"city_name": "当雄县"
},
{
"_id": 2191,
"id": 2933,
"pid": 343,
"city_code": "101140103",
"city_name": "尼木县"
},
{
"_id": 2192,
"id": 2934,
"pid": 343,
"city_code": "101140106",
"city_name": "曲水县"
},
{
"_id": 2193,
"id": 2935,
"pid": 343,
"city_code": "101140105",
"city_name": "堆龙德庆县"
},
{
"_id": 2194,
"id": 2936,
"pid": 343,
"city_code": "101140107",
"city_name": "达孜县"
},
{
"_id": 2195,
"id": 2937,
"pid": 343,
"city_code": "101140108",
"city_name": "墨竹工卡县"
},
{
"_id": 2196,
"id": 2938,
"pid": 344,
"city_code": "101140707",
"city_name": "噶尔县"
},
{
"_id": 2197,
"id": 2939,
"pid": 344,
"city_code": "101140705",
"city_name": "普兰县"
},
{
"_id": 2198,
"id": 2940,
"pid": 344,
"city_code": "101140706",
"city_name": "札达县"
},
{
"_id": 2199,
"id": 2941,
"pid": 344,
"city_code": "101140708",
"city_name": "日土县"
},
{
"_id": 2200,
"id": 2942,
"pid": 344,
"city_code": "101140709",
"city_name": "革吉县"
},
{
"_id": 2201,
"id": 2943,
"pid": 344,
"city_code": "101140702",
"city_name": "改则县"
},
{
"_id": 2202,
"id": 2944,
"pid": 344,
"city_code": "101140710",
"city_name": "措勤县"
},
{
"_id": 2203,
"id": 2945,
"pid": 345,
"city_code": "101140501",
"city_name": "昌都县"
},
{
"_id": 2204,
"id": 2946,
"pid": 345,
"city_code": "101140509",
"city_name": "江达县"
},
{
"_id": 2205,
"id": 2947,
"pid": 345,
"city_code": "101140511",
"city_name": "贡觉县"
},
{
"_id": 2206,
"id": 2948,
"pid": 345,
"city_code": "101140503",
"city_name": "类乌齐县"
},
{
"_id": 2207,
"id": 2949,
"pid": 345,
"city_code": "101140502",
"city_name": "丁青县"
},
{
"_id": 2208,
"id": 2950,
"pid": 345,
"city_code": "101140510",
"city_name": "察雅县"
},
{
"_id": 2209,
"id": 2951,
"pid": 345,
"city_code": "101140507",
"city_name": "八宿县"
},
{
"_id": 2210,
"id": 2952,
"pid": 345,
"city_code": "101140505",
"city_name": "左贡县"
},
{
"_id": 2211,
"id": 2953,
"pid": 345,
"city_code": "101140506",
"city_name": "芒康县"
},
{
"_id": 2212,
"id": 2954,
"pid": 345,
"city_code": "101140504",
"city_name": "洛隆县"
},
{
"_id": 2213,
"id": 2955,
"pid": 345,
"city_code": "101140503",
"city_name": "边坝县"
},
{
"_id": 2214,
"id": 2956,
"pid": 346,
"city_code": "101140401",
"city_name": "林芝县"
},
{
"_id": 2215,
"id": 2957,
"pid": 346,
"city_code": "101140405",
"city_name": "工布江达县"
},
{
"_id": 2216,
"id": 2958,
"pid": 346,
"city_code": "101140403",
"city_name": "米林县"
},
{
"_id": 2217,
"id": 2959,
"pid": 346,
"city_code": "101140407",
"city_name": "墨脱县"
},
{
"_id": 2218,
"id": 2960,
"pid": 346,
"city_code": "101140402",
"city_name": "波密县"
},
{
"_id": 2219,
"id": 2961,
"pid": 346,
"city_code": "101140404",
"city_name": "察隅县"
},
{
"_id": 2220,
"id": 2962,
"pid": 346,
"city_code": "101140406",
"city_name": "朗县"
},
{
"_id": 2221,
"id": 2963,
"pid": 347,
"city_code": "101140601",
"city_name": "那曲县"
},
{
"_id": 2222,
"id": 2964,
"pid": 347,
"city_code": "101140603",
"city_name": "嘉黎县"
},
{
"_id": 2223,
"id": 2965,
"pid": 347,
"city_code": "101140607",
"city_name": "比如县"
},
{
"_id": 2224,
"id": 2966,
"pid": 347,
"city_code": "101140607",
"city_name": "聂荣县"
},
{
"_id": 2225,
"id": 2967,
"pid": 347,
"city_code": "101140605",
"city_name": "安多县"
},
{
"_id": 2226,
"id": 2968,
"pid": 347,
"city_code": "101140703",
"city_name": "申扎县"
},
{
"_id": 2227,
"id": 2969,
"pid": 347,
"city_code": "101140606",
"city_name": "索县"
},
{
"_id": 2228,
"id": 2970,
"pid": 347,
"city_code": "101140604",
"city_name": "班戈县"
},
{
"_id": 2229,
"id": 2971,
"pid": 347,
"city_code": "101140608",
"city_name": "巴青县"
},
{
"_id": 2230,
"id": 2972,
"pid": 347,
"city_code": "101140602",
"city_name": "尼玛县"
},
{
"_id": 2231,
"id": 2973,
"pid": 348,
"city_code": "101140201",
"city_name": "日喀则市"
},
{
"_id": 2232,
"id": 2974,
"pid": 348,
"city_code": "101140203",
"city_name": "南木林县"
},
{
"_id": 2233,
"id": 2975,
"pid": 348,
"city_code": "101140206",
"city_name": "江孜县"
},
{
"_id": 2234,
"id": 2976,
"pid": 348,
"city_code": "101140205",
"city_name": "定日县"
},
{
"_id": 2235,
"id": 2977,
"pid": 348,
"city_code": "101140213",
"city_name": "萨迦县"
},
{
"_id": 2236,
"id": 2978,
"pid": 348,
"city_code": "101140202",
"city_name": "拉孜县"
},
{
"_id": 2237,
"id": 2979,
"pid": 348,
"city_code": "101140211",
"city_name": "昂仁县"
},
{
"_id": 2238,
"id": 2980,
"pid": 348,
"city_code": "101140214",
"city_name": "谢通门县"
},
{
"_id": 2239,
"id": 2981,
"pid": 348,
"city_code": "101140217",
"city_name": "白朗县"
},
{
"_id": 2240,
"id": 2982,
"pid": 348,
"city_code": "101140220",
"city_name": "仁布县"
},
{
"_id": 2241,
"id": 2983,
"pid": 348,
"city_code": "101140219",
"city_name": "康马县"
},
{
"_id": 2242,
"id": 2984,
"pid": 348,
"city_code": "101140212",
"city_name": "定结县"
},
{
"_id": 2243,
"id": 2985,
"pid": 348,
"city_code": "101140208",
"city_name": "仲巴县"
},
{
"_id": 2244,
"id": 2986,
"pid": 348,
"city_code": "101140218",
"city_name": "亚东县"
},
{
"_id": 2245,
"id": 2987,
"pid": 348,
"city_code": "101140210",
"city_name": "吉隆县"
},
{
"_id": 2246,
"id": 2988,
"pid": 348,
"city_code": "101140204",
"city_name": "聂拉木县"
},
{
"_id": 2247,
"id": 2989,
"pid": 348,
"city_code": "101140209",
"city_name": "萨嘎县"
},
{
"_id": 2248,
"id": 2990,
"pid": 348,
"city_code": "101140216",
"city_name": "岗巴县"
},
{
"_id": 2249,
"id": 2991,
"pid": 349,
"city_code": "101140309",
"city_name": "乃东县"
},
{
"_id": 2250,
"id": 2992,
"pid": 349,
"city_code": "101140303",
"city_name": "扎囊县"
},
{
"_id": 2251,
"id": 2993,
"pid": 349,
"city_code": "101140302",
"city_name": "贡嘎县"
},
{
"_id": 2252,
"id": 2994,
"pid": 349,
"city_code": "101140310",
"city_name": "桑日县"
},
{
"_id": 2253,
"id": 2995,
"pid": 349,
"city_code": "101140303",
"city_name": "琼结县"
},
{
"_id": 2254,
"id": 2996,
"pid": 349,
"city_code": "101140314",
"city_name": "曲松县"
},
{
"_id": 2255,
"id": 2997,
"pid": 349,
"city_code": "101140312",
"city_name": "措美县"
},
{
"_id": 2256,
"id": 2998,
"pid": 349,
"city_code": "101140311",
"city_name": "洛扎县"
},
{
"_id": 2257,
"id": 2999,
"pid": 349,
"city_code": "101140304",
"city_name": "加查县"
},
{
"_id": 2258,
"id": 3000,
"pid": 349,
"city_code": "101140307",
"city_name": "隆子县"
},
{
"_id": 2259,
"id": 3001,
"pid": 349,
"city_code": "101140306",
"city_name": "错那县"
},
{
"_id": 2260,
"id": 3002,
"pid": 349,
"city_code": "101140305",
"city_name": "浪卡子县"
},
{
"_id": 2261,
"id": 3008,
"pid": 350,
"city_code": "101130105",
"city_name": "达坂城区"
},
{
"_id": 2262,
"id": 3010,
"pid": 350,
"city_code": "101130101",
"city_name": "乌鲁木齐县"
},
{
"_id": 2263,
"id": 3011,
"pid": 351,
"city_code": "101130801",
"city_name": "阿克苏市"
},
{
"_id": 2264,
"id": 3012,
"pid": 351,
"city_code": "101130803",
"city_name": "温宿县"
},
{
"_id": 2265,
"id": 3013,
"pid": 351,
"city_code": "101130807",
"city_name": "库车县"
},
{
"_id": 2266,
"id": 3014,
"pid": 351,
"city_code": "101130806",
"city_name": "沙雅县"
},
{
"_id": 2267,
"id": 3015,
"pid": 351,
"city_code": "101130805",
"city_name": "新和县"
},
{
"_id": 2268,
"id": 3016,
"pid": 351,
"city_code": "101130804",
"city_name": "拜城县"
},
{
"_id": 2269,
"id": 3017,
"pid": 351,
"city_code": "101130802",
"city_name": "乌什县"
},
{
"_id": 2270,
"id": 3018,
"pid": 351,
"city_code": "101130809",
"city_name": "阿瓦提县"
},
{
"_id": 2271,
"id": 3019,
"pid": 351,
"city_code": "101130808",
"city_name": "柯坪县"
},
{
"_id": 2272,
"id": 3020,
"pid": 352,
"city_code": "101130701",
"city_name": "阿拉尔市"
},
{
"_id": 2273,
"id": 3021,
"pid": 353,
"city_code": "101130601",
"city_name": "库尔勒"
},
{
"_id": 2274,
"id": 3022,
"pid": 353,
"city_code": "101130602",
"city_name": "轮台县"
},
{
"_id": 2275,
"id": 3023,
"pid": 353,
"city_code": "101130603",
"city_name": "尉犁县"
},
{
"_id": 2276,
"id": 3024,
"pid": 353,
"city_code": "101130604",
"city_name": "若羌县"
},
{
"_id": 2277,
"id": 3025,
"pid": 353,
"city_code": "101130605",
"city_name": "且末县"
},
{
"_id": 2278,
"id": 3026,
"pid": 353,
"city_code": "101130607",
"city_name": "焉耆县"
},
{
"_id": 2279,
"id": 3027,
"pid": 353,
"city_code": "101130606",
"city_name": "和静县"
},
{
"_id": 2280,
"id": 3028,
"pid": 353,
"city_code": "101130608",
"city_name": "和硕县"
},
{
"_id": 2281,
"id": 3029,
"pid": 353,
"city_code": "101130612",
"city_name": "博湖县"
},
{
"_id": 2282,
"id": 3030,
"pid": 354,
"city_code": "101131601",
"city_name": "博乐市"
},
{
"_id": 2283,
"id": 3031,
"pid": 354,
"city_code": "101131603",
"city_name": "精河县"
},
{
"_id": 2284,
"id": 3032,
"pid": 354,
"city_code": "101131602",
"city_name": "温泉县"
},
{
"_id": 2285,
"id": 3033,
"pid": 355,
"city_code": "101130402",
"city_name": "呼图壁县"
},
{
"_id": 2286,
"id": 3034,
"pid": 355,
"city_code": "101130403",
"city_name": "米泉市"
},
{
"_id": 2287,
"id": 3035,
"pid": 355,
"city_code": "101130401",
"city_name": "昌吉市"
},
{
"_id": 2288,
"id": 3036,
"pid": 355,
"city_code": "101130404",
"city_name": "阜康市"
},
{
"_id": 2289,
"id": 3037,
"pid": 355,
"city_code": "101130407",
"city_name": "玛纳斯县"
},
{
"_id": 2290,
"id": 3038,
"pid": 355,
"city_code": "101130406",
"city_name": "奇台县"
},
{
"_id": 2291,
"id": 3039,
"pid": 355,
"city_code": "101130405",
"city_name": "吉木萨尔县"
},
{
"_id": 2292,
"id": 3040,
"pid": 355,
"city_code": "101130408",
"city_name": "木垒县"
},
{
"_id": 2293,
"id": 3041,
"pid": 356,
"city_code": "101131201",
"city_name": "哈密市"
},
{
"_id": 2294,
"id": 3042,
"pid": 356,
"city_code": "101131204",
"city_name": "伊吾县"
},
{
"_id": 2295,
"id": 3043,
"pid": 356,
"city_code": "101131203",
"city_name": "巴里坤"
},
{
"_id": 2296,
"id": 3044,
"pid": 357,
"city_code": "101131301",
"city_name": "和田市"
},
{
"_id": 2297,
"id": 3045,
"pid": 357,
"city_code": "101131301",
"city_name": "和田县"
},
{
"_id": 2298,
"id": 3046,
"pid": 357,
"city_code": "101131304",
"city_name": "墨玉县"
},
{
"_id": 2299,
"id": 3047,
"pid": 357,
"city_code": "101131302",
"city_name": "皮山县"
},
{
"_id": 2300,
"id": 3048,
"pid": 357,
"city_code": "101131305",
"city_name": "洛浦县"
},
{
"_id": 2301,
"id": 3049,
"pid": 357,
"city_code": "101131303",
"city_name": "策勒县"
},
{
"_id": 2302,
"id": 3050,
"pid": 357,
"city_code": "101131307",
"city_name": "于田县"
},
{
"_id": 2303,
"id": 3051,
"pid": 357,
"city_code": "101131306",
"city_name": "民丰县"
},
{
"_id": 2304,
"id": 3052,
"pid": 358,
"city_code": "101130901",
"city_name": "喀什市"
},
{
"_id": 2305,
"id": 3053,
"pid": 358,
"city_code": "101130911",
"city_name": "疏附县"
},
{
"_id": 2306,
"id": 3054,
"pid": 358,
"city_code": "101130912",
"city_name": "疏勒县"
},
{
"_id": 2307,
"id": 3055,
"pid": 358,
"city_code": "101130902",
"city_name": "英吉沙县"
},
{
"_id": 2308,
"id": 3056,
"pid": 358,
"city_code": "101130907",
"city_name": "泽普县"
},
{
"_id": 2309,
"id": 3057,
"pid": 358,
"city_code": "101130905",
"city_name": "莎车县"
},
{
"_id": 2310,
"id": 3058,
"pid": 358,
"city_code": "101130906",
"city_name": "叶城县"
},
{
"_id": 2311,
"id": 3059,
"pid": 358,
"city_code": "101130904",
"city_name": "麦盖提县"
},
{
"_id": 2312,
"id": 3060,
"pid": 358,
"city_code": "101130909",
"city_name": "岳普湖县"
},
{
"_id": 2313,
"id": 3061,
"pid": 358,
"city_code": "101130910",
"city_name": "伽师县"
},
{
"_id": 2314,
"id": 3062,
"pid": 358,
"city_code": "101130908",
"city_name": "巴楚县"
},
{
"_id": 2315,
"id": 3063,
"pid": 358,
"city_code": "101130903",
"city_name": "塔什库尔干"
},
{
"_id": 2316,
"id": 3064,
"pid": 359,
"city_code": "101130201",
"city_name": "克拉玛依市"
},
{
"_id": 2317,
"id": 3065,
"pid": 360,
"city_code": "101131501",
"city_name": "阿图什市"
},
{
"_id": 2318,
"id": 3066,
"pid": 360,
"city_code": "101131503",
"city_name": "阿克陶县"
},
{
"_id": 2319,
"id": 3067,
"pid": 360,
"city_code": "101131504",
"city_name": "阿合奇县"
},
{
"_id": 2320,
"id": 3068,
"pid": 360,
"city_code": "101131502",
"city_name": "乌恰县"
},
{
"_id": 2321,
"id": 3069,
"pid": 361,
"city_code": "101130301",
"city_name": "石河子市"
},
{
"_id": 2322,
"id": 3071,
"pid": 363,
"city_code": "101130501",
"city_name": "吐鲁番市"
},
{
"_id": 2323,
"id": 3072,
"pid": 363,
"city_code": "101130504",
"city_name": "鄯善县"
},
{
"_id": 2324,
"id": 3073,
"pid": 363,
"city_code": "101130502",
"city_name": "托克逊县"
},
{
"_id": 2325,
"id": 3075,
"pid": 365,
"city_code": "101131401",
"city_name": "阿勒泰"
},
{
"_id": 2326,
"id": 3076,
"pid": 365,
"city_code": "101131104",
"city_name": "和布克赛尔"
},
{
"_id": 2327,
"id": 3077,
"pid": 365,
"city_code": "101131001",
"city_name": "伊宁市"
},
{
"_id": 2328,
"id": 3078,
"pid": 365,
"city_code": "101131406",
"city_name": "布尔津县"
},
{
"_id": 2329,
"id": 3079,
"pid": 365,
"city_code": "101131011",
"city_name": "奎屯市"
},
{
"_id": 2330,
"id": 3080,
"pid": 365,
"city_code": "101131106",
"city_name": "乌苏市"
},
{
"_id": 2331,
"id": 3081,
"pid": 365,
"city_code": "101131103",
"city_name": "额敏县"
},
{
"_id": 2332,
"id": 3082,
"pid": 365,
"city_code": "101131408",
"city_name": "富蕴县"
},
{
"_id": 2333,
"id": 3083,
"pid": 365,
"city_code": "101131004",
"city_name": "伊宁县"
},
{
"_id": 2334,
"id": 3084,
"pid": 365,
"city_code": "101131407",
"city_name": "福海县"
},
{
"_id": 2335,
"id": 3085,
"pid": 365,
"city_code": "101131009",
"city_name": "霍城县"
},
{
"_id": 2336,
"id": 3086,
"pid": 365,
"city_code": "101131107",
"city_name": "沙湾县"
},
{
"_id": 2337,
"id": 3087,
"pid": 365,
"city_code": "101131005",
"city_name": "巩留县"
},
{
"_id": 2338,
"id": 3088,
"pid": 365,
"city_code": "101131402",
"city_name": "哈巴河县"
},
{
"_id": 2339,
"id": 3089,
"pid": 365,
"city_code": "101131105",
"city_name": "托里县"
},
{
"_id": 2340,
"id": 3090,
"pid": 365,
"city_code": "101131409",
"city_name": "青河县"
},
{
"_id": 2341,
"id": 3091,
"pid": 365,
"city_code": "101131006",
"city_name": "新源县"
},
{
"_id": 2342,
"id": 3092,
"pid": 365,
"city_code": "101131102",
"city_name": "裕民县"
},
{
"_id": 2343,
"id": 3094,
"pid": 365,
"city_code": "101131405",
"city_name": "吉木乃县"
},
{
"_id": 2344,
"id": 3095,
"pid": 365,
"city_code": "101131007",
"city_name": "昭苏县"
},
{
"_id": 2345,
"id": 3096,
"pid": 365,
"city_code": "101131008",
"city_name": "特克斯县"
},
{
"_id": 2346,
"id": 3097,
"pid": 365,
"city_code": "101131003",
"city_name": "尼勒克县"
},
{
"_id": 2347,
"id": 3098,
"pid": 365,
"city_code": "101131002",
"city_name": "察布查尔"
},
{
"_id": 2348,
"id": 3103,
"pid": 366,
"city_code": "101290103",
"city_name": "东川区"
},
{
"_id": 2349,
"id": 3104,
"pid": 366,
"city_code": "101290112",
"city_name": "安宁市"
},
{
"_id": 2350,
"id": 3105,
"pid": 366,
"city_code": "101290108",
"city_name": "呈贡县"
},
{
"_id": 2351,
"id": 3106,
"pid": 366,
"city_code": "101290105",
"city_name": "晋宁县"
},
{
"_id": 2352,
"id": 3107,
"pid": 366,
"city_code": "101290109",
"city_name": "富民县"
},
{
"_id": 2353,
"id": 3108,
"pid": 366,
"city_code": "101290106",
"city_name": "宜良县"
},
{
"_id": 2354,
"id": 3109,
"pid": 366,
"city_code": "101290110",
"city_name": "嵩明县"
},
{
"_id": 2355,
"id": 3110,
"pid": 366,
"city_code": "101290107",
"city_name": "石林县"
},
{
"_id": 2356,
"id": 3111,
"pid": 366,
"city_code": "101290111",
"city_name": "禄劝县"
},
{
"_id": 2357,
"id": 3112,
"pid": 366,
"city_code": "101290104",
"city_name": "寻甸县"
},
{
"_id": 2358,
"id": 3113,
"pid": 367,
"city_code": "101291204",
"city_name": "兰坪县"
},
{
"_id": 2359,
"id": 3114,
"pid": 367,
"city_code": "101291205",
"city_name": "泸水县"
},
{
"_id": 2360,
"id": 3115,
"pid": 367,
"city_code": "101291203",
"city_name": "福贡县"
},
{
"_id": 2361,
"id": 3116,
"pid": 367,
"city_code": "101291207",
"city_name": "贡山县"
},
{
"_id": 2362,
"id": 3117,
"pid": 368,
"city_code": "101290912",
"city_name": "宁洱县"
},
{
"_id": 2363,
"id": 3118,
"pid": 368,
"city_code": "101290901",
"city_name": "思茅区"
},
{
"_id": 2364,
"id": 3119,
"pid": 368,
"city_code": "101290906",
"city_name": "墨江县"
},
{
"_id": 2365,
"id": 3120,
"pid": 368,
"city_code": "101290903",
"city_name": "景东县"
},
{
"_id": 2366,
"id": 3121,
"pid": 368,
"city_code": "101290902",
"city_name": "景谷县"
},
{
"_id": 2367,
"id": 3122,
"pid": 368,
"city_code": "101290911",
"city_name": "镇沅县"
},
{
"_id": 2368,
"id": 3123,
"pid": 368,
"city_code": "101290907",
"city_name": "江城县"
},
{
"_id": 2369,
"id": 3124,
"pid": 368,
"city_code": "101290908",
"city_name": "孟连县"
},
{
"_id": 2370,
"id": 3125,
"pid": 368,
"city_code": "101290904",
"city_name": "澜沧县"
},
{
"_id": 2371,
"id": 3126,
"pid": 368,
"city_code": "101290909",
"city_name": "西盟县"
},
{
"_id": 2372,
"id": 3128,
"pid": 369,
"city_code": "101291404",
"city_name": "宁蒗县"
},
{
"_id": 2373,
"id": 3130,
"pid": 369,
"city_code": "101291402",
"city_name": "永胜县"
},
{
"_id": 2374,
"id": 3131,
"pid": 369,
"city_code": "101291403",
"city_name": "华坪县"
},
{
"_id": 2375,
"id": 3133,
"pid": 370,
"city_code": "101290504",
"city_name": "施甸县"
},
{
"_id": 2376,
"id": 3134,
"pid": 370,
"city_code": "101290506",
"city_name": "腾冲县"
},
{
"_id": 2377,
"id": 3135,
"pid": 370,
"city_code": "101290503",
"city_name": "龙陵县"
},
{
"_id": 2378,
"id": 3136,
"pid": 370,
"city_code": "101290505",
"city_name": "昌宁县"
},
{
"_id": 2379,
"id": 3137,
"pid": 371,
"city_code": "101290801",
"city_name": "楚雄市"
},
{
"_id": 2380,
"id": 3138,
"pid": 371,
"city_code": "101290809",
"city_name": "双柏县"
},
{
"_id": 2381,
"id": 3139,
"pid": 371,
"city_code": "101290805",
"city_name": "牟定县"
},
{
"_id": 2382,
"id": 3140,
"pid": 371,
"city_code": "101290806",
"city_name": "南华县"
},
{
"_id": 2383,
"id": 3141,
"pid": 371,
"city_code": "101290804",
"city_name": "姚安县"
},
{
"_id": 2384,
"id": 3142,
"pid": 371,
"city_code": "101290802",
"city_name": "大姚县"
},
{
"_id": 2385,
"id": 3143,
"pid": 371,
"city_code": "101290810",
"city_name": "永仁县"
},
{
"_id": 2386,
"id": 3144,
"pid": 371,
"city_code": "101290803",
"city_name": "元谋县"
},
{
"_id": 2387,
"id": 3145,
"pid": 371,
"city_code": "101290807",
"city_name": "武定县"
},
{
"_id": 2388,
"id": 3146,
"pid": 371,
"city_code": "101290808",
"city_name": "禄丰县"
},
{
"_id": 2389,
"id": 3147,
"pid": 372,
"city_code": "101290201",
"city_name": "大理市"
},
{
"_id": 2390,
"id": 3148,
"pid": 372,
"city_code": "101290207",
"city_name": "祥云县"
},
{
"_id": 2391,
"id": 3149,
"pid": 372,
"city_code": "101290205",
"city_name": "宾川县"
},
{
"_id": 2392,
"id": 3150,
"pid": 372,
"city_code": "101290206",
"city_name": "弥渡县"
},
{
"_id": 2393,
"id": 3151,
"pid": 372,
"city_code": "101290204",
"city_name": "永平县"
},
{
"_id": 2394,
"id": 3152,
"pid": 372,
"city_code": "101290202",
"city_name": "云龙县"
},
{
"_id": 2395,
"id": 3153,
"pid": 372,
"city_code": "101290210",
"city_name": "洱源县"
},
{
"_id": 2396,
"id": 3154,
"pid": 372,
"city_code": "101290209",
"city_name": "剑川县"
},
{
"_id": 2397,
"id": 3155,
"pid": 372,
"city_code": "101290211",
"city_name": "鹤庆县"
},
{
"_id": 2398,
"id": 3156,
"pid": 372,
"city_code": "101290203",
"city_name": "漾濞县"
},
{
"_id": 2399,
"id": 3157,
"pid": 372,
"city_code": "101290212",
"city_name": "南涧县"
},
{
"_id": 2400,
"id": 3158,
"pid": 372,
"city_code": "101290208",
"city_name": "巍山县"
},
{
"_id": 2401,
"id": 3159,
"pid": 373,
"city_code": "101291508",
"city_name": "潞西市"
},
{
"_id": 2402,
"id": 3160,
"pid": 373,
"city_code": "101291506",
"city_name": "瑞丽市"
},
{
"_id": 2403,
"id": 3161,
"pid": 373,
"city_code": "101291507",
"city_name": "梁河县"
},
{
"_id": 2404,
"id": 3162,
"pid": 373,
"city_code": "101291504",
"city_name": "盈江县"
},
{
"_id": 2405,
"id": 3163,
"pid": 373,
"city_code": "101291503",
"city_name": "陇川县"
},
{
"_id": 2406,
"id": 3164,
"pid": 374,
"city_code": "101291301",
"city_name": "香格里拉县"
},
{
"_id": 2407,
"id": 3165,
"pid": 374,
"city_code": "101291302",
"city_name": "德钦县"
},
{
"_id": 2408,
"id": 3166,
"pid": 374,
"city_code": "101291303",
"city_name": "维西县"
},
{
"_id": 2409,
"id": 3167,
"pid": 375,
"city_code": "101290311",
"city_name": "泸西县"
},
{
"_id": 2410,
"id": 3168,
"pid": 375,
"city_code": "101290309",
"city_name": "蒙自市"
},
{
"_id": 2411,
"id": 3169,
"pid": 375,
"city_code": "101290308",
"city_name": "个旧市"
},
{
"_id": 2412,
"id": 3170,
"pid": 375,
"city_code": "101290307",
"city_name": "开远市"
},
{
"_id": 2413,
"id": 3171,
"pid": 375,
"city_code": "101290306",
"city_name": "绿春县"
},
{
"_id": 2414,
"id": 3172,
"pid": 375,
"city_code": "101290303",
"city_name": "建水县"
},
{
"_id": 2415,
"id": 3173,
"pid": 375,
"city_code": "101290302",
"city_name": "石屏县"
},
{
"_id": 2416,
"id": 3174,
"pid": 375,
"city_code": "101290304",
"city_name": "弥勒县"
},
{
"_id": 2417,
"id": 3175,
"pid": 375,
"city_code": "101290305",
"city_name": "元阳县"
},
{
"_id": 2418,
"id": 3176,
"pid": 375,
"city_code": "101290301",
"city_name": "红河县"
},
{
"_id": 2419,
"id": 3177,
"pid": 375,
"city_code": "101290312",
"city_name": "金平县"
},
{
"_id": 2420,
"id": 3178,
"pid": 375,
"city_code": "101290313",
"city_name": "河口县"
},
{
"_id": 2421,
"id": 3179,
"pid": 375,
"city_code": "101290310",
"city_name": "屏边县"
},
{
"_id": 2422,
"id": 3181,
"pid": 376,
"city_code": "101291105",
"city_name": "凤庆县"
},
{
"_id": 2423,
"id": 3182,
"pid": 376,
"city_code": "101291107",
"city_name": "云县"
},
{
"_id": 2424,
"id": 3183,
"pid": 376,
"city_code": "101291106",
"city_name": "永德县"
},
{
"_id": 2425,
"id": 3184,
"pid": 376,
"city_code": "101291108",
"city_name": "镇康县"
},
{
"_id": 2426,
"id": 3185,
"pid": 376,
"city_code": "101291104",
"city_name": "双江县"
},
{
"_id": 2427,
"id": 3186,
"pid": 376,
"city_code": "101291103",
"city_name": "耿马县"
},
{
"_id": 2428,
"id": 3187,
"pid": 376,
"city_code": "101291102",
"city_name": "沧源县"
},
{
"_id": 2429,
"id": 3189,
"pid": 377,
"city_code": "101290409",
"city_name": "宣威市"
},
{
"_id": 2430,
"id": 3190,
"pid": 377,
"city_code": "101290405",
"city_name": "马龙县"
},
{
"_id": 2431,
"id": 3191,
"pid": 377,
"city_code": "101290403",
"city_name": "陆良县"
},
{
"_id": 2432,
"id": 3192,
"pid": 377,
"city_code": "101290406",
"city_name": "师宗县"
},
{
"_id": 2433,
"id": 3193,
"pid": 377,
"city_code": "101290407",
"city_name": "罗平县"
},
{
"_id": 2434,
"id": 3194,
"pid": 377,
"city_code": "101290404",
"city_name": "富源县"
},
{
"_id": 2435,
"id": 3195,
"pid": 377,
"city_code": "101290408",
"city_name": "会泽县"
},
{
"_id": 2436,
"id": 3196,
"pid": 377,
"city_code": "101290402",
"city_name": "沾益县"
},
{
"_id": 2437,
"id": 3197,
"pid": 378,
"city_code": "101290601",
"city_name": "文山县"
},
{
"_id": 2438,
"id": 3198,
"pid": 378,
"city_code": "101290605",
"city_name": "砚山县"
},
{
"_id": 2439,
"id": 3199,
"pid": 378,
"city_code": "101290602",
"city_name": "西畴县"
},
{
"_id": 2440,
"id": 3200,
"pid": 378,
"city_code": "101290604",
"city_name": "麻栗坡县"
},
{
"_id": 2441,
"id": 3201,
"pid": 378,
"city_code": "101290603",
"city_name": "马关县"
},
{
"_id": 2442,
"id": 3202,
"pid": 378,
"city_code": "101290606",
"city_name": "丘北县"
},
{
"_id": 2443,
"id": 3203,
"pid": 378,
"city_code": "101290607",
"city_name": "广南县"
},
{
"_id": 2444,
"id": 3204,
"pid": 378,
"city_code": "101290608",
"city_name": "富宁县"
},
{
"_id": 2445,
"id": 3205,
"pid": 379,
"city_code": "101291601",
"city_name": "景洪市"
},
{
"_id": 2446,
"id": 3206,
"pid": 379,
"city_code": "101291603",
"city_name": "勐海县"
},
{
"_id": 2447,
"id": 3207,
"pid": 379,
"city_code": "101291605",
"city_name": "勐腊县"
},
{
"_id": 2448,
"id": 3209,
"pid": 380,
"city_code": "101290703",
"city_name": "江川县"
},
{
"_id": 2449,
"id": 3210,
"pid": 380,
"city_code": "101290702",
"city_name": "澄江县"
},
{
"_id": 2450,
"id": 3211,
"pid": 380,
"city_code": "101290704",
"city_name": "通海县"
},
{
"_id": 2451,
"id": 3212,
"pid": 380,
"city_code": "101290705",
"city_name": "华宁县"
},
{
"_id": 2452,
"id": 3213,
"pid": 380,
"city_code": "101290707",
"city_name": "易门县"
},
{
"_id": 2453,
"id": 3214,
"pid": 380,
"city_code": "101290708",
"city_name": "峨山县"
},
{
"_id": 2454,
"id": 3215,
"pid": 380,
"city_code": "101290706",
"city_name": "新平县"
},
{
"_id": 2455,
"id": 3216,
"pid": 380,
"city_code": "101290709",
"city_name": "元江县"
},
{
"_id": 2456,
"id": 3218,
"pid": 381,
"city_code": "101291002",
"city_name": "鲁甸县"
},
{
"_id": 2457,
"id": 3219,
"pid": 381,
"city_code": "101291006",
"city_name": "巧家县"
},
{
"_id": 2458,
"id": 3220,
"pid": 381,
"city_code": "101291009",
"city_name": "盐津县"
},
{
"_id": 2459,
"id": 3221,
"pid": 381,
"city_code": "101291010",
"city_name": "大关县"
},
{
"_id": 2460,
"id": 3222,
"pid": 381,
"city_code": "101291008",
"city_name": "永善县"
},
{
"_id": 2461,
"id": 3223,
"pid": 381,
"city_code": "101291007",
"city_name": "绥江县"
},
{
"_id": 2462,
"id": 3224,
"pid": 381,
"city_code": "101291004",
"city_name": "镇雄县"
},
{
"_id": 2463,
"id": 3225,
"pid": 381,
"city_code": "101291003",
"city_name": "彝良县"
},
{
"_id": 2464,
"id": 3226,
"pid": 381,
"city_code": "101291005",
"city_name": "威信县"
},
{
"_id": 2465,
"id": 3227,
"pid": 381,
"city_code": "101291011",
"city_name": "水富县"
},
{
"_id": 2466,
"id": 3234,
"pid": 382,
"city_code": "101210102",
"city_name": "萧山区"
},
{
"_id": 2467,
"id": 3235,
"pid": 382,
"city_code": "101210106",
"city_name": "余杭区"
},
{
"_id": 2468,
"id": 3237,
"pid": 382,
"city_code": "101210105",
"city_name": "建德市"
},
{
"_id": 2469,
"id": 3238,
"pid": 382,
"city_code": "101210108",
"city_name": "富阳区"
},
{
"_id": 2470,
"id": 3239,
"pid": 382,
"city_code": "101210107",
"city_name": "临安市"
},
{
"_id": 2471,
"id": 3240,
"pid": 382,
"city_code": "101210103",
"city_name": "桐庐县"
},
{
"_id": 2472,
"id": 3241,
"pid": 382,
"city_code": "101210104",
"city_name": "淳安县"
},
{
"_id": 2473,
"id": 3244,
"pid": 383,
"city_code": "101210204",
"city_name": "德清县"
},
{
"_id": 2474,
"id": 3245,
"pid": 383,
"city_code": "101210202",
"city_name": "长兴县"
},
{
"_id": 2475,
"id": 3246,
"pid": 383,
"city_code": "101210203",
"city_name": "安吉县"
},
{
"_id": 2476,
"id": 3249,
"pid": 384,
"city_code": "101210303",
"city_name": "海宁市"
},
{
"_id": 2477,
"id": 3250,
"pid": 384,
"city_code": "101210302",
"city_name": "嘉善县"
},
{
"_id": 2478,
"id": 3251,
"pid": 384,
"city_code": "101210305",
"city_name": "平湖市"
},
{
"_id": 2479,
"id": 3252,
"pid": 384,
"city_code": "101210304",
"city_name": "桐乡市"
},
{
"_id": 2480,
"id": 3253,
"pid": 384,
"city_code": "101210306",
"city_name": "海盐县"
},
{
"_id": 2481,
"id": 3256,
"pid": 385,
"city_code": "101210903",
"city_name": "兰溪市"
},
{
"_id": 2482,
"id": 3257,
"pid": 385,
"city_code": "101210904",
"city_name": "义乌市"
},
{
"_id": 2483,
"id": 3264,
"pid": 385,
"city_code": "101210905",
"city_name": "东阳市"
},
{
"_id": 2484,
"id": 3265,
"pid": 385,
"city_code": "101210907",
"city_name": "永康市"
},
{
"_id": 2485,
"id": 3266,
"pid": 385,
"city_code": "101210906",
"city_name": "武义县"
},
{
"_id": 2486,
"id": 3267,
"pid": 385,
"city_code": "101210902",
"city_name": "浦江县"
},
{
"_id": 2487,
"id": 3268,
"pid": 385,
"city_code": "101210908",
"city_name": "磐安县"
},
{
"_id": 2488,
"id": 3270,
"pid": 386,
"city_code": "101210803",
"city_name": "龙泉市"
},
{
"_id": 2489,
"id": 3271,
"pid": 386,
"city_code": "101210805",
"city_name": "青田县"
},
{
"_id": 2490,
"id": 3272,
"pid": 386,
"city_code": "101210804",
"city_name": "缙云县"
},
{
"_id": 2491,
"id": 3273,
"pid": 386,
"city_code": "101210802",
"city_name": "遂昌县"
},
{
"_id": 2492,
"id": 3274,
"pid": 386,
"city_code": "101210808",
"city_name": "松阳县"
},
{
"_id": 2493,
"id": 3275,
"pid": 386,
"city_code": "101210806",
"city_name": "云和县"
},
{
"_id": 2494,
"id": 3276,
"pid": 386,
"city_code": "101210807",
"city_name": "庆元县"
},
{
"_id": 2495,
"id": 3277,
"pid": 386,
"city_code": "101210809",
"city_name": "景宁县"
},
{
"_id": 2496,
"id": 3281,
"pid": 387,
"city_code": "101210412",
"city_name": "镇海区"
},
{
"_id": 2497,
"id": 3282,
"pid": 387,
"city_code": "101210410",
"city_name": "北仑区"
},
{
"_id": 2498,
"id": 3283,
"pid": 387,
"city_code": "101210411",
"city_name": "鄞州区"
},
{
"_id": 2499,
"id": 3284,
"pid": 387,
"city_code": "101210404",
"city_name": "余姚市"
},
{
"_id": 2500,
"id": 3285,
"pid": 387,
"city_code": "101210403",
"city_name": "慈溪市"
},
{
"_id": 2501,
"id": 3286,
"pid": 387,
"city_code": "101210405",
"city_name": "奉化区"
},
{
"_id": 2502,
"id": 3287,
"pid": 387,
"city_code": "101210406",
"city_name": "象山县"
},
{
"_id": 2503,
"id": 3288,
"pid": 387,
"city_code": "101210408",
"city_name": "宁海县"
},
{
"_id": 2504,
"id": 3290,
"pid": 388,
"city_code": "101210503",
"city_name": "上虞区"
},
{
"_id": 2505,
"id": 3291,
"pid": 388,
"city_code": "101210505",
"city_name": "嵊州市"
},
{
"_id": 2506,
"id": 3292,
"pid": 388,
"city_code": "101210501",
"city_name": "绍兴县"
},
{
"_id": 2507,
"id": 3293,
"pid": 388,
"city_code": "101210504",
"city_name": "新昌县"
},
{
"_id": 2508,
"id": 3294,
"pid": 388,
"city_code": "101210502",
"city_name": "诸暨市"
},
{
"_id": 2509,
"id": 3295,
"pid": 389,
"city_code": "101210611",
"city_name": "椒江区"
},
{
"_id": 2510,
"id": 3296,
"pid": 389,
"city_code": "101210612",
"city_name": "黄岩区"
},
{
"_id": 2511,
"id": 3297,
"pid": 389,
"city_code": "101210613",
"city_name": "路桥区"
},
{
"_id": 2512,
"id": 3298,
"pid": 389,
"city_code": "101210607",
"city_name": "温岭市"
},
{
"_id": 2513,
"id": 3299,
"pid": 389,
"city_code": "101210610",
"city_name": "临海市"
},
{
"_id": 2514,
"id": 3300,
"pid": 389,
"city_code": "101210603",
"city_name": "玉环县"
},
{
"_id": 2515,
"id": 3301,
"pid": 389,
"city_code": "101210604",
"city_name": "三门县"
},
{
"_id": 2516,
"id": 3302,
"pid": 389,
"city_code": "101210605",
"city_name": "天台县"
},
{
"_id": 2517,
"id": 3303,
"pid": 389,
"city_code": "101210606",
"city_name": "仙居县"
},
{
"_id": 2518,
"id": 3307,
"pid": 390,
"city_code": "101210705",
"city_name": "瑞安市"
},
{
"_id": 2519,
"id": 3308,
"pid": 390,
"city_code": "101210707",
"city_name": "乐清市"
},
{
"_id": 2520,
"id": 3309,
"pid": 390,
"city_code": "101210706",
"city_name": "洞头区"
},
{
"_id": 2521,
"id": 3310,
"pid": 390,
"city_code": "101210708",
"city_name": "永嘉县"
},
{
"_id": 2522,
"id": 3311,
"pid": 390,
"city_code": "101210704",
"city_name": "平阳县"
},
{
"_id": 2523,
"id": 3312,
"pid": 390,
"city_code": "101210709",
"city_name": "苍南县"
},
{
"_id": 2524,
"id": 3313,
"pid": 390,
"city_code": "101210703",
"city_name": "文成县"
},
{
"_id": 2525,
"id": 3314,
"pid": 390,
"city_code": "101210702",
"city_name": "泰顺县"
},
{
"_id": 2526,
"id": 3315,
"pid": 391,
"city_code": "101211106",
"city_name": "定海区"
},
{
"_id": 2527,
"id": 3316,
"pid": 391,
"city_code": "101211105",
"city_name": "普陀区"
},
{
"_id": 2528,
"id": 3317,
"pid": 391,
"city_code": "101211104",
"city_name": "岱山县"
},
{
"_id": 2529,
"id": 3318,
"pid": 391,
"city_code": "101211102",
"city_name": "嵊泗县"
},
{
"_id": 2530,
"id": 3319,
"pid": 392,
"city_code": "101211006",
"city_name": "衢江区"
},
{
"_id": 2531,
"id": 3320,
"pid": 392,
"city_code": "101211005",
"city_name": "江山市"
},
{
"_id": 2532,
"id": 3321,
"pid": 392,
"city_code": "101211002",
"city_name": "常山县"
},
{
"_id": 2533,
"id": 3322,
"pid": 392,
"city_code": "101211003",
"city_name": "开化县"
},
{
"_id": 2534,
"id": 3323,
"pid": 392,
"city_code": "101211004",
"city_name": "龙游县"
},
{
"_id": 2535,
"id": 3324,
"pid": 31,
"city_code": "101040300",
"city_name": "合川区"
},
{
"_id": 2536,
"id": 3325,
"pid": 31,
"city_code": "101040500",
"city_name": "江津区"
},
{
"_id": 2537,
"id": 3326,
"pid": 31,
"city_code": "101040400",
"city_name": "南川区"
},
{
"_id": 2538,
"id": 3327,
"pid": 31,
"city_code": "101040200",
"city_name": "永川区"
},
{
"_id": 2539,
"id": 3329,
"pid": 31,
"city_code": "101040700",
"city_name": "渝北区"
},
{
"_id": 2540,
"id": 3330,
"pid": 31,
"city_code": "101040600",
"city_name": "万盛区"
},
{
"_id": 2541,
"id": 3332,
"pid": 31,
"city_code": "101041300",
"city_name": "万州区"
},
{
"_id": 2542,
"id": 3333,
"pid": 31,
"city_code": "101040800",
"city_name": "北碚区"
},
{
"_id": 2543,
"id": 3334,
"pid": 31,
"city_code": "101043700",
"city_name": "沙坪坝区"
},
{
"_id": 2544,
"id": 3335,
"pid": 31,
"city_code": "101040900",
"city_name": "巴南区"
},
{
"_id": 2545,
"id": 3336,
"pid": 31,
"city_code": "101041400",
"city_name": "涪陵区"
},
{
"_id": 2546,
"id": 3340,
"pid": 31,
"city_code": "101041100",
"city_name": "黔江区"
},
{
"_id": 2547,
"id": 3341,
"pid": 31,
"city_code": "101041000",
"city_name": "长寿区"
},
{
"_id": 2548,
"id": 3343,
"pid": 31,
"city_code": "101043300",
"city_name": "綦江区"
},
{
"_id": 2549,
"id": 3344,
"pid": 31,
"city_code": "101042100",
"city_name": "潼南区"
},
{
"_id": 2550,
"id": 3345,
"pid": 31,
"city_code": "101042800",
"city_name": "铜梁区"
},
{
"_id": 2551,
"id": 3346,
"pid": 31,
"city_code": "101042600",
"city_name": "大足县"
},
{
"_id": 2552,
"id": 3347,
"pid": 31,
"city_code": "101042700",
"city_name": "荣昌区"
},
{
"_id": 2553,
"id": 3348,
"pid": 31,
"city_code": "101042900",
"city_name": "璧山区"
},
{
"_id": 2554,
"id": 3349,
"pid": 31,
"city_code": "101042200",
"city_name": "垫江县"
},
{
"_id": 2555,
"id": 3350,
"pid": 31,
"city_code": "101043100",
"city_name": "武隆县"
},
{
"_id": 2556,
"id": 3351,
"pid": 31,
"city_code": "101043000",
"city_name": "丰都县"
},
{
"_id": 2557,
"id": 3352,
"pid": 31,
"city_code": "101041600",
"city_name": "城口县"
},
{
"_id": 2558,
"id": 3353,
"pid": 31,
"city_code": "101042300",
"city_name": "梁平县"
},
{
"_id": 2559,
"id": 3354,
"pid": 31,
"city_code": "101041500",
"city_name": "开县"
},
{
"_id": 2560,
"id": 3355,
"pid": 31,
"city_code": "101041800",
"city_name": "巫溪县"
},
{
"_id": 2561,
"id": 3356,
"pid": 31,
"city_code": "101042000",
"city_name": "巫山县"
},
{
"_id": 2562,
"id": 3357,
"pid": 31,
"city_code": "101041900",
"city_name": "奉节县"
},
{
"_id": 2563,
"id": 3358,
"pid": 31,
"city_code": "101041700",
"city_name": "云阳县"
},
{
"_id": 2564,
"id": 3359,
"pid": 31,
"city_code": "101042400",
"city_name": "忠县"
},
{
"_id": 2565,
"id": 3360,
"pid": 31,
"city_code": "101042500",
"city_name": "石柱县"
},
{
"_id": 2566,
"id": 3361,
"pid": 31,
"city_code": "101043200",
"city_name": "彭水县"
},
{
"_id": 2567,
"id": 3362,
"pid": 31,
"city_code": "101043400",
"city_name": "酉阳县"
},
{
"_id": 2568,
"id": 3363,
"pid": 31,
"city_code": "101043600",
"city_name": "秀山县"
},
{
"_id": 2569,
"id": 3368,
"pid": 32,
"city_code": "101320102",
"city_name": "九龙城区"
},
{
"_id": 2570,
"id": 3383,
"pid": 34,
"city_code": "101340101",
"city_name": "台北"
},
{
"_id": 2571,
"id": 3384,
"pid": 34,
"city_code": "101340201",
"city_name": "高雄"
},
{
"_id": 2572,
"id": 3385,
"pid": 34,
"city_code": "CHTW0006",
"city_name": "基隆"
},
{
"_id": 2573,
"id": 3386,
"pid": 34,
"city_code": "101340401",
"city_name": "台中"
},
{
"_id": 2574,
"id": 3387,
"pid": 34,
"city_code": "101340301",
"city_name": "台南"
},
{
"_id": 2575,
"id": 3388,
"pid": 34,
"city_code": "101340103",
"city_name": "新竹"
},
{
"_id": 2576,
"id": 3389,
"pid": 34,
"city_code": "101340901",
"city_name": "嘉义"
},
{
"_id": 2577,
"id": 3390,
"pid": 34,
"city_code": "101340701",
"city_name": "宜兰县"
},
{
"_id": 2578,
"id": 3391,
"pid": 34,
"city_code": "101340102",
"city_name": "桃园县"
},
{
"_id": 2579,
"id": 3392,
"pid": 34,
"city_code": "CHTW0016",
"city_name": "苗栗县"
},
{
"_id": 2580,
"id": 3393,
"pid": 34,
"city_code": "CHTW0017",
"city_name": "彰化县"
},
{
"_id": 2581,
"id": 3394,
"pid": 34,
"city_code": "101340404",
"city_name": "南投县"
},
{
"_id": 2582,
"id": 3395,
"pid": 34,
"city_code": "101340406",
"city_name": "云林县"
},
{
"_id": 2583,
"id": 3396,
"pid": 34,
"city_code": "101340205",
"city_name": "屏东县"
},
{
"_id": 2584,
"id": 3397,
"pid": 34,
"city_code": "101341101",
"city_name": "台东县"
},
{
"_id": 2585,
"id": 3398,
"pid": 34,
"city_code": "101340405",
"city_name": "花莲县"
},
{
"_id": 2586,
"id": 3400,
"pid": 2,
"city_code": "101220101",
"city_name": "合肥"
},
{
"_id": 2587,
"id": 3405,
"pid": 3400,
"city_code": "101220102",
"city_name": "长丰县"
},
{
"_id": 2588,
"id": 3406,
"pid": 3400,
"city_code": "101220103",
"city_name": "肥东县"
},
{
"_id": 2589,
"id": 3407,
"pid": 3400,
"city_code": "101220104",
"city_name": "肥西县"
},
{
"_id": 2590,
"id": 3259,
"pid": 168,
"city_code": "101050708",
"city_name": "加格达奇区"
},
{
"_id": 2591,
"id": 3261,
"pid": 168,
"city_code": "101050706",
"city_name": "新林区"
},
{
"_id": 2592,
"id": 3262,
"pid": 168,
"city_code": "101050705",
"city_name": "呼中区"
},
{
"_id": 2593,
"id": 1856,
"pid": 365,
"city_code": "101131101",
"city_name": "塔城市"
},
{
"_id": 2594,
"id": 3657,
"pid": 28,
"city_code": "",
"city_name": "北屯"
},
{
"_id": 2595,
"id": 3661,
"pid": 8,
"city_code": "",
"city_name": "三沙"
}
]
"""
def print_c():
return json.loads(a)
| true
| true
|
f71a872cde99c049e202b8d9270f4ff266420483
| 3,749
|
gyp
|
Python
|
sync/sync.gyp
|
nagineni/chromium-crosswalk
|
5725642f1c67d0f97e8613ec1c3e8107ab53fdf8
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 231
|
2015-01-08T09:04:44.000Z
|
2021-12-30T03:03:10.000Z
|
sync/sync.gyp
|
j4ckfrost/android_external_chromium_org
|
a1a3dad8b08d1fcf6b6b36c267158ed63217c780
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2017-02-14T21:55:58.000Z
|
2017-02-14T21:55:58.000Z
|
sync/sync.gyp
|
j4ckfrost/android_external_chromium_org
|
a1a3dad8b08d1fcf6b6b36c267158ed63217c780
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 268
|
2015-01-21T05:53:28.000Z
|
2022-03-25T22:09:01.000Z
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'includes': [
'sync_android.gypi',
'sync_tests.gypi',
],
'conditions': [
# Notes:
# 1) In static mode, the public 'sync' target has a target type of 'none',
# and is composed of the static library targets 'sync_api', 'sync_core',
# 'sync_internal_api', 'sync_notifier', and 'sync_proto'.
# 2) In component mode, we build the public 'sync' target into a single DLL,
# which includes the contents of sync_api.gypi, sync_core.gypi,
# sync_internal_api.gypi, sync_notifier.gypi, and sync_proto.gypi.
# 3) All external targets that depend on anything in sync/ must simply
# declare a dependency on 'sync.gyp:sync'
['component=="static_library"', {
'targets': [
# The public sync static library target.
{
'target_name': 'sync',
'type': 'none',
'dependencies': [
'sync_api',
'sync_core',
'sync_internal_api',
'sync_notifier',
'sync_proto',
],
'export_dependent_settings': [
'sync_notifier',
'sync_proto',
],
},
# The sync external API library.
{
'target_name': 'sync_api',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_api.gypi',
],
'dependencies': [
'sync_internal_api',
'sync_proto',
],
},
# The core sync library.
{
'target_name': 'sync_core',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_core.gypi',
],
'dependencies': [
'sync_notifier',
'sync_proto',
],
'export_dependent_settings': [
'sync_notifier',
'sync_proto',
],
},
# The sync internal API library.
{
'target_name': 'sync_internal_api',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_internal_api.gypi',
],
'dependencies': [
'sync_core',
'sync_notifier',
'sync_proto',
],
'export_dependent_settings': [
'sync_core',
'sync_proto',
],
},
# The sync notifications library.
{
'target_name': 'sync_notifier',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_notifier.gypi',
],
},
# The sync protocol buffer library.
{
'target_name': 'sync_proto',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_proto.gypi',
],
},
],
},
{ # component != static_library
'targets': [
# The public sync shared library target.
{
'target_name': 'sync',
'type': 'shared_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_api.gypi',
'sync_core.gypi',
'sync_internal_api.gypi',
'sync_notifier.gypi',
'sync_proto.gypi',
],
},
],
}],
],
}
| 27.77037
| 80
| 0.497999
|
{
'variables': {
'chromium_code': 1,
},
'includes': [
'sync_android.gypi',
'sync_tests.gypi',
],
'conditions': [
['component=="static_library"', {
'targets': [
{
'target_name': 'sync',
'type': 'none',
'dependencies': [
'sync_api',
'sync_core',
'sync_internal_api',
'sync_notifier',
'sync_proto',
],
'export_dependent_settings': [
'sync_notifier',
'sync_proto',
],
},
{
'target_name': 'sync_api',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_api.gypi',
],
'dependencies': [
'sync_internal_api',
'sync_proto',
],
},
{
'target_name': 'sync_core',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_core.gypi',
],
'dependencies': [
'sync_notifier',
'sync_proto',
],
'export_dependent_settings': [
'sync_notifier',
'sync_proto',
],
},
{
'target_name': 'sync_internal_api',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_internal_api.gypi',
],
'dependencies': [
'sync_core',
'sync_notifier',
'sync_proto',
],
'export_dependent_settings': [
'sync_core',
'sync_proto',
],
},
{
'target_name': 'sync_notifier',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_notifier.gypi',
],
},
{
'target_name': 'sync_proto',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_proto.gypi',
],
},
],
},
{
'targets': [
{
'target_name': 'sync',
'type': 'shared_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_api.gypi',
'sync_core.gypi',
'sync_internal_api.gypi',
'sync_notifier.gypi',
'sync_proto.gypi',
],
},
],
}],
],
}
| true
| true
|
f71a87d374e64809b39fcfe0bfb79d5e0281482c
| 641
|
py
|
Python
|
dkey/__init__.py
|
NOhs/dkey
|
5305e0028a858d7ec7cdf4889783650b026ad4f5
|
[
"MIT"
] | 2
|
2019-01-18T19:53:07.000Z
|
2019-02-01T12:30:45.000Z
|
dkey/__init__.py
|
NOhs/dkey
|
5305e0028a858d7ec7cdf4889783650b026ad4f5
|
[
"MIT"
] | 15
|
2019-01-14T18:11:20.000Z
|
2019-03-30T14:22:35.000Z
|
dkey/__init__.py
|
NOhs/dkey
|
5305e0028a858d7ec7cdf4889783650b026ad4f5
|
[
"MIT"
] | 1
|
2019-02-01T21:51:15.000Z
|
2019-02-01T21:51:15.000Z
|
"""Module containing tools to deprecate the use of selected keys in a given dictionary.
This module provides:
deprecate_keys
==============
Class to wrap a dict to deprecate some keys in it.
dkey
====
Function to generate deprecated keys.
__version__
===========
A string indicating which version of dkey is currently used.
version_info
============
A tuple containing the currently used version.
"""
from ._dkey import deprecate_keys as deprecate_keys
from ._dkey import dkey as dkey
from pbr.version import VersionInfo
_v = VersionInfo('mgen').semantic_version()
__version__ = _v.release_string()
version_info = _v.version_tuple()
| 21.366667
| 87
| 0.74415
|
from ._dkey import deprecate_keys as deprecate_keys
from ._dkey import dkey as dkey
from pbr.version import VersionInfo
_v = VersionInfo('mgen').semantic_version()
__version__ = _v.release_string()
version_info = _v.version_tuple()
| true
| true
|
f71a87fef420481a4397ce77f7a2bd37c708dd91
| 6,159
|
py
|
Python
|
pydec/math/kd_tree.py
|
hirani/pydec
|
0574d1148952510b0e59b1f5cb1d9a673193be7f
|
[
"BSD-3-Clause"
] | 49
|
2016-07-03T14:40:48.000Z
|
2022-03-08T01:33:03.000Z
|
pydec/math/kd_tree.py
|
hirani/pydec
|
0574d1148952510b0e59b1f5cb1d9a673193be7f
|
[
"BSD-3-Clause"
] | 4
|
2016-09-16T18:51:06.000Z
|
2020-06-20T03:53:24.000Z
|
pydec/math/kd_tree.py
|
hirani/pydec
|
0574d1148952510b0e59b1f5cb1d9a673193be7f
|
[
"BSD-3-Clause"
] | 15
|
2015-09-26T20:06:57.000Z
|
2021-06-21T17:01:02.000Z
|
__all__ = ['kd_tree']
from math import sqrt
from heapq import heappush,heappop
class kd_tree:
class node:
def point_distance(self,point):
return sqrt(sum([ (a - b)**2 for (a,b) in zip(point,self.point)]))
def separator_distance(self,point):
return point[self.axis] - self.point[self.axis]
def __repr__(self):
output = ""
return "kd_tree< %s points in %s-dimensions >"% (self.num_points,self.k)
def __init__(self, points, values=None):
"""kD-Tree spatial data structure
Parameters
----------
points : array-like
An N-by-K array of N point coordinates in K dimensions
Optional Parameters
-------------------
values : array-like
A sequence of N elements associated with the points.
By default, the integers [0,1,...N-1] are used.
Examples
--------
>>> points = [[0,0],[1,0],[0,1],[1,1]]
>>> values = ['A','B','C','D']
>>> kd = kd_tree(points, values)
>>> kd
kd_tree< 4 points in 2-dimensions >
>>> kd.nearest([2,0])
'B'
>>> kd.nearest_n([2,0],2)
['B', 'D']
>>> kd.in_sphere([0.1,0.2], 1.1)
['A', 'C', 'B']
"""
lengths = [len(p) for p in points]
min_dim,max_dim = min(lengths),max(lengths)
if min_dim != max_dim:
raise ValueError('points must all have the same dimension')
if values is None:
values = range(len(points))
if len(points) != len(values):
raise ValueError('points and values must have the same lengths')
self.k = min_dim
self.num_points = len(points)
self.root = self.__build(zip(points,values),depth=0)
def __build(self, pv_pairs, depth):
if not pv_pairs:
return None
axis = depth % self.k #cycle axis
pv_pairs = sorted(pv_pairs, key=lambda x: x[0][axis])
mid = len(pv_pairs) // 2
node = self.node()
node.axis = axis
node.point = pv_pairs[mid][0]
node.value = pv_pairs[mid][1]
node.left_child = self.__build(pv_pairs[:mid], depth+1)
node.right_child = self.__build(pv_pairs[mid+1:], depth+1)
return node
def nearest(self, point, max_dist=float('inf')):
"""Returns the value associated with the nearest points to a given location
Parameters
----------
point : array-like
Location in space, e.g. [1.5, 2.0]
Optional Parameters
-------------------
max_dist : float
Ignore points farther than max_dist away from the query point.
Returns
-------
value : single element
The value associated with the point nearest to the query point.
Returns None if no points lie within max_dist of the query point
or the tree is empty.
"""
x = self.nearest_n(point,n=1,max_dist=max_dist) #list with 0 or 1 elements
if len(x) == 0:
return None
else:
return x[0]
def in_sphere(self, point, radius, max_points=None):
"""Returns the values of all points in a given sphere
Parameters
----------
point : array-like
Center of the sphere, e.g. [1.5, 2.0]
radius : float
Radius of the sphere, e.g. 0.3
Optional Parameters
-------------------
max_points : integer
An upper-bound on the number of points to return.
Returns
-------
values : list
List of values associated with all points in the sphere
defined by point and radius.
"""
if max_points is None:
max_points = float('inf')
return self.nearest_n(point, n=max_points, max_dist=radius)
def nearest_n(self, point, n, max_dist=float('inf')):
"""Returns the values of the nearest n points to a given location
Parameters
----------
point : array-like
Location in space, e.g. [1.5, 2.0]
n : integer
(Maximum) Number of values to return. Will return
fewer than n values if the kd_tree contains fewer
than n points.
Optional Parameters
-------------------
max_dist : float
Ignore points farther than max_dist away from the query point.
Returns
-------
values : list
List of values associated with the n nearest points to
the query location.
"""
heap = []
self.__nearest_n(point, n, max_dist, self.root, heap)
heap.sort()
return [ node.value for (neg_dist,node) in reversed(heap) ]
def __nearest_n(self,point,n,max_dist,current,heap):
if current is None:
return max_dist
pt_dist = current.point_distance(point) #distance to this node's point
sep_dist = current.separator_distance(point) #signed distance to this node's separating plane
if pt_dist < max_dist:
heappush(heap,(-pt_dist,current)) #add this point to the queue
if len(heap) > n:
heappop(heap)
if len(heap) == n:
max_dist = min(-heap[0][0],max_dist)
if sep_dist < 0:
max_dist = self.__nearest_n(point,n,max_dist,current.left_child,heap)
else:
max_dist = self.__nearest_n(point,n,max_dist,current.right_child,heap)
if abs(sep_dist) < max_dist:
#explore other subtree
if sep_dist < 0:
return self.__nearest_n(point,n,max_dist,current.right_child,heap)
else:
return self.__nearest_n(point,n,max_dist,current.left_child,heap)
else:
return max_dist
##def inorder(x):
## if x is not None:
## return inorder(x.left_child) + [x.value] + inorder(x.right_child)
## else:
## return []
| 30.339901
| 102
| 0.539211
|
__all__ = ['kd_tree']
from math import sqrt
from heapq import heappush,heappop
class kd_tree:
class node:
def point_distance(self,point):
return sqrt(sum([ (a - b)**2 for (a,b) in zip(point,self.point)]))
def separator_distance(self,point):
return point[self.axis] - self.point[self.axis]
def __repr__(self):
output = ""
return "kd_tree< %s points in %s-dimensions >"% (self.num_points,self.k)
def __init__(self, points, values=None):
lengths = [len(p) for p in points]
min_dim,max_dim = min(lengths),max(lengths)
if min_dim != max_dim:
raise ValueError('points must all have the same dimension')
if values is None:
values = range(len(points))
if len(points) != len(values):
raise ValueError('points and values must have the same lengths')
self.k = min_dim
self.num_points = len(points)
self.root = self.__build(zip(points,values),depth=0)
def __build(self, pv_pairs, depth):
if not pv_pairs:
return None
axis = depth % self.k
pv_pairs = sorted(pv_pairs, key=lambda x: x[0][axis])
mid = len(pv_pairs) // 2
node = self.node()
node.axis = axis
node.point = pv_pairs[mid][0]
node.value = pv_pairs[mid][1]
node.left_child = self.__build(pv_pairs[:mid], depth+1)
node.right_child = self.__build(pv_pairs[mid+1:], depth+1)
return node
def nearest(self, point, max_dist=float('inf')):
x = self.nearest_n(point,n=1,max_dist=max_dist)
if len(x) == 0:
return None
else:
return x[0]
def in_sphere(self, point, radius, max_points=None):
if max_points is None:
max_points = float('inf')
return self.nearest_n(point, n=max_points, max_dist=radius)
def nearest_n(self, point, n, max_dist=float('inf')):
heap = []
self.__nearest_n(point, n, max_dist, self.root, heap)
heap.sort()
return [ node.value for (neg_dist,node) in reversed(heap) ]
def __nearest_n(self,point,n,max_dist,current,heap):
if current is None:
return max_dist
pt_dist = current.point_distance(point)
sep_dist = current.separator_distance(point) #signed distance to this node's separating plane
if pt_dist < max_dist:
heappush(heap,(-pt_dist,current))
if len(heap) > n:
heappop(heap)
if len(heap) == n:
max_dist = min(-heap[0][0],max_dist)
if sep_dist < 0:
max_dist = self.__nearest_n(point,n,max_dist,current.left_child,heap)
else:
max_dist = self.__nearest_n(point,n,max_dist,current.right_child,heap)
if abs(sep_dist) < max_dist:
if sep_dist < 0:
return self.__nearest_n(point,n,max_dist,current.right_child,heap)
else:
return self.__nearest_n(point,n,max_dist,current.left_child,heap)
else:
return max_dist
| true
| true
|
f71a88666c1fd19fd83b8b1279071950abcd31d2
| 3,505
|
py
|
Python
|
model/config.py
|
yhl111/PCNN
|
2e0967aec962d55df1eb7d149a44b91c6c751a1a
|
[
"Apache-2.0"
] | 99
|
2018-05-19T03:59:47.000Z
|
2022-03-17T07:25:10.000Z
|
model/config.py
|
yhl111/PCNN
|
2e0967aec962d55df1eb7d149a44b91c6c751a1a
|
[
"Apache-2.0"
] | 10
|
2018-05-21T13:16:42.000Z
|
2022-03-26T06:06:51.000Z
|
model/config.py
|
yhl111/PCNN
|
2e0967aec962d55df1eb7d149a44b91c6c751a1a
|
[
"Apache-2.0"
] | 38
|
2018-05-19T10:20:57.000Z
|
2022-01-25T12:37:08.000Z
|
import os
import numpy as np
from .general_utils import get_logger
from .data_utils import load_vocab, get_processing_word
class Config():
def __init__(self, load=True):
"""Initialize hyperparameters and load vocabs
Args:
load_embeddings: (bool) if True, load embeddings into
np array, else None
"""
# directory for training outputs
if not os.path.exists(self.dir_output):
os.makedirs(self.dir_output)
# create instance of logger
self.logger = get_logger(self.path_log)
# load if requested (default)
if load:
self.load()
def load(self):
"""Loads vocabulary, processing functions and embeddings
Supposes that build_data.py has been run successfully and that
the corresponding files have been created (vocab and trimmed
vectors)
"""
# 1. vocabulary
self.vocab_words = load_vocab(self.filename_words)
self.vocab_relations = load_vocab(self.filename_relation)
self.nwords = len(self.vocab_words)
self.nrelations = len(self.vocab_relations)
# 2. get processing functions that map str -> id
self.processing_word = get_processing_word(self.vocab_words, UNK = "<UNK>")
self.processing_relation = get_processing_word(self.vocab_relations, UNK='NA')
# 3. get pre-trained embeddings
self.embeddings = (np.load(self.filename_embeddings)['vec']
if self.use_pretrained else None)
# general config
dir_output = "./results/test/"
graph_output = "./graph"
dir_model = dir_output + "model.weights/" # directory to save models
path_log = dir_output + "log.txt"
restore_model = "./results/test/model.weights/early_best.ckpt"
# embeddings
dim_word = 50
dim_pos = 5
dim = dim_word + 2*dim_pos
# position range in sentence
nposition = 500
# convolution
window_size = 3
feature_maps = 230
filename_train_origin = "./data/origin_data/train.txt"
filename_train = "./data/processed_data/train.txt"
filename_train_wrong = "./data/processed_data/wrong_parse_train.txt"
filename_dev = "./data/processed_data/test.txt"
filename_test_origin = "./data/origin_data/test.txt"
filename_test = "./data/processed_data/test.txt"
filename_test_wrong = "./data/processed_data/wrong_parse_test.txt"
max_iter = None # if not None, max number of examples in Dataset
# vocab (created from dataset with build_data.py)
filename_words = "./data/processed_data/words.txt"
filename_embeddings = "./data/processed_data/vectors.npz"
filename_relation_origin = "./data/origin_data/relation2id.txt"
filename_relation = "./data/processed_data/relation.txt"
# word vectors file
filename_wordvectors = "./data/origin_data/vec.txt"
use_pretrained = True
MIL = False # if True, using multi-instances learning
shuffle = False # if True, shuffle train dataset
max_iter = None # if not None, max number of examples in Dataset
# training
train_word_embeddings = False
train_pos_embeddings = True
nepochs = 15
dropout = 0.5
batch_size = 50
lr_method = "adadelta"
lr = 0.001
lr_decay = 0.9
clip = -1 # if negative, no clipping
nepoch_no_imprv = 3
early_stop = True
max_train_step = 100000
| 31.294643
| 87
| 0.653067
|
import os
import numpy as np
from .general_utils import get_logger
from .data_utils import load_vocab, get_processing_word
class Config():
def __init__(self, load=True):
if not os.path.exists(self.dir_output):
os.makedirs(self.dir_output)
self.logger = get_logger(self.path_log)
if load:
self.load()
def load(self):
self.vocab_words = load_vocab(self.filename_words)
self.vocab_relations = load_vocab(self.filename_relation)
self.nwords = len(self.vocab_words)
self.nrelations = len(self.vocab_relations)
self.processing_word = get_processing_word(self.vocab_words, UNK = "<UNK>")
self.processing_relation = get_processing_word(self.vocab_relations, UNK='NA')
self.embeddings = (np.load(self.filename_embeddings)['vec']
if self.use_pretrained else None)
dir_output = "./results/test/"
graph_output = "./graph"
dir_model = dir_output + "model.weights/"
path_log = dir_output + "log.txt"
restore_model = "./results/test/model.weights/early_best.ckpt"
dim_word = 50
dim_pos = 5
dim = dim_word + 2*dim_pos
nposition = 500
window_size = 3
feature_maps = 230
filename_train_origin = "./data/origin_data/train.txt"
filename_train = "./data/processed_data/train.txt"
filename_train_wrong = "./data/processed_data/wrong_parse_train.txt"
filename_dev = "./data/processed_data/test.txt"
filename_test_origin = "./data/origin_data/test.txt"
filename_test = "./data/processed_data/test.txt"
filename_test_wrong = "./data/processed_data/wrong_parse_test.txt"
max_iter = None
filename_words = "./data/processed_data/words.txt"
filename_embeddings = "./data/processed_data/vectors.npz"
filename_relation_origin = "./data/origin_data/relation2id.txt"
filename_relation = "./data/processed_data/relation.txt"
filename_wordvectors = "./data/origin_data/vec.txt"
use_pretrained = True
MIL = False
shuffle = False
max_iter = None
train_word_embeddings = False
train_pos_embeddings = True
nepochs = 15
dropout = 0.5
batch_size = 50
lr_method = "adadelta"
lr = 0.001
lr_decay = 0.9
clip = -1
nepoch_no_imprv = 3
early_stop = True
max_train_step = 100000
| true
| true
|
f71a89aa8b0e3fea02389bae72a1e0206e098bc4
| 4,846
|
py
|
Python
|
tests/algorithms/test_tracking_smoothing.py
|
thompson318/scikit-surgerycore
|
22867073a5a3e87def68b4a76e70fe54d085be32
|
[
"BSD-3-Clause"
] | 3
|
2020-09-26T18:19:49.000Z
|
2021-09-19T08:43:00.000Z
|
tests/algorithms/test_tracking_smoothing.py
|
thompson318/scikit-surgerycore
|
22867073a5a3e87def68b4a76e70fe54d085be32
|
[
"BSD-3-Clause"
] | 45
|
2020-04-27T09:12:28.000Z
|
2020-04-27T09:50:49.000Z
|
tests/algorithms/test_tracking_smoothing.py
|
SciKit-Surgery/scikit-surgerycore
|
22867073a5a3e87def68b4a76e70fe54d085be32
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Tests for BARD pointer module"""
import math
import numpy as np
import pytest
import sksurgerycore.algorithms.tracking_smoothing as reg
def test_rvec_to_quaterion():
"""
Does it convert correctly
"""
#a 90 degree rotation about the x axis
rvec = np.array([math.pi/2.0, 0.0, 0.0])
quaternion = reg._rvec_to_quaternion(rvec) # pylint: disable=protected-access
assert quaternion[0] == math.cos(math.pi/4.0)
assert quaternion[1] == 1.0 * math.sin(math.pi/4.0)
assert quaternion[2] == 0.0
assert quaternion[3] == 0.0
def test_quaterion_to_matrix():
"""
Test conversion on a 90 degree rotation about y axis.
"""
quaternion = np.array([math.cos(math.pi/4.0), 0.0,
1.0 * math.sin(math.pi/4.0), 0.0])
rot_mat = reg.quaternion_to_matrix(quaternion)
rot_mat1 = np.eye(3, dtype=np.float64)
rot_mat1[0, 0] = 0.0
rot_mat1[0, 2] = 1.0
rot_mat1[2, 0] = -1.0
rot_mat1[2, 2] = 0.0
assert np.allclose(rot_mat, rot_mat1, rtol=1e-05, atol=1e-10)
def test_rolling_mean_no_buffer():
"""
Try doing a rolling mean with zero buffer.
"""
with pytest.raises(ValueError):
_ = reg.RollingMean(vector_size=3, buffer_size=0)
def test_rolling_mean_returns_nan():
"""
Tests for rolling mean class.
"""
mean_buffer = reg.RollingMean(vector_size=3, buffer_size=5)
assert np.isnan(mean_buffer.getmean()).all
def test_rolling_mean_single_value():
"""
Test rolling mean returns vector value for single entry
"""
vector = [5.4, 1.2, 3.4]
mean_buffer = reg.RollingMean(vector_size=3, buffer_size=5)
mean_buffer.pop(vector)
assert np.allclose(vector, mean_buffer.getmean(), rtol=1e-05, atol=1e-10)
def test_rolling_mean_four_values():
"""
Test rolling mean returns vector value for single entry
"""
vector0 = [5.4, 1.2, 3.4]
vector1 = [7.4, -1.2, -1.4]
vector2 = [-2.6, 4.2, 2.6]
vector3 = [9.0, 3.3, 3.6]
expected_answer0 = [3.4, 1.4, 1.533333]
expected_answer1 = [4.6, 2.1, 1.6]
mean_buffer = reg.RollingMean(vector_size=3, buffer_size=3)
mean_buffer.pop(vector0)
mean_buffer.pop(vector1)
mean_buffer.pop(vector2)
assert np.allclose(expected_answer0, mean_buffer.getmean(), rtol=1e-05,
atol=1e-6)
mean_buffer.pop(vector3)
assert np.allclose(expected_answer1, mean_buffer.getmean(), rtol=1e-05,
atol=1e-10)
def test_rolling_rotation_no_buffer():
"""
Try doing a rolling rotation mean with zero buffer.
"""
with pytest.raises(ValueError):
_ = reg.RollingMeanRotation(buffer_size=0)
def test_rolling_rot_returns_nan():
"""
Tests for rolling mean rotation class.
"""
mean_buffer = reg.RollingMeanRotation(buffer_size=5)
assert np.isnan(mean_buffer.getmean()).all
def test_rolling_rot_single_value():
"""
Test rolling mean rotation returns vector value for single entry
"""
rvec = np.array([0.0, -math.pi/2.0, 0.0])
expected_quaternion = np.array([math.cos(math.pi/4.0), 0.0,
-1.0 * math.sin(math.pi/4.0), 0.0])
mean_buffer = reg.RollingMeanRotation(buffer_size=5)
mean_buffer.pop(rvec)
assert np.allclose(expected_quaternion, mean_buffer.getmean(),
rtol=1e-05, atol=1e-10)
def test_r_rot_sgl_value_sgl_buff():
"""
Test rolling mean rotation returns vector value for single entry
"""
rvec = np.array([0.0, 0.0, -math.pi/2.0])
expected_quaternion = np.array([math.cos(math.pi/4.0), 0.0, 0.0,
-1.0 * math.sin(math.pi/4.0)])
mean_buffer = reg.RollingMeanRotation(buffer_size=1)
mean_buffer.pop(rvec)
assert np.allclose(expected_quaternion, mean_buffer.getmean(),
rtol=1e-05, atol=1e-10)
def test_rolling_rot_four_values():
"""
Test rolling mean returns vector value for single entry
"""
rvec0 = [0.0, 0.0, 0.0]
rvec1 = [np.NaN, np.NaN, np.NaN]
rvec2 = [0.0, 0.0, -math.pi/2.0]
rvec3 = [0.0, math.pi/3.0, 0.0]
expected_answer0 = reg._rvec_to_quaternion([0.0, 0.0, -math.pi/4.0]) # pylint: disable=protected-access
#the next ones more of a regression test, I haven't independently
#calculated this answer.
expected_answer1 = [-0.87602709, 0.0, -0.27843404, 0.39376519]
mean_buffer = reg.RollingMeanRotation(buffer_size=3)
mean_buffer.pop(rvec0)
mean_buffer.pop(rvec1)
mean_buffer.pop(rvec2)
assert np.allclose(expected_answer0, mean_buffer.getmean(), rtol=1e-05,
atol=1e-6)
mean_buffer.pop(rvec3)
assert np.allclose(expected_answer1, mean_buffer.getmean(), rtol=1e-05,
atol=1e-10)
| 27.691429
| 107
| 0.636401
|
import math
import numpy as np
import pytest
import sksurgerycore.algorithms.tracking_smoothing as reg
def test_rvec_to_quaterion():
rvec = np.array([math.pi/2.0, 0.0, 0.0])
quaternion = reg._rvec_to_quaternion(rvec)
assert quaternion[0] == math.cos(math.pi/4.0)
assert quaternion[1] == 1.0 * math.sin(math.pi/4.0)
assert quaternion[2] == 0.0
assert quaternion[3] == 0.0
def test_quaterion_to_matrix():
quaternion = np.array([math.cos(math.pi/4.0), 0.0,
1.0 * math.sin(math.pi/4.0), 0.0])
rot_mat = reg.quaternion_to_matrix(quaternion)
rot_mat1 = np.eye(3, dtype=np.float64)
rot_mat1[0, 0] = 0.0
rot_mat1[0, 2] = 1.0
rot_mat1[2, 0] = -1.0
rot_mat1[2, 2] = 0.0
assert np.allclose(rot_mat, rot_mat1, rtol=1e-05, atol=1e-10)
def test_rolling_mean_no_buffer():
with pytest.raises(ValueError):
_ = reg.RollingMean(vector_size=3, buffer_size=0)
def test_rolling_mean_returns_nan():
mean_buffer = reg.RollingMean(vector_size=3, buffer_size=5)
assert np.isnan(mean_buffer.getmean()).all
def test_rolling_mean_single_value():
vector = [5.4, 1.2, 3.4]
mean_buffer = reg.RollingMean(vector_size=3, buffer_size=5)
mean_buffer.pop(vector)
assert np.allclose(vector, mean_buffer.getmean(), rtol=1e-05, atol=1e-10)
def test_rolling_mean_four_values():
vector0 = [5.4, 1.2, 3.4]
vector1 = [7.4, -1.2, -1.4]
vector2 = [-2.6, 4.2, 2.6]
vector3 = [9.0, 3.3, 3.6]
expected_answer0 = [3.4, 1.4, 1.533333]
expected_answer1 = [4.6, 2.1, 1.6]
mean_buffer = reg.RollingMean(vector_size=3, buffer_size=3)
mean_buffer.pop(vector0)
mean_buffer.pop(vector1)
mean_buffer.pop(vector2)
assert np.allclose(expected_answer0, mean_buffer.getmean(), rtol=1e-05,
atol=1e-6)
mean_buffer.pop(vector3)
assert np.allclose(expected_answer1, mean_buffer.getmean(), rtol=1e-05,
atol=1e-10)
def test_rolling_rotation_no_buffer():
with pytest.raises(ValueError):
_ = reg.RollingMeanRotation(buffer_size=0)
def test_rolling_rot_returns_nan():
mean_buffer = reg.RollingMeanRotation(buffer_size=5)
assert np.isnan(mean_buffer.getmean()).all
def test_rolling_rot_single_value():
rvec = np.array([0.0, -math.pi/2.0, 0.0])
expected_quaternion = np.array([math.cos(math.pi/4.0), 0.0,
-1.0 * math.sin(math.pi/4.0), 0.0])
mean_buffer = reg.RollingMeanRotation(buffer_size=5)
mean_buffer.pop(rvec)
assert np.allclose(expected_quaternion, mean_buffer.getmean(),
rtol=1e-05, atol=1e-10)
def test_r_rot_sgl_value_sgl_buff():
rvec = np.array([0.0, 0.0, -math.pi/2.0])
expected_quaternion = np.array([math.cos(math.pi/4.0), 0.0, 0.0,
-1.0 * math.sin(math.pi/4.0)])
mean_buffer = reg.RollingMeanRotation(buffer_size=1)
mean_buffer.pop(rvec)
assert np.allclose(expected_quaternion, mean_buffer.getmean(),
rtol=1e-05, atol=1e-10)
def test_rolling_rot_four_values():
rvec0 = [0.0, 0.0, 0.0]
rvec1 = [np.NaN, np.NaN, np.NaN]
rvec2 = [0.0, 0.0, -math.pi/2.0]
rvec3 = [0.0, math.pi/3.0, 0.0]
expected_answer0 = reg._rvec_to_quaternion([0.0, 0.0, -math.pi/4.0])
#calculated this answer.
expected_answer1 = [-0.87602709, 0.0, -0.27843404, 0.39376519]
mean_buffer = reg.RollingMeanRotation(buffer_size=3)
mean_buffer.pop(rvec0)
mean_buffer.pop(rvec1)
mean_buffer.pop(rvec2)
assert np.allclose(expected_answer0, mean_buffer.getmean(), rtol=1e-05,
atol=1e-6)
mean_buffer.pop(rvec3)
assert np.allclose(expected_answer1, mean_buffer.getmean(), rtol=1e-05,
atol=1e-10)
| true
| true
|
f71a8a1a3d48005a0ee4af6cf7d83fd52dcee595
| 1,223
|
py
|
Python
|
web/playlists/migrations/0001__initial.py
|
vtalks/vtalks.net
|
80fb19ff9684e0854c6abe5f0eef73e80ec326a6
|
[
"Apache-2.0"
] | 1
|
2017-11-28T03:17:23.000Z
|
2017-11-28T03:17:23.000Z
|
web/playlists/migrations/0001__initial.py
|
vtalks/vtalks.net
|
80fb19ff9684e0854c6abe5f0eef73e80ec326a6
|
[
"Apache-2.0"
] | 56
|
2018-01-14T18:03:03.000Z
|
2018-06-25T17:59:02.000Z
|
web/playlists/migrations/0001__initial.py
|
vtalks/vtalks.net
|
80fb19ff9684e0854c6abe5f0eef73e80ec326a6
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.0.2 on 2018-02-23 08:56
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Playlists',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(default=None, max_length=100, unique=True)),
('title', models.CharField(default=None, max_length=200)),
('slug', models.SlugField(default=None, max_length=200, unique=True)),
('description', models.TextField(blank=True)),
('created', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date created')),
('updated', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date updated')),
],
options={
'verbose_name': 'Playlists',
'verbose_name_plural': 'Playlists',
'ordering': ['-created'],
'get_latest_by': ['-created'],
},
),
]
| 35.970588
| 114
| 0.578087
|
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Playlists',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(default=None, max_length=100, unique=True)),
('title', models.CharField(default=None, max_length=200)),
('slug', models.SlugField(default=None, max_length=200, unique=True)),
('description', models.TextField(blank=True)),
('created', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date created')),
('updated', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date updated')),
],
options={
'verbose_name': 'Playlists',
'verbose_name_plural': 'Playlists',
'ordering': ['-created'],
'get_latest_by': ['-created'],
},
),
]
| true
| true
|
f71a8adea750a9a9bae32bcd140b26a2c336c2a6
| 3,692
|
py
|
Python
|
trees-and-graphs/minimal-tree.py
|
georgeRenard/CrackingTheCodingInterview
|
ba9866e8e7a8c9942464d76b13af08ea6b15f3f9
|
[
"MIT"
] | null | null | null |
trees-and-graphs/minimal-tree.py
|
georgeRenard/CrackingTheCodingInterview
|
ba9866e8e7a8c9942464d76b13af08ea6b15f3f9
|
[
"MIT"
] | null | null | null |
trees-and-graphs/minimal-tree.py
|
georgeRenard/CrackingTheCodingInterview
|
ba9866e8e7a8c9942464d76b13af08ea6b15f3f9
|
[
"MIT"
] | null | null | null |
import sys
def problem():
"""
Minimal Tree: Given a sorted (increasing order) array with unique integer elements, write an algorithm to create a binary search tree with minimal height.
"""
pass
class BST:
def __init__(self):
self.count = 0
self.root = None
@staticmethod
def build_from_sorted_array(arr):
root = BST.__build_from_sorted_array(arr)
bst = BST()
bst.root = root
return bst
@staticmethod
def __build_from_sorted_array(arr):
size = len(arr)
if size == 1:
return BST.BSTNode(arr[0])
if size == 0:
return
median_index = size // 2
left = arr[0: median_index]
right = arr[median_index + 1 : ]
root_val = arr[median_index]
root = BST.BSTNode(root_val)
left_subtree_root = BST.__build_from_sorted_array(left)
right_subtree_root = BST.__build_from_sorted_array(right)
root.left = left_subtree_root
root.right = right_subtree_root
return root
def is_BST(self):
if self.root is None:
raise Exception("You have got yourself an empty tree")
return self.__is_BST(self.root)
def __is_BST(self, current):
if current is None:
return True
res = True
if current.left is not None:
res = res and current.value >= current.left.value
if current.right is not None:
res = res and current.value <= current.right.value
res = res and self.__is_BST(current.left)
res = res and self.__is_BST(current.right)
return res
def __update_height(self, current):
if current is None:
return 0
left = 0 if current.left is None else current.left.height
right = 0 if current.right is None else current.right.height
if abs(left - right) > 1:
raise Exception("The tree is unbalanced")
current.height = 1 + self.__update_height(current.left) + self.__update_height(current.right)
return current.height
def is_balanced(self):
if self.root is None:
raise Exception("An empty tree can never be balanced")
self.__update_height(self.root)
return self.__is_balanced(self.root)
# You could further optimize this by attaching the balance factor to the node directly and adjusting as needed
# It would just happen to look like an AVL tree
def __is_balanced(self, current):
return abs(self.root.left.height - self.root.right.height) <= 1
def dump(self):
if self.root is None:
raise Exception("Cannot dump an empty tree")
self.__dump(self.root)
def __dump(self, current, indent = 0):
if current is None:
return
print("{0}{1}".format(" " * indent, current.value))
self.__dump(current.left, indent + 2)
self.__dump(current.right, indent + 2)
def __repr__(self):
return self.root.__repr__()
class BSTNode:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
self.height = 0
if __name__ == '__main__':
args = sys.argv[1:]
arr = [1, 2, 4, 12, 35, 41, 72, 102, 562]
tree = BST.build_from_sorted_array(arr)
tree.dump()
print("The tree {0} BST".format("is" if tree.is_BST() else "is not"))
print("The tree {0} balanced".format("is" if tree.is_balanced() else "is not"))
| 22.512195
| 170
| 0.581798
|
import sys
def problem():
pass
class BST:
def __init__(self):
self.count = 0
self.root = None
@staticmethod
def build_from_sorted_array(arr):
root = BST.__build_from_sorted_array(arr)
bst = BST()
bst.root = root
return bst
@staticmethod
def __build_from_sorted_array(arr):
size = len(arr)
if size == 1:
return BST.BSTNode(arr[0])
if size == 0:
return
median_index = size // 2
left = arr[0: median_index]
right = arr[median_index + 1 : ]
root_val = arr[median_index]
root = BST.BSTNode(root_val)
left_subtree_root = BST.__build_from_sorted_array(left)
right_subtree_root = BST.__build_from_sorted_array(right)
root.left = left_subtree_root
root.right = right_subtree_root
return root
def is_BST(self):
if self.root is None:
raise Exception("You have got yourself an empty tree")
return self.__is_BST(self.root)
def __is_BST(self, current):
if current is None:
return True
res = True
if current.left is not None:
res = res and current.value >= current.left.value
if current.right is not None:
res = res and current.value <= current.right.value
res = res and self.__is_BST(current.left)
res = res and self.__is_BST(current.right)
return res
def __update_height(self, current):
if current is None:
return 0
left = 0 if current.left is None else current.left.height
right = 0 if current.right is None else current.right.height
if abs(left - right) > 1:
raise Exception("The tree is unbalanced")
current.height = 1 + self.__update_height(current.left) + self.__update_height(current.right)
return current.height
def is_balanced(self):
if self.root is None:
raise Exception("An empty tree can never be balanced")
self.__update_height(self.root)
return self.__is_balanced(self.root)
def __is_balanced(self, current):
return abs(self.root.left.height - self.root.right.height) <= 1
def dump(self):
if self.root is None:
raise Exception("Cannot dump an empty tree")
self.__dump(self.root)
def __dump(self, current, indent = 0):
if current is None:
return
print("{0}{1}".format(" " * indent, current.value))
self.__dump(current.left, indent + 2)
self.__dump(current.right, indent + 2)
def __repr__(self):
return self.root.__repr__()
class BSTNode:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
self.height = 0
if __name__ == '__main__':
args = sys.argv[1:]
arr = [1, 2, 4, 12, 35, 41, 72, 102, 562]
tree = BST.build_from_sorted_array(arr)
tree.dump()
print("The tree {0} BST".format("is" if tree.is_BST() else "is not"))
print("The tree {0} balanced".format("is" if tree.is_balanced() else "is not"))
| true
| true
|
f71a8bd251c72ed72e4f3105b3a56a3e0dee9bf4
| 14,903
|
py
|
Python
|
xero_python/accounting/models/repeating_invoice.py
|
sidtrengove/xero-python
|
52f1ec2232def4c8e773e8e5fd6f766c059517b2
|
[
"MIT"
] | 1
|
2020-06-05T15:03:15.000Z
|
2020-06-05T15:03:15.000Z
|
xero_python/accounting/models/repeating_invoice.py
|
sidtrengove/xero-python
|
52f1ec2232def4c8e773e8e5fd6f766c059517b2
|
[
"MIT"
] | null | null | null |
xero_python/accounting/models/repeating_invoice.py
|
sidtrengove/xero-python
|
52f1ec2232def4c8e773e8e5fd6f766c059517b2
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Accounting API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: 2.1.6
Contact: api@xero.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
from xero_python.models import BaseModel
class RepeatingInvoice(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
"type": "str",
"contact": "Contact",
"schedule": "Schedule",
"line_items": "list[LineItem]",
"line_amount_types": "LineAmountTypes",
"reference": "str",
"branding_theme_id": "str",
"currency_code": "CurrencyCode",
"status": "str",
"sub_total": "float",
"total_tax": "float",
"total": "float",
"repeating_invoice_id": "str",
"id": "str",
"has_attachments": "bool",
"attachments": "list[Attachment]",
}
attribute_map = {
"type": "Type",
"contact": "Contact",
"schedule": "Schedule",
"line_items": "LineItems",
"line_amount_types": "LineAmountTypes",
"reference": "Reference",
"branding_theme_id": "BrandingThemeID",
"currency_code": "CurrencyCode",
"status": "Status",
"sub_total": "SubTotal",
"total_tax": "TotalTax",
"total": "Total",
"repeating_invoice_id": "RepeatingInvoiceID",
"id": "ID",
"has_attachments": "HasAttachments",
"attachments": "Attachments",
}
def __init__(
self,
type=None,
contact=None,
schedule=None,
line_items=None,
line_amount_types=None,
reference=None,
branding_theme_id=None,
currency_code=None,
status=None,
sub_total=None,
total_tax=None,
total=None,
repeating_invoice_id=None,
id=None,
has_attachments=False,
attachments=None,
): # noqa: E501
"""RepeatingInvoice - a model defined in OpenAPI""" # noqa: E501
self._type = None
self._contact = None
self._schedule = None
self._line_items = None
self._line_amount_types = None
self._reference = None
self._branding_theme_id = None
self._currency_code = None
self._status = None
self._sub_total = None
self._total_tax = None
self._total = None
self._repeating_invoice_id = None
self._id = None
self._has_attachments = None
self._attachments = None
self.discriminator = None
if type is not None:
self.type = type
if contact is not None:
self.contact = contact
if schedule is not None:
self.schedule = schedule
if line_items is not None:
self.line_items = line_items
if line_amount_types is not None:
self.line_amount_types = line_amount_types
if reference is not None:
self.reference = reference
if branding_theme_id is not None:
self.branding_theme_id = branding_theme_id
if currency_code is not None:
self.currency_code = currency_code
if status is not None:
self.status = status
if sub_total is not None:
self.sub_total = sub_total
if total_tax is not None:
self.total_tax = total_tax
if total is not None:
self.total = total
if repeating_invoice_id is not None:
self.repeating_invoice_id = repeating_invoice_id
if id is not None:
self.id = id
if has_attachments is not None:
self.has_attachments = has_attachments
if attachments is not None:
self.attachments = attachments
@property
def type(self):
"""Gets the type of this RepeatingInvoice. # noqa: E501
See Invoice Types # noqa: E501
:return: The type of this RepeatingInvoice. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this RepeatingInvoice.
See Invoice Types # noqa: E501
:param type: The type of this RepeatingInvoice. # noqa: E501
:type: str
"""
allowed_values = ["ACCPAY", "ACCREC"] # noqa: E501
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}".format( # noqa: E501
type, allowed_values
)
)
self._type = type
@property
def contact(self):
"""Gets the contact of this RepeatingInvoice. # noqa: E501
:return: The contact of this RepeatingInvoice. # noqa: E501
:rtype: Contact
"""
return self._contact
@contact.setter
def contact(self, contact):
"""Sets the contact of this RepeatingInvoice.
:param contact: The contact of this RepeatingInvoice. # noqa: E501
:type: Contact
"""
self._contact = contact
@property
def schedule(self):
"""Gets the schedule of this RepeatingInvoice. # noqa: E501
:return: The schedule of this RepeatingInvoice. # noqa: E501
:rtype: Schedule
"""
return self._schedule
@schedule.setter
def schedule(self, schedule):
"""Sets the schedule of this RepeatingInvoice.
:param schedule: The schedule of this RepeatingInvoice. # noqa: E501
:type: Schedule
"""
self._schedule = schedule
@property
def line_items(self):
"""Gets the line_items of this RepeatingInvoice. # noqa: E501
See LineItems # noqa: E501
:return: The line_items of this RepeatingInvoice. # noqa: E501
:rtype: list[LineItem]
"""
return self._line_items
@line_items.setter
def line_items(self, line_items):
"""Sets the line_items of this RepeatingInvoice.
See LineItems # noqa: E501
:param line_items: The line_items of this RepeatingInvoice. # noqa: E501
:type: list[LineItem]
"""
self._line_items = line_items
@property
def line_amount_types(self):
"""Gets the line_amount_types of this RepeatingInvoice. # noqa: E501
:return: The line_amount_types of this RepeatingInvoice. # noqa: E501
:rtype: LineAmountTypes
"""
return self._line_amount_types
@line_amount_types.setter
def line_amount_types(self, line_amount_types):
"""Sets the line_amount_types of this RepeatingInvoice.
:param line_amount_types: The line_amount_types of this RepeatingInvoice. # noqa: E501
:type: LineAmountTypes
"""
self._line_amount_types = line_amount_types
@property
def reference(self):
"""Gets the reference of this RepeatingInvoice. # noqa: E501
ACCREC only – additional reference number # noqa: E501
:return: The reference of this RepeatingInvoice. # noqa: E501
:rtype: str
"""
return self._reference
@reference.setter
def reference(self, reference):
"""Sets the reference of this RepeatingInvoice.
ACCREC only – additional reference number # noqa: E501
:param reference: The reference of this RepeatingInvoice. # noqa: E501
:type: str
"""
self._reference = reference
@property
def branding_theme_id(self):
"""Gets the branding_theme_id of this RepeatingInvoice. # noqa: E501
See BrandingThemes # noqa: E501
:return: The branding_theme_id of this RepeatingInvoice. # noqa: E501
:rtype: str
"""
return self._branding_theme_id
@branding_theme_id.setter
def branding_theme_id(self, branding_theme_id):
"""Sets the branding_theme_id of this RepeatingInvoice.
See BrandingThemes # noqa: E501
:param branding_theme_id: The branding_theme_id of this RepeatingInvoice. # noqa: E501
:type: str
"""
self._branding_theme_id = branding_theme_id
@property
def currency_code(self):
"""Gets the currency_code of this RepeatingInvoice. # noqa: E501
:return: The currency_code of this RepeatingInvoice. # noqa: E501
:rtype: CurrencyCode
"""
return self._currency_code
@currency_code.setter
def currency_code(self, currency_code):
"""Sets the currency_code of this RepeatingInvoice.
:param currency_code: The currency_code of this RepeatingInvoice. # noqa: E501
:type: CurrencyCode
"""
self._currency_code = currency_code
@property
def status(self):
"""Gets the status of this RepeatingInvoice. # noqa: E501
One of the following - DRAFT or AUTHORISED – See Invoice Status Codes # noqa: E501
:return: The status of this RepeatingInvoice. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this RepeatingInvoice.
One of the following - DRAFT or AUTHORISED – See Invoice Status Codes # noqa: E501
:param status: The status of this RepeatingInvoice. # noqa: E501
:type: str
"""
allowed_values = ["DRAFT", "AUTHORISED", "DELETED"] # noqa: E501
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}".format( # noqa: E501
status, allowed_values
)
)
self._status = status
@property
def sub_total(self):
"""Gets the sub_total of this RepeatingInvoice. # noqa: E501
Total of invoice excluding taxes # noqa: E501
:return: The sub_total of this RepeatingInvoice. # noqa: E501
:rtype: float
"""
return self._sub_total
@sub_total.setter
def sub_total(self, sub_total):
"""Sets the sub_total of this RepeatingInvoice.
Total of invoice excluding taxes # noqa: E501
:param sub_total: The sub_total of this RepeatingInvoice. # noqa: E501
:type: float
"""
self._sub_total = sub_total
@property
def total_tax(self):
"""Gets the total_tax of this RepeatingInvoice. # noqa: E501
Total tax on invoice # noqa: E501
:return: The total_tax of this RepeatingInvoice. # noqa: E501
:rtype: float
"""
return self._total_tax
@total_tax.setter
def total_tax(self, total_tax):
"""Sets the total_tax of this RepeatingInvoice.
Total tax on invoice # noqa: E501
:param total_tax: The total_tax of this RepeatingInvoice. # noqa: E501
:type: float
"""
self._total_tax = total_tax
@property
def total(self):
"""Gets the total of this RepeatingInvoice. # noqa: E501
Total of Invoice tax inclusive (i.e. SubTotal + TotalTax) # noqa: E501
:return: The total of this RepeatingInvoice. # noqa: E501
:rtype: float
"""
return self._total
@total.setter
def total(self, total):
"""Sets the total of this RepeatingInvoice.
Total of Invoice tax inclusive (i.e. SubTotal + TotalTax) # noqa: E501
:param total: The total of this RepeatingInvoice. # noqa: E501
:type: float
"""
self._total = total
@property
def repeating_invoice_id(self):
"""Gets the repeating_invoice_id of this RepeatingInvoice. # noqa: E501
Xero generated unique identifier for repeating invoice template # noqa: E501
:return: The repeating_invoice_id of this RepeatingInvoice. # noqa: E501
:rtype: str
"""
return self._repeating_invoice_id
@repeating_invoice_id.setter
def repeating_invoice_id(self, repeating_invoice_id):
"""Sets the repeating_invoice_id of this RepeatingInvoice.
Xero generated unique identifier for repeating invoice template # noqa: E501
:param repeating_invoice_id: The repeating_invoice_id of this RepeatingInvoice. # noqa: E501
:type: str
"""
self._repeating_invoice_id = repeating_invoice_id
@property
def id(self):
"""Gets the id of this RepeatingInvoice. # noqa: E501
Xero generated unique identifier for repeating invoice template # noqa: E501
:return: The id of this RepeatingInvoice. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this RepeatingInvoice.
Xero generated unique identifier for repeating invoice template # noqa: E501
:param id: The id of this RepeatingInvoice. # noqa: E501
:type: str
"""
self._id = id
@property
def has_attachments(self):
"""Gets the has_attachments of this RepeatingInvoice. # noqa: E501
boolean to indicate if an invoice has an attachment # noqa: E501
:return: The has_attachments of this RepeatingInvoice. # noqa: E501
:rtype: bool
"""
return self._has_attachments
@has_attachments.setter
def has_attachments(self, has_attachments):
"""Sets the has_attachments of this RepeatingInvoice.
boolean to indicate if an invoice has an attachment # noqa: E501
:param has_attachments: The has_attachments of this RepeatingInvoice. # noqa: E501
:type: bool
"""
self._has_attachments = has_attachments
@property
def attachments(self):
"""Gets the attachments of this RepeatingInvoice. # noqa: E501
Displays array of attachments from the API # noqa: E501
:return: The attachments of this RepeatingInvoice. # noqa: E501
:rtype: list[Attachment]
"""
return self._attachments
@attachments.setter
def attachments(self, attachments):
"""Sets the attachments of this RepeatingInvoice.
Displays array of attachments from the API # noqa: E501
:param attachments: The attachments of this RepeatingInvoice. # noqa: E501
:type: list[Attachment]
"""
self._attachments = attachments
| 28.881783
| 124
| 0.609072
|
import re
from xero_python.models import BaseModel
class RepeatingInvoice(BaseModel):
openapi_types = {
"type": "str",
"contact": "Contact",
"schedule": "Schedule",
"line_items": "list[LineItem]",
"line_amount_types": "LineAmountTypes",
"reference": "str",
"branding_theme_id": "str",
"currency_code": "CurrencyCode",
"status": "str",
"sub_total": "float",
"total_tax": "float",
"total": "float",
"repeating_invoice_id": "str",
"id": "str",
"has_attachments": "bool",
"attachments": "list[Attachment]",
}
attribute_map = {
"type": "Type",
"contact": "Contact",
"schedule": "Schedule",
"line_items": "LineItems",
"line_amount_types": "LineAmountTypes",
"reference": "Reference",
"branding_theme_id": "BrandingThemeID",
"currency_code": "CurrencyCode",
"status": "Status",
"sub_total": "SubTotal",
"total_tax": "TotalTax",
"total": "Total",
"repeating_invoice_id": "RepeatingInvoiceID",
"id": "ID",
"has_attachments": "HasAttachments",
"attachments": "Attachments",
}
def __init__(
self,
type=None,
contact=None,
schedule=None,
line_items=None,
line_amount_types=None,
reference=None,
branding_theme_id=None,
currency_code=None,
status=None,
sub_total=None,
total_tax=None,
total=None,
repeating_invoice_id=None,
id=None,
has_attachments=False,
attachments=None,
):
self._type = None
self._contact = None
self._schedule = None
self._line_items = None
self._line_amount_types = None
self._reference = None
self._branding_theme_id = None
self._currency_code = None
self._status = None
self._sub_total = None
self._total_tax = None
self._total = None
self._repeating_invoice_id = None
self._id = None
self._has_attachments = None
self._attachments = None
self.discriminator = None
if type is not None:
self.type = type
if contact is not None:
self.contact = contact
if schedule is not None:
self.schedule = schedule
if line_items is not None:
self.line_items = line_items
if line_amount_types is not None:
self.line_amount_types = line_amount_types
if reference is not None:
self.reference = reference
if branding_theme_id is not None:
self.branding_theme_id = branding_theme_id
if currency_code is not None:
self.currency_code = currency_code
if status is not None:
self.status = status
if sub_total is not None:
self.sub_total = sub_total
if total_tax is not None:
self.total_tax = total_tax
if total is not None:
self.total = total
if repeating_invoice_id is not None:
self.repeating_invoice_id = repeating_invoice_id
if id is not None:
self.id = id
if has_attachments is not None:
self.has_attachments = has_attachments
if attachments is not None:
self.attachments = attachments
@property
def type(self):
return self._type
@type.setter
def type(self, type):
allowed_values = ["ACCPAY", "ACCREC"]
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}".format(
type, allowed_values
)
)
self._type = type
@property
def contact(self):
return self._contact
@contact.setter
def contact(self, contact):
self._contact = contact
@property
def schedule(self):
return self._schedule
@schedule.setter
def schedule(self, schedule):
self._schedule = schedule
@property
def line_items(self):
return self._line_items
@line_items.setter
def line_items(self, line_items):
self._line_items = line_items
@property
def line_amount_types(self):
return self._line_amount_types
@line_amount_types.setter
def line_amount_types(self, line_amount_types):
self._line_amount_types = line_amount_types
@property
def reference(self):
return self._reference
@reference.setter
def reference(self, reference):
self._reference = reference
@property
def branding_theme_id(self):
return self._branding_theme_id
@branding_theme_id.setter
def branding_theme_id(self, branding_theme_id):
self._branding_theme_id = branding_theme_id
@property
def currency_code(self):
return self._currency_code
@currency_code.setter
def currency_code(self, currency_code):
self._currency_code = currency_code
@property
def status(self):
return self._status
@status.setter
def status(self, status):
allowed_values = ["DRAFT", "AUTHORISED", "DELETED"]
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}".format(
status, allowed_values
)
)
self._status = status
@property
def sub_total(self):
return self._sub_total
@sub_total.setter
def sub_total(self, sub_total):
self._sub_total = sub_total
@property
def total_tax(self):
return self._total_tax
@total_tax.setter
def total_tax(self, total_tax):
self._total_tax = total_tax
@property
def total(self):
return self._total
@total.setter
def total(self, total):
self._total = total
@property
def repeating_invoice_id(self):
return self._repeating_invoice_id
@repeating_invoice_id.setter
def repeating_invoice_id(self, repeating_invoice_id):
self._repeating_invoice_id = repeating_invoice_id
@property
def id(self):
return self._id
@id.setter
def id(self, id):
self._id = id
@property
def has_attachments(self):
return self._has_attachments
@has_attachments.setter
def has_attachments(self, has_attachments):
self._has_attachments = has_attachments
@property
def attachments(self):
return self._attachments
@attachments.setter
def attachments(self, attachments):
self._attachments = attachments
| true
| true
|
f71a8c220df84bc1e3780600d32aac91ad2146a3
| 2,355
|
py
|
Python
|
scripts/dca.py
|
Fu-Om/bitbank-dca
|
17b24bc09bc1980b90f63113909bb8d62c8ff885
|
[
"MIT"
] | null | null | null |
scripts/dca.py
|
Fu-Om/bitbank-dca
|
17b24bc09bc1980b90f63113909bb8d62c8ff885
|
[
"MIT"
] | null | null | null |
scripts/dca.py
|
Fu-Om/bitbank-dca
|
17b24bc09bc1980b90f63113909bb8d62c8ff885
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import python_bitbankcc
from math import floor
from datetime import datetime
import pathlib
import csv
from settings import BITBANK_API_KEY, BITBANK_API_SECRET
class BitBankPubAPI:
def __init__(self):
self.pub = python_bitbankcc.public()
def get_ticker(self, pair):
try:
value = self.pub.get_ticker(pair)
return value
except Exception as e:
print(e)
return None
class BitBankPrvAPI:
def __init__(self):
api_key = BITBANK_API_KEY
api_secret = BITBANK_API_SECRET
self.prv = python_bitbankcc.private(api_key, api_secret)
def get_asset(self):
try:
value = self.prv.get_asset()
return value
except Exception as e:
print(e)
return None
def buy_order(self, order_price, amount):
try:
value = self.prv.order('btc_jpy', order_price, amount, 'buy', 'limit')
return value
except Exception as e:
print(e)
return None
def main():
unit = 5000 # unit of rounding order
dca_amount = 3000 # jpy to buy for each day
log_file_path = pathlib.Path.home() / 'Devel/bitbank-dca/log.csv' # log file path
pub_set = BitBankPubAPI()
prv_set = BitBankPrvAPI()
ticker = pub_set.get_ticker('btc_jpy')
last_price = int(ticker['last'])
if last_price % unit == 0:
order_price = last_price-2000
else:
order_price = unit * (last_price // unit)
# find amount closest to dca amount on the 4th decimal
amount = dca_amount / order_price
amount = floor(amount * 10 ** 4 + 0.5) / 10 ** 4
t = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if log_file_path.exists():
with open(log_file_path, 'a', newline='') as f:
writer = csv.writer(f)
writer.writerow([t, str(order_price), str(amount), str(last_price)])
else:
log_file_path.touch()
with open(log_file_path, 'w+', newline='') as f:
writer = csv.writer(f)
writer.writerow(['time', 'order_price', 'amount', 'current_price'])
writer.writerow([t, str(order_price), str(amount), str(last_price)])
prv_set.buy_order(order_price=str(order_price), amount=str(amount))
if __name__ == '__main__':
main()
| 28.719512
| 86
| 0.61104
|
import python_bitbankcc
from math import floor
from datetime import datetime
import pathlib
import csv
from settings import BITBANK_API_KEY, BITBANK_API_SECRET
class BitBankPubAPI:
def __init__(self):
self.pub = python_bitbankcc.public()
def get_ticker(self, pair):
try:
value = self.pub.get_ticker(pair)
return value
except Exception as e:
print(e)
return None
class BitBankPrvAPI:
def __init__(self):
api_key = BITBANK_API_KEY
api_secret = BITBANK_API_SECRET
self.prv = python_bitbankcc.private(api_key, api_secret)
def get_asset(self):
try:
value = self.prv.get_asset()
return value
except Exception as e:
print(e)
return None
def buy_order(self, order_price, amount):
try:
value = self.prv.order('btc_jpy', order_price, amount, 'buy', 'limit')
return value
except Exception as e:
print(e)
return None
def main():
unit = 5000
dca_amount = 3000
log_file_path = pathlib.Path.home() / 'Devel/bitbank-dca/log.csv'
pub_set = BitBankPubAPI()
prv_set = BitBankPrvAPI()
ticker = pub_set.get_ticker('btc_jpy')
last_price = int(ticker['last'])
if last_price % unit == 0:
order_price = last_price-2000
else:
order_price = unit * (last_price // unit)
amount = dca_amount / order_price
amount = floor(amount * 10 ** 4 + 0.5) / 10 ** 4
t = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if log_file_path.exists():
with open(log_file_path, 'a', newline='') as f:
writer = csv.writer(f)
writer.writerow([t, str(order_price), str(amount), str(last_price)])
else:
log_file_path.touch()
with open(log_file_path, 'w+', newline='') as f:
writer = csv.writer(f)
writer.writerow(['time', 'order_price', 'amount', 'current_price'])
writer.writerow([t, str(order_price), str(amount), str(last_price)])
prv_set.buy_order(order_price=str(order_price), amount=str(amount))
if __name__ == '__main__':
main()
| true
| true
|
f71a8cc95351a2ae4dc1ead3b589c5eaa31f5bc7
| 10,419
|
py
|
Python
|
asanakoy/dataset.py
|
chritter/kaggle_carvana_segmentation
|
14165feadfb500c842616ecb93d9f350dd5bcf87
|
[
"MIT"
] | 447
|
2017-11-18T03:13:45.000Z
|
2022-02-18T10:31:55.000Z
|
asanakoy/dataset.py
|
chritter/kaggle_carvana_segmentation
|
14165feadfb500c842616ecb93d9f350dd5bcf87
|
[
"MIT"
] | 9
|
2018-07-03T13:44:10.000Z
|
2022-03-11T23:17:29.000Z
|
asanakoy/dataset.py
|
jayden-chua/image-mask
|
ce2c6a32bf13df582e7b57e506d58518258be292
|
[
"MIT"
] | 119
|
2017-11-18T07:24:02.000Z
|
2021-11-15T12:24:08.000Z
|
import os
from os.path import isfile, join
from PIL import Image
import pandas as pd
import torch
from torch.utils.data.dataset import Dataset
from torchvision import transforms
import numpy as np
import matplotlib.pyplot as plt
import shutil
from sklearn.model_selection import KFold
from sklearn.model_selection import StratifiedKFold
from pathlib2 import Path
import config
def get_stratified_by_area_folds(car_ids, n_splits, fold_id, random_state, min_bin_size=7):
"""
Sample stratified folds accounting on the area of the car masks
:param car_ids: string ids of the cars (order matters!)
:param n_splits:
:param fold_id:
:param random_state:
:param min_bin_size: min number of samples in each of the area classes(bins)
:return:
"""
df = pd.read_hdf(join(config.input_data_dir, 'areas_df.hdf5'))
assert len(car_ids) == len(df)
df = df.loc[car_ids]
freq, bins = np.histogram(df['sum'], bins=35)
new_bins = [bins[0]]
new_bin_hs = []
pos = 0
cur_bin_right = 0
cur_bin_h = 0
while pos < len(freq):
cur_bin_right = max(cur_bin_right, bins[pos + 1])
cur_bin_h += freq[pos]
if cur_bin_h >= min_bin_size or pos == len(freq) - 1:
new_bin_hs.append(cur_bin_h)
cur_bin_h = 0
new_bins.append(cur_bin_right)
pos += 1
if new_bin_hs[-1] < min_bin_size:
new_bin_hs[-2] += new_bin_hs[-1]
del new_bin_hs[-1]
new_bins[-2] = new_bins[-1]
del new_bins[-1]
new_bins[0] -= 1
assert max(new_bins) == max(bins)
assert freq.sum() == np.sum(new_bin_hs) == len(df)
print 'Num area bins:', len(new_bin_hs)
assert len(new_bin_hs) >= min_bin_size, len(new_bin_hs)
df['area_class'] = pd.cut(df['sum'], new_bins, labels=False)
kf = StratifiedKFold(n_splits=n_splits, shuffle=True, random_state=random_state)
folds = list(kf.split(X=np.arange(len(df)), y=df['area_class'].values))[fold_id]
car_indices = {'train': folds[0], 'val': folds[1]}
return car_indices
class CARVANA(Dataset):
"""
CARVANA dataset that contains car images as .jpg. Each car has 16 images
taken in different angles and a unique id: id_01.jpg, id_02.jpg, ..., id_16.jpg
The labels are provided as a .gif image that contains the manually cutout mask
for each training image
"""
def __init__(self, root, subset="train", image_size=512,
transform=None, is_hq=True, seed=1993, v=1, n_folds=10, fold_id=0, group='all', return_image_id=False):
"""
:param root: it has to be a path to the folder that contains the dataset folders
:param train: boolean true if you want the train set false for the test one
:param transform: transform the images and labels
"""
assert v in [1, 2], 'Unknown folds version: {}'.format(v)
assert 0 <= fold_id < n_folds, fold_id
assert group in range(1, 9) + ['all']
print 'CARVANA::folds version={}'.format(v)
self.is_hq = is_hq
self.group = group
self.return_image_id = return_image_id
if group == 'all':
num_views = 16
else:
num_views = 2
print 'Group: {}; num_views:{}'.format(group, num_views)
self.root = os.path.abspath(os.path.expanduser(root))
self.transform = transform
self.subset = subset
self.data_path, self.labels_path = [], []
self.rs = np.random.RandomState(seed)
if self.subset in ['train', 'val']:
suff = ''
if image_size == 512:
suff = '_{}'.format(image_size)
images_dir = self.root + '/train' + ('_hq' if is_hq else '') + suff
print 'Reading images from ', images_dir
self.data_path = self.get_paths(images_dir, group)
self.labels_path = self.get_paths(self.root + '/train_masks' + suff, group)
assert len(self.data_path) / num_views == 5088 / num_views
assert len(self.data_path) % num_views == 0
num_cars = len(self.data_path) / num_views
if v == 1:
# random k folds
kf = KFold(n_splits=n_folds, shuffle=True, random_state=self.rs)
folds = list(kf.split(np.arange(num_cars)))[fold_id]
car_indices = {'train': folds[0], 'val': folds[1]}
else:
print 'Stratified folds based on mask area...'
car_ids = np.unique(map(lambda x: os.path.basename(x[:-len('_01.jpg')]), self.data_path))
assert os.path.basename(self.data_path[128 + 1]) == car_ids[128 / num_views] + '_{:02}.jpg'.format(2)
car_indices = get_stratified_by_area_folds(car_ids,
n_splits=n_folds,
fold_id=fold_id,
random_state=self.rs)
# TRAIN_FRAC = 0.9
# num_train = int(TRAIN_FRAC * (len(self.data_path) / 16)) * 16
# car_ids = self.rs.permutation(len(self.data_path) / 16)
indices = dict()
for split_name in ['train', 'val']:
img_indices = []
for car_id in car_indices[split_name]:
img_indices.extend(range(car_id * num_views, (car_id + 1) * num_views))
indices[split_name] = img_indices
self.data_path = self.data_path[indices[self.subset]]
self.labels_path = self.labels_path[indices[self.subset]]
print 'Dataset::{}: fold {}/{}'.format(self.subset, fold_id, n_folds)
elif self.subset == "test":
self.data_path = self.get_test_paths(is_hq, group)
self.labels_path = None
else:
raise RuntimeError('Invalid subset ' + self.subset + ', it must be one of:'
' \'train\', \'val\' or \'test\'')
def reload_all_test_paths(self):
if self.subset != 'test':
raise ValueError('Only possible for test!')
self.data_path = self.get_test_paths(self.is_hq, self.group)
@staticmethod
def get_paths(dir_path, group):
"""
returns all the sorted image paths.
:param dir_path:
:return: array with all the paths to the images
"""
assert group in range(1, 9) + ['all']
images_dir = [join(dir_path, f) for f in os.listdir(dir_path) if
isfile(join(dir_path, f))]
final_paths = []
if group in range(1, 9):
endings = ['_{:02d}.jpg'.format(group), '_{:02d}_mask.gif'.format(group),
'_{:02d}.jpg'.format(group + 8), '_{:02d}_mask.gif'.format(group + 8)]
for path in images_dir:
for ending in endings:
if path.endswith(ending):
final_paths.append(path)
break
else:
final_paths = images_dir
final_paths.sort()
return np.asarray(final_paths)
@staticmethod
def get_test_paths(is_hq, group='all'):
return CARVANA.get_paths(join(config.input_data_dir, 'test') +
('_hq' if is_hq else ''), group)
def __getitem__(self, index):
"""
:param index:
:return: tuple (img, target) with the input data and its label
"""
# load image and labels
img = Image.open(self.data_path[index])
target = Image.open(self.labels_path[index]) if not self.subset == 'test' else None
# target = np.asarray(target) * 255
if target is not None and target.mode == 'P':
# has values from 0 to 1
target = target.convert('L') # convert to int value from 0 to 255
# apply transforms to both
if self.transform is not None:
if target is not None:
img, target = self.transform(img, target)
else:
img = self.transform(img)
if target is not None:
assert target.max() == 1.0, 'Wrong scaling for target mask (max val = {})'.format(target.max())
target[(target > 0) & (target < 1.0)] = 0
assert ((target > 0) & (target < 1.0)).sum() == 0
if not self.return_image_id:
return img, target
else:
return img, target, Path(self.data_path[index]).stem
else:
return img, Path(self.data_path[index]).stem
def __len__(self):
return len(self.data_path)
class CarvanaPlus(Dataset):
"""
CARVANA dataset that contains car images as .jpg. Each car has 16 images
taken in different angles and a unique id: id_01.jpg, id_02.jpg, ..., id_16.jpg
The labels are provided as a .gif image that contains the manually cutout mask
for each training image
"""
def __init__(self, root, subset="train", image_size=512,
transform=None, is_hq=True, seed=1993, v=1, n_folds=10, fold_id=0,
group='all', return_image_id=False):
if subset not in ['train']:
raise ValueError('No test split available')
self.carvana = CARVANA(root, subset, image_size,
transform, is_hq, seed, v, n_folds,
fold_id, group, return_image_id)
def __getitem__(self, index):
return self.carvana[index]
def __len__(self):
return len(self.carvana)
def im_show(img_list):
"""
It receives a list of images and plots them together
:param img_list:
:return:
"""
to_PIL = transforms.ToPILImage()
if len(img_list) >= 10:
raise Exception("len(img_list) must be smaller than 10")
for idx, img in enumerate(img_list):
img = np.array(to_PIL(img))
plt.subplot(100 + 10 * len(img_list) + (idx + 1))
fig = plt.imshow(img)
fig.axes.get_xaxis().set_visible(False)
fig.axes.get_yaxis().set_visible(False)
plt.show()
def save_checkpoint(state, is_best, filepath='checkpoint.pth.tar'):
torch.save(state, filepath)
if is_best:
shutil.copyfile(filepath,
os.path.join(os.path.dirname(filepath), 'model_best.pth.tar'))
| 38.732342
| 120
| 0.578079
|
import os
from os.path import isfile, join
from PIL import Image
import pandas as pd
import torch
from torch.utils.data.dataset import Dataset
from torchvision import transforms
import numpy as np
import matplotlib.pyplot as plt
import shutil
from sklearn.model_selection import KFold
from sklearn.model_selection import StratifiedKFold
from pathlib2 import Path
import config
def get_stratified_by_area_folds(car_ids, n_splits, fold_id, random_state, min_bin_size=7):
"""
Sample stratified folds accounting on the area of the car masks
:param car_ids: string ids of the cars (order matters!)
:param n_splits:
:param fold_id:
:param random_state:
:param min_bin_size: min number of samples in each of the area classes(bins)
:return:
"""
df = pd.read_hdf(join(config.input_data_dir, 'areas_df.hdf5'))
assert len(car_ids) == len(df)
df = df.loc[car_ids]
freq, bins = np.histogram(df['sum'], bins=35)
new_bins = [bins[0]]
new_bin_hs = []
pos = 0
cur_bin_right = 0
cur_bin_h = 0
while pos < len(freq):
cur_bin_right = max(cur_bin_right, bins[pos + 1])
cur_bin_h += freq[pos]
if cur_bin_h >= min_bin_size or pos == len(freq) - 1:
new_bin_hs.append(cur_bin_h)
cur_bin_h = 0
new_bins.append(cur_bin_right)
pos += 1
if new_bin_hs[-1] < min_bin_size:
new_bin_hs[-2] += new_bin_hs[-1]
del new_bin_hs[-1]
new_bins[-2] = new_bins[-1]
del new_bins[-1]
new_bins[0] -= 1
assert max(new_bins) == max(bins)
assert freq.sum() == np.sum(new_bin_hs) == len(df)
print 'Num area bins:', len(new_bin_hs)
assert len(new_bin_hs) >= min_bin_size, len(new_bin_hs)
df['area_class'] = pd.cut(df['sum'], new_bins, labels=False)
kf = StratifiedKFold(n_splits=n_splits, shuffle=True, random_state=random_state)
folds = list(kf.split(X=np.arange(len(df)), y=df['area_class'].values))[fold_id]
car_indices = {'train': folds[0], 'val': folds[1]}
return car_indices
class CARVANA(Dataset):
"""
CARVANA dataset that contains car images as .jpg. Each car has 16 images
taken in different angles and a unique id: id_01.jpg, id_02.jpg, ..., id_16.jpg
The labels are provided as a .gif image that contains the manually cutout mask
for each training image
"""
def __init__(self, root, subset="train", image_size=512,
transform=None, is_hq=True, seed=1993, v=1, n_folds=10, fold_id=0, group='all', return_image_id=False):
"""
:param root: it has to be a path to the folder that contains the dataset folders
:param train: boolean true if you want the train set false for the test one
:param transform: transform the images and labels
"""
assert v in [1, 2], 'Unknown folds version: {}'.format(v)
assert 0 <= fold_id < n_folds, fold_id
assert group in range(1, 9) + ['all']
print 'CARVANA::folds version={}'.format(v)
self.is_hq = is_hq
self.group = group
self.return_image_id = return_image_id
if group == 'all':
num_views = 16
else:
num_views = 2
print 'Group: {}; num_views:{}'.format(group, num_views)
self.root = os.path.abspath(os.path.expanduser(root))
self.transform = transform
self.subset = subset
self.data_path, self.labels_path = [], []
self.rs = np.random.RandomState(seed)
if self.subset in ['train', 'val']:
suff = ''
if image_size == 512:
suff = '_{}'.format(image_size)
images_dir = self.root + '/train' + ('_hq' if is_hq else '') + suff
print 'Reading images from ', images_dir
self.data_path = self.get_paths(images_dir, group)
self.labels_path = self.get_paths(self.root + '/train_masks' + suff, group)
assert len(self.data_path) / num_views == 5088 / num_views
assert len(self.data_path) % num_views == 0
num_cars = len(self.data_path) / num_views
if v == 1:
kf = KFold(n_splits=n_folds, shuffle=True, random_state=self.rs)
folds = list(kf.split(np.arange(num_cars)))[fold_id]
car_indices = {'train': folds[0], 'val': folds[1]}
else:
print 'Stratified folds based on mask area...'
car_ids = np.unique(map(lambda x: os.path.basename(x[:-len('_01.jpg')]), self.data_path))
assert os.path.basename(self.data_path[128 + 1]) == car_ids[128 / num_views] + '_{:02}.jpg'.format(2)
car_indices = get_stratified_by_area_folds(car_ids,
n_splits=n_folds,
fold_id=fold_id,
random_state=self.rs)
indices = dict()
for split_name in ['train', 'val']:
img_indices = []
for car_id in car_indices[split_name]:
img_indices.extend(range(car_id * num_views, (car_id + 1) * num_views))
indices[split_name] = img_indices
self.data_path = self.data_path[indices[self.subset]]
self.labels_path = self.labels_path[indices[self.subset]]
print 'Dataset::{}: fold {}/{}'.format(self.subset, fold_id, n_folds)
elif self.subset == "test":
self.data_path = self.get_test_paths(is_hq, group)
self.labels_path = None
else:
raise RuntimeError('Invalid subset ' + self.subset + ', it must be one of:'
' \'train\', \'val\' or \'test\'')
def reload_all_test_paths(self):
if self.subset != 'test':
raise ValueError('Only possible for test!')
self.data_path = self.get_test_paths(self.is_hq, self.group)
@staticmethod
def get_paths(dir_path, group):
"""
returns all the sorted image paths.
:param dir_path:
:return: array with all the paths to the images
"""
assert group in range(1, 9) + ['all']
images_dir = [join(dir_path, f) for f in os.listdir(dir_path) if
isfile(join(dir_path, f))]
final_paths = []
if group in range(1, 9):
endings = ['_{:02d}.jpg'.format(group), '_{:02d}_mask.gif'.format(group),
'_{:02d}.jpg'.format(group + 8), '_{:02d}_mask.gif'.format(group + 8)]
for path in images_dir:
for ending in endings:
if path.endswith(ending):
final_paths.append(path)
break
else:
final_paths = images_dir
final_paths.sort()
return np.asarray(final_paths)
@staticmethod
def get_test_paths(is_hq, group='all'):
return CARVANA.get_paths(join(config.input_data_dir, 'test') +
('_hq' if is_hq else ''), group)
def __getitem__(self, index):
"""
:param index:
:return: tuple (img, target) with the input data and its label
"""
img = Image.open(self.data_path[index])
target = Image.open(self.labels_path[index]) if not self.subset == 'test' else None
if target is not None and target.mode == 'P':
target = target.convert('L')
if self.transform is not None:
if target is not None:
img, target = self.transform(img, target)
else:
img = self.transform(img)
if target is not None:
assert target.max() == 1.0, 'Wrong scaling for target mask (max val = {})'.format(target.max())
target[(target > 0) & (target < 1.0)] = 0
assert ((target > 0) & (target < 1.0)).sum() == 0
if not self.return_image_id:
return img, target
else:
return img, target, Path(self.data_path[index]).stem
else:
return img, Path(self.data_path[index]).stem
def __len__(self):
return len(self.data_path)
class CarvanaPlus(Dataset):
"""
CARVANA dataset that contains car images as .jpg. Each car has 16 images
taken in different angles and a unique id: id_01.jpg, id_02.jpg, ..., id_16.jpg
The labels are provided as a .gif image that contains the manually cutout mask
for each training image
"""
def __init__(self, root, subset="train", image_size=512,
transform=None, is_hq=True, seed=1993, v=1, n_folds=10, fold_id=0,
group='all', return_image_id=False):
if subset not in ['train']:
raise ValueError('No test split available')
self.carvana = CARVANA(root, subset, image_size,
transform, is_hq, seed, v, n_folds,
fold_id, group, return_image_id)
def __getitem__(self, index):
return self.carvana[index]
def __len__(self):
return len(self.carvana)
def im_show(img_list):
"""
It receives a list of images and plots them together
:param img_list:
:return:
"""
to_PIL = transforms.ToPILImage()
if len(img_list) >= 10:
raise Exception("len(img_list) must be smaller than 10")
for idx, img in enumerate(img_list):
img = np.array(to_PIL(img))
plt.subplot(100 + 10 * len(img_list) + (idx + 1))
fig = plt.imshow(img)
fig.axes.get_xaxis().set_visible(False)
fig.axes.get_yaxis().set_visible(False)
plt.show()
def save_checkpoint(state, is_best, filepath='checkpoint.pth.tar'):
torch.save(state, filepath)
if is_best:
shutil.copyfile(filepath,
os.path.join(os.path.dirname(filepath), 'model_best.pth.tar'))
| false
| true
|
f71a8d0e28e916e21d6205427d7bc48512999cec
| 15,536
|
py
|
Python
|
tensorflow_probability/python/distributions/deterministic.py
|
bourov/probability
|
1e4053a0938b4773c3425bcbb07b3f1e5d50c7e2
|
[
"Apache-2.0"
] | 2
|
2020-12-17T20:43:24.000Z
|
2021-06-11T22:09:16.000Z
|
tensorflow_probability/python/distributions/deterministic.py
|
bourov/probability
|
1e4053a0938b4773c3425bcbb07b3f1e5d50c7e2
|
[
"Apache-2.0"
] | 2
|
2021-08-25T16:14:51.000Z
|
2022-02-10T04:47:11.000Z
|
tensorflow_probability/python/distributions/deterministic.py
|
bourov/probability
|
1e4053a0938b4773c3425bcbb07b3f1e5d50c7e2
|
[
"Apache-2.0"
] | 1
|
2020-12-19T13:05:15.000Z
|
2020-12-19T13:05:15.000Z
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The Deterministic distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
# Dependency imports
import six
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import kullback_leibler
from tensorflow_probability.python.internal import assert_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import reparameterization
from tensorflow_probability.python.internal import tensor_util
from tensorflow_probability.python.internal import tensorshape_util
__all__ = [
'Deterministic',
'VectorDeterministic',
]
@six.add_metaclass(abc.ABCMeta)
class _BaseDeterministic(distribution.Distribution):
"""Base class for Deterministic distributions."""
def __init__(self,
loc,
atol=None,
rtol=None,
is_vector=False,
validate_args=False,
allow_nan_stats=True,
parameters=None,
name='_BaseDeterministic'):
"""Initialize a batch of `_BaseDeterministic` distributions.
The `atol` and `rtol` parameters allow for some slack in `pmf`, `cdf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if Abs(x - loc) <= atol + rtol * Abs(loc),
= 0, otherwise.
```
Args:
loc: Numeric `Tensor`. The point (or batch of points) on which this
distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
is_vector: Python `bool`. If `True`, this is for `VectorDeterministic`,
else `Deterministic`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value '`NaN`' to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
parameters: Dict of locals to facilitate copy construction.
name: Python `str` name prefixed to Ops created by this class.
Raises:
ValueError: If `loc` is a scalar.
"""
with tf.name_scope(name) as name:
dtype = dtype_util.common_dtype([loc, atol, rtol], dtype_hint=tf.float32)
self._loc = tensor_util.convert_nonref_to_tensor(
loc, dtype_hint=dtype, name='loc')
self._atol = tensor_util.convert_nonref_to_tensor(
0 if atol is None else atol, dtype=dtype, name='atol')
self._rtol = tensor_util.convert_nonref_to_tensor(
0 if rtol is None else rtol, dtype=dtype, name='rtol')
self._is_vector = is_vector
super(_BaseDeterministic, self).__init__(
dtype=self._loc.dtype,
reparameterization_type=(
reparameterization.FULLY_REPARAMETERIZED
if dtype_util.is_floating(self._loc.dtype)
else reparameterization.NOT_REPARAMETERIZED),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
def _slack(self, loc):
# Avoid using the large broadcast with self.loc if possible.
if self.parameters['rtol'] is None:
return self.atol
else:
return self.atol + self.rtol * tf.abs(loc)
@property
def loc(self):
"""Point (or batch of points) at which this distribution is supported."""
return self._loc
@property
def atol(self):
"""Absolute tolerance for comparing points to `self.loc`."""
return self._atol
@property
def rtol(self):
"""Relative tolerance for comparing points to `self.loc`."""
return self._rtol
def _entropy(self):
return tf.zeros(self.batch_shape_tensor(), dtype=self.dtype)
def _mean(self):
return tf.identity(self.loc)
def _variance(self):
return tf.zeros_like(self.loc)
def _mode(self):
return self.mean()
def _sample_n(self, n, seed=None):
del seed # unused
loc = tf.convert_to_tensor(self.loc)
return tf.broadcast_to(
loc,
tf.concat([[n], self._batch_shape_tensor(loc=loc),
self._event_shape_tensor(loc=loc)],
axis=0))
def _default_event_space_bijector(self):
return
def _parameter_control_dependencies(self, is_init):
assertions = []
# In init, we can always build shape and dtype checks because
# we assume shape doesn't change for Variable backed args.
if is_init and self._is_vector:
msg = 'Argument `loc` must be at least rank 1.'
if tensorshape_util.rank(self.loc.shape) is not None:
if tensorshape_util.rank(self.loc.shape) < 1:
raise ValueError(msg)
elif self.validate_args:
assertions.append(
assert_util.assert_rank_at_least(self.loc, 1, message=msg))
if not self.validate_args:
assert not assertions # Should never happen
return []
if is_init != tensor_util.is_ref(self.atol):
assertions.append(
assert_util.assert_non_negative(
self.atol, message='Argument "atol" must be non-negative'))
if is_init != tensor_util.is_ref(self.rtol):
assertions.append(
assert_util.assert_non_negative(
self.rtol, message='Argument "rtol" must be non-negative'))
return assertions
class Deterministic(_BaseDeterministic):
"""Scalar `Deterministic` distribution on the real line.
The scalar `Deterministic` distribution is parameterized by a [batch] point
`loc` on the real line. The distribution is supported at this point only,
and corresponds to a random variable that is constant, equal to `loc`.
See [Degenerate rv](https://en.wikipedia.org/wiki/Degenerate_distribution).
#### Mathematical Details
The probability mass function (pmf) and cumulative distribution function (cdf)
are
```none
pmf(x; loc) = 1, if x == loc, else 0
cdf(x; loc) = 1, if x >= loc, else 0
```
#### Examples
```python
# Initialize a single Deterministic supported at zero.
constant = tfp.distributions.Deterministic(0.)
constant.prob(0.)
==> 1.
constant.prob(2.)
==> 0.
# Initialize a [2, 2] batch of scalar constants.
loc = [[0., 1.], [2., 3.]]
x = [[0., 1.1], [1.99, 3.]]
constant = tfp.distributions.Deterministic(loc)
constant.prob(x)
==> [[1., 0.], [0., 1.]]
```
"""
def __init__(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name='Deterministic'):
"""Initialize a scalar `Deterministic` distribution.
The `atol` and `rtol` parameters allow for some slack in `pmf`, `cdf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if Abs(x - loc) <= atol + rtol * Abs(loc),
= 0, otherwise.
```
Args:
loc: Numeric `Tensor` of shape `[B1, ..., Bb]`, with `b >= 0`.
The point (or batch of points) on which this distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value '`NaN`' to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
super(Deterministic, self).__init__(
loc,
atol=atol,
rtol=rtol,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _params_event_ndims(cls):
return dict(loc=0, atol=0, rtol=0)
def _batch_shape_tensor(self, loc=None):
return tf.broadcast_dynamic_shape(
tf.shape(self.loc if loc is None else loc),
tf.broadcast_dynamic_shape(tf.shape(self.atol), tf.shape(self.rtol)))
def _batch_shape(self):
return tf.broadcast_static_shape(
self.loc.shape,
tf.broadcast_static_shape(self.atol.shape, self.rtol.shape))
def _event_shape_tensor(self, loc=None):
del loc
return tf.constant([], dtype=tf.int32)
def _event_shape(self):
return tf.TensorShape([])
def _prob(self, x):
loc = tf.convert_to_tensor(self.loc)
# Enforces dtype of probability to be float, when self.dtype is not.
prob_dtype = self.dtype if dtype_util.is_floating(
self.dtype) else tf.float32
return tf.cast(tf.abs(x - loc) <= self._slack(loc), dtype=prob_dtype)
def _cdf(self, x):
loc = tf.identity(self.loc)
return tf.cast(x >= loc - self._slack(loc), dtype=self.dtype)
class VectorDeterministic(_BaseDeterministic):
"""Vector `Deterministic` distribution on `R^k`.
The `VectorDeterministic` distribution is parameterized by a [batch] point
`loc in R^k`. The distribution is supported at this point only,
and corresponds to a random variable that is constant, equal to `loc`.
See [Degenerate rv](https://en.wikipedia.org/wiki/Degenerate_distribution).
#### Mathematical Details
The probability mass function (pmf) is
```none
pmf(x; loc)
= 1, if All[Abs(x - loc) <= atol + rtol * Abs(loc)],
= 0, otherwise.
```
#### Examples
```python
tfd = tfp.distributions
# Initialize a single VectorDeterministic supported at [0., 2.] in R^2.
constant = tfd.Deterministic([0., 2.])
constant.prob([0., 2.])
==> 1.
constant.prob([0., 3.])
==> 0.
# Initialize a [3] batch of constants on R^2.
loc = [[0., 1.], [2., 3.], [4., 5.]]
constant = tfd.VectorDeterministic(loc)
constant.prob([[0., 1.], [1.9, 3.], [3.99, 5.]])
==> [1., 0., 0.]
```
"""
def __init__(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name='VectorDeterministic'):
"""Initialize a `VectorDeterministic` distribution on `R^k`, for `k >= 0`.
Note that there is only one point in `R^0`, the 'point' `[]`. So if `k = 0`
then `self.prob([]) == 1`.
The `atol` and `rtol` parameters allow for some slack in `pmf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if All[Abs(x - loc) <= atol + rtol * Abs(loc)],
= 0, otherwise
```
Args:
loc: Numeric `Tensor` of shape `[B1, ..., Bb, k]`, with `b >= 0`, `k >= 0`
The point (or batch of points) on which this distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value '`NaN`' to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
super(VectorDeterministic, self).__init__(
loc,
atol=atol,
rtol=rtol,
is_vector=True,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _params_event_ndims(cls):
return dict(loc=1, atol=1, rtol=1)
def _batch_shape_tensor(self, loc=None):
return tf.broadcast_dynamic_shape(
tf.shape(self.loc if loc is None else loc),
tf.broadcast_dynamic_shape(tf.shape(self.atol),
tf.shape(self.rtol)))[:-1]
def _batch_shape(self):
return tf.broadcast_static_shape(
self.loc.shape,
tf.broadcast_static_shape(self.atol.shape, self.rtol.shape))[:-1]
def _event_shape_tensor(self, loc=None):
return tf.shape(self.loc if loc is None else loc)[-1:]
def _event_shape(self):
return self.loc.shape[-1:]
def _prob(self, x):
loc = tf.convert_to_tensor(self.loc)
return tf.cast(
tf.reduce_all(tf.abs(x - loc) <= self._slack(loc), axis=-1),
dtype=self.dtype)
def _sample_control_dependencies(self, x):
assertions = []
if not self.validate_args:
return assertions
assertions.append(assert_util.assert_rank_at_least(x, 1))
assertions.append(assert_util.assert_equal(
self.event_shape_tensor(), tf.gather(tf.shape(x), tf.rank(x) - 1),
message=('Argument `x` not defined in the same space '
'R**k as this distribution')))
return assertions
@kullback_leibler.RegisterKL(_BaseDeterministic, distribution.Distribution)
def _kl_deterministic_distribution(a, b, name=None):
"""Calculate the batched KL divergence `KL(a || b)` with `a` Deterministic.
Args:
a: instance of a Deterministic distribution object.
b: instance of a Distribution distribution object.
name: (optional) Name to use for created operations. Default is
'kl_deterministic_distribution'.
Returns:
Batchwise `KL(a || b)`.
"""
with tf.name_scope(name or 'kl_deterministic_distribution'):
return -b.log_prob(a.loc)
| 34.524444
| 80
| 0.655381
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import kullback_leibler
from tensorflow_probability.python.internal import assert_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import reparameterization
from tensorflow_probability.python.internal import tensor_util
from tensorflow_probability.python.internal import tensorshape_util
__all__ = [
'Deterministic',
'VectorDeterministic',
]
@six.add_metaclass(abc.ABCMeta)
class _BaseDeterministic(distribution.Distribution):
def __init__(self,
loc,
atol=None,
rtol=None,
is_vector=False,
validate_args=False,
allow_nan_stats=True,
parameters=None,
name='_BaseDeterministic'):
with tf.name_scope(name) as name:
dtype = dtype_util.common_dtype([loc, atol, rtol], dtype_hint=tf.float32)
self._loc = tensor_util.convert_nonref_to_tensor(
loc, dtype_hint=dtype, name='loc')
self._atol = tensor_util.convert_nonref_to_tensor(
0 if atol is None else atol, dtype=dtype, name='atol')
self._rtol = tensor_util.convert_nonref_to_tensor(
0 if rtol is None else rtol, dtype=dtype, name='rtol')
self._is_vector = is_vector
super(_BaseDeterministic, self).__init__(
dtype=self._loc.dtype,
reparameterization_type=(
reparameterization.FULLY_REPARAMETERIZED
if dtype_util.is_floating(self._loc.dtype)
else reparameterization.NOT_REPARAMETERIZED),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
def _slack(self, loc):
if self.parameters['rtol'] is None:
return self.atol
else:
return self.atol + self.rtol * tf.abs(loc)
@property
def loc(self):
return self._loc
@property
def atol(self):
return self._atol
@property
def rtol(self):
return self._rtol
def _entropy(self):
return tf.zeros(self.batch_shape_tensor(), dtype=self.dtype)
def _mean(self):
return tf.identity(self.loc)
def _variance(self):
return tf.zeros_like(self.loc)
def _mode(self):
return self.mean()
def _sample_n(self, n, seed=None):
del seed
loc = tf.convert_to_tensor(self.loc)
return tf.broadcast_to(
loc,
tf.concat([[n], self._batch_shape_tensor(loc=loc),
self._event_shape_tensor(loc=loc)],
axis=0))
def _default_event_space_bijector(self):
return
def _parameter_control_dependencies(self, is_init):
assertions = []
if is_init and self._is_vector:
msg = 'Argument `loc` must be at least rank 1.'
if tensorshape_util.rank(self.loc.shape) is not None:
if tensorshape_util.rank(self.loc.shape) < 1:
raise ValueError(msg)
elif self.validate_args:
assertions.append(
assert_util.assert_rank_at_least(self.loc, 1, message=msg))
if not self.validate_args:
assert not assertions # Should never happen
return []
if is_init != tensor_util.is_ref(self.atol):
assertions.append(
assert_util.assert_non_negative(
self.atol, message='Argument "atol" must be non-negative'))
if is_init != tensor_util.is_ref(self.rtol):
assertions.append(
assert_util.assert_non_negative(
self.rtol, message='Argument "rtol" must be non-negative'))
return assertions
class Deterministic(_BaseDeterministic):
def __init__(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name='Deterministic'):
parameters = dict(locals())
super(Deterministic, self).__init__(
loc,
atol=atol,
rtol=rtol,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _params_event_ndims(cls):
return dict(loc=0, atol=0, rtol=0)
def _batch_shape_tensor(self, loc=None):
return tf.broadcast_dynamic_shape(
tf.shape(self.loc if loc is None else loc),
tf.broadcast_dynamic_shape(tf.shape(self.atol), tf.shape(self.rtol)))
def _batch_shape(self):
return tf.broadcast_static_shape(
self.loc.shape,
tf.broadcast_static_shape(self.atol.shape, self.rtol.shape))
def _event_shape_tensor(self, loc=None):
del loc
return tf.constant([], dtype=tf.int32)
def _event_shape(self):
return tf.TensorShape([])
def _prob(self, x):
loc = tf.convert_to_tensor(self.loc)
# Enforces dtype of probability to be float, when self.dtype is not.
prob_dtype = self.dtype if dtype_util.is_floating(
self.dtype) else tf.float32
return tf.cast(tf.abs(x - loc) <= self._slack(loc), dtype=prob_dtype)
def _cdf(self, x):
loc = tf.identity(self.loc)
return tf.cast(x >= loc - self._slack(loc), dtype=self.dtype)
class VectorDeterministic(_BaseDeterministic):
def __init__(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name='VectorDeterministic'):
parameters = dict(locals())
super(VectorDeterministic, self).__init__(
loc,
atol=atol,
rtol=rtol,
is_vector=True,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _params_event_ndims(cls):
return dict(loc=1, atol=1, rtol=1)
def _batch_shape_tensor(self, loc=None):
return tf.broadcast_dynamic_shape(
tf.shape(self.loc if loc is None else loc),
tf.broadcast_dynamic_shape(tf.shape(self.atol),
tf.shape(self.rtol)))[:-1]
def _batch_shape(self):
return tf.broadcast_static_shape(
self.loc.shape,
tf.broadcast_static_shape(self.atol.shape, self.rtol.shape))[:-1]
def _event_shape_tensor(self, loc=None):
return tf.shape(self.loc if loc is None else loc)[-1:]
def _event_shape(self):
return self.loc.shape[-1:]
def _prob(self, x):
loc = tf.convert_to_tensor(self.loc)
return tf.cast(
tf.reduce_all(tf.abs(x - loc) <= self._slack(loc), axis=-1),
dtype=self.dtype)
def _sample_control_dependencies(self, x):
assertions = []
if not self.validate_args:
return assertions
assertions.append(assert_util.assert_rank_at_least(x, 1))
assertions.append(assert_util.assert_equal(
self.event_shape_tensor(), tf.gather(tf.shape(x), tf.rank(x) - 1),
message=('Argument `x` not defined in the same space '
'R**k as this distribution')))
return assertions
@kullback_leibler.RegisterKL(_BaseDeterministic, distribution.Distribution)
def _kl_deterministic_distribution(a, b, name=None):
with tf.name_scope(name or 'kl_deterministic_distribution'):
return -b.log_prob(a.loc)
| true
| true
|
f71a8d25f1b81ca9d952d8f9624d010c487bd0bf
| 185
|
py
|
Python
|
tests/test_app/apps.py
|
JiriKr/django-migrate-sql
|
b848acb14679ce8bf472d91e52c85afcce2c5db2
|
[
"ISC"
] | 13
|
2016-01-05T12:21:11.000Z
|
2021-08-30T05:41:39.000Z
|
tests/test_app/apps.py
|
JiriKr/django-migrate-sql
|
b848acb14679ce8bf472d91e52c85afcce2c5db2
|
[
"ISC"
] | 10
|
2015-12-27T14:40:31.000Z
|
2020-04-01T11:40:36.000Z
|
tests/test_app/apps.py
|
JiriKr/django-migrate-sql
|
b848acb14679ce8bf472d91e52c85afcce2c5db2
|
[
"ISC"
] | 3
|
2017-10-29T11:26:27.000Z
|
2019-01-03T17:16:54.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class TestAppConfig(AppConfig):
name = 'test_app'
verbose_name = 'Test App'
| 18.5
| 39
| 0.718919
|
from __future__ import unicode_literals
from django.apps import AppConfig
class TestAppConfig(AppConfig):
name = 'test_app'
verbose_name = 'Test App'
| true
| true
|
f71a8db5b96c9e6d722390a922326bcdd0e4974b
| 534
|
py
|
Python
|
src/quom/tokenizer/token.py
|
Viatorus/Quom
|
5f2aa90a86a8eed5689670748967ab8d4de2d9c1
|
[
"MIT"
] | 90
|
2018-11-27T21:49:32.000Z
|
2022-03-13T08:48:51.000Z
|
src/quom/tokenizer/token.py
|
Viatorus/Quom
|
5f2aa90a86a8eed5689670748967ab8d4de2d9c1
|
[
"MIT"
] | 12
|
2018-12-04T22:18:36.000Z
|
2021-08-15T11:41:15.000Z
|
src/quom/tokenizer/token.py
|
Viatorus/Quom
|
5f2aa90a86a8eed5689670748967ab8d4de2d9c1
|
[
"MIT"
] | 2
|
2021-06-11T14:11:07.000Z
|
2021-08-15T06:07:28.000Z
|
from .iterator import Span, RawIterator
class Token:
def __init__(self, start, end):
self.start = start.copy()
self.end = end.copy()
@property
def raw(self):
return str(Span(RawIterator(self.start), RawIterator(self.end)))
def __str__(self):
return str(Span(self.start, self.end))
class EmptyToken(Token):
def __init__(self):
super().__init__(RawIterator(''), RawIterator(''))
print(self)
class StartToken(Token):
pass
class EndToken(Token):
pass
| 17.8
| 72
| 0.627341
|
from .iterator import Span, RawIterator
class Token:
def __init__(self, start, end):
self.start = start.copy()
self.end = end.copy()
@property
def raw(self):
return str(Span(RawIterator(self.start), RawIterator(self.end)))
def __str__(self):
return str(Span(self.start, self.end))
class EmptyToken(Token):
def __init__(self):
super().__init__(RawIterator(''), RawIterator(''))
print(self)
class StartToken(Token):
pass
class EndToken(Token):
pass
| true
| true
|
f71a8e2fb8856f94587904ef39bc7d65a4aae1c7
| 3,477
|
py
|
Python
|
src/transpiler/cppy/CodeGeneration.py
|
ArmindoFlores/cppy
|
5ce0832e79bbdb56b11cd03490ee1d6d09a454a0
|
[
"MIT"
] | 5
|
2021-12-24T00:11:22.000Z
|
2022-01-06T23:53:10.000Z
|
src/transpiler/cppy/CodeGeneration.py
|
ArmindoFlores/cppy
|
5ce0832e79bbdb56b11cd03490ee1d6d09a454a0
|
[
"MIT"
] | null | null | null |
src/transpiler/cppy/CodeGeneration.py
|
ArmindoFlores/cppy
|
5ce0832e79bbdb56b11cd03490ee1d6d09a454a0
|
[
"MIT"
] | null | null | null |
from . import PythonExpressions
class CodeBlock:
def get_code(self, scope):
return NotImplemented
class CBAssign(CodeBlock):
def __init__(self, var, value):
self._var = var
self._value = value
def get_code(self, scope):
return f"SCOPE.set_var(\"{self._var.get_members()[0]}\", \"{scope.get_scope_path()}\", {self._value.get_code(scope)});"
class CBName(CodeBlock):
def __init__(self, var):
self._var = var
def get_code(self, scope):
return self._var.get_code(scope) + ";"
class Scope(CodeBlock):
def __init__(self, name, parent_ctx=None):
self.name = name
self._parent_ctx = parent_ctx
self._variables = {}
self._code_blocks = []
def add_cb(self, cb):
self._code_blocks.append(cb)
def get_scope_path(self):
if self._parent_ctx is None:
return self.name
return self._parent_ctx.get_scope_path() + "." + self.name
def get_code(self, scope):
# var_decl_code = "\n".join(
# (f"\tPyObject *{var};" for var in self._variables)
# )
var_decl_code = ""
total_code = var_decl_code + (("\n" + self._parent_ctx.get_code()) if self._parent_ctx is not None else "")
total_code += "\n".join("\n".join(("\t" + line for line in cb.get_code(self).splitlines())) for cb in self._code_blocks)
total_code = f"\ncppy::PyObjectPtr {self.name}()\n\x7b\n{total_code}\n\treturn cppy::helpers::new_none();\n\x7d"
return total_code
def has_var(self, name):
if name in self._variables:
return True
if self._parent_ctx is not None:
return self._parent_ctx.hasvar(name)
return False
def has_local_var(self, name):
return name in self._variables
def get_var(self, name):
if name in self._variables:
return self._variables[name]
if self._parent_ctx is not None:
return self._parent_ctx.getvar(name)
return None
def add_var(self, name, var):
if self.has_local_var(name):
return False
self._variables[name] = var
return True
class CBIf(CodeBlock):
def __init__(self, if_condition, if_body, elifs_conditions, elifs_bodies, else_body):
self._if_cond = if_condition
self._if_body = if_body
self._elifs_conds = elifs_conditions
self._elifs_bodies = elifs_bodies
self._else_body = else_body
def get_code(self, scope):
if_text = f"if (cppy::helpers::cbool({self._if_cond.get_code(scope)})) " + "{\n"
if_text += "\n".join("\n".join(("\t" + line for line in cb.get_code(scope).splitlines())) for cb in self._if_body)
if_text += "\n}\n"
for i in range(len(self._elifs_conds)):
if_text += f"else if (cppy::helpers::cbool({self._elifs_conds[i].get_code(scope)})) " + "{\n"
if_text += "\n".join("\n".join(("\t" + line for line in cb.get_code(scope).splitlines())) for cb in self._elifs_bodies[i])
if_text += "\n}\n"
if self._else_body is not None:
if_text += "else {\n"
if_text += "\n".join("\n".join(("\t" + line for line in cb.get_code(scope).splitlines())) for cb in self._else_body)
if_text += "\n}\n"
return if_text
| 36.989362
| 135
| 0.581823
|
from . import PythonExpressions
class CodeBlock:
def get_code(self, scope):
return NotImplemented
class CBAssign(CodeBlock):
def __init__(self, var, value):
self._var = var
self._value = value
def get_code(self, scope):
return f"SCOPE.set_var(\"{self._var.get_members()[0]}\", \"{scope.get_scope_path()}\", {self._value.get_code(scope)});"
class CBName(CodeBlock):
def __init__(self, var):
self._var = var
def get_code(self, scope):
return self._var.get_code(scope) + ";"
class Scope(CodeBlock):
def __init__(self, name, parent_ctx=None):
self.name = name
self._parent_ctx = parent_ctx
self._variables = {}
self._code_blocks = []
def add_cb(self, cb):
self._code_blocks.append(cb)
def get_scope_path(self):
if self._parent_ctx is None:
return self.name
return self._parent_ctx.get_scope_path() + "." + self.name
def get_code(self, scope):
var_decl_code = ""
total_code = var_decl_code + (("\n" + self._parent_ctx.get_code()) if self._parent_ctx is not None else "")
total_code += "\n".join("\n".join(("\t" + line for line in cb.get_code(self).splitlines())) for cb in self._code_blocks)
total_code = f"\ncppy::PyObjectPtr {self.name}()\n\x7b\n{total_code}\n\treturn cppy::helpers::new_none();\n\x7d"
return total_code
def has_var(self, name):
if name in self._variables:
return True
if self._parent_ctx is not None:
return self._parent_ctx.hasvar(name)
return False
def has_local_var(self, name):
return name in self._variables
def get_var(self, name):
if name in self._variables:
return self._variables[name]
if self._parent_ctx is not None:
return self._parent_ctx.getvar(name)
return None
def add_var(self, name, var):
if self.has_local_var(name):
return False
self._variables[name] = var
return True
class CBIf(CodeBlock):
def __init__(self, if_condition, if_body, elifs_conditions, elifs_bodies, else_body):
self._if_cond = if_condition
self._if_body = if_body
self._elifs_conds = elifs_conditions
self._elifs_bodies = elifs_bodies
self._else_body = else_body
def get_code(self, scope):
if_text = f"if (cppy::helpers::cbool({self._if_cond.get_code(scope)})) " + "{\n"
if_text += "\n".join("\n".join(("\t" + line for line in cb.get_code(scope).splitlines())) for cb in self._if_body)
if_text += "\n}\n"
for i in range(len(self._elifs_conds)):
if_text += f"else if (cppy::helpers::cbool({self._elifs_conds[i].get_code(scope)})) " + "{\n"
if_text += "\n".join("\n".join(("\t" + line for line in cb.get_code(scope).splitlines())) for cb in self._elifs_bodies[i])
if_text += "\n}\n"
if self._else_body is not None:
if_text += "else {\n"
if_text += "\n".join("\n".join(("\t" + line for line in cb.get_code(scope).splitlines())) for cb in self._else_body)
if_text += "\n}\n"
return if_text
| true
| true
|
f71a8fd9a5f02c00e4c48dcb982f21552d529470
| 4,796
|
py
|
Python
|
yt/frontends/athena/io.py
|
danielgrassinger/yt_new_frontend
|
5f91d2fb8721c4c5da0af543a6256ed979cd9fc9
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
yt/frontends/athena/io.py
|
danielgrassinger/yt_new_frontend
|
5f91d2fb8721c4c5da0af543a6256ed979cd9fc9
|
[
"BSD-3-Clause-Clear"
] | 1
|
2016-04-05T22:30:14.000Z
|
2016-04-05T22:30:14.000Z
|
yt/frontends/athena/io.py
|
danielgrassinger/yt_new_frontend
|
5f91d2fb8721c4c5da0af543a6256ed979cd9fc9
|
[
"BSD-3-Clause-Clear"
] | 1
|
2020-12-05T05:51:09.000Z
|
2020-12-05T05:51:09.000Z
|
"""
The data-file handling functions
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from yt.utilities.io_handler import \
BaseIOHandler
import numpy as np
from yt.funcs import mylog, defaultdict
from .data_structures import chk23
float_size = {"float":np.dtype(">f4").itemsize,
"double":np.dtype(">f8").itemsize}
axis_list = ["_x","_y","_z"]
class IOHandlerAthena(BaseIOHandler):
_dataset_type = "athena"
_offset_string = 'data:offsets=0'
_data_string = 'data:datatype=0'
_read_table_offset = None
def _field_dict(self,fhandle):
keys = fhandle['field_types'].keys()
val = fhandle['field_types'].keys()
return dict(zip(keys,val))
def _read_field_names(self,grid):
pass
def _read_chunk_data(self,chunk,fields):
data = {}
if len(chunk.objs) == 0: return data
for grid in chunk.objs:
if grid.filename is None:
continue
f = open(grid.filename, "rb")
data[grid.id] = {}
grid_dims = grid.ActiveDimensions
read_dims = grid.read_dims.astype("int64")
grid_ncells = np.prod(read_dims)
grid0_ncells = np.prod(grid.index.grids[0].read_dims)
read_table_offset = get_read_table_offset(f)
for field in fields:
ftype, offsetr, dtype = grid.index._field_map[field]
if grid_ncells != grid0_ncells:
offset = offsetr + ((grid_ncells-grid0_ncells) * (offsetr//grid0_ncells))
if grid_ncells == grid0_ncells:
offset = offsetr
offset = int(offset) # Casting to be certain.
file_offset = grid.file_offset[2]*read_dims[0]*read_dims[1]*float_size[dtype]
xread = slice(grid.file_offset[0],grid.file_offset[0]+grid_dims[0])
yread = slice(grid.file_offset[1],grid.file_offset[1]+grid_dims[1])
f.seek(read_table_offset+offset+file_offset)
if dtype == 'float':
dt = '>f4'
elif dtype == 'double':
dt = '>f8'
if ftype == 'scalar':
f.seek(read_table_offset+offset+file_offset)
v = np.fromfile(f, dtype=dt,
count=grid_ncells).reshape(read_dims,order='F')
if ftype == 'vector':
vec_offset = axis_list.index(field[-1][-2:])
f.seek(read_table_offset+offset+3*file_offset)
v = np.fromfile(f, dtype=dt, count=3*grid_ncells)
v = v[vec_offset::3].reshape(read_dims,order='F')
if grid.ds.field_ordering == 1:
data[grid.id][field] = v[xread,yread,:].T.astype("float64")
else:
data[grid.id][field] = v[xread,yread,:].astype("float64")
f.close()
return data
def _read_data_slice(self, grid, field, axis, coord):
sl = [slice(None), slice(None), slice(None)]
sl[axis] = slice(coord, coord + 1)
if grid.ds.field_ordering == 1:
sl.reverse()
return self._read_data_set(grid, field)[sl]
def _read_fluid_selection(self, chunks, selector, fields, size):
chunks = list(chunks)
if any((ftype != "athena" for ftype, fname in fields)):
raise NotImplementedError
rv = {}
for field in fields:
rv[field] = np.empty(size, dtype="float64")
ng = sum(len(c.objs) for c in chunks)
mylog.debug("Reading %s cells of %s fields in %s grids",
size, [f2 for f1, f2 in fields], ng)
ind = 0
for chunk in chunks:
data = self._read_chunk_data(chunk, fields)
for g in chunk.objs:
for field in fields:
ftype, fname = field
ds = data[g.id].pop(field)
nd = g.select(selector, ds, rv[field], ind) # caches
ind += nd
data.pop(g.id)
return rv
def get_read_table_offset(f):
line = f.readline()
while True:
splitup = line.strip().split()
chkc = chk23('CELL_DATA')
chkp = chk23('POINT_DATA')
if chkc in splitup or chkp in splitup:
f.readline()
read_table_offset = f.tell()
break
line = f.readline()
return read_table_offset
| 37.76378
| 93
| 0.533987
|
from yt.utilities.io_handler import \
BaseIOHandler
import numpy as np
from yt.funcs import mylog, defaultdict
from .data_structures import chk23
float_size = {"float":np.dtype(">f4").itemsize,
"double":np.dtype(">f8").itemsize}
axis_list = ["_x","_y","_z"]
class IOHandlerAthena(BaseIOHandler):
_dataset_type = "athena"
_offset_string = 'data:offsets=0'
_data_string = 'data:datatype=0'
_read_table_offset = None
def _field_dict(self,fhandle):
keys = fhandle['field_types'].keys()
val = fhandle['field_types'].keys()
return dict(zip(keys,val))
def _read_field_names(self,grid):
pass
def _read_chunk_data(self,chunk,fields):
data = {}
if len(chunk.objs) == 0: return data
for grid in chunk.objs:
if grid.filename is None:
continue
f = open(grid.filename, "rb")
data[grid.id] = {}
grid_dims = grid.ActiveDimensions
read_dims = grid.read_dims.astype("int64")
grid_ncells = np.prod(read_dims)
grid0_ncells = np.prod(grid.index.grids[0].read_dims)
read_table_offset = get_read_table_offset(f)
for field in fields:
ftype, offsetr, dtype = grid.index._field_map[field]
if grid_ncells != grid0_ncells:
offset = offsetr + ((grid_ncells-grid0_ncells) * (offsetr//grid0_ncells))
if grid_ncells == grid0_ncells:
offset = offsetr
offset = int(offset)
file_offset = grid.file_offset[2]*read_dims[0]*read_dims[1]*float_size[dtype]
xread = slice(grid.file_offset[0],grid.file_offset[0]+grid_dims[0])
yread = slice(grid.file_offset[1],grid.file_offset[1]+grid_dims[1])
f.seek(read_table_offset+offset+file_offset)
if dtype == 'float':
dt = '>f4'
elif dtype == 'double':
dt = '>f8'
if ftype == 'scalar':
f.seek(read_table_offset+offset+file_offset)
v = np.fromfile(f, dtype=dt,
count=grid_ncells).reshape(read_dims,order='F')
if ftype == 'vector':
vec_offset = axis_list.index(field[-1][-2:])
f.seek(read_table_offset+offset+3*file_offset)
v = np.fromfile(f, dtype=dt, count=3*grid_ncells)
v = v[vec_offset::3].reshape(read_dims,order='F')
if grid.ds.field_ordering == 1:
data[grid.id][field] = v[xread,yread,:].T.astype("float64")
else:
data[grid.id][field] = v[xread,yread,:].astype("float64")
f.close()
return data
def _read_data_slice(self, grid, field, axis, coord):
sl = [slice(None), slice(None), slice(None)]
sl[axis] = slice(coord, coord + 1)
if grid.ds.field_ordering == 1:
sl.reverse()
return self._read_data_set(grid, field)[sl]
def _read_fluid_selection(self, chunks, selector, fields, size):
chunks = list(chunks)
if any((ftype != "athena" for ftype, fname in fields)):
raise NotImplementedError
rv = {}
for field in fields:
rv[field] = np.empty(size, dtype="float64")
ng = sum(len(c.objs) for c in chunks)
mylog.debug("Reading %s cells of %s fields in %s grids",
size, [f2 for f1, f2 in fields], ng)
ind = 0
for chunk in chunks:
data = self._read_chunk_data(chunk, fields)
for g in chunk.objs:
for field in fields:
ftype, fname = field
ds = data[g.id].pop(field)
nd = g.select(selector, ds, rv[field], ind)
ind += nd
data.pop(g.id)
return rv
def get_read_table_offset(f):
line = f.readline()
while True:
splitup = line.strip().split()
chkc = chk23('CELL_DATA')
chkp = chk23('POINT_DATA')
if chkc in splitup or chkp in splitup:
f.readline()
read_table_offset = f.tell()
break
line = f.readline()
return read_table_offset
| true
| true
|
f71a90002a6262037bfee9acd3d8a0d96e934ba0
| 3,017
|
py
|
Python
|
src/configs/adult/adult_mlp_weighted.py
|
nbingo/sMOOth
|
aacdc5d24b931e534e984681923ec74f1103ca2f
|
[
"MIT"
] | null | null | null |
src/configs/adult/adult_mlp_weighted.py
|
nbingo/sMOOth
|
aacdc5d24b931e534e984681923ec74f1103ca2f
|
[
"MIT"
] | null | null | null |
src/configs/adult/adult_mlp_weighted.py
|
nbingo/sMOOth
|
aacdc5d24b931e534e984681923ec74f1103ca2f
|
[
"MIT"
] | null | null | null |
"""
An example config file to train a ImageNet classifier with detectron2.
Model and dataloader both come from torchvision.
This shows how to use detectron2 as a general engine for any new models and tasks.
To run, use the following command:
python tools/lazyconfig_train_net.py --config-file configs/Misc/torchvision_imagenet_R_50.py \
--num-gpus 8 dataloader.train.dataset.root=/path/to/imagenet/
"""
import yaml
import torch
from omegaconf import OmegaConf
from fvcore.common.param_scheduler import CosineParamScheduler
from detectron2.solver import WarmupParamScheduler
from detectron2.solver.build import get_default_optimizer_params
from detectron2.config import LazyConfig, LazyCall as L
from detectron2.evaluation import DatasetEvaluators
from src.configs.common.utils import build_data_loader
from src.models.adult_mlp import IncomeClassifier
from src.loaders.adult_loader import FeatDataset
from src.metrics.evaluators import ClassificationAcc, BinaryEqualizedOddsViolation
from src.metrics.losses import cross_entropy_loss, equalized_odds_violation, MultiObjectiveLoss
from src.harnesses.harnesses import MultiProcessHarness, SimpleHarness
dataloader = OmegaConf.create()
dataloader.train = L(build_data_loader)(
dataset=L(FeatDataset)(
subset='train',
income_const=yaml.load(open('/lfs/local/0/nomir/sMOOth/data/Adult/income.yml'), Loader=yaml.FullLoader)
),
batch_size=256,
num_workers=4,
training=True,
)
dataloader.test = L(build_data_loader)(
dataset=L(FeatDataset)(
subset='val',
income_const=yaml.load(open('/lfs/local/0/nomir/sMOOth/data/Adult/income.yml'), Loader=yaml.FullLoader)
),
batch_size=256,
num_workers=4,
training=False,
)
# Can also be list of DatasetEvaluators
dataloader.evaluator = L(DatasetEvaluators)(evaluators=(ClassificationAcc(), BinaryEqualizedOddsViolation()))
train = LazyConfig.load("/lfs/local/0/nomir/sMOOth/src/configs/common/train.py").train
train.init_checkpoint = None
# max_iter = number epochs * (train dataset size / batch size)
train.max_iter = 50 * 30162 // 256
train.eval_period = 30162 // 256
train.loss_fn = L(MultiObjectiveLoss)(losses=[cross_entropy_loss, equalized_odds_violation])
train.loss_tradeoff = torch.Tensor([0.5, 0.5])
# Arguments for multiprocess training
train.harness = SimpleHarness
train.num_workers = 1
train.gpus = [0] # TODO: Eventually want this to be a commandline arg
train.process_over_key = 'model.loss_fn'
train.process_over_vals = [cross_entropy_loss]
model = L(IncomeClassifier)(
in_dim=105,
hidden_dim=105,
num_hidden_blocks=2,
drop_prob=0.2,
out_dim=2,
loss_fn=train.loss_fn,
device=train.device,
)
optimizer = L(torch.optim.Adam)(
params=L(get_default_optimizer_params)(),
lr=1e-3,
weight_decay=1e-4,
)
lr_multiplier = L(WarmupParamScheduler)(
scheduler=L(CosineParamScheduler)(
start_value=0.1,
end_value=1e-4,
),
warmup_length=1 / 100,
warmup_factor=0.1,
)
| 32.793478
| 111
| 0.764667
|
import yaml
import torch
from omegaconf import OmegaConf
from fvcore.common.param_scheduler import CosineParamScheduler
from detectron2.solver import WarmupParamScheduler
from detectron2.solver.build import get_default_optimizer_params
from detectron2.config import LazyConfig, LazyCall as L
from detectron2.evaluation import DatasetEvaluators
from src.configs.common.utils import build_data_loader
from src.models.adult_mlp import IncomeClassifier
from src.loaders.adult_loader import FeatDataset
from src.metrics.evaluators import ClassificationAcc, BinaryEqualizedOddsViolation
from src.metrics.losses import cross_entropy_loss, equalized_odds_violation, MultiObjectiveLoss
from src.harnesses.harnesses import MultiProcessHarness, SimpleHarness
dataloader = OmegaConf.create()
dataloader.train = L(build_data_loader)(
dataset=L(FeatDataset)(
subset='train',
income_const=yaml.load(open('/lfs/local/0/nomir/sMOOth/data/Adult/income.yml'), Loader=yaml.FullLoader)
),
batch_size=256,
num_workers=4,
training=True,
)
dataloader.test = L(build_data_loader)(
dataset=L(FeatDataset)(
subset='val',
income_const=yaml.load(open('/lfs/local/0/nomir/sMOOth/data/Adult/income.yml'), Loader=yaml.FullLoader)
),
batch_size=256,
num_workers=4,
training=False,
)
dataloader.evaluator = L(DatasetEvaluators)(evaluators=(ClassificationAcc(), BinaryEqualizedOddsViolation()))
train = LazyConfig.load("/lfs/local/0/nomir/sMOOth/src/configs/common/train.py").train
train.init_checkpoint = None
train.max_iter = 50 * 30162 // 256
train.eval_period = 30162 // 256
train.loss_fn = L(MultiObjectiveLoss)(losses=[cross_entropy_loss, equalized_odds_violation])
train.loss_tradeoff = torch.Tensor([0.5, 0.5])
train.harness = SimpleHarness
train.num_workers = 1
train.gpus = [0]
train.process_over_key = 'model.loss_fn'
train.process_over_vals = [cross_entropy_loss]
model = L(IncomeClassifier)(
in_dim=105,
hidden_dim=105,
num_hidden_blocks=2,
drop_prob=0.2,
out_dim=2,
loss_fn=train.loss_fn,
device=train.device,
)
optimizer = L(torch.optim.Adam)(
params=L(get_default_optimizer_params)(),
lr=1e-3,
weight_decay=1e-4,
)
lr_multiplier = L(WarmupParamScheduler)(
scheduler=L(CosineParamScheduler)(
start_value=0.1,
end_value=1e-4,
),
warmup_length=1 / 100,
warmup_factor=0.1,
)
| true
| true
|
f71a90ab75738523d69c347c11d6351be429b483
| 2,311
|
py
|
Python
|
python/message_queues/pika_route.py
|
edgells/dev_coms
|
a7e50c32bcb45c6b6781e6d0514fda6ddf8aef02
|
[
"MIT"
] | null | null | null |
python/message_queues/pika_route.py
|
edgells/dev_coms
|
a7e50c32bcb45c6b6781e6d0514fda6ddf8aef02
|
[
"MIT"
] | null | null | null |
python/message_queues/pika_route.py
|
edgells/dev_coms
|
a7e50c32bcb45c6b6781e6d0514fda6ddf8aef02
|
[
"MIT"
] | null | null | null |
import random
import threading
import pika
"""
总结:
"""
def send():
tag = random.choice(['info', 'error', 'warn'])
rb_conn = pika.BlockingConnection(pika.ConnectionParameters(host='192.168.101.129',
port=5672,
virtual_host='/',
credentials=pika.PlainCredentials(username='admin',
password='admin')),
)
ch = rb_conn.channel()
ch.exchange_declare(exchange='direct_logs', exchange_type='direct') # create direct exchange
# bind queue
msg = b"hello world"
for n in range(100):
for tag in ['info', 'error', 'warn']:
ch.basic_publish(exchange="direct_logs",
routing_key=tag,
body=msg) # to exchange send message
ch.close()
print('send over')
def recv():
rb_conn = pika.BlockingConnection(pika.ConnectionParameters(host='192.168.101.129',
port=5672,
virtual_host='/',
credentials=pika.PlainCredentials(username='admin',
password='admin')),
)
ch = rb_conn.channel()
ch.exchange_declare('direct_logs', exchange_type='direct')
def callback(ch, method, p, msg):
print(threading.get_ident(), '---', method.routing_key, '---', msg)
queue = ch.queue_declare(queue='', exclusive=True)
queue_name = queue.method.queue
for tag in ['info', 'error', 'warn']:
ch.queue_bind(exchange='direct_logs', queue=queue_name, routing_key=tag)
ch.basic_consume(
queue=queue_name,
on_message_callback=callback,
auto_ack=True
)
ch.start_consuming()
if __name__ == '__main__':
rv = threading.Thread(target=recv)
rv.start()
send()
rv.join()
| 32.549296
| 117
| 0.450887
|
import random
import threading
import pika
def send():
tag = random.choice(['info', 'error', 'warn'])
rb_conn = pika.BlockingConnection(pika.ConnectionParameters(host='192.168.101.129',
port=5672,
virtual_host='/',
credentials=pika.PlainCredentials(username='admin',
password='admin')),
)
ch = rb_conn.channel()
ch.exchange_declare(exchange='direct_logs', exchange_type='direct')
msg = b"hello world"
for n in range(100):
for tag in ['info', 'error', 'warn']:
ch.basic_publish(exchange="direct_logs",
routing_key=tag,
body=msg)
ch.close()
print('send over')
def recv():
rb_conn = pika.BlockingConnection(pika.ConnectionParameters(host='192.168.101.129',
port=5672,
virtual_host='/',
credentials=pika.PlainCredentials(username='admin',
password='admin')),
)
ch = rb_conn.channel()
ch.exchange_declare('direct_logs', exchange_type='direct')
def callback(ch, method, p, msg):
print(threading.get_ident(), '---', method.routing_key, '---', msg)
queue = ch.queue_declare(queue='', exclusive=True)
queue_name = queue.method.queue
for tag in ['info', 'error', 'warn']:
ch.queue_bind(exchange='direct_logs', queue=queue_name, routing_key=tag)
ch.basic_consume(
queue=queue_name,
on_message_callback=callback,
auto_ack=True
)
ch.start_consuming()
if __name__ == '__main__':
rv = threading.Thread(target=recv)
rv.start()
send()
rv.join()
| true
| true
|
f71a90c7288736f03f64c09624abaf7fafd6201a
| 2,080
|
py
|
Python
|
hippybot/plugins/plusplusbot.py
|
1stvamp/hippybot
|
931fb1accae295da3ae94184ef138aeedd5a726e
|
[
"BSD-2-Clause-FreeBSD"
] | 33
|
2015-03-03T08:41:56.000Z
|
2022-02-16T12:05:30.000Z
|
hippybot/plugins/plusplusbot.py
|
1stvamp/hippybot
|
931fb1accae295da3ae94184ef138aeedd5a726e
|
[
"BSD-2-Clause-FreeBSD"
] | 9
|
2015-01-09T00:29:33.000Z
|
2016-06-21T13:09:54.000Z
|
hippybot/plugins/plusplusbot.py
|
1stvamp/hippybot
|
931fb1accae295da3ae94184ef138aeedd5a726e
|
[
"BSD-2-Clause-FreeBSD"
] | 18
|
2015-01-07T22:40:45.000Z
|
2018-04-04T18:58:50.000Z
|
import os
import os.path
import re
import sqlite3dbm
from threading import RLock
from hippybot.hipchat import HipChatApi
from hippybot.decorators import botcmd, contentcmd
CONFIG_DIR = os.path.expanduser("~/.techbot")
DB = os.path.expanduser("~/.techbot/score.db")
class Plugin(object):
"""Plugin to handle knewton replacement of ++ bot in partychatapp
"""
global_commands = ['scores']
def __init__(self, config):
pass
def __init__(self):
self.rlock = RLock()
self.db = self.get_db()
def get_db(self):
self.create_dir()
db = sqlite3dbm.sshelve.open(DB)
return db
def create_dir(self):
if not os.path.exists(CONFIG_DIR):
os.mkdir(CONFIG_DIR)
@contentcmd
def change_score(self, mess, **kwargs):
message = mess.getBody()
if message:
room = unicode(mess.getFrom()).split("/")[0]
user = unicode(mess.getFrom()).split("/")[1]
results = []
if message.find('++') > -1 or message.find('--') > -1:
self.bot.log.info("plusplusbot: %s" % mess)
if message.endswith("++") or message.endswith("--"):
results.extend(self.process_message(message, room, user))
for m in re.findall("\((.*?)\)", message):
if m.endswith("++") or m.endswith("--"):
results.extend(self.process_message(m, room, user))
if len(results) > 0:
return "\n".join(results)
def process_message(self, message, room, user):
results = []
victim = message[:-2]
excl = "woot!"
plus = 1
if message.endswith('--'):
excl = "ouch!"
plus = -1
with self.rlock:
scores = self.db.get(room, {})
score = scores.setdefault(victim, 0)
score += plus
scores[victim] = score
self.db[room] = scores
return ["[%s] %s [%s now at %s]" % (user, victim, excl, score)]
@botcmd
def scores(self, mess, args, **kwargs):
"""
Prints all scores from this room
Format: @NickName scores
"""
self.bot.log.info("score: %s" % mess)
room = unicode(mess.getFrom()).split("/")[0]
ret = []
with self.rlock:
scores = self.db.get(room, {})
for key in scores:
ret.append("%s: %s" %(key, scores[key]))
return '\n'.join(ret)
| 25.679012
| 66
| 0.639904
|
import os
import os.path
import re
import sqlite3dbm
from threading import RLock
from hippybot.hipchat import HipChatApi
from hippybot.decorators import botcmd, contentcmd
CONFIG_DIR = os.path.expanduser("~/.techbot")
DB = os.path.expanduser("~/.techbot/score.db")
class Plugin(object):
global_commands = ['scores']
def __init__(self, config):
pass
def __init__(self):
self.rlock = RLock()
self.db = self.get_db()
def get_db(self):
self.create_dir()
db = sqlite3dbm.sshelve.open(DB)
return db
def create_dir(self):
if not os.path.exists(CONFIG_DIR):
os.mkdir(CONFIG_DIR)
@contentcmd
def change_score(self, mess, **kwargs):
message = mess.getBody()
if message:
room = unicode(mess.getFrom()).split("/")[0]
user = unicode(mess.getFrom()).split("/")[1]
results = []
if message.find('++') > -1 or message.find('--') > -1:
self.bot.log.info("plusplusbot: %s" % mess)
if message.endswith("++") or message.endswith("--"):
results.extend(self.process_message(message, room, user))
for m in re.findall("\((.*?)\)", message):
if m.endswith("++") or m.endswith("--"):
results.extend(self.process_message(m, room, user))
if len(results) > 0:
return "\n".join(results)
def process_message(self, message, room, user):
results = []
victim = message[:-2]
excl = "woot!"
plus = 1
if message.endswith('--'):
excl = "ouch!"
plus = -1
with self.rlock:
scores = self.db.get(room, {})
score = scores.setdefault(victim, 0)
score += plus
scores[victim] = score
self.db[room] = scores
return ["[%s] %s [%s now at %s]" % (user, victim, excl, score)]
@botcmd
def scores(self, mess, args, **kwargs):
self.bot.log.info("score: %s" % mess)
room = unicode(mess.getFrom()).split("/")[0]
ret = []
with self.rlock:
scores = self.db.get(room, {})
for key in scores:
ret.append("%s: %s" %(key, scores[key]))
return '\n'.join(ret)
| true
| true
|
f71a90cbe34b2055cfe4879a68c1824ee28a3a13
| 8,902
|
py
|
Python
|
logchecker/__init__.py
|
Lifars/log-checker
|
462d3a0c66b5fa5a964689ce594cb70833960862
|
[
"MIT"
] | 6
|
2021-01-13T05:32:14.000Z
|
2022-02-18T01:35:09.000Z
|
logchecker/__init__.py
|
Lifars/log-checker
|
462d3a0c66b5fa5a964689ce594cb70833960862
|
[
"MIT"
] | null | null | null |
logchecker/__init__.py
|
Lifars/log-checker
|
462d3a0c66b5fa5a964689ce594cb70833960862
|
[
"MIT"
] | 1
|
2021-09-27T12:56:21.000Z
|
2021-09-27T12:56:21.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Logchecker tool for scanning log files against YETI Threat Intelligence Repository.
By LIFARS
This code is licensed under MIT license (see LICENSE for details)
"""
__version__ = "0.8"
__author__ = "LIFARS LLC"
__copyright__ = "Copyright (c) 2020,2021 LIFARS LLC"
__credits__ = ["LIFARS LLC"]
__license__ = "MIT"
__maintainer__ = "LIFARS LLC"
__status__ = "Production"
import argparse
import collections
import configparser
import csv
import json
import os
import re
import sys
import Evtx.Evtx as evtx
import pyeti
Config = collections.namedtuple("Config", ["url", "key", "output"])
def is_valid_file(parser, arg):
if not os.path.exists(arg):
parser.error("The file %s does not exist!" % arg)
else:
return arg
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-c",
"--config",
help="Config file path. Config file should contain url of YETI database,"
" authorization key and output format. If it is present, it overrides"
" --url, --key and --csv/--json options.",
type=argparse.FileType("r"),
)
parser.add_argument(
"-f",
"--file",
help="[REQUIRED] Log file path.",
type=lambda x: is_valid_file(parser, x),
required=True,
)
parser.add_argument(
"-o",
"--output",
help="Output file path. If file does not exist, creates new file."
"If not specified, output is printed to STDOUT.",
type=argparse.FileType("w+"),
)
parser.add_argument(
"-a",
"--address",
default=False,
action="store_true",
help="Search only for ip addresses. If none of the address, "
"domain or hash flag is specified, it search for all mentioned.",
)
parser.add_argument(
"-d",
"--domain",
default=False,
action="store_true",
help="Search only for domains. If none of the address, "
"domain or hash flag is specified, it search for all mentioned.",
)
parser.add_argument(
"-H",
"--hash",
default=False,
action="store_true",
help="Search only for hashes. If none of the address, "
"domain or hash flag is specified, it search for all mentioned.",
)
parser.add_argument(
"-A",
"--all",
default=False,
action="store_true",
help="Show all values in logs. By default it shows only values "
"which have record in database.",
)
group = parser.add_mutually_exclusive_group()
group.add_argument(
"-C",
"--csv",
default=False,
action="store_true",
help="Output in CSV format. This is default option.",
)
group.add_argument(
"-j",
"--json",
default=False,
action="store_true",
help="Output in JSON format. By default output is in CSV format.",
)
parser.add_argument("-u", "--url", help="URL of YETI instance.", type=str)
parser.add_argument("-k", "--key", help="API key for YETI.", type=str)
args = parser.parse_args()
if not (args.config or args.url):
parser.error(
"Missing URL of YETI. Use --url URL or add config file using --config CONFIG"
)
url = args.url
key = args.key
csv = args.csv
json = args.json
if args.config:
url, key, outf = parse_config_file(args.config)
if outf.lower() == "json":
json = True
csv = False
elif outf.lower() == "csv":
json = False
csv = True
else:
print("Unsupported output format. Using default", file=sys.stderr)
json = False
csv = True
check_log_file(
args.file,
url,
key,
output=args.output,
address=args.address,
domain=args.domain,
hash=args.hash,
all=args.all,
csv=csv,
json=json,
)
def parse_config_file(file):
config = configparser.ConfigParser()
config.read_file(file)
url = config.get("DEFAULT", "url")
key = config.get("DEFAULT", "api_key")
output = config.get("DEFAULT", "output_format")
return Config(url, key, output)
def check_log_file(file, url, key, **kwargs):
_, file_extension = os.path.splitext(file)
print("reading file", file=sys.stderr)
if file_extension == ".evtx":
log = __read_evtx_file(file)
else:
log = __read_text_file(file)
print("parsing file", file=sys.stderr)
values = parse_log_file(log)
print("looking in database", file=sys.stderr)
results = []
a = kwargs.get("all", False)
api = pyeti.YetiApi(url, api_key=key)
for val, logs in values.items():
result = {"value": val}
yeti = api.observable_search(value=val)
if yeti:
result["tags"] = yeti[0].get("tags", [])
result["created"] = yeti[0].get("created", "")
result["sources"] = yeti[0].get("sources", [])
else:
result["tags"] = []
result["created"] = ""
result["sources"] = []
result["original_log"] = logs
if yeti or a:
results.append(result)
print("writing results", file=sys.stderr)
ret = kwargs.get("ret", False)
if ret:
return results
output = kwargs.get("output", None)
if not output:
output = sys.stdout
j = kwargs.get("json", False)
if j:
json.dump(results, output, indent=4, sort_keys=True)
else:
fields = ["value", "tags", "created", "sources", "original_log"]
results = __flatten(map(__unpack_logs, map(__csv_row, results)))
writer = csv.DictWriter(output, fieldnames=fields, quoting=csv.QUOTE_ALL)
writer.writeheader()
writer.writerows(results)
outfh = kwargs.get("output", None)
if outfh:
outfh.close()
print("finished", file=sys.stderr)
def parse_log_file(log, **kwargs):
addr_pattern = re.compile("(?:[0-9]{1,3}\.){3}[0-9]{1,3}")
ipv6_pattern = re.compile(
"(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|"
"fe80:(?::[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]+|"
"::(?:ffff(?::0{1,4})?:)?"
"(?:(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])\.){3}"
"(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])|"
"(?:[0-9a-fA-F]{1,4}:){1,4}:"
"(?:(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])\.){3}"
"(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])|"
":(?:(?::[0-9a-fA-F]{1,4}){1,7}|:)|"
"[0-9a-fA-F]{1,4}:(?:(?::[0-9a-fA-F]{1,4}){1,6})|"
"(?:[0-9a-fA-F]{1,4}:){1,2}(?::[0-9a-fA-F]{1,4}){1,5}|"
"(?:[0-9a-fA-F]{1,4}:){1,3}(?::[0-9a-fA-F]{1,4}){1,4}|"
"(?:[0-9a-fA-F]{1,4}:){1,4}(?::[0-9a-fA-F]{1,4}){1,3}|"
"(?:[0-9a-fA-F]{1,4}:){1,5}(?::[0-9a-fA-F]{1,4}){1,2}|"
"(?:[0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|"
"(?:[0-9a-fA-F]{1,4}:){1,7}:"
)
domain_pattern = re.compile("(?:[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?\.)+[a-z]{2,6}")
hash_pattern = re.compile("[0-9a-fA-F]{64}|[0-9a-fA-F]{40}|[0-9a-fA-F]{32}")
a = kwargs.get("address", False)
d = kwargs.get("domain", False)
h = kwargs.get("hash", False)
flags = a or d or h
values = {}
for line in log:
if (not flags) or a:
addr = addr_pattern.findall(line)
for match in addr:
values.setdefault(match, []).append(line)
addr = ipv6_pattern.findall(line)
for match in addr:
values.setdefault(match.lower(), []).append(line)
if (not flags) or d:
dom = domain_pattern.findall(line)
for match in dom:
values.setdefault(match.lower(), []).append(line)
if (not flags) or h:
ha = hash_pattern.findall(line)
for match in ha:
values.setdefault(match.lower(), []).append(line)
values.pop("schemas.microsoft.com", None)
return values
def __read_evtx_file(file):
with evtx.Evtx(file) as f:
log = list(map(evtx.Record.xml, f.records()))
return log
def __read_text_file(file):
with open(file) as f:
log = f.read().splitlines()
return log
def __dict_to_string(d):
return " ".join(["{}:{}".format(key, val) for key, val in d.items()])
def __list_to_string(li):
return " ".join(li)
def __csv_row(d):
d["tags"] = __list_to_string([__dict_to_string(tag) for tag in d["tags"]])
d["sources"] = __list_to_string(d["sources"])
return d
def __unpack_logs(d):
result = []
for log in d["original_log"]:
new = d.copy()
new["original_log"] = log
result.append(new)
return result
def __flatten(li):
return [item for sublist in li for item in sublist]
if __name__ == "__main__":
main()
| 28.902597
| 89
| 0.552123
|
__version__ = "0.8"
__author__ = "LIFARS LLC"
__copyright__ = "Copyright (c) 2020,2021 LIFARS LLC"
__credits__ = ["LIFARS LLC"]
__license__ = "MIT"
__maintainer__ = "LIFARS LLC"
__status__ = "Production"
import argparse
import collections
import configparser
import csv
import json
import os
import re
import sys
import Evtx.Evtx as evtx
import pyeti
Config = collections.namedtuple("Config", ["url", "key", "output"])
def is_valid_file(parser, arg):
if not os.path.exists(arg):
parser.error("The file %s does not exist!" % arg)
else:
return arg
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-c",
"--config",
help="Config file path. Config file should contain url of YETI database,"
" authorization key and output format. If it is present, it overrides"
" --url, --key and --csv/--json options.",
type=argparse.FileType("r"),
)
parser.add_argument(
"-f",
"--file",
help="[REQUIRED] Log file path.",
type=lambda x: is_valid_file(parser, x),
required=True,
)
parser.add_argument(
"-o",
"--output",
help="Output file path. If file does not exist, creates new file."
"If not specified, output is printed to STDOUT.",
type=argparse.FileType("w+"),
)
parser.add_argument(
"-a",
"--address",
default=False,
action="store_true",
help="Search only for ip addresses. If none of the address, "
"domain or hash flag is specified, it search for all mentioned.",
)
parser.add_argument(
"-d",
"--domain",
default=False,
action="store_true",
help="Search only for domains. If none of the address, "
"domain or hash flag is specified, it search for all mentioned.",
)
parser.add_argument(
"-H",
"--hash",
default=False,
action="store_true",
help="Search only for hashes. If none of the address, "
"domain or hash flag is specified, it search for all mentioned.",
)
parser.add_argument(
"-A",
"--all",
default=False,
action="store_true",
help="Show all values in logs. By default it shows only values "
"which have record in database.",
)
group = parser.add_mutually_exclusive_group()
group.add_argument(
"-C",
"--csv",
default=False,
action="store_true",
help="Output in CSV format. This is default option.",
)
group.add_argument(
"-j",
"--json",
default=False,
action="store_true",
help="Output in JSON format. By default output is in CSV format.",
)
parser.add_argument("-u", "--url", help="URL of YETI instance.", type=str)
parser.add_argument("-k", "--key", help="API key for YETI.", type=str)
args = parser.parse_args()
if not (args.config or args.url):
parser.error(
"Missing URL of YETI. Use --url URL or add config file using --config CONFIG"
)
url = args.url
key = args.key
csv = args.csv
json = args.json
if args.config:
url, key, outf = parse_config_file(args.config)
if outf.lower() == "json":
json = True
csv = False
elif outf.lower() == "csv":
json = False
csv = True
else:
print("Unsupported output format. Using default", file=sys.stderr)
json = False
csv = True
check_log_file(
args.file,
url,
key,
output=args.output,
address=args.address,
domain=args.domain,
hash=args.hash,
all=args.all,
csv=csv,
json=json,
)
def parse_config_file(file):
config = configparser.ConfigParser()
config.read_file(file)
url = config.get("DEFAULT", "url")
key = config.get("DEFAULT", "api_key")
output = config.get("DEFAULT", "output_format")
return Config(url, key, output)
def check_log_file(file, url, key, **kwargs):
_, file_extension = os.path.splitext(file)
print("reading file", file=sys.stderr)
if file_extension == ".evtx":
log = __read_evtx_file(file)
else:
log = __read_text_file(file)
print("parsing file", file=sys.stderr)
values = parse_log_file(log)
print("looking in database", file=sys.stderr)
results = []
a = kwargs.get("all", False)
api = pyeti.YetiApi(url, api_key=key)
for val, logs in values.items():
result = {"value": val}
yeti = api.observable_search(value=val)
if yeti:
result["tags"] = yeti[0].get("tags", [])
result["created"] = yeti[0].get("created", "")
result["sources"] = yeti[0].get("sources", [])
else:
result["tags"] = []
result["created"] = ""
result["sources"] = []
result["original_log"] = logs
if yeti or a:
results.append(result)
print("writing results", file=sys.stderr)
ret = kwargs.get("ret", False)
if ret:
return results
output = kwargs.get("output", None)
if not output:
output = sys.stdout
j = kwargs.get("json", False)
if j:
json.dump(results, output, indent=4, sort_keys=True)
else:
fields = ["value", "tags", "created", "sources", "original_log"]
results = __flatten(map(__unpack_logs, map(__csv_row, results)))
writer = csv.DictWriter(output, fieldnames=fields, quoting=csv.QUOTE_ALL)
writer.writeheader()
writer.writerows(results)
outfh = kwargs.get("output", None)
if outfh:
outfh.close()
print("finished", file=sys.stderr)
def parse_log_file(log, **kwargs):
addr_pattern = re.compile("(?:[0-9]{1,3}\.){3}[0-9]{1,3}")
ipv6_pattern = re.compile(
"(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|"
"fe80:(?::[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]+|"
"::(?:ffff(?::0{1,4})?:)?"
"(?:(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])\.){3}"
"(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])|"
"(?:[0-9a-fA-F]{1,4}:){1,4}:"
"(?:(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])\.){3}"
"(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])|"
":(?:(?::[0-9a-fA-F]{1,4}){1,7}|:)|"
"[0-9a-fA-F]{1,4}:(?:(?::[0-9a-fA-F]{1,4}){1,6})|"
"(?:[0-9a-fA-F]{1,4}:){1,2}(?::[0-9a-fA-F]{1,4}){1,5}|"
"(?:[0-9a-fA-F]{1,4}:){1,3}(?::[0-9a-fA-F]{1,4}){1,4}|"
"(?:[0-9a-fA-F]{1,4}:){1,4}(?::[0-9a-fA-F]{1,4}){1,3}|"
"(?:[0-9a-fA-F]{1,4}:){1,5}(?::[0-9a-fA-F]{1,4}){1,2}|"
"(?:[0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|"
"(?:[0-9a-fA-F]{1,4}:){1,7}:"
)
domain_pattern = re.compile("(?:[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?\.)+[a-z]{2,6}")
hash_pattern = re.compile("[0-9a-fA-F]{64}|[0-9a-fA-F]{40}|[0-9a-fA-F]{32}")
a = kwargs.get("address", False)
d = kwargs.get("domain", False)
h = kwargs.get("hash", False)
flags = a or d or h
values = {}
for line in log:
if (not flags) or a:
addr = addr_pattern.findall(line)
for match in addr:
values.setdefault(match, []).append(line)
addr = ipv6_pattern.findall(line)
for match in addr:
values.setdefault(match.lower(), []).append(line)
if (not flags) or d:
dom = domain_pattern.findall(line)
for match in dom:
values.setdefault(match.lower(), []).append(line)
if (not flags) or h:
ha = hash_pattern.findall(line)
for match in ha:
values.setdefault(match.lower(), []).append(line)
values.pop("schemas.microsoft.com", None)
return values
def __read_evtx_file(file):
with evtx.Evtx(file) as f:
log = list(map(evtx.Record.xml, f.records()))
return log
def __read_text_file(file):
with open(file) as f:
log = f.read().splitlines()
return log
def __dict_to_string(d):
return " ".join(["{}:{}".format(key, val) for key, val in d.items()])
def __list_to_string(li):
return " ".join(li)
def __csv_row(d):
d["tags"] = __list_to_string([__dict_to_string(tag) for tag in d["tags"]])
d["sources"] = __list_to_string(d["sources"])
return d
def __unpack_logs(d):
result = []
for log in d["original_log"]:
new = d.copy()
new["original_log"] = log
result.append(new)
return result
def __flatten(li):
return [item for sublist in li for item in sublist]
if __name__ == "__main__":
main()
| true
| true
|
f71a912403bfab59958931030305960d9f1ae9a4
| 1,594
|
py
|
Python
|
python/perspective/perspective/core/plugin.py
|
JKGu/perspective
|
7b319b7896e58d5860b72bd8756997976f9a7722
|
[
"Apache-2.0"
] | 1
|
2020-05-12T10:41:12.000Z
|
2020-05-12T10:41:12.000Z
|
python/perspective/perspective/core/plugin.py
|
JKGu/perspective
|
7b319b7896e58d5860b72bd8756997976f9a7722
|
[
"Apache-2.0"
] | null | null | null |
python/perspective/perspective/core/plugin.py
|
JKGu/perspective
|
7b319b7896e58d5860b72bd8756997976f9a7722
|
[
"Apache-2.0"
] | null | null | null |
################################################################################
#
# Copyright (c) 2019, the Perspective Authors.
#
# This file is part of the Perspective library, distributed under the terms of
# the Apache License 2.0. The full license can be found in the LICENSE file.
#
from enum import Enum
class Plugin(Enum):
'''The plugins (grids/charts) available in Perspective. Pass these into
the `plugin` arg in `PerspectiveWidget` or `PerspectiveViewer`.
Examples:
>>> widget = PerspectiveWidget(data, plugin=Plugin.TREEMAP)
'''
HYPERGRID = 'hypergrid' # hypergrid
GRID = 'hypergrid' # hypergrid
YBAR = 'y_bar' # highcharts
XBAR = 'x_bar' # highcharts
YLINE = 'y_line' # highcharts
YAREA = 'y_area' # highcharts
YSCATTER = 'y_scatter' # highcharts
XYLINE = 'xy_line' # highcharts
XYSCATTER = 'xy_scatter' # highcharts
TREEMAP = 'treemap' # highcharts
SUNBURST = 'sunburst' # highcharts
HEATMAP = 'heatmap' # highcharts
YBAR_D3 = 'd3_y_bar' # d3fc
XBAR_D3 = 'd3_x_bar' # d3fc
YLINE_D3 = 'd3_y_line' # d3fc
YAREA_D3 = 'd3_y_area' # d3fc
YSCATTER_D3 = 'd3_y_scatter' # d3fc
XYSCATTER_D3 = 'd3_xy_scatter' # d3fc
TREEMAP_D3 = 'd3_treemap' # d3fc
SUNBURST_D3 = 'd3_sunburst' # d3fc
HEATMAP_D3 = 'd3_heatmap' # d3fc
CANDLESTICK = 'd3_candlestick' # d3fc
CANDLESTICK_D3 = 'd3_candlestick' # d3fc
OHLC = 'd3_ohlc' # d3fc
OHLC_D3 = 'd3_ohlc' # d3fc
@staticmethod
def options():
return list(c.value for c in Plugin)
| 31.254902
| 80
| 0.617942
| true
| true
|
|
f71a913f37c249d4a0288dfa1a5ae20fc0e63d6e
| 275
|
py
|
Python
|
BasicPythonPrograms/pythonExe16.py
|
Pushkar745/PythonProgramming
|
ea60e97b70d46fb63ef203913c8b3f9570232dd3
|
[
"Apache-2.0"
] | null | null | null |
BasicPythonPrograms/pythonExe16.py
|
Pushkar745/PythonProgramming
|
ea60e97b70d46fb63ef203913c8b3f9570232dd3
|
[
"Apache-2.0"
] | null | null | null |
BasicPythonPrograms/pythonExe16.py
|
Pushkar745/PythonProgramming
|
ea60e97b70d46fb63ef203913c8b3f9570232dd3
|
[
"Apache-2.0"
] | null | null | null |
#Explicit function
def digitSum(n):
dsum=0
for ele in str(n):
dsum+=int (ele)
return dsum
#Initializing list
List=[367,111,562,945,6726,873]
#Using the function on odd element of the list
newList=[digitSum(i) for i in List if i & 1]
print(newList)
| 25
| 46
| 0.665455
|
def digitSum(n):
dsum=0
for ele in str(n):
dsum+=int (ele)
return dsum
List=[367,111,562,945,6726,873]
newList=[digitSum(i) for i in List if i & 1]
print(newList)
| true
| true
|
f71a9211a58e7ed7bd817495b5f5893f861323b7
| 19,870
|
py
|
Python
|
examples/resnet34_imagenet/resnet34.py
|
FujitsuResearch/automatic_pruning
|
b3bb525b736ca3e465cb6fb87f134748424a0fe5
|
[
"BSD-3-Clause-Clear"
] | 2
|
2022-01-25T12:28:21.000Z
|
2022-01-25T12:29:05.000Z
|
examples/resnet34_imagenet/resnet34.py
|
FujitsuResearch/automatic_pruning
|
b3bb525b736ca3e465cb6fb87f134748424a0fe5
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
examples/resnet34_imagenet/resnet34.py
|
FujitsuResearch/automatic_pruning
|
b3bb525b736ca3e465cb6fb87f134748424a0fe5
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
# resnet34.py COPYRIGHT Fujitsu Limited 2022
import torch.nn as nn
import torch.nn.functional as F
def zero_padding(x1, x2):
num_ch1 = x1.size()[1]
num_ch2 = x2.size()[1]
ch_diff = num_ch1 - num_ch2
# path1 < path2 : zero padding to path1 tensor
if num_ch1 < num_ch2:
ch_diff = -1 * ch_diff
if ch_diff%2 ==0:
x1 = F.pad(x1[:, :, :, :], (0, 0, 0, 0, ch_diff//2, ch_diff//2), "constant", 0)
else:
x1 = F.pad(x1[:, :, :, :], (0, 0, 0, 0, ch_diff//2, (ch_diff//2)+1), "constant", 0)
# path1 > path2 : zero padding to path2 tensor
elif num_ch1 > num_ch2:
if ch_diff%2 ==0:
x2 = F.pad(x2[:, :, :, :], (0, 0, 0, 0, ch_diff//2, ch_diff//2), "constant", 0)
else:
x2 = F.pad(x2[:, :, :, :], (0, 0, 0, 0, ch_diff//2, (ch_diff//2)+1), "constant", 0)
return x1, x2
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
"""3x3 convolution with padding"""
return nn.Conv2d(
in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=dilation,
groups=groups,
bias=False,
dilation=dilation,
)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(
self,
inplanes,
planes,
stride=1,
downsample=None,
groups=1,
base_width=64,
dilation=1,
norm_layer=None,
n_in_channels=None,
n_channels1=None,
n_channels2=None,
):
super(BasicBlock, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
if groups != 1 or base_width != 64:
raise ValueError("BasicBlock only supports groups=1 and base_width=64")
if dilation > 1:
raise NotImplementedError("Dilation > 1 not supported in BasicBlock")
# Both self.conv1 and self.downsample layers downsample the input when stride != 1
self.conv1 = conv3x3(n_in_channels, n_channels1, stride)
self.bn1 = norm_layer(n_channels1)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(n_channels1, n_channels2)
self.bn2 = norm_layer(n_channels2)
self.downsample = downsample #if dawnsample else downsample(n_in_channels, n_channels3)
self.stride = stride
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out, identity = zero_padding(out, identity) # zero padding
out += identity
out = self.relu(out)
return out
class ResNet34(nn.Module):
def __init__(
self,
block=BasicBlock,
layers=[3, 4, 6, 3],
num_classes=1000,
zero_init_residual=False,
groups=1,
width_per_group=64,
replace_stride_with_dilation=None,
norm_layer=None,
ch_conv1=64,
ch_l10_1=64,
ch_l10_2=64,
ch_l11_1=64,
ch_l11_2=64,
ch_l12_1=64,
ch_l12_2=64,
ch_l20_1=128,
ch_l20_2=128,
ch_l20_ds=128,
ch_l21_1=128,
ch_l21_2=128,
ch_l22_1=128,
ch_l22_2=128,
ch_l23_1=128,
ch_l23_2=128,
ch_l30_1=256,
ch_l30_2=256,
ch_l30_ds=256,
ch_l31_1=256,
ch_l31_2=256,
ch_l32_1=256,
ch_l32_2=256,
ch_l33_1=256,
ch_l33_2=256,
ch_l34_1=256,
ch_l34_2=256,
ch_l35_1=256,
ch_l35_2=256,
ch_l40_1=512,
ch_l40_2=512,
ch_l40_ds=512,
ch_l41_1=512,
ch_l41_2=512,
ch_l42_1=512,
ch_l42_2=512,
):
super(ResNet34, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self._norm_layer = norm_layer
self.inplanes = 64
self.dilation = 1
if replace_stride_with_dilation is None:
# each element in the tuple indicates if we should replace
# the 2x2 stride with a dilated convolution instead
replace_stride_with_dilation = [False, False, False]
if len(replace_stride_with_dilation) != 3:
raise ValueError(
"replace_stride_with_dilation should be None "
"or a 3-element tuple, got {}".format(replace_stride_with_dilation)
)
self.groups = groups
self.base_width = width_per_group
self.conv1 = nn.Conv2d(3, ch_conv1, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = norm_layer(ch_conv1)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
in_ch_l11 = max(ch_conv1, ch_l10_2)
in_ch_l12 = max(in_ch_l11, ch_l11_2)
self.layer1 = self._make_layer_3(block=block, planes=64, blocks=layers[0],
n_in_channels0=ch_conv1,
n_channels00=ch_l10_1,
n_channels01=ch_l10_2,
n_channels_ds=None,
n_in_channels1=in_ch_l11,
n_channels10=ch_l11_1,
n_channels11=ch_l11_2,
n_in_channels2=in_ch_l12,
n_channels20=ch_l12_1,
n_channels21=ch_l12_2,
)
in_ch_l20 = max(in_ch_l12, ch_l12_2)
in_ch_l21 = max(ch_l20_ds, ch_l20_2)
in_ch_l22 = max(in_ch_l21, ch_l21_2)
in_ch_l23 = max(in_ch_l22, ch_l22_2)
self.layer2 = self._make_layer_4(block, 128, layers[1], stride=2,
dilate=replace_stride_with_dilation[0],
n_in_channels0=in_ch_l20,
n_channels00=ch_l20_1,
n_channels01=ch_l20_2,
n_channels_ds=ch_l20_ds,
n_in_channels1=in_ch_l21,
n_channels10=ch_l21_1,
n_channels11=ch_l21_2,
n_in_channels2=in_ch_l22,
n_channels20=ch_l22_1,
n_channels21=ch_l22_2,
n_in_channels3=in_ch_l23,
n_channels30=ch_l23_1,
n_channels31=ch_l23_2,
)
in_ch_l30 = max(in_ch_l23, ch_l23_2)
in_ch_l31 = max(ch_l30_ds, ch_l30_2)
in_ch_l32 = max(in_ch_l31, ch_l31_2)
in_ch_l33 = max(in_ch_l32, ch_l32_2)
in_ch_l34 = max(in_ch_l33, ch_l33_2)
in_ch_l35 = max(in_ch_l34, ch_l34_2)
self.layer3 = self._make_layer_6(block, 256, layers[2], stride=2,
dilate=replace_stride_with_dilation[1],
n_in_channels0=in_ch_l30,
n_channels00=ch_l30_1,
n_channels01=ch_l30_2,
n_channels_ds=ch_l30_ds,
n_in_channels1=in_ch_l31,
n_channels10=ch_l31_1,
n_channels11=ch_l31_2,
n_in_channels2=in_ch_l32,
n_channels20=ch_l32_1,
n_channels21=ch_l32_2,
n_in_channels3=in_ch_l33,
n_channels30=ch_l33_1,
n_channels31=ch_l33_2,
n_in_channels4=in_ch_l34,
n_channels40=ch_l34_1,
n_channels41=ch_l34_2,
n_in_channels5=in_ch_l35,
n_channels50=ch_l35_1,
n_channels51=ch_l35_2,
)
in_ch_l40 = max(in_ch_l35, ch_l35_2)
in_ch_l41 = max(ch_l40_ds, ch_l40_2)
in_ch_l42 = max(in_ch_l41, ch_l41_2)
self.layer4 = self._make_layer_3(block, 512, layers[3], stride=2,
dilate=replace_stride_with_dilation[2],
n_in_channels0=in_ch_l40,
n_channels00=ch_l40_1,
n_channels01=ch_l40_2,
n_channels_ds=ch_l40_ds,
n_in_channels1=in_ch_l41,
n_channels10=ch_l41_1,
n_channels11=ch_l41_2,
n_in_channels2=in_ch_l42,
n_channels20=ch_l42_1,
n_channels21=ch_l42_2,
)
in_ch_fc = max(in_ch_l42, ch_l42_2)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(in_ch_fc, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu")
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer_3(self, block, planes, blocks, stride=1, dilate=False,
n_in_channels0=None,
n_channels00=None, n_channels01=None,
n_channels_ds=None,
n_in_channels1=None,
n_channels10=None, n_channels11=None,
n_in_channels2=None,
n_channels20=None, n_channels21=None,
):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential( conv1x1(n_in_channels0, n_channels_ds, stride), norm_layer(n_channels_ds) )
self.inplanes = planes * block.expansion
layers = []
# layer_0
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
n_in_channels=n_in_channels0,
n_channels1=n_channels00,
n_channels2=n_channels01,
)
)
# layer_1
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels1,
n_channels1=n_channels10,
n_channels2=n_channels11,
)
)
# layer_2
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels2,
n_channels1=n_channels20,
n_channels2=n_channels21,
)
)
return nn.Sequential(*layers)
def _make_layer_4(self, block, planes, blocks, stride=1, dilate=False,
n_in_channels0=None,
n_channels00=None, n_channels01=None,
n_channels_ds=None,
n_in_channels1=None,
n_channels10=None, n_channels11=None,
n_in_channels2=None,
n_channels20=None, n_channels21=None,
n_in_channels3=None,
n_channels30=None, n_channels31=None,
):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential( conv1x1(n_in_channels0, n_channels_ds, stride), norm_layer(n_channels_ds) )
self.inplanes = planes * block.expansion
layers = []
# layer_0
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
n_in_channels=n_in_channels0,
n_channels1=n_channels00,
n_channels2=n_channels01,
)
)
# layer_1
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels1,
n_channels1=n_channels10,
n_channels2=n_channels11,
)
)
# layer_2
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels2,
n_channels1=n_channels20,
n_channels2=n_channels21,
)
)
# layer_3
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels3,
n_channels1=n_channels30,
n_channels2=n_channels31,
)
)
return nn.Sequential(*layers)
def _make_layer_6(self, block, planes, blocks, stride=1, dilate=False,
n_in_channels0=None,
n_channels00=None, n_channels01=None,
n_channels_ds=None,
n_in_channels1=None,
n_channels10=None, n_channels11=None,
n_in_channels2=None,
n_channels20=None, n_channels21=None,
n_in_channels3=None,
n_channels30=None, n_channels31=None,
n_in_channels4=None,
n_channels40=None, n_channels41=None,
n_in_channels5=None,
n_channels50=None, n_channels51=None,
):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential( conv1x1(n_in_channels0, n_channels_ds, stride), norm_layer(n_channels_ds) )
self.inplanes = planes * block.expansion
layers = []
# layer_0
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
n_in_channels=n_in_channels0,
n_channels1=n_channels00,
n_channels2=n_channels01,
)
)
# layer_1
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels1,
n_channels1=n_channels10,
n_channels2=n_channels11,
)
)
# layer_2
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels2,
n_channels1=n_channels20,
n_channels2=n_channels21,
)
)
# layer_3
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels3,
n_channels1=n_channels30,
n_channels2=n_channels31,
)
)
# layer_4
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels4,
n_channels1=n_channels40,
n_channels2=n_channels41,
)
)
# layer_5
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels5,
n_channels1=n_channels50,
n_channels2=n_channels51,
)
)
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.reshape(x.size(0), -1)
x = self.fc(x)
return x
| 35.230496
| 115
| 0.48767
|
import torch.nn as nn
import torch.nn.functional as F
def zero_padding(x1, x2):
num_ch1 = x1.size()[1]
num_ch2 = x2.size()[1]
ch_diff = num_ch1 - num_ch2
if num_ch1 < num_ch2:
ch_diff = -1 * ch_diff
if ch_diff%2 ==0:
x1 = F.pad(x1[:, :, :, :], (0, 0, 0, 0, ch_diff//2, ch_diff//2), "constant", 0)
else:
x1 = F.pad(x1[:, :, :, :], (0, 0, 0, 0, ch_diff//2, (ch_diff//2)+1), "constant", 0)
elif num_ch1 > num_ch2:
if ch_diff%2 ==0:
x2 = F.pad(x2[:, :, :, :], (0, 0, 0, 0, ch_diff//2, ch_diff//2), "constant", 0)
else:
x2 = F.pad(x2[:, :, :, :], (0, 0, 0, 0, ch_diff//2, (ch_diff//2)+1), "constant", 0)
return x1, x2
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
return nn.Conv2d(
in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=dilation,
groups=groups,
bias=False,
dilation=dilation,
)
def conv1x1(in_planes, out_planes, stride=1):
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(
self,
inplanes,
planes,
stride=1,
downsample=None,
groups=1,
base_width=64,
dilation=1,
norm_layer=None,
n_in_channels=None,
n_channels1=None,
n_channels2=None,
):
super(BasicBlock, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
if groups != 1 or base_width != 64:
raise ValueError("BasicBlock only supports groups=1 and base_width=64")
if dilation > 1:
raise NotImplementedError("Dilation > 1 not supported in BasicBlock")
self.conv1 = conv3x3(n_in_channels, n_channels1, stride)
self.bn1 = norm_layer(n_channels1)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(n_channels1, n_channels2)
self.bn2 = norm_layer(n_channels2)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out, identity = zero_padding(out, identity)
out += identity
out = self.relu(out)
return out
class ResNet34(nn.Module):
def __init__(
self,
block=BasicBlock,
layers=[3, 4, 6, 3],
num_classes=1000,
zero_init_residual=False,
groups=1,
width_per_group=64,
replace_stride_with_dilation=None,
norm_layer=None,
ch_conv1=64,
ch_l10_1=64,
ch_l10_2=64,
ch_l11_1=64,
ch_l11_2=64,
ch_l12_1=64,
ch_l12_2=64,
ch_l20_1=128,
ch_l20_2=128,
ch_l20_ds=128,
ch_l21_1=128,
ch_l21_2=128,
ch_l22_1=128,
ch_l22_2=128,
ch_l23_1=128,
ch_l23_2=128,
ch_l30_1=256,
ch_l30_2=256,
ch_l30_ds=256,
ch_l31_1=256,
ch_l31_2=256,
ch_l32_1=256,
ch_l32_2=256,
ch_l33_1=256,
ch_l33_2=256,
ch_l34_1=256,
ch_l34_2=256,
ch_l35_1=256,
ch_l35_2=256,
ch_l40_1=512,
ch_l40_2=512,
ch_l40_ds=512,
ch_l41_1=512,
ch_l41_2=512,
ch_l42_1=512,
ch_l42_2=512,
):
super(ResNet34, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self._norm_layer = norm_layer
self.inplanes = 64
self.dilation = 1
if replace_stride_with_dilation is None:
replace_stride_with_dilation = [False, False, False]
if len(replace_stride_with_dilation) != 3:
raise ValueError(
"replace_stride_with_dilation should be None "
"or a 3-element tuple, got {}".format(replace_stride_with_dilation)
)
self.groups = groups
self.base_width = width_per_group
self.conv1 = nn.Conv2d(3, ch_conv1, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = norm_layer(ch_conv1)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
in_ch_l11 = max(ch_conv1, ch_l10_2)
in_ch_l12 = max(in_ch_l11, ch_l11_2)
self.layer1 = self._make_layer_3(block=block, planes=64, blocks=layers[0],
n_in_channels0=ch_conv1,
n_channels00=ch_l10_1,
n_channels01=ch_l10_2,
n_channels_ds=None,
n_in_channels1=in_ch_l11,
n_channels10=ch_l11_1,
n_channels11=ch_l11_2,
n_in_channels2=in_ch_l12,
n_channels20=ch_l12_1,
n_channels21=ch_l12_2,
)
in_ch_l20 = max(in_ch_l12, ch_l12_2)
in_ch_l21 = max(ch_l20_ds, ch_l20_2)
in_ch_l22 = max(in_ch_l21, ch_l21_2)
in_ch_l23 = max(in_ch_l22, ch_l22_2)
self.layer2 = self._make_layer_4(block, 128, layers[1], stride=2,
dilate=replace_stride_with_dilation[0],
n_in_channels0=in_ch_l20,
n_channels00=ch_l20_1,
n_channels01=ch_l20_2,
n_channels_ds=ch_l20_ds,
n_in_channels1=in_ch_l21,
n_channels10=ch_l21_1,
n_channels11=ch_l21_2,
n_in_channels2=in_ch_l22,
n_channels20=ch_l22_1,
n_channels21=ch_l22_2,
n_in_channels3=in_ch_l23,
n_channels30=ch_l23_1,
n_channels31=ch_l23_2,
)
in_ch_l30 = max(in_ch_l23, ch_l23_2)
in_ch_l31 = max(ch_l30_ds, ch_l30_2)
in_ch_l32 = max(in_ch_l31, ch_l31_2)
in_ch_l33 = max(in_ch_l32, ch_l32_2)
in_ch_l34 = max(in_ch_l33, ch_l33_2)
in_ch_l35 = max(in_ch_l34, ch_l34_2)
self.layer3 = self._make_layer_6(block, 256, layers[2], stride=2,
dilate=replace_stride_with_dilation[1],
n_in_channels0=in_ch_l30,
n_channels00=ch_l30_1,
n_channels01=ch_l30_2,
n_channels_ds=ch_l30_ds,
n_in_channels1=in_ch_l31,
n_channels10=ch_l31_1,
n_channels11=ch_l31_2,
n_in_channels2=in_ch_l32,
n_channels20=ch_l32_1,
n_channels21=ch_l32_2,
n_in_channels3=in_ch_l33,
n_channels30=ch_l33_1,
n_channels31=ch_l33_2,
n_in_channels4=in_ch_l34,
n_channels40=ch_l34_1,
n_channels41=ch_l34_2,
n_in_channels5=in_ch_l35,
n_channels50=ch_l35_1,
n_channels51=ch_l35_2,
)
in_ch_l40 = max(in_ch_l35, ch_l35_2)
in_ch_l41 = max(ch_l40_ds, ch_l40_2)
in_ch_l42 = max(in_ch_l41, ch_l41_2)
self.layer4 = self._make_layer_3(block, 512, layers[3], stride=2,
dilate=replace_stride_with_dilation[2],
n_in_channels0=in_ch_l40,
n_channels00=ch_l40_1,
n_channels01=ch_l40_2,
n_channels_ds=ch_l40_ds,
n_in_channels1=in_ch_l41,
n_channels10=ch_l41_1,
n_channels11=ch_l41_2,
n_in_channels2=in_ch_l42,
n_channels20=ch_l42_1,
n_channels21=ch_l42_2,
)
in_ch_fc = max(in_ch_l42, ch_l42_2)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(in_ch_fc, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu")
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer_3(self, block, planes, blocks, stride=1, dilate=False,
n_in_channels0=None,
n_channels00=None, n_channels01=None,
n_channels_ds=None,
n_in_channels1=None,
n_channels10=None, n_channels11=None,
n_in_channels2=None,
n_channels20=None, n_channels21=None,
):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential( conv1x1(n_in_channels0, n_channels_ds, stride), norm_layer(n_channels_ds) )
self.inplanes = planes * block.expansion
layers = []
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
n_in_channels=n_in_channels0,
n_channels1=n_channels00,
n_channels2=n_channels01,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels1,
n_channels1=n_channels10,
n_channels2=n_channels11,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels2,
n_channels1=n_channels20,
n_channels2=n_channels21,
)
)
return nn.Sequential(*layers)
def _make_layer_4(self, block, planes, blocks, stride=1, dilate=False,
n_in_channels0=None,
n_channels00=None, n_channels01=None,
n_channels_ds=None,
n_in_channels1=None,
n_channels10=None, n_channels11=None,
n_in_channels2=None,
n_channels20=None, n_channels21=None,
n_in_channels3=None,
n_channels30=None, n_channels31=None,
):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential( conv1x1(n_in_channels0, n_channels_ds, stride), norm_layer(n_channels_ds) )
self.inplanes = planes * block.expansion
layers = []
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
n_in_channels=n_in_channels0,
n_channels1=n_channels00,
n_channels2=n_channels01,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels1,
n_channels1=n_channels10,
n_channels2=n_channels11,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels2,
n_channels1=n_channels20,
n_channels2=n_channels21,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels3,
n_channels1=n_channels30,
n_channels2=n_channels31,
)
)
return nn.Sequential(*layers)
def _make_layer_6(self, block, planes, blocks, stride=1, dilate=False,
n_in_channels0=None,
n_channels00=None, n_channels01=None,
n_channels_ds=None,
n_in_channels1=None,
n_channels10=None, n_channels11=None,
n_in_channels2=None,
n_channels20=None, n_channels21=None,
n_in_channels3=None,
n_channels30=None, n_channels31=None,
n_in_channels4=None,
n_channels40=None, n_channels41=None,
n_in_channels5=None,
n_channels50=None, n_channels51=None,
):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential( conv1x1(n_in_channels0, n_channels_ds, stride), norm_layer(n_channels_ds) )
self.inplanes = planes * block.expansion
layers = []
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
n_in_channels=n_in_channels0,
n_channels1=n_channels00,
n_channels2=n_channels01,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels1,
n_channels1=n_channels10,
n_channels2=n_channels11,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels2,
n_channels1=n_channels20,
n_channels2=n_channels21,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels3,
n_channels1=n_channels30,
n_channels2=n_channels31,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels4,
n_channels1=n_channels40,
n_channels2=n_channels41,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels5,
n_channels1=n_channels50,
n_channels2=n_channels51,
)
)
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.reshape(x.size(0), -1)
x = self.fc(x)
return x
| true
| true
|
f71a921e657b6f695c22749f6d1c6b756adc0c9a
| 5,837
|
py
|
Python
|
allegation/tests/services/test_download_allegations.py
|
invinst/CPDB
|
c2d8ae8888b13d956cc1068742f18d45736d4121
|
[
"Apache-2.0"
] | 16
|
2016-05-20T09:03:32.000Z
|
2020-09-13T14:23:06.000Z
|
allegation/tests/services/test_download_allegations.py
|
invinst/CPDB
|
c2d8ae8888b13d956cc1068742f18d45736d4121
|
[
"Apache-2.0"
] | 2
|
2016-05-24T01:44:14.000Z
|
2016-06-17T22:19:45.000Z
|
allegation/tests/services/test_download_allegations.py
|
invinst/CPDB
|
c2d8ae8888b13d956cc1068742f18d45736d4121
|
[
"Apache-2.0"
] | 2
|
2016-10-10T16:14:19.000Z
|
2020-10-26T00:17:02.000Z
|
from mock import patch, MagicMock, call
from allegation.factories import (
DownloadFactory, OfficerAllegationFactory, AllegationFactory, ComplainingWitnessFactory, OfficerFactory)
from allegation.services.download_allegations import AllegationsDownload
from api.models import Setting
from common.tests.core import SimpleTestCase
from share.factories import SettingFactory
class AllegationsDownloadTestCase(SimpleTestCase):
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_write_disclaimer(self, mock_workbook):
setting = Setting.objects.first() or SettingFactory()
download = DownloadFactory()
line_1 = 'line_1'
line_2 = 'line_2'
setting.export_excel_disclaimer = '{line_1}\n{line_2}'.format(line_1=line_1, line_2=line_2)
setting.save()
mock_worksheet = MagicMock()
mock_workbook().add_worksheet.return_value = mock_worksheet
with patch('allegation.services.download_allegations.os'):
allegation_download = AllegationsDownload(download.id)
allegation_download.init_workbook()
allegation_download.write_disclaimer()
expected_calls = [
call.write('A1', line_1),
call.write('A2', line_2)
]
mock_worksheet.assert_has_calls(expected_calls)
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_investigator_name_rank_in_allegation_sheet(self, mock_workbook):
officer_allegation_1 = OfficerAllegationFactory()
investigator = officer_allegation_1.allegation.investigator
allegation_download = AllegationsDownload(DownloadFactory().id)
allegation_download.officer_allegations = [officer_allegation_1]
allegation_download.update_crids()
allegation_download.write_headers = MagicMock()
mock_worksheet = MagicMock()
with patch('allegation.services.download_allegations.os'):
allegation_download.init_workbook()
allegation_download.write_allegations_columns(mock_worksheet)
(sheet, columns), _ = allegation_download.write_headers.call_args
sheet.should.equal(mock_worksheet)
(set(columns) > set(['InvestigatorName', 'InvestigatorRank'])).should.be.true
allegation_download.write_allegations_data(mock_worksheet)
mock_worksheet.write.assert_any_call(1, 21, officer_allegation_1.allegation.investigator.name)
mock_worksheet.write.assert_any_call(1, 22, investigator.current_rank)
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_complaining_witness_sheet(self, mock_workbook):
allegation = AllegationFactory()
witness = ComplainingWitnessFactory(allegation=allegation, crid=allegation.crid)
officer_allegation = OfficerAllegationFactory(allegation=allegation)
allegation_download = AllegationsDownload(DownloadFactory().id)
allegation_download.officer_allegations = [officer_allegation]
allegation_download.update_crids()
allegation_download.write_headers = MagicMock()
with patch('allegation.services.download_allegations.os'):
allegation_download.init_workbook()
mock_worksheet = MagicMock()
allegation_download.workbook.add_worksheet = MagicMock(return_value=mock_worksheet)
allegation_download.write_complaint_witnesses()
(sheet, columns), _ = allegation_download.write_headers.call_args
sheet.should.equal(mock_worksheet)
columns.should.equal(['CRID', 'Gender', 'Race', 'Age'])
mock_worksheet.write.assert_any_call(1, 0, str(allegation.crid))
mock_worksheet.write.assert_any_call(1, 1, witness.gender)
mock_worksheet.write.assert_any_call(1, 2, witness.race)
mock_worksheet.write.assert_any_call(1, 3, witness.age)
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_officer_sheet(self, mock_workbook):
allegation = AllegationFactory()
officer = OfficerFactory()
officer_allegation = OfficerAllegationFactory(allegation=allegation, officer=officer)
allegation_download = AllegationsDownload(DownloadFactory().id)
allegation_download.officer_allegations = [officer_allegation]
allegation_download.update_crids()
allegation_download.write_headers = MagicMock()
with patch('allegation.services.download_allegations.os'):
allegation_download.init_workbook()
mock_worksheet = MagicMock()
allegation_download.workbook.add_worksheet = MagicMock(return_value=mock_worksheet)
allegation_download.write_officer_profile()
(sheet, columns), _ = allegation_download.write_headers.call_args
sheet.should.equal(mock_worksheet)
columns.should.equal([
'OfficerID', 'OfficerFirst', 'OfficerLast', 'Gender', 'Race',
'ApptDate', 'Unit', 'Rank', 'Star', 'Age'])
mock_worksheet.write.assert_any_call(1, 0, officer.id)
mock_worksheet.write.assert_any_call(1, 1, officer.officer_first)
mock_worksheet.write.assert_any_call(1, 2, officer.officer_last)
mock_worksheet.write.assert_any_call(1, 3, officer.gender)
mock_worksheet.write.assert_any_call(1, 4, officer.race)
mock_worksheet.write.assert_any_call(1, 5, officer.appt_date)
mock_worksheet.write.assert_any_call(1, 6, officer.unit.unit_name)
mock_worksheet.write.assert_any_call(1, 7, officer.rank)
mock_worksheet.write.assert_any_call(1, 8, officer.star)
mock_worksheet.write.assert_any_call(1, 9, officer.age)
| 50.318966
| 108
| 0.714922
|
from mock import patch, MagicMock, call
from allegation.factories import (
DownloadFactory, OfficerAllegationFactory, AllegationFactory, ComplainingWitnessFactory, OfficerFactory)
from allegation.services.download_allegations import AllegationsDownload
from api.models import Setting
from common.tests.core import SimpleTestCase
from share.factories import SettingFactory
class AllegationsDownloadTestCase(SimpleTestCase):
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_write_disclaimer(self, mock_workbook):
setting = Setting.objects.first() or SettingFactory()
download = DownloadFactory()
line_1 = 'line_1'
line_2 = 'line_2'
setting.export_excel_disclaimer = '{line_1}\n{line_2}'.format(line_1=line_1, line_2=line_2)
setting.save()
mock_worksheet = MagicMock()
mock_workbook().add_worksheet.return_value = mock_worksheet
with patch('allegation.services.download_allegations.os'):
allegation_download = AllegationsDownload(download.id)
allegation_download.init_workbook()
allegation_download.write_disclaimer()
expected_calls = [
call.write('A1', line_1),
call.write('A2', line_2)
]
mock_worksheet.assert_has_calls(expected_calls)
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_investigator_name_rank_in_allegation_sheet(self, mock_workbook):
officer_allegation_1 = OfficerAllegationFactory()
investigator = officer_allegation_1.allegation.investigator
allegation_download = AllegationsDownload(DownloadFactory().id)
allegation_download.officer_allegations = [officer_allegation_1]
allegation_download.update_crids()
allegation_download.write_headers = MagicMock()
mock_worksheet = MagicMock()
with patch('allegation.services.download_allegations.os'):
allegation_download.init_workbook()
allegation_download.write_allegations_columns(mock_worksheet)
(sheet, columns), _ = allegation_download.write_headers.call_args
sheet.should.equal(mock_worksheet)
(set(columns) > set(['InvestigatorName', 'InvestigatorRank'])).should.be.true
allegation_download.write_allegations_data(mock_worksheet)
mock_worksheet.write.assert_any_call(1, 21, officer_allegation_1.allegation.investigator.name)
mock_worksheet.write.assert_any_call(1, 22, investigator.current_rank)
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_complaining_witness_sheet(self, mock_workbook):
allegation = AllegationFactory()
witness = ComplainingWitnessFactory(allegation=allegation, crid=allegation.crid)
officer_allegation = OfficerAllegationFactory(allegation=allegation)
allegation_download = AllegationsDownload(DownloadFactory().id)
allegation_download.officer_allegations = [officer_allegation]
allegation_download.update_crids()
allegation_download.write_headers = MagicMock()
with patch('allegation.services.download_allegations.os'):
allegation_download.init_workbook()
mock_worksheet = MagicMock()
allegation_download.workbook.add_worksheet = MagicMock(return_value=mock_worksheet)
allegation_download.write_complaint_witnesses()
(sheet, columns), _ = allegation_download.write_headers.call_args
sheet.should.equal(mock_worksheet)
columns.should.equal(['CRID', 'Gender', 'Race', 'Age'])
mock_worksheet.write.assert_any_call(1, 0, str(allegation.crid))
mock_worksheet.write.assert_any_call(1, 1, witness.gender)
mock_worksheet.write.assert_any_call(1, 2, witness.race)
mock_worksheet.write.assert_any_call(1, 3, witness.age)
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_officer_sheet(self, mock_workbook):
allegation = AllegationFactory()
officer = OfficerFactory()
officer_allegation = OfficerAllegationFactory(allegation=allegation, officer=officer)
allegation_download = AllegationsDownload(DownloadFactory().id)
allegation_download.officer_allegations = [officer_allegation]
allegation_download.update_crids()
allegation_download.write_headers = MagicMock()
with patch('allegation.services.download_allegations.os'):
allegation_download.init_workbook()
mock_worksheet = MagicMock()
allegation_download.workbook.add_worksheet = MagicMock(return_value=mock_worksheet)
allegation_download.write_officer_profile()
(sheet, columns), _ = allegation_download.write_headers.call_args
sheet.should.equal(mock_worksheet)
columns.should.equal([
'OfficerID', 'OfficerFirst', 'OfficerLast', 'Gender', 'Race',
'ApptDate', 'Unit', 'Rank', 'Star', 'Age'])
mock_worksheet.write.assert_any_call(1, 0, officer.id)
mock_worksheet.write.assert_any_call(1, 1, officer.officer_first)
mock_worksheet.write.assert_any_call(1, 2, officer.officer_last)
mock_worksheet.write.assert_any_call(1, 3, officer.gender)
mock_worksheet.write.assert_any_call(1, 4, officer.race)
mock_worksheet.write.assert_any_call(1, 5, officer.appt_date)
mock_worksheet.write.assert_any_call(1, 6, officer.unit.unit_name)
mock_worksheet.write.assert_any_call(1, 7, officer.rank)
mock_worksheet.write.assert_any_call(1, 8, officer.star)
mock_worksheet.write.assert_any_call(1, 9, officer.age)
| true
| true
|
f71a9246d59e712669453737c400d746d8277d54
| 1,433
|
py
|
Python
|
stream_alert/rule_processor/main.py
|
ashmere/streamalert
|
5a03d3d272a8e4e4b1ee71567fad1d7e185bb903
|
[
"Apache-2.0"
] | 1
|
2018-11-18T12:13:44.000Z
|
2018-11-18T12:13:44.000Z
|
stream_alert/rule_processor/main.py
|
GSA/streamalert
|
57d78157c76c19b9a0fe5bd6deae541cda928914
|
[
"Apache-2.0"
] | 110
|
2019-02-13T05:32:07.000Z
|
2021-07-29T05:42:01.000Z
|
stream_alert/rule_processor/main.py
|
ashmere/streamalert
|
5a03d3d272a8e4e4b1ee71567fad1d7e185bb903
|
[
"Apache-2.0"
] | 1
|
2019-11-01T01:03:47.000Z
|
2019-11-01T01:03:47.000Z
|
"""
Copyright 2017-present, Airbnb Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import importlib
import os
from stream_alert.rule_processor.handler import StreamAlert
modules_to_import = set()
# walk the rules directory to dymanically import
for folder in ('matchers', 'rules'):
for root, dirs, files in os.walk(folder):
filtered_files = [rule_file for rule_file in files if not (rule_file.startswith((
'.', '__init__')) or rule_file.endswith('.pyc'))]
package_path = root.replace('/', '.')
for import_file in filtered_files:
import_module = os.path.splitext(import_file)[0]
if package_path and import_module:
modules_to_import.add('{}.{}'.format(package_path, import_module))
for module_name in modules_to_import:
importlib.import_module(module_name)
def handler(event, context):
"""Main Lambda handler function"""
StreamAlert(context).run(event)
| 35.825
| 89
| 0.728542
|
import importlib
import os
from stream_alert.rule_processor.handler import StreamAlert
modules_to_import = set()
for folder in ('matchers', 'rules'):
for root, dirs, files in os.walk(folder):
filtered_files = [rule_file for rule_file in files if not (rule_file.startswith((
'.', '__init__')) or rule_file.endswith('.pyc'))]
package_path = root.replace('/', '.')
for import_file in filtered_files:
import_module = os.path.splitext(import_file)[0]
if package_path and import_module:
modules_to_import.add('{}.{}'.format(package_path, import_module))
for module_name in modules_to_import:
importlib.import_module(module_name)
def handler(event, context):
StreamAlert(context).run(event)
| true
| true
|
f71a9251405f51578902104c3076923ed80a68f2
| 666
|
py
|
Python
|
eth/chains/mainnet/constants.py
|
shreyasnbhat/py-evm
|
cd31d83185e102a7cb2f11e2f67923b069ee9cef
|
[
"MIT"
] | 1
|
2018-12-09T11:56:53.000Z
|
2018-12-09T11:56:53.000Z
|
eth/chains/mainnet/constants.py
|
shreyasnbhat/py-evm
|
cd31d83185e102a7cb2f11e2f67923b069ee9cef
|
[
"MIT"
] | null | null | null |
eth/chains/mainnet/constants.py
|
shreyasnbhat/py-evm
|
cd31d83185e102a7cb2f11e2f67923b069ee9cef
|
[
"MIT"
] | 2
|
2019-09-05T01:31:56.000Z
|
2019-09-17T09:09:16.000Z
|
from eth_typing import BlockNumber
# https://github.com/ethereum/EIPs/blob/master/EIPS/eip-155.md
MAINNET_CHAIN_ID = 1
# Fork Blocks listed in ascending order
#
# Homestead Block
#
HOMESTEAD_MAINNET_BLOCK = BlockNumber(1150000)
#
# DAO Block
#
DAO_FORK_MAINNET_BLOCK = BlockNumber(1920000)
DAO_FORK_MAINNET_EXTRA_DATA = b'dao-hard-fork'
#
# Tangerine Whistle Block
#
TANGERINE_WHISTLE_MAINNET_BLOCK = BlockNumber(2463000)
#
# Spurious Dragon Block
#
SPURIOUS_DRAGON_MAINNET_BLOCK = BlockNumber(2675000)
#
# Byzantium Block
#
BYZANTIUM_MAINNET_BLOCK = BlockNumber(4370000)
#
# Constantinople Block
#
CONSTANTINOPLE_MAINNET_BLOCK = BlockNumber(7080000)
| 14.8
| 62
| 0.78979
|
from eth_typing import BlockNumber
MAINNET_CHAIN_ID = 1
HOMESTEAD_MAINNET_BLOCK = BlockNumber(1150000)
DAO_FORK_MAINNET_BLOCK = BlockNumber(1920000)
DAO_FORK_MAINNET_EXTRA_DATA = b'dao-hard-fork'
TANGERINE_WHISTLE_MAINNET_BLOCK = BlockNumber(2463000)
SPURIOUS_DRAGON_MAINNET_BLOCK = BlockNumber(2675000)
BYZANTIUM_MAINNET_BLOCK = BlockNumber(4370000)
CONSTANTINOPLE_MAINNET_BLOCK = BlockNumber(7080000)
| true
| true
|
f71a929b94aaa07c53b09d5b18de47578263ba83
| 6,430
|
py
|
Python
|
conf.py
|
isabella232/grr-doc
|
2b0e28dc8d456dd0301aa14d45bf53d36de02781
|
[
"Apache-2.0"
] | null | null | null |
conf.py
|
isabella232/grr-doc
|
2b0e28dc8d456dd0301aa14d45bf53d36de02781
|
[
"Apache-2.0"
] | 1
|
2021-06-27T17:20:11.000Z
|
2021-06-27T17:20:11.000Z
|
conf.py
|
isabella232/grr-doc
|
2b0e28dc8d456dd0301aa14d45bf53d36de02781
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# GRR documentation build configuration file, created by
# sphinx-quickstart on Wed Nov 22 17:54:03 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.mathjax',
'recommonmark',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'GRR'
copyright = u'2021, GRR team'
author = u'GRR team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u''
# The full version, including alpha/beta/rc tags.
release = u''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
# html_sidebars = {
# '**': [
# 'relations.html', # needs 'show_related': True theme option to display
# 'searchbox.html',
# ]
# }
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'GRRdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htmaster_dobp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'GRR.tex', u'GRR Documentation',
u'GRR team', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'grr', u'GRR Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'GRR', u'GRR Documentation',
author, 'GRR', 'One line description of project.',
'Miscellaneous'),
]
# Configure sphinx to convert markdown links (recommonmark is broken at the
# moment).
from docutils import nodes, transforms
class ProcessLink(transforms.Transform):
default_priority = 1000
text_replacements = {
"__GRR_VERSION__": "3.4.3.1",
"__GRR_DEB_VERSION__": "3.4.3-1"
}
def find_replace(self, node):
if isinstance(node, nodes.reference) and "refuri" in node:
r = node["refuri"]
if r.endswith(".md"):
r = r[:-3] + ".html"
node["refuri"] = r
if isinstance(node, nodes.Text):
for k, v in self.text_replacements.items():
if k in node.astext():
repl = nodes.Text(node.replace(k, v))
node.parent.replace(node, repl)
return node
def traverse(self, node):
"""Traverse the document tree rooted at node.
node : docutil node
current root node to traverse
"""
self.find_replace(node)
for c in node.children:
self.traverse(c)
def apply(self):
self.current_level = 0
self.traverse(self.document)
from recommonmark.transform import AutoStructify
def setup(app):
app.add_config_value('recommonmark_config', {
'enable_auto_toc_tree': True,
'auto_toc_tree_section': 'Table of contents',
}, True)
app.add_transform(AutoStructify)
app.add_transform(ProcessLink)
| 29.768519
| 81
| 0.657387
|
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.mathjax',
'recommonmark',
]
templates_path = ['_templates']
master_doc = 'index'
project = u'GRR'
copyright = u'2021, GRR team'
author = u'GRR team'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u''
# The full version, including alpha/beta/rc tags.
release = u''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
# html_sidebars = {
# '**': [
# 'relations.html', # needs 'show_related': True theme option to display
# 'searchbox.html',
# ]
# }
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'GRRdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htmaster_dobp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'GRR.tex', u'GRR Documentation',
u'GRR team', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'grr', u'GRR Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'GRR', u'GRR Documentation',
author, 'GRR', 'One line description of project.',
'Miscellaneous'),
]
# Configure sphinx to convert markdown links (recommonmark is broken at the
# moment).
from docutils import nodes, transforms
class ProcessLink(transforms.Transform):
default_priority = 1000
text_replacements = {
"__GRR_VERSION__": "3.4.3.1",
"__GRR_DEB_VERSION__": "3.4.3-1"
}
def find_replace(self, node):
if isinstance(node, nodes.reference) and "refuri" in node:
r = node["refuri"]
if r.endswith(".md"):
r = r[:-3] + ".html"
node["refuri"] = r
if isinstance(node, nodes.Text):
for k, v in self.text_replacements.items():
if k in node.astext():
repl = nodes.Text(node.replace(k, v))
node.parent.replace(node, repl)
return node
def traverse(self, node):
self.find_replace(node)
for c in node.children:
self.traverse(c)
def apply(self):
self.current_level = 0
self.traverse(self.document)
from recommonmark.transform import AutoStructify
def setup(app):
app.add_config_value('recommonmark_config', {
'enable_auto_toc_tree': True,
'auto_toc_tree_section': 'Table of contents',
}, True)
app.add_transform(AutoStructify)
app.add_transform(ProcessLink)
| true
| true
|
f71a931bbfeddaef6760880c9e0d84b9e3ce6a96
| 3,111
|
py
|
Python
|
resend_kafka_message/logic/client/kafka_client.py
|
Tungnt24/reprocess-kafka-message
|
50a6495675630866b0a800a2b2857754f9cdfb02
|
[
"MIT"
] | null | null | null |
resend_kafka_message/logic/client/kafka_client.py
|
Tungnt24/reprocess-kafka-message
|
50a6495675630866b0a800a2b2857754f9cdfb02
|
[
"MIT"
] | null | null | null |
resend_kafka_message/logic/client/kafka_client.py
|
Tungnt24/reprocess-kafka-message
|
50a6495675630866b0a800a2b2857754f9cdfb02
|
[
"MIT"
] | null | null | null |
from kafka import KafkaProducer, KafkaConsumer
from resend_kafka_message.setting import (
KafkaProducerConfig,
KafkaConsumerConfig,
)
import json
from kafka.structs import TopicPartition
from resend_kafka_message.utils.logger import logger
class KafkaBackupProducer:
def __init__(self) -> None:
self.producer = KafkaProducer(
bootstrap_servers=KafkaProducerConfig.KAFKA_BROKER,
value_serializer=lambda x: json.dumps(x).encode("utf-8"),
)
self.topic = KafkaProducerConfig.KAFKA_TOPIC
def send_message(self, user, event, partition):
self.producer.send(
topic=self.topic,
key=bytes(user, "utf-8"),
value=event,
partition=partition,
)
self.producer.flush()
class KafkaBackupConsumer:
def __init__(self) -> None:
self.consumer = KafkaConsumer(
bootstrap_servers=KafkaConsumerConfig.KAFKA_BROKER,
auto_offset_reset=KafkaConsumerConfig.KAFKA_AUTO_OFFSET_RESET,
value_deserializer=lambda x: json.loads(x.decode("utf-8")),
enable_auto_commit=KafkaConsumerConfig.KAFKA_ENABLE_AUTO_COMMIT,
max_poll_records=KafkaConsumerConfig.KAFKA_MAX_POLL_RECORDS,
)
self.topic = KafkaConsumerConfig.KAFKA_TOPIC
def kafka_close(self):
self.consumer.close(autocommit=False)
def current_possion(self, partition):
tp = TopicPartition(self.topic, partition)
return self.consumer.position(tp)
def assign_partition(self, partition):
tp = TopicPartition(self.topic, partition)
self.consumer.assign([tp])
def seek_message(self, partition, offset_start):
tp = TopicPartition(self.topic, partition)
self.consumer.seek(tp, offset_start)
return self.consumer
def get_offset_and_timestamp(self, tp, timestamp_start, timestamp_end):
offset_and_timestamp_start = self.consumer.offsets_for_times(
{tp: int(timestamp_start)}
)
offset_and_timestamp_end = self.consumer.offsets_for_times(
{tp: int(timestamp_end)}
)
offset_and_timestamp_start = list(offset_and_timestamp_start.values())[
0
]
offset_and_timestamp_end = list(offset_and_timestamp_end.values())[0]
if (
offset_and_timestamp_start is None
or offset_and_timestamp_end is None
):
return None, None
return offset_and_timestamp_start, offset_and_timestamp_end
def get_offset(self, partition, timestamp_start, timestamp_end):
tp = TopicPartition(self.topic, partition)
(
offset_timestamp_start,
offset_timestamp_end,
) = self.get_offset_and_timestamp(tp, timestamp_start, timestamp_end)
if offset_timestamp_start is None or offset_timestamp_start is None:
raise Exception("could not found offset and timestamp")
offset_start = offset_timestamp_start.offset
offset_end = offset_timestamp_end.offset
return offset_start, offset_end
| 36.6
| 79
| 0.68306
|
from kafka import KafkaProducer, KafkaConsumer
from resend_kafka_message.setting import (
KafkaProducerConfig,
KafkaConsumerConfig,
)
import json
from kafka.structs import TopicPartition
from resend_kafka_message.utils.logger import logger
class KafkaBackupProducer:
def __init__(self) -> None:
self.producer = KafkaProducer(
bootstrap_servers=KafkaProducerConfig.KAFKA_BROKER,
value_serializer=lambda x: json.dumps(x).encode("utf-8"),
)
self.topic = KafkaProducerConfig.KAFKA_TOPIC
def send_message(self, user, event, partition):
self.producer.send(
topic=self.topic,
key=bytes(user, "utf-8"),
value=event,
partition=partition,
)
self.producer.flush()
class KafkaBackupConsumer:
def __init__(self) -> None:
self.consumer = KafkaConsumer(
bootstrap_servers=KafkaConsumerConfig.KAFKA_BROKER,
auto_offset_reset=KafkaConsumerConfig.KAFKA_AUTO_OFFSET_RESET,
value_deserializer=lambda x: json.loads(x.decode("utf-8")),
enable_auto_commit=KafkaConsumerConfig.KAFKA_ENABLE_AUTO_COMMIT,
max_poll_records=KafkaConsumerConfig.KAFKA_MAX_POLL_RECORDS,
)
self.topic = KafkaConsumerConfig.KAFKA_TOPIC
def kafka_close(self):
self.consumer.close(autocommit=False)
def current_possion(self, partition):
tp = TopicPartition(self.topic, partition)
return self.consumer.position(tp)
def assign_partition(self, partition):
tp = TopicPartition(self.topic, partition)
self.consumer.assign([tp])
def seek_message(self, partition, offset_start):
tp = TopicPartition(self.topic, partition)
self.consumer.seek(tp, offset_start)
return self.consumer
def get_offset_and_timestamp(self, tp, timestamp_start, timestamp_end):
offset_and_timestamp_start = self.consumer.offsets_for_times(
{tp: int(timestamp_start)}
)
offset_and_timestamp_end = self.consumer.offsets_for_times(
{tp: int(timestamp_end)}
)
offset_and_timestamp_start = list(offset_and_timestamp_start.values())[
0
]
offset_and_timestamp_end = list(offset_and_timestamp_end.values())[0]
if (
offset_and_timestamp_start is None
or offset_and_timestamp_end is None
):
return None, None
return offset_and_timestamp_start, offset_and_timestamp_end
def get_offset(self, partition, timestamp_start, timestamp_end):
tp = TopicPartition(self.topic, partition)
(
offset_timestamp_start,
offset_timestamp_end,
) = self.get_offset_and_timestamp(tp, timestamp_start, timestamp_end)
if offset_timestamp_start is None or offset_timestamp_start is None:
raise Exception("could not found offset and timestamp")
offset_start = offset_timestamp_start.offset
offset_end = offset_timestamp_end.offset
return offset_start, offset_end
| true
| true
|
f71a939f803f8836cd5408d397bbd195ac54e34a
| 394
|
py
|
Python
|
Applications/powershell/6.0.2/package.py
|
cashmerepipeline/CashmereRez
|
13a73931d715ffac27c337abcd6df97b5c47534b
|
[
"MIT"
] | null | null | null |
Applications/powershell/6.0.2/package.py
|
cashmerepipeline/CashmereRez
|
13a73931d715ffac27c337abcd6df97b5c47534b
|
[
"MIT"
] | null | null | null |
Applications/powershell/6.0.2/package.py
|
cashmerepipeline/CashmereRez
|
13a73931d715ffac27c337abcd6df97b5c47534b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
name = 'powershell'
version = '6.0.2'
author = ['microsoft']
tools = ["pwsh"]
requires = []
variants = [
['platform-windows'],
]
def commands():
import os
applications_path = os.environ["APPLICATIONS_PATH"]
env.PATH.append(os.path.join(applications_path, "powershell", "%s"%version).replace('/', os.sep))
| 13.586207
| 101
| 0.560914
|
name = 'powershell'
version = '6.0.2'
author = ['microsoft']
tools = ["pwsh"]
requires = []
variants = [
['platform-windows'],
]
def commands():
import os
applications_path = os.environ["APPLICATIONS_PATH"]
env.PATH.append(os.path.join(applications_path, "powershell", "%s"%version).replace('/', os.sep))
| true
| true
|
f71a94dc26cb028f050610ac6539dd762ba81156
| 1,020
|
py
|
Python
|
dataset_utils.py
|
wzkkzw12345/NIPS2019DeepGamblers
|
0d1b595611a8bc653fddfdf1419bd8dbde153532
|
[
"MIT"
] | 25
|
2019-10-24T02:18:37.000Z
|
2021-10-10T05:40:39.000Z
|
dataset_utils.py
|
wzkkzw12345/NIPS2019DeepGamblers
|
0d1b595611a8bc653fddfdf1419bd8dbde153532
|
[
"MIT"
] | 1
|
2019-10-28T03:30:25.000Z
|
2019-12-16T09:50:04.000Z
|
dataset_utils.py
|
wzkkzw12345/NIPS2019DeepGamblers
|
0d1b595611a8bc653fddfdf1419bd8dbde153532
|
[
"MIT"
] | 11
|
2019-11-25T05:00:38.000Z
|
2022-01-18T21:27:23.000Z
|
import os
import torch
from torch.utils.data import Dataset, DataLoader
import torchvision.transforms.functional as F
from torchvision import transforms, utils
from PIL import Image
class resized_dataset(Dataset):
def __init__(self, dataset, transform=None, start=None, end=None, resize=None):
self.data=[]
if start == None: start = 0
if end == None: end = dataset.__len__()
if resize==None:
for i in range(start, end):
self.data.append((*dataset.__getitem__(i)))
else:
for i in range(start, end):
item=dataset.__getitem__(i)
self.data.append((F.center_crop(F.resize(item[0],resize,Image.BILINEAR),resize),item[1]))
self.transform = transform
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
if self.transform:
return (self.transform(self.data[idx][0]), self.data[idx][1])
else:
return self.data[idx]
| 34
| 105
| 0.611765
|
import os
import torch
from torch.utils.data import Dataset, DataLoader
import torchvision.transforms.functional as F
from torchvision import transforms, utils
from PIL import Image
class resized_dataset(Dataset):
def __init__(self, dataset, transform=None, start=None, end=None, resize=None):
self.data=[]
if start == None: start = 0
if end == None: end = dataset.__len__()
if resize==None:
for i in range(start, end):
self.data.append((*dataset.__getitem__(i)))
else:
for i in range(start, end):
item=dataset.__getitem__(i)
self.data.append((F.center_crop(F.resize(item[0],resize,Image.BILINEAR),resize),item[1]))
self.transform = transform
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
if self.transform:
return (self.transform(self.data[idx][0]), self.data[idx][1])
else:
return self.data[idx]
| false
| true
|
f71a96389c5ecde338aa29ef1117227f29df61b8
| 1,098
|
py
|
Python
|
files/sun/practice/binarytree.py
|
1ta/study_python
|
7623ed019397225f63093c5aaccb155bdf289805
|
[
"MIT"
] | null | null | null |
files/sun/practice/binarytree.py
|
1ta/study_python
|
7623ed019397225f63093c5aaccb155bdf289805
|
[
"MIT"
] | null | null | null |
files/sun/practice/binarytree.py
|
1ta/study_python
|
7623ed019397225f63093c5aaccb155bdf289805
|
[
"MIT"
] | null | null | null |
"""
Definition of TreeNode:
class TreeNode:
def __init__(self, val):
self.val = val
self.left, self.right = None, None
"""
class Solution:
"""
@param inorder : A list of integers that inorder traversal of a tree
@param postorder : A list of integers that postorder traversal of a tree
@return : Root of a tree
"""
def buildTree(self, inorder, postorder):
def genTree(inorder,postorder):
if len(inorder)==0:
return None
root_val = postorder[-1]
root = TreeNode(root_val)
n = inorder.index(root_val)
left_inorder = inorder[:n]
left_postorder = postorder[:n]
right_inorder = inorder[n+1:]
right_postorder= postorder[n:len(postorder)-1]
if len(left_inorder) > 0:
root.left = genTree(left_inorder, left_postorder)
if len(right_inorder) > 0:
root.right = genTree(right_inorder, right_postorder)
return root
root = genTree(inorder, postorder)
return root
| 32.294118
| 76
| 0.583789
|
class Solution:
def buildTree(self, inorder, postorder):
def genTree(inorder,postorder):
if len(inorder)==0:
return None
root_val = postorder[-1]
root = TreeNode(root_val)
n = inorder.index(root_val)
left_inorder = inorder[:n]
left_postorder = postorder[:n]
right_inorder = inorder[n+1:]
right_postorder= postorder[n:len(postorder)-1]
if len(left_inorder) > 0:
root.left = genTree(left_inorder, left_postorder)
if len(right_inorder) > 0:
root.right = genTree(right_inorder, right_postorder)
return root
root = genTree(inorder, postorder)
return root
| true
| true
|
f71a974a3093e8096614977acf39bdfa59c13911
| 6,492
|
py
|
Python
|
utils/dataload.py
|
hobinkwak/Stock-Movements-Classification
|
dac2e90d9ef2294f5c4dc8f6605b9051c71b3f45
|
[
"MIT"
] | null | null | null |
utils/dataload.py
|
hobinkwak/Stock-Movements-Classification
|
dac2e90d9ef2294f5c4dc8f6605b9051c71b3f45
|
[
"MIT"
] | null | null | null |
utils/dataload.py
|
hobinkwak/Stock-Movements-Classification
|
dac2e90d9ef2294f5c4dc8f6605b9051c71b3f45
|
[
"MIT"
] | null | null | null |
from itertools import combinations
import pandas as pd
from utils.utils import *
def load_etf():
etf_data = pd.read_csv(
"data/etf_data.csv", encoding="euc_kr", parse_dates=["tdate"]
)
etf_ohlcv = etf_data.set_index(["tdate", "etf_code", "data_name"])[
"value"
].unstack()
etf_close = etf_ohlcv["종가"].unstack()
return etf_close
def load_macro_data():
macro_data = pd.read_csv('외부데이터/macro_final.csv', index_col='Item Name').iloc[1:, :]
macro_data.index = pd.to_datetime(macro_data.index)
macro_data = macro_data.fillna(method='ffill')
macro_data = (macro_data.resample('m').last() / macro_data.resample('m').first())
macro_data.columns = ['FOMC정책금리', '한국정책금리', '중국정책금리', '미국국채_1m', '미국국채_3m', '미국국채_6m', '미국국채_1y', '미국국채_5y',
'미국국채_10y', '리보_달러_1m', '리보_달러_1y', '리보_달러_3m', '리보_달러_6m', '리보_달러_1w',
'DDR4 16G (2G*8) 2666 MHZ', 'NAND 16Gb 2Gx8 SLC', 'DDR4 16G (2G*8) eTT MHZ',
'DDR3 4Gb 512Mx8 1600/1866Mbps', 'DDR3 4Gb 512Mx8 eTT',
'NAND 8Gb 1Gx8 SLC', 'NAND 64Gb 8Gx8 MLC', 'WTI_1M', 'BRENT_1M', 'DUBAI_ASIA1M',
'난방유_선물_NYMEX', '천연가스_선물_NYMEX', '가스오일_선물_IPE', '천연가스_선물_IPE', '금_선물', '은_선물', '알루미늄_선물',
'전기동_선물', '납_선물', '니켈_선물', '주석_선물', '아연_선물', '10YR BEI', 'T10Y2Y', 'DFF',
'HY Ef Yield', 'Trade DI', 'VIX', 'USDKRW', 'Eco Policy Uncertainty']
macro_data = macro_data[
['FOMC정책금리', '한국정책금리', '중국정책금리', '미국국채_1m', '미국국채_3m', '미국국채_6m', '미국국채_1y', '미국국채_5y', '미국국채_10y', '리보_달러_1m',
'리보_달러_1y', '리보_달러_3m', '리보_달러_6m', '리보_달러_1w', 'DDR3 4Gb 512Mx8 eTT',
'NAND 8Gb 1Gx8 SLC', 'WTI_1M', 'BRENT_1M', 'DUBAI_ASIA1M', '난방유_선물_NYMEX', '천연가스_선물_NYMEX', '가스오일_선물_IPE',
'천연가스_선물_IPE', '금_선물', '은_선물', '알루미늄_선물', '전기동_선물', '납_선물', '니켈_선물', '주석_선물', '아연_선물', '10YR BEI', 'T10Y2Y',
'HY Ef Yield', 'Trade DI', 'VIX', 'USDKRW', 'Eco Policy Uncertainty']]
return macro_data
def load_wics_data():
WICS대_exposure = process_wics_data("./외부데이터/ETF별 업종 exposure.csv")
WICS업종 = process_wics_data("./외부데이터/WICS 업종별 투자정보 데이터.csv")
WICS대 = WICS업종[
[
"에너지",
"소재",
"산업재",
"경기관련소비재",
"필수소비재",
"건강관리",
"금융",
"IT",
"커뮤니케이션서비스",
"유틸리티",
]
]
WICS대 = WICS대.T.drop_duplicates().T
return WICS대, WICS대_exposure
def features_from_wics(wics):
"""
wics : WICS대 (from load_wics_data())
"""
wics_price = wics.xs("종가지수", level=1, axis=1)
momentums = get_moving_features(wics_price, type='price')
wics_trd_volume = wics.xs("거래대금", level=1, axis=1)
trd_volumes = get_moving_features(wics_trd_volume, type='volume')
wics_retail_volume = wics.xs("개인 순매수대금(일간)", level=1, axis=1).fillna(0)
retail_volumes = get_moving_features(wics_retail_volume, type='volume')
wics_for_volume = wics.xs("외국인총합계순매수대금(일간)", level=1, axis=1).fillna(0)
for_volumes = get_moving_features(wics_for_volume, type='volume')
wics_inst_volume = wics.xs("기관 순매수대금(일간)", level=1,axis=1).fillna(0)
inst_volumes = get_moving_features(wics_inst_volume, type='volume')
wics_pe = wics.xs("P/E(FY0)", level=1,axis=1)
pe_scale = wics_pe.resample('M').last().apply(lambda X: minmaxscale(X), axis=1)
wics_fwd_pe = wics.xs("P/E(Fwd.12M)", level=1,axis=1)
fwd_pe_changes = get_moving_features(wics_fwd_pe, type='fwd')
wics_fwd_eps = wics.xs("EPS(Fwd.12M, 지배)", level=1,axis=1)
fwd_eps_changes =get_moving_features(wics_fwd_eps, type='fwd')
size_ = wics.xs("시가총액", level=1,axis=1).resample('M').last()
features = {
"macro": load_macro_data(),
"size": size_,
"mom_1m": momentums[0],
"mom_3m": momentums[1],
"mom_6m": momentums[2],
"mom_1y": momentums[3],
"trd_1m": trd_volumes[0],
"trd_3m": trd_volumes[1],
"trd_6m": trd_volumes[2],
"trd_1y": trd_volumes[3],
"retail_trd_1m": retail_volumes[0],
"retail_trd_3m": retail_volumes[1],
"retail_trd_6m": retail_volumes[2],
"retail_trd_1y": retail_volumes[3],
"for_trd_1m": for_volumes[0],
"for_trd_3m": for_volumes[1],
"for_trd_6m": for_volumes[2],
"for_trd_1y": for_volumes[3],
"inst_trd_1m": inst_volumes[0],
"inst_trd_3m": inst_volumes[1],
"inst_trd_6m": inst_volumes[2],
"inst_trd_1y": inst_volumes[3],
"fwd_pe_1m": fwd_pe_changes[0],
"fwd_pe_3m": fwd_pe_changes[1],
"fwd_eps_1m": fwd_eps_changes[0],
"fwd_eps_3m": fwd_eps_changes[1],
"pe": pe_scale,
}
return wics_price, features
def combination_set(pair, start, end, price, features):
"""
:param pair: WICS대분류 pair
:param start: 기간
:param end: 기간
:param price: wics_prices (from features_from_wics())
:param features: features (from features_from_wics())
"""
comb_price = price[list(pair)]
comb_ret = (comb_price.resample('m').last() / comb_price.resample('m').first()).loc[start:end]
feature_table = features['macro'].loc[start:end]
for key in list(features.keys())[1:6]:
feature_table[key] = features[key].apply(lambda x: (x[pair[0]] / x[pair[1]]), axis=1).loc[start:end]
for key in list(features.keys())[6:]:
feature_table[key] = features[key].apply(lambda x: (x[pair[0]] - x[pair[1]]), axis=1).loc[start:end]
comb_ret['winner'] = comb_ret.apply(
lambda x: comb_ret.columns[0] if (x[comb_ret.columns[0]] > x[comb_ret.columns[1]]) else comb_ret.columns[1],
axis=1)
feature_table = feature_table.replace([-np.inf, np.inf], np.nan).fillna(method='ffill')
comb_ret = comb_ret.replace([-np.inf, np.inf], np.nan).fillna(method='ffill')
feature_table = feature_table.shift(1).iloc[1:]
comb_ret = comb_ret.iloc[1:]
X_data = feature_table
y_data = comb_ret[['winner']].astype('category')
return X_data, y_data
def load_dataset():
WICS대,_ = load_wics_data()
price, features = features_from_wics(WICS대)
columns = ['에너지', '소재', '산업재', '경기관련소비재', '필수소비재', '건강관리', '금융', 'IT', '커뮤니케이션서비스', '유틸리티']
pairs = list(combinations(columns, 2))
total_dataset = {pair : combination_set(pair,'2011-12','2021-05', price, features) for pair in pairs}
return total_dataset
| 40.074074
| 119
| 0.611214
|
from itertools import combinations
import pandas as pd
from utils.utils import *
def load_etf():
etf_data = pd.read_csv(
"data/etf_data.csv", encoding="euc_kr", parse_dates=["tdate"]
)
etf_ohlcv = etf_data.set_index(["tdate", "etf_code", "data_name"])[
"value"
].unstack()
etf_close = etf_ohlcv["종가"].unstack()
return etf_close
def load_macro_data():
macro_data = pd.read_csv('외부데이터/macro_final.csv', index_col='Item Name').iloc[1:, :]
macro_data.index = pd.to_datetime(macro_data.index)
macro_data = macro_data.fillna(method='ffill')
macro_data = (macro_data.resample('m').last() / macro_data.resample('m').first())
macro_data.columns = ['FOMC정책금리', '한국정책금리', '중국정책금리', '미국국채_1m', '미국국채_3m', '미국국채_6m', '미국국채_1y', '미국국채_5y',
'미국국채_10y', '리보_달러_1m', '리보_달러_1y', '리보_달러_3m', '리보_달러_6m', '리보_달러_1w',
'DDR4 16G (2G*8) 2666 MHZ', 'NAND 16Gb 2Gx8 SLC', 'DDR4 16G (2G*8) eTT MHZ',
'DDR3 4Gb 512Mx8 1600/1866Mbps', 'DDR3 4Gb 512Mx8 eTT',
'NAND 8Gb 1Gx8 SLC', 'NAND 64Gb 8Gx8 MLC', 'WTI_1M', 'BRENT_1M', 'DUBAI_ASIA1M',
'난방유_선물_NYMEX', '천연가스_선물_NYMEX', '가스오일_선물_IPE', '천연가스_선물_IPE', '금_선물', '은_선물', '알루미늄_선물',
'전기동_선물', '납_선물', '니켈_선물', '주석_선물', '아연_선물', '10YR BEI', 'T10Y2Y', 'DFF',
'HY Ef Yield', 'Trade DI', 'VIX', 'USDKRW', 'Eco Policy Uncertainty']
macro_data = macro_data[
['FOMC정책금리', '한국정책금리', '중국정책금리', '미국국채_1m', '미국국채_3m', '미국국채_6m', '미국국채_1y', '미국국채_5y', '미국국채_10y', '리보_달러_1m',
'리보_달러_1y', '리보_달러_3m', '리보_달러_6m', '리보_달러_1w', 'DDR3 4Gb 512Mx8 eTT',
'NAND 8Gb 1Gx8 SLC', 'WTI_1M', 'BRENT_1M', 'DUBAI_ASIA1M', '난방유_선물_NYMEX', '천연가스_선물_NYMEX', '가스오일_선물_IPE',
'천연가스_선물_IPE', '금_선물', '은_선물', '알루미늄_선물', '전기동_선물', '납_선물', '니켈_선물', '주석_선물', '아연_선물', '10YR BEI', 'T10Y2Y',
'HY Ef Yield', 'Trade DI', 'VIX', 'USDKRW', 'Eco Policy Uncertainty']]
return macro_data
def load_wics_data():
WICS대_exposure = process_wics_data("./외부데이터/ETF별 업종 exposure.csv")
WICS업종 = process_wics_data("./외부데이터/WICS 업종별 투자정보 데이터.csv")
WICS대 = WICS업종[
[
"에너지",
"소재",
"산업재",
"경기관련소비재",
"필수소비재",
"건강관리",
"금융",
"IT",
"커뮤니케이션서비스",
"유틸리티",
]
]
WICS대 = WICS대.T.drop_duplicates().T
return WICS대, WICS대_exposure
def features_from_wics(wics):
wics_price = wics.xs("종가지수", level=1, axis=1)
momentums = get_moving_features(wics_price, type='price')
wics_trd_volume = wics.xs("거래대금", level=1, axis=1)
trd_volumes = get_moving_features(wics_trd_volume, type='volume')
wics_retail_volume = wics.xs("개인 순매수대금(일간)", level=1, axis=1).fillna(0)
retail_volumes = get_moving_features(wics_retail_volume, type='volume')
wics_for_volume = wics.xs("외국인총합계순매수대금(일간)", level=1, axis=1).fillna(0)
for_volumes = get_moving_features(wics_for_volume, type='volume')
wics_inst_volume = wics.xs("기관 순매수대금(일간)", level=1,axis=1).fillna(0)
inst_volumes = get_moving_features(wics_inst_volume, type='volume')
wics_pe = wics.xs("P/E(FY0)", level=1,axis=1)
pe_scale = wics_pe.resample('M').last().apply(lambda X: minmaxscale(X), axis=1)
wics_fwd_pe = wics.xs("P/E(Fwd.12M)", level=1,axis=1)
fwd_pe_changes = get_moving_features(wics_fwd_pe, type='fwd')
wics_fwd_eps = wics.xs("EPS(Fwd.12M, 지배)", level=1,axis=1)
fwd_eps_changes =get_moving_features(wics_fwd_eps, type='fwd')
size_ = wics.xs("시가총액", level=1,axis=1).resample('M').last()
features = {
"macro": load_macro_data(),
"size": size_,
"mom_1m": momentums[0],
"mom_3m": momentums[1],
"mom_6m": momentums[2],
"mom_1y": momentums[3],
"trd_1m": trd_volumes[0],
"trd_3m": trd_volumes[1],
"trd_6m": trd_volumes[2],
"trd_1y": trd_volumes[3],
"retail_trd_1m": retail_volumes[0],
"retail_trd_3m": retail_volumes[1],
"retail_trd_6m": retail_volumes[2],
"retail_trd_1y": retail_volumes[3],
"for_trd_1m": for_volumes[0],
"for_trd_3m": for_volumes[1],
"for_trd_6m": for_volumes[2],
"for_trd_1y": for_volumes[3],
"inst_trd_1m": inst_volumes[0],
"inst_trd_3m": inst_volumes[1],
"inst_trd_6m": inst_volumes[2],
"inst_trd_1y": inst_volumes[3],
"fwd_pe_1m": fwd_pe_changes[0],
"fwd_pe_3m": fwd_pe_changes[1],
"fwd_eps_1m": fwd_eps_changes[0],
"fwd_eps_3m": fwd_eps_changes[1],
"pe": pe_scale,
}
return wics_price, features
def combination_set(pair, start, end, price, features):
comb_price = price[list(pair)]
comb_ret = (comb_price.resample('m').last() / comb_price.resample('m').first()).loc[start:end]
feature_table = features['macro'].loc[start:end]
for key in list(features.keys())[1:6]:
feature_table[key] = features[key].apply(lambda x: (x[pair[0]] / x[pair[1]]), axis=1).loc[start:end]
for key in list(features.keys())[6:]:
feature_table[key] = features[key].apply(lambda x: (x[pair[0]] - x[pair[1]]), axis=1).loc[start:end]
comb_ret['winner'] = comb_ret.apply(
lambda x: comb_ret.columns[0] if (x[comb_ret.columns[0]] > x[comb_ret.columns[1]]) else comb_ret.columns[1],
axis=1)
feature_table = feature_table.replace([-np.inf, np.inf], np.nan).fillna(method='ffill')
comb_ret = comb_ret.replace([-np.inf, np.inf], np.nan).fillna(method='ffill')
feature_table = feature_table.shift(1).iloc[1:]
comb_ret = comb_ret.iloc[1:]
X_data = feature_table
y_data = comb_ret[['winner']].astype('category')
return X_data, y_data
def load_dataset():
WICS대,_ = load_wics_data()
price, features = features_from_wics(WICS대)
columns = ['에너지', '소재', '산업재', '경기관련소비재', '필수소비재', '건강관리', '금융', 'IT', '커뮤니케이션서비스', '유틸리티']
pairs = list(combinations(columns, 2))
total_dataset = {pair : combination_set(pair,'2011-12','2021-05', price, features) for pair in pairs}
return total_dataset
| true
| true
|
f71a97da98a14131d787d14e3647b6eaf3f98b88
| 8,968
|
py
|
Python
|
neko/Scanners/CFBFScanner/CFBFScanner.py
|
mebuis/neko
|
c76eacb60c3a3f6adfb6a7a6fd7f61640be2c00d
|
[
"Apache-2.0"
] | 1
|
2018-12-07T02:05:16.000Z
|
2018-12-07T02:05:16.000Z
|
neko/Scanners/CFBFScanner/CFBFScanner.py
|
mebuis/neko
|
c76eacb60c3a3f6adfb6a7a6fd7f61640be2c00d
|
[
"Apache-2.0"
] | null | null | null |
neko/Scanners/CFBFScanner/CFBFScanner.py
|
mebuis/neko
|
c76eacb60c3a3f6adfb6a7a6fd7f61640be2c00d
|
[
"Apache-2.0"
] | null | null | null |
# -*- encoding: UTF-8 -*-
import string
from neko.Common import Threat
from neko.Common.CLSID import CLSID_NULL, LOW_RISK_LEVEL_OBJECTS, HIGH_RISK_LEVEL_OBJECTS
from neko.Common.DataStructures.OLE1 import LengthPrefixedByteArray
from neko.Common.DataStructures.OLE2 import OLEStream, SOAPMoniker, CompositeMoniker, FileMoniker, UrlMoniker
from neko.Parsers.CFBFParser import CFBFParser
from neko.Parsers.CFBFParser.DataStructures import DirectorySectorEntry
class CFBFScanner:
def __init__(self):
from neko import Dispatcher
self.Dispatcher: Dispatcher = None
self.Parser: CFBFParser = None
self.Flags = set()
def Scan(self, **kwargs):
self.Dispatcher = kwargs["dispatcher"]
self.Parser = kwargs["parser"]
self.CheckDirectoryEntries()
self.CheckOLEStreams()
self.CheckStreamData()
return self
def CheckDirectoryEntryNames(self):
for entry in self.Parser.DirectoryEntries.values():
entry_name = entry.ObjectName.lower() # stream names are case-insensitive
if ("MACRO" not in self.Flags) and (entry_name in frozenset(["_vba_project", "dir", "_srp_0", "projectlk", "projectwm", "project"])):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_MACRO",
information = {}
)
)
self.Flags.add("MACRO")
if ("OCX" not in self.Flags) and (entry_name in frozenset(["\\x03ocxname"])):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_OLE_CONTROL_EXTENSION",
information = {}
)
)
self.Flags.add("OCX")
if ("ENCRYPTED_PACKAGE" not in self.Flags) and (entry_name in frozenset(["encryptedpackage"])):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_ENCRYPTED_PACKAGE",
information = {}
)
)
self.Flags.add("ENCRYPTED_PACKAGE")
def CheckDirectoryEntryCLSIDs(self):
for entry in self.Parser.DirectoryEntries.values():
if entry.ObjectType.Value == DirectorySectorEntry.STREAM_OBJECT:
continue
clsid = str(entry.CLSID)
if clsid == CLSID_NULL:
continue # unknown handler
elif clsid in LOW_RISK_LEVEL_OBJECTS:
if clsid not in self.Flags:
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_LOW_RISK_LEVEL_OBJECT",
information = {
"type": LOW_RISK_LEVEL_OBJECTS[clsid],
"clsid": clsid
}
)
)
self.Flags.add(clsid)
elif clsid in HIGH_RISK_LEVEL_OBJECTS:
if clsid not in self.Flags:
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_HIGH_RISK_LEVEL_OBJECT",
information = {
"type": HIGH_RISK_LEVEL_OBJECTS[clsid],
"clsid": clsid
}
)
)
self.Flags.add(clsid)
else:
if clsid not in self.Flags:
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_UNKNOWN_OBJECT",
information = {
"clsid": clsid
}
)
)
self.Flags.add(clsid)
def CheckDirectoryEntries(self):
self.CheckDirectoryEntryNames()
self.CheckDirectoryEntryCLSIDs()
def CheckOLEStreams(self):
for entry in self.Parser.DirectoryEntries.values():
entry_name = entry.ObjectName.lower()
if entry_name != "\\x01ole":
continue
olestream = OLEStream().Parse(entry.StreamData)
relative_moniker_stream = olestream.RelativeMonikerStream
absolute_moniker_stream = olestream.AbsoluteMonikerStream
if str(relative_moniker_stream.CLSID) != CLSID_NULL:
outer_moniker_stream = relative_moniker_stream
elif str(absolute_moniker_stream.CLSID) != CLSID_NULL:
outer_moniker_stream = absolute_moniker_stream
else:
continue
outer_moniker = outer_moniker_stream.Moniker
if isinstance(outer_moniker, SOAPMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_SOAP_MONIKER",
information = {
"url": str(outer_moniker.Url).strip(string.whitespace + "\x00")[5:] # wsdl=
}
)
)
elif isinstance(outer_moniker, CompositeMoniker):
for inner_moniker_stream in outer_moniker.MonikerArray:
inner_moniker = inner_moniker_stream.Moniker
if isinstance(inner_moniker, FileMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_COMPOSITED_FILE_MONIKER",
information = {
"ansi_path": str(inner_moniker.AnsiPath).strip(string.whitespace + "\x00"),
"unicode_path": str(inner_moniker.UnicodePath).strip(string.whitespace + "\x00")
}
)
)
elif isinstance(inner_moniker, UrlMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_COMPOSITED_URL_MONIKER",
information = {
"url": str(inner_moniker.Url).strip(string.whitespace + "\x00")
}
)
)
elif isinstance(outer_moniker, FileMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_FILE_MONIKER",
information = {
"ansi_path": str(outer_moniker.AnsiPath).strip(string.whitespace + "\x00"),
"unicode_path": str(outer_moniker.UnicodePath).strip(string.whitespace + "\x00")
}
)
)
elif isinstance(outer_moniker, UrlMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_URL_MONIKER",
information = {
"url": str(outer_moniker.Url).strip(string.whitespace + "\x00")
}
)
)
def CheckStreamData(self):
for entry in self.Parser.DirectoryEntries.values():
if entry.ObjectType.Value != DirectorySectorEntry.STREAM_OBJECT:
continue
entry_name = entry.ObjectName.lower()
if entry_name.startswith(("\\x01", "\\x03", "\\x05")) and (entry_name != "\\x01ole10native"):
continue
stream_data = entry.StreamData
if entry_name == "\\x01ole10native":
stream_data = LengthPrefixedByteArray().Parse(stream_data).Data
if stream_data:
from neko import Dispatcher
dispatcher = Dispatcher(label = f"{self.Dispatcher.Label} -> Stream \"{entry.ObjectName}\"")
dispatcher.Dispatch(stream_data)
self.Dispatcher.ChildDispatchers.append(dispatcher)
| 41.327189
| 145
| 0.492975
|
import string
from neko.Common import Threat
from neko.Common.CLSID import CLSID_NULL, LOW_RISK_LEVEL_OBJECTS, HIGH_RISK_LEVEL_OBJECTS
from neko.Common.DataStructures.OLE1 import LengthPrefixedByteArray
from neko.Common.DataStructures.OLE2 import OLEStream, SOAPMoniker, CompositeMoniker, FileMoniker, UrlMoniker
from neko.Parsers.CFBFParser import CFBFParser
from neko.Parsers.CFBFParser.DataStructures import DirectorySectorEntry
class CFBFScanner:
def __init__(self):
from neko import Dispatcher
self.Dispatcher: Dispatcher = None
self.Parser: CFBFParser = None
self.Flags = set()
def Scan(self, **kwargs):
self.Dispatcher = kwargs["dispatcher"]
self.Parser = kwargs["parser"]
self.CheckDirectoryEntries()
self.CheckOLEStreams()
self.CheckStreamData()
return self
def CheckDirectoryEntryNames(self):
for entry in self.Parser.DirectoryEntries.values():
entry_name = entry.ObjectName.lower()
if ("MACRO" not in self.Flags) and (entry_name in frozenset(["_vba_project", "dir", "_srp_0", "projectlk", "projectwm", "project"])):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_MACRO",
information = {}
)
)
self.Flags.add("MACRO")
if ("OCX" not in self.Flags) and (entry_name in frozenset(["\\x03ocxname"])):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_OLE_CONTROL_EXTENSION",
information = {}
)
)
self.Flags.add("OCX")
if ("ENCRYPTED_PACKAGE" not in self.Flags) and (entry_name in frozenset(["encryptedpackage"])):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_ENCRYPTED_PACKAGE",
information = {}
)
)
self.Flags.add("ENCRYPTED_PACKAGE")
def CheckDirectoryEntryCLSIDs(self):
for entry in self.Parser.DirectoryEntries.values():
if entry.ObjectType.Value == DirectorySectorEntry.STREAM_OBJECT:
continue
clsid = str(entry.CLSID)
if clsid == CLSID_NULL:
continue
elif clsid in LOW_RISK_LEVEL_OBJECTS:
if clsid not in self.Flags:
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_LOW_RISK_LEVEL_OBJECT",
information = {
"type": LOW_RISK_LEVEL_OBJECTS[clsid],
"clsid": clsid
}
)
)
self.Flags.add(clsid)
elif clsid in HIGH_RISK_LEVEL_OBJECTS:
if clsid not in self.Flags:
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_HIGH_RISK_LEVEL_OBJECT",
information = {
"type": HIGH_RISK_LEVEL_OBJECTS[clsid],
"clsid": clsid
}
)
)
self.Flags.add(clsid)
else:
if clsid not in self.Flags:
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_UNKNOWN_OBJECT",
information = {
"clsid": clsid
}
)
)
self.Flags.add(clsid)
def CheckDirectoryEntries(self):
self.CheckDirectoryEntryNames()
self.CheckDirectoryEntryCLSIDs()
def CheckOLEStreams(self):
for entry in self.Parser.DirectoryEntries.values():
entry_name = entry.ObjectName.lower()
if entry_name != "\\x01ole":
continue
olestream = OLEStream().Parse(entry.StreamData)
relative_moniker_stream = olestream.RelativeMonikerStream
absolute_moniker_stream = olestream.AbsoluteMonikerStream
if str(relative_moniker_stream.CLSID) != CLSID_NULL:
outer_moniker_stream = relative_moniker_stream
elif str(absolute_moniker_stream.CLSID) != CLSID_NULL:
outer_moniker_stream = absolute_moniker_stream
else:
continue
outer_moniker = outer_moniker_stream.Moniker
if isinstance(outer_moniker, SOAPMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_SOAP_MONIKER",
information = {
"url": str(outer_moniker.Url).strip(string.whitespace + "\x00")[5:]
}
)
)
elif isinstance(outer_moniker, CompositeMoniker):
for inner_moniker_stream in outer_moniker.MonikerArray:
inner_moniker = inner_moniker_stream.Moniker
if isinstance(inner_moniker, FileMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_COMPOSITED_FILE_MONIKER",
information = {
"ansi_path": str(inner_moniker.AnsiPath).strip(string.whitespace + "\x00"),
"unicode_path": str(inner_moniker.UnicodePath).strip(string.whitespace + "\x00")
}
)
)
elif isinstance(inner_moniker, UrlMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_COMPOSITED_URL_MONIKER",
information = {
"url": str(inner_moniker.Url).strip(string.whitespace + "\x00")
}
)
)
elif isinstance(outer_moniker, FileMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_FILE_MONIKER",
information = {
"ansi_path": str(outer_moniker.AnsiPath).strip(string.whitespace + "\x00"),
"unicode_path": str(outer_moniker.UnicodePath).strip(string.whitespace + "\x00")
}
)
)
elif isinstance(outer_moniker, UrlMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_URL_MONIKER",
information = {
"url": str(outer_moniker.Url).strip(string.whitespace + "\x00")
}
)
)
def CheckStreamData(self):
for entry in self.Parser.DirectoryEntries.values():
if entry.ObjectType.Value != DirectorySectorEntry.STREAM_OBJECT:
continue
entry_name = entry.ObjectName.lower()
if entry_name.startswith(("\\x01", "\\x03", "\\x05")) and (entry_name != "\\x01ole10native"):
continue
stream_data = entry.StreamData
if entry_name == "\\x01ole10native":
stream_data = LengthPrefixedByteArray().Parse(stream_data).Data
if stream_data:
from neko import Dispatcher
dispatcher = Dispatcher(label = f"{self.Dispatcher.Label} -> Stream \"{entry.ObjectName}\"")
dispatcher.Dispatch(stream_data)
self.Dispatcher.ChildDispatchers.append(dispatcher)
| true
| true
|
f71a97f2cf8061f969605f468dcddb25a7b8ae82
| 1,596
|
py
|
Python
|
progress_bar.py
|
qcrit/LaTeCH-CLfL-2019-GreekClassification
|
0984f88c455d314afd6395be927bcf1383378860
|
[
"MIT"
] | 1
|
2019-11-03T21:10:01.000Z
|
2019-11-03T21:10:01.000Z
|
progress_bar.py
|
qcrit/LaTeCH-CLfL-2019-GreekClassification
|
0984f88c455d314afd6395be927bcf1383378860
|
[
"MIT"
] | null | null | null |
progress_bar.py
|
qcrit/LaTeCH-CLfL-2019-GreekClassification
|
0984f88c455d314afd6395be927bcf1383378860
|
[
"MIT"
] | 2
|
2019-12-23T20:05:32.000Z
|
2019-12-23T20:10:27.000Z
|
# From https://stackoverflow.com/a/34325723
_prev_str_length = None
# Print iterations progress
def print_progress_bar(iteration, total, prefix='', suffix='', decimals=1, length=18, fill='█'):
"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
fill - Optional : bar fill character (Str)
"""
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
s = '%s |%s| %s%% %s' % (prefix, bar, percent, suffix)
global _prev_str_length
if _prev_str_length:
print(' ' * _prev_str_length, end='\r') #Clear out previous bar to prevent lingering characters if current bar is shorter
print(s, end='\r')
_prev_str_length = len(s)
# Print New Line on Complete
if iteration == total:
_prev_str_length = None
print()
if __name__ == '__main__':
#
# Sample Usage
#
from time import sleep
# A List of Items
items = list(range(0, 57))
l = len(items)
for i in range(l + 1):
# Do stuff...
sleep(0.1)
# Update Progress Bar
print_progress_bar(i, l, prefix='Progress:', suffix='Complete')
# Sample Output
# Progress: |█████████████████████████████████████████████-----| 90.0% Complete
| 31.294118
| 123
| 0.641604
|
_prev_str_length = None
def print_progress_bar(iteration, total, prefix='', suffix='', decimals=1, length=18, fill='█'):
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
s = '%s |%s| %s%% %s' % (prefix, bar, percent, suffix)
global _prev_str_length
if _prev_str_length:
print(' ' * _prev_str_length, end='\r')
print(s, end='\r')
_prev_str_length = len(s)
if iteration == total:
_prev_str_length = None
print()
if __name__ == '__main__':
from time import sleep
items = list(range(0, 57))
l = len(items)
for i in range(l + 1):
sleep(0.1)
print_progress_bar(i, l, prefix='Progress:', suffix='Complete')
| true
| true
|
f71a984be3f40ce7973e0b35ea72325af786a392
| 3,517
|
py
|
Python
|
app/graph/Node.py
|
OuissalTAIM/jenkins
|
7ea5bcdeb6c0bb3cc14c2826a68e4f521de163c1
|
[
"BSD-1-Clause"
] | null | null | null |
app/graph/Node.py
|
OuissalTAIM/jenkins
|
7ea5bcdeb6c0bb3cc14c2826a68e4f521de163c1
|
[
"BSD-1-Clause"
] | 6
|
2021-02-02T22:52:41.000Z
|
2022-03-12T00:37:30.000Z
|
app/graph/Node.py
|
OuissalTAIM/jenkins
|
7ea5bcdeb6c0bb3cc14c2826a68e4f521de163c1
|
[
"BSD-1-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from app.entity.MineBeneficiation import *
import json
import pandas as pd
from app.graph.Graph import Edge
class NodeJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, Node):
return o.moniker()
if isinstance(o, pd.core.series.Series):
return o.to_dict()
return json.JSONEncoder.default(self, o)
class Node:
"""
A node is an entity and its upstreams/downstreams
"""
def __init__(self, entity):
"""
ctor
:param entity: Entity
"""
self.entity = entity
self.upstream = {}
self.downstream = {}
def __repr__(self):
"""
Node representation
:return: string
"""
return self.moniker()
def __str__(self):
"""
Stringify
:return: dict
"""
return self.moniker()
def name(self):
"""
Primary entity name
:return: string
"""
return self.entity.name
def location(self):
return self.entity.location
def nominal_capacity(self):
return self.entity.nominal_capacity
def moniker(self):
"""
Primary moniker
:return: string
"""
return self.entity.moniker
def layer(self):
"""
Layer enumeration
:return: Enum
"""
return self.entity.layer
def add_downstream(self, transport, entity_id):
"""
Connect to downstream
:param transport: mean of transport
:param entity_id: identifier of entity
:return: None
"""
if entity_id not in Entity.ENTITIES:
raise Exception("Downstream entity {0} does not exist".format(entity_id))
ds_entity = Entity.ENTITIES[entity_id]
if entity_id in self.downstream and self.downstream[entity_id].transport == transport:
raise Exception("Downstream entity {0} via {1} already exists with node {2}".format(entity_id, transport, self.name()))
self.downstream[entity_id] = Edge(transport, self.entity, ds_entity)
def cost_pv(self, downstream_node=None):
"""
Cost PV including transport
:param downstream_node: destination node
:return: double
"""
if downstream_node is None:
return self.entity.cost_pv()
edge = self.downstream[downstream_node.moniker()]
#TODO: make sure that edge.cost() is in same unit as volume,
# rework this code
transport_cost = edge.cost() * self.entity.volume()
cost = self.entity.cost_pv()
cost["transport"] = (transport_cost.unit, transport_cost.value)
return cost
class ComboNode(Node):
"""
Node combining 2 nodes
"""
def __init__(self, layer, up_node, down_node):
"""
ctor
:param layer: PipelineLayer
:param up_node: Node
:param down_node: Node
"""
self.layer = layer
self.up_node = up_node
self.down_node = down_node
if layer == env.PipelineLayer.MINE_BENEFICIATION:
self.entity = MineBeneficiationEntity(self.up_node.entity, self.down_node.entity)
else:
name = "%s%s%s" % (up_node.name(), env.COMBO_NODES_SEPARATION, down_node.name())
moniker = "%s%s%s" % (up_node.moniker(), env.COMBO_NODES_SEPARATION, down_node.moniker())
self.entity = Entity(name=name, layer=layer, id=moniker)
| 27.476563
| 131
| 0.59056
|
from app.entity.MineBeneficiation import *
import json
import pandas as pd
from app.graph.Graph import Edge
class NodeJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, Node):
return o.moniker()
if isinstance(o, pd.core.series.Series):
return o.to_dict()
return json.JSONEncoder.default(self, o)
class Node:
def __init__(self, entity):
self.entity = entity
self.upstream = {}
self.downstream = {}
def __repr__(self):
return self.moniker()
def __str__(self):
return self.moniker()
def name(self):
return self.entity.name
def location(self):
return self.entity.location
def nominal_capacity(self):
return self.entity.nominal_capacity
def moniker(self):
return self.entity.moniker
def layer(self):
return self.entity.layer
def add_downstream(self, transport, entity_id):
if entity_id not in Entity.ENTITIES:
raise Exception("Downstream entity {0} does not exist".format(entity_id))
ds_entity = Entity.ENTITIES[entity_id]
if entity_id in self.downstream and self.downstream[entity_id].transport == transport:
raise Exception("Downstream entity {0} via {1} already exists with node {2}".format(entity_id, transport, self.name()))
self.downstream[entity_id] = Edge(transport, self.entity, ds_entity)
def cost_pv(self, downstream_node=None):
if downstream_node is None:
return self.entity.cost_pv()
edge = self.downstream[downstream_node.moniker()]
transport_cost = edge.cost() * self.entity.volume()
cost = self.entity.cost_pv()
cost["transport"] = (transport_cost.unit, transport_cost.value)
return cost
class ComboNode(Node):
def __init__(self, layer, up_node, down_node):
self.layer = layer
self.up_node = up_node
self.down_node = down_node
if layer == env.PipelineLayer.MINE_BENEFICIATION:
self.entity = MineBeneficiationEntity(self.up_node.entity, self.down_node.entity)
else:
name = "%s%s%s" % (up_node.name(), env.COMBO_NODES_SEPARATION, down_node.name())
moniker = "%s%s%s" % (up_node.moniker(), env.COMBO_NODES_SEPARATION, down_node.moniker())
self.entity = Entity(name=name, layer=layer, id=moniker)
| true
| true
|
f71a987a8a837ab3584bf058b3896b72ab4cd67f
| 33,495
|
py
|
Python
|
third_party/scons/scons-local/SCons/Builder.py
|
rwatson/chromium-capsicum
|
b03da8e897f897c6ad2cda03ceda217b760fd528
|
[
"BSD-3-Clause"
] | 11
|
2015-03-20T04:08:08.000Z
|
2021-11-15T15:51:36.000Z
|
third_party/scons/scons-local/SCons/Builder.py
|
changbai1980/chromium
|
c4625eefca763df86471d798ee5a4a054b4716ae
|
[
"BSD-3-Clause"
] | null | null | null |
third_party/scons/scons-local/SCons/Builder.py
|
changbai1980/chromium
|
c4625eefca763df86471d798ee5a4a054b4716ae
|
[
"BSD-3-Clause"
] | null | null | null |
"""SCons.Builder
Builder object subsystem.
A Builder object is a callable that encapsulates information about how
to execute actions to create a target Node (file) from source Nodes
(files), and how to create those dependencies for tracking.
The main entry point here is the Builder() factory method. This provides
a procedural interface that creates the right underlying Builder object
based on the keyword arguments supplied and the types of the arguments.
The goal is for this external interface to be simple enough that the
vast majority of users can create new Builders as necessary to support
building new types of files in their configurations, without having to
dive any deeper into this subsystem.
The base class here is BuilderBase. This is a concrete base class which
does, in fact, represent the Builder objects that we (or users) create.
There is also a proxy that looks like a Builder:
CompositeBuilder
This proxies for a Builder with an action that is actually a
dictionary that knows how to map file suffixes to a specific
action. This is so that we can invoke different actions
(compilers, compile options) for different flavors of source
files.
Builders and their proxies have the following public interface methods
used by other modules:
__call__()
THE public interface. Calling a Builder object (with the
use of internal helper methods) sets up the target and source
dependencies, appropriate mapping to a specific action, and the
environment manipulation necessary for overridden construction
variable. This also takes care of warning about possible mistakes
in keyword arguments.
add_emitter()
Adds an emitter for a specific file suffix, used by some Tool
modules to specify that (for example) a yacc invocation on a .y
can create a .h *and* a .c file.
add_action()
Adds an action for a specific file suffix, heavily used by
Tool modules to add their specific action(s) for turning
a source file into an object file to the global static
and shared object file Builders.
There are the following methods for internal use within this module:
_execute()
The internal method that handles the heavily lifting when a
Builder is called. This is used so that the __call__() methods
can set up warning about possible mistakes in keyword-argument
overrides, and *then* execute all of the steps necessary so that
the warnings only occur once.
get_name()
Returns the Builder's name within a specific Environment,
primarily used to try to return helpful information in error
messages.
adjust_suffix()
get_prefix()
get_suffix()
get_src_suffix()
set_src_suffix()
Miscellaneous stuff for handling the prefix and suffix
manipulation we use in turning source file names into target
file names.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Builder.py 3897 2009/01/13 06:45:54 scons"
import UserDict
import UserList
import SCons.Action
from SCons.Debug import logInstanceCreation
from SCons.Errors import InternalError, UserError
import SCons.Executor
import SCons.Memoize
import SCons.Node
import SCons.Node.FS
import SCons.Util
import SCons.Warnings
class _Null:
pass
_null = _Null
def match_splitext(path, suffixes = []):
if suffixes:
matchsuf = filter(lambda S,path=path: path[-len(S):] == S,
suffixes)
if matchsuf:
suf = max(map(None, map(len, matchsuf), matchsuf))[1]
return [path[:-len(suf)], path[-len(suf):]]
return SCons.Util.splitext(path)
class DictCmdGenerator(SCons.Util.Selector):
"""This is a callable class that can be used as a
command generator function. It holds on to a dictionary
mapping file suffixes to Actions. It uses that dictionary
to return the proper action based on the file suffix of
the source file."""
def __init__(self, dict=None, source_ext_match=1):
SCons.Util.Selector.__init__(self, dict)
self.source_ext_match = source_ext_match
def src_suffixes(self):
return self.keys()
def add_action(self, suffix, action):
"""Add a suffix-action pair to the mapping.
"""
self[suffix] = action
def __call__(self, target, source, env, for_signature):
if not source:
return []
if self.source_ext_match:
suffixes = self.src_suffixes()
ext = None
for src in map(str, source):
my_ext = match_splitext(src, suffixes)[1]
if ext and my_ext != ext:
raise UserError("While building `%s' from `%s': Cannot build multiple sources with different extensions: %s, %s" % (repr(map(str, target)), src, ext, my_ext))
ext = my_ext
else:
ext = match_splitext(str(source[0]), self.src_suffixes())[1]
if not ext:
#return ext
raise UserError("While building `%s': Cannot deduce file extension from source files: %s" % (repr(map(str, target)), repr(map(str, source))))
try:
ret = SCons.Util.Selector.__call__(self, env, source, ext)
except KeyError, e:
raise UserError("Ambiguous suffixes after environment substitution: %s == %s == %s" % (e[0], e[1], e[2]))
if ret is None:
raise UserError("While building `%s' from `%s': Don't know how to build from a source file with suffix `%s'. Expected a suffix in this list: %s." % \
(repr(map(str, target)), repr(map(str, source)), ext, repr(self.keys())))
return ret
class CallableSelector(SCons.Util.Selector):
"""A callable dictionary that will, in turn, call the value it
finds if it can."""
def __call__(self, env, source):
value = SCons.Util.Selector.__call__(self, env, source)
if callable(value):
value = value(env, source)
return value
class DictEmitter(SCons.Util.Selector):
"""A callable dictionary that maps file suffixes to emitters.
When called, it finds the right emitter in its dictionary for the
suffix of the first source file, and calls that emitter to get the
right lists of targets and sources to return. If there's no emitter
for the suffix in its dictionary, the original target and source are
returned.
"""
def __call__(self, target, source, env):
emitter = SCons.Util.Selector.__call__(self, env, source)
if emitter:
target, source = emitter(target, source, env)
return (target, source)
class ListEmitter(UserList.UserList):
"""A callable list of emitters that calls each in sequence,
returning the result.
"""
def __call__(self, target, source, env):
for e in self.data:
target, source = e(target, source, env)
return (target, source)
# These are a common errors when calling a Builder;
# they are similar to the 'target' and 'source' keyword args to builders,
# so we issue warnings when we see them. The warnings can, of course,
# be disabled.
misleading_keywords = {
'targets' : 'target',
'sources' : 'source',
}
class OverrideWarner(UserDict.UserDict):
"""A class for warning about keyword arguments that we use as
overrides in a Builder call.
This class exists to handle the fact that a single Builder call
can actually invoke multiple builders. This class only emits the
warnings once, no matter how many Builders are invoked.
"""
def __init__(self, dict):
UserDict.UserDict.__init__(self, dict)
if __debug__: logInstanceCreation(self, 'Builder.OverrideWarner')
self.already_warned = None
def warn(self):
if self.already_warned:
return
for k in self.keys():
if misleading_keywords.has_key(k):
alt = misleading_keywords[k]
msg = "Did you mean to use `%s' instead of `%s'?" % (alt, k)
SCons.Warnings.warn(SCons.Warnings.MisleadingKeywordsWarning, msg)
self.already_warned = 1
def Builder(**kw):
"""A factory for builder objects."""
composite = None
if kw.has_key('generator'):
if kw.has_key('action'):
raise UserError, "You must not specify both an action and a generator."
kw['action'] = SCons.Action.CommandGeneratorAction(kw['generator'], {})
del kw['generator']
elif kw.has_key('action'):
source_ext_match = kw.get('source_ext_match', 1)
if kw.has_key('source_ext_match'):
del kw['source_ext_match']
if SCons.Util.is_Dict(kw['action']):
composite = DictCmdGenerator(kw['action'], source_ext_match)
kw['action'] = SCons.Action.CommandGeneratorAction(composite, {})
kw['src_suffix'] = composite.src_suffixes()
else:
kw['action'] = SCons.Action.Action(kw['action'])
if kw.has_key('emitter'):
emitter = kw['emitter']
if SCons.Util.is_String(emitter):
# This allows users to pass in an Environment
# variable reference (like "$FOO") as an emitter.
# We will look in that Environment variable for
# a callable to use as the actual emitter.
var = SCons.Util.get_environment_var(emitter)
if not var:
raise UserError, "Supplied emitter '%s' does not appear to refer to an Environment variable" % emitter
kw['emitter'] = EmitterProxy(var)
elif SCons.Util.is_Dict(emitter):
kw['emitter'] = DictEmitter(emitter)
elif SCons.Util.is_List(emitter):
kw['emitter'] = ListEmitter(emitter)
result = apply(BuilderBase, (), kw)
if not composite is None:
result = CompositeBuilder(result, composite)
return result
def _node_errors(builder, env, tlist, slist):
"""Validate that the lists of target and source nodes are
legal for this builder and environment. Raise errors or
issue warnings as appropriate.
"""
# First, figure out if there are any errors in the way the targets
# were specified.
for t in tlist:
if t.side_effect:
raise UserError, "Multiple ways to build the same target were specified for: %s" % t
if t.has_explicit_builder():
if not t.env is None and not t.env is env:
action = t.builder.action
t_contents = action.get_contents(tlist, slist, t.env)
contents = action.get_contents(tlist, slist, env)
if t_contents == contents:
msg = "Two different environments were specified for target %s,\n\tbut they appear to have the same action: %s" % (t, action.genstring(tlist, slist, t.env))
SCons.Warnings.warn(SCons.Warnings.DuplicateEnvironmentWarning, msg)
else:
msg = "Two environments with different actions were specified for the same target: %s" % t
raise UserError, msg
if builder.multi:
if t.builder != builder:
msg = "Two different builders (%s and %s) were specified for the same target: %s" % (t.builder.get_name(env), builder.get_name(env), t)
raise UserError, msg
# TODO(batch): list constructed each time!
if t.get_executor().get_all_targets() != tlist:
msg = "Two different target lists have a target in common: %s (from %s and from %s)" % (t, map(str, t.get_executor().get_all_targets()), map(str, tlist))
raise UserError, msg
elif t.sources != slist:
msg = "Multiple ways to build the same target were specified for: %s (from %s and from %s)" % (t, map(str, t.sources), map(str, slist))
raise UserError, msg
if builder.single_source:
if len(slist) > 1:
raise UserError, "More than one source given for single-source builder: targets=%s sources=%s" % (map(str,tlist), map(str,slist))
class EmitterProxy:
"""This is a callable class that can act as a
Builder emitter. It holds on to a string that
is a key into an Environment dictionary, and will
look there at actual build time to see if it holds
a callable. If so, we will call that as the actual
emitter."""
def __init__(self, var):
self.var = SCons.Util.to_String(var)
def __call__(self, target, source, env):
emitter = self.var
# Recursively substitute the variable.
# We can't use env.subst() because it deals only
# in strings. Maybe we should change that?
while SCons.Util.is_String(emitter) and env.has_key(emitter):
emitter = env[emitter]
if callable(emitter):
target, source = emitter(target, source, env)
elif SCons.Util.is_List(emitter):
for e in emitter:
target, source = e(target, source, env)
return (target, source)
def __cmp__(self, other):
return cmp(self.var, other.var)
class BuilderBase:
"""Base class for Builders, objects that create output
nodes (files) from input nodes (files).
"""
if SCons.Memoize.use_memoizer:
__metaclass__ = SCons.Memoize.Memoized_Metaclass
memoizer_counters = []
def __init__(self, action = None,
prefix = '',
suffix = '',
src_suffix = '',
target_factory = None,
source_factory = None,
target_scanner = None,
source_scanner = None,
emitter = None,
multi = 0,
env = None,
single_source = 0,
name = None,
chdir = _null,
is_explicit = 1,
src_builder = None,
ensure_suffix = False,
**overrides):
if __debug__: logInstanceCreation(self, 'Builder.BuilderBase')
self._memo = {}
self.action = action
self.multi = multi
if SCons.Util.is_Dict(prefix):
prefix = CallableSelector(prefix)
self.prefix = prefix
if SCons.Util.is_Dict(suffix):
suffix = CallableSelector(suffix)
self.env = env
self.single_source = single_source
if overrides.has_key('overrides'):
SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning,
"The \"overrides\" keyword to Builder() creation has been deprecated;\n" +\
"\tspecify the items as keyword arguments to the Builder() call instead.")
overrides.update(overrides['overrides'])
del overrides['overrides']
if overrides.has_key('scanner'):
SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning,
"The \"scanner\" keyword to Builder() creation has been deprecated;\n"
"\tuse: source_scanner or target_scanner as appropriate.")
del overrides['scanner']
self.overrides = overrides
self.set_suffix(suffix)
self.set_src_suffix(src_suffix)
self.ensure_suffix = ensure_suffix
self.target_factory = target_factory
self.source_factory = source_factory
self.target_scanner = target_scanner
self.source_scanner = source_scanner
self.emitter = emitter
# Optional Builder name should only be used for Builders
# that don't get attached to construction environments.
if name:
self.name = name
self.executor_kw = {}
if not chdir is _null:
self.executor_kw['chdir'] = chdir
self.is_explicit = is_explicit
if src_builder is None:
src_builder = []
elif not SCons.Util.is_List(src_builder):
src_builder = [ src_builder ]
self.src_builder = src_builder
def __nonzero__(self):
raise InternalError, "Do not test for the Node.builder attribute directly; use Node.has_builder() instead"
def get_name(self, env):
"""Attempts to get the name of the Builder.
Look at the BUILDERS variable of env, expecting it to be a
dictionary containing this Builder, and return the key of the
dictionary. If there's no key, then return a directly-configured
name (if there is one) or the name of the class (by default)."""
try:
index = env['BUILDERS'].values().index(self)
return env['BUILDERS'].keys()[index]
except (AttributeError, KeyError, TypeError, ValueError):
try:
return self.name
except AttributeError:
return str(self.__class__)
def __cmp__(self, other):
return cmp(self.__dict__, other.__dict__)
def splitext(self, path, env=None):
if not env:
env = self.env
if env:
suffixes = self.src_suffixes(env)
else:
suffixes = []
return match_splitext(path, suffixes)
def _adjustixes(self, files, pre, suf, ensure_suffix=False):
if not files:
return []
result = []
if not SCons.Util.is_List(files):
files = [files]
for f in files:
if SCons.Util.is_String(f):
f = SCons.Util.adjustixes(f, pre, suf, ensure_suffix)
result.append(f)
return result
def _create_nodes(self, env, target = None, source = None):
"""Create and return lists of target and source nodes.
"""
src_suf = self.get_src_suffix(env)
target_factory = env.get_factory(self.target_factory)
source_factory = env.get_factory(self.source_factory)
source = self._adjustixes(source, None, src_suf)
slist = env.arg2nodes(source, source_factory)
pre = self.get_prefix(env, slist)
suf = self.get_suffix(env, slist)
if target is None:
try:
t_from_s = slist[0].target_from_source
except AttributeError:
raise UserError("Do not know how to create a target from source `%s'" % slist[0])
except IndexError:
tlist = []
else:
splitext = lambda S,self=self,env=env: self.splitext(S,env)
tlist = [ t_from_s(pre, suf, splitext) ]
else:
target = self._adjustixes(target, pre, suf, self.ensure_suffix)
tlist = env.arg2nodes(target, target_factory, target=target, source=source)
if self.emitter:
# The emitter is going to do str(node), but because we're
# being called *from* a builder invocation, the new targets
# don't yet have a builder set on them and will look like
# source files. Fool the emitter's str() calls by setting
# up a temporary builder on the new targets.
new_targets = []
for t in tlist:
if not t.is_derived():
t.builder_set(self)
new_targets.append(t)
orig_tlist = tlist[:]
orig_slist = slist[:]
target, source = self.emitter(target=tlist, source=slist, env=env)
# Now delete the temporary builders that we attached to any
# new targets, so that _node_errors() doesn't do weird stuff
# to them because it thinks they already have builders.
for t in new_targets:
if t.builder is self:
# Only delete the temporary builder if the emitter
# didn't change it on us.
t.builder_set(None)
# Have to call arg2nodes yet again, since it is legal for
# emitters to spit out strings as well as Node instances.
tlist = env.arg2nodes(target, target_factory,
target=orig_tlist, source=orig_slist)
slist = env.arg2nodes(source, source_factory,
target=orig_tlist, source=orig_slist)
return tlist, slist
def _execute(self, env, target, source, overwarn={}, executor_kw={}):
# We now assume that target and source are lists or None.
if self.src_builder:
source = self.src_builder_sources(env, source, overwarn)
if self.single_source and len(source) > 1 and target is None:
result = []
if target is None: target = [None]*len(source)
for tgt, src in zip(target, source):
if not tgt is None: tgt = [tgt]
if not src is None: src = [src]
result.extend(self._execute(env, tgt, src, overwarn))
return SCons.Node.NodeList(result)
overwarn.warn()
tlist, slist = self._create_nodes(env, target, source)
# Check for errors with the specified target/source lists.
_node_errors(self, env, tlist, slist)
# The targets are fine, so find or make the appropriate Executor to
# build this particular list of targets from this particular list of
# sources.
executor = None
key = None
if self.multi:
try:
executor = tlist[0].get_executor(create = 0)
except (AttributeError, IndexError):
pass
else:
executor.add_sources(slist)
if executor is None:
if not self.action:
fmt = "Builder %s must have an action to build %s."
raise UserError, fmt % (self.get_name(env or self.env),
map(str,tlist))
key = self.action.batch_key(env or self.env, tlist, slist)
if key:
try:
executor = SCons.Executor.GetBatchExecutor(key)
except KeyError:
pass
else:
executor.add_batch(tlist, slist)
if executor is None:
executor = SCons.Executor.Executor(self.action, env, [],
tlist, slist, executor_kw)
if key:
SCons.Executor.AddBatchExecutor(key, executor)
# Now set up the relevant information in the target Nodes themselves.
for t in tlist:
t.cwd = env.fs.getcwd()
t.builder_set(self)
t.env_set(env)
t.add_source(slist)
t.set_executor(executor)
t.set_explicit(self.is_explicit)
return SCons.Node.NodeList(tlist)
def __call__(self, env, target=None, source=None, chdir=_null, **kw):
# We now assume that target and source are lists or None.
# The caller (typically Environment.BuilderWrapper) is
# responsible for converting any scalar values to lists.
if chdir is _null:
ekw = self.executor_kw
else:
ekw = self.executor_kw.copy()
ekw['chdir'] = chdir
if kw:
if kw.has_key('srcdir'):
def prependDirIfRelative(f, srcdir=kw['srcdir']):
import os.path
if SCons.Util.is_String(f) and not os.path.isabs(f):
f = os.path.join(srcdir, f)
return f
if not SCons.Util.is_List(source):
source = [source]
source = map(prependDirIfRelative, source)
del kw['srcdir']
if self.overrides:
env_kw = self.overrides.copy()
env_kw.update(kw)
else:
env_kw = kw
else:
env_kw = self.overrides
env = env.Override(env_kw)
return self._execute(env, target, source, OverrideWarner(kw), ekw)
def adjust_suffix(self, suff):
if suff and not suff[0] in [ '.', '_', '$' ]:
return '.' + suff
return suff
def get_prefix(self, env, sources=[]):
prefix = self.prefix
if callable(prefix):
prefix = prefix(env, sources)
return env.subst(prefix)
def set_suffix(self, suffix):
if not callable(suffix):
suffix = self.adjust_suffix(suffix)
self.suffix = suffix
def get_suffix(self, env, sources=[]):
suffix = self.suffix
if callable(suffix):
suffix = suffix(env, sources)
return env.subst(suffix)
def set_src_suffix(self, src_suffix):
if not src_suffix:
src_suffix = []
elif not SCons.Util.is_List(src_suffix):
src_suffix = [ src_suffix ]
adjust = lambda suf, s=self: \
callable(suf) and suf or s.adjust_suffix(suf)
self.src_suffix = map(adjust, src_suffix)
def get_src_suffix(self, env):
"""Get the first src_suffix in the list of src_suffixes."""
ret = self.src_suffixes(env)
if not ret:
return ''
return ret[0]
def add_emitter(self, suffix, emitter):
"""Add a suffix-emitter mapping to this Builder.
This assumes that emitter has been initialized with an
appropriate dictionary type, and will throw a TypeError if
not, so the caller is responsible for knowing that this is an
appropriate method to call for the Builder in question.
"""
self.emitter[suffix] = emitter
def add_src_builder(self, builder):
"""
Add a new Builder to the list of src_builders.
This requires wiping out cached values so that the computed
lists of source suffixes get re-calculated.
"""
self._memo = {}
self.src_builder.append(builder)
def _get_sdict(self, env):
"""
Returns a dictionary mapping all of the source suffixes of all
src_builders of this Builder to the underlying Builder that
should be called first.
This dictionary is used for each target specified, so we save a
lot of extra computation by memoizing it for each construction
environment.
Note that this is re-computed each time, not cached, because there
might be changes to one of our source Builders (or one of their
source Builders, and so on, and so on...) that we can't "see."
The underlying methods we call cache their computed values,
though, so we hope repeatedly aggregating them into a dictionary
like this won't be too big a hit. We may need to look for a
better way to do this if performance data show this has turned
into a significant bottleneck.
"""
sdict = {}
for bld in self.get_src_builders(env):
for suf in bld.src_suffixes(env):
sdict[suf] = bld
return sdict
def src_builder_sources(self, env, source, overwarn={}):
sdict = self._get_sdict(env)
src_suffixes = self.src_suffixes(env)
lengths = list(set(map(len, src_suffixes)))
def match_src_suffix(name, src_suffixes=src_suffixes, lengths=lengths):
node_suffixes = map(lambda l, n=name: n[-l:], lengths)
for suf in src_suffixes:
if suf in node_suffixes:
return suf
return None
result = []
for s in SCons.Util.flatten(source):
if SCons.Util.is_String(s):
match_suffix = match_src_suffix(env.subst(s))
if not match_suffix and not '.' in s:
src_suf = self.get_src_suffix(env)
s = self._adjustixes(s, None, src_suf)[0]
else:
match_suffix = match_src_suffix(s.name)
if match_suffix:
try:
bld = sdict[match_suffix]
except KeyError:
result.append(s)
else:
tlist = bld._execute(env, None, [s], overwarn)
# If the subsidiary Builder returned more than one
# target, then filter out any sources that this
# Builder isn't capable of building.
if len(tlist) > 1:
mss = lambda t, m=match_src_suffix: m(t.name)
tlist = filter(mss, tlist)
result.extend(tlist)
else:
result.append(s)
source_factory = env.get_factory(self.source_factory)
return env.arg2nodes(result, source_factory)
def _get_src_builders_key(self, env):
return id(env)
memoizer_counters.append(SCons.Memoize.CountDict('get_src_builders', _get_src_builders_key))
def get_src_builders(self, env):
"""
Returns the list of source Builders for this Builder.
This exists mainly to look up Builders referenced as
strings in the 'BUILDER' variable of the construction
environment and cache the result.
"""
memo_key = id(env)
try:
memo_dict = self._memo['get_src_builders']
except KeyError:
memo_dict = {}
self._memo['get_src_builders'] = memo_dict
else:
try:
return memo_dict[memo_key]
except KeyError:
pass
builders = []
for bld in self.src_builder:
if SCons.Util.is_String(bld):
try:
bld = env['BUILDERS'][bld]
except KeyError:
continue
builders.append(bld)
memo_dict[memo_key] = builders
return builders
def _subst_src_suffixes_key(self, env):
return id(env)
memoizer_counters.append(SCons.Memoize.CountDict('subst_src_suffixes', _subst_src_suffixes_key))
def subst_src_suffixes(self, env):
"""
The suffix list may contain construction variable expansions,
so we have to evaluate the individual strings. To avoid doing
this over and over, we memoize the results for each construction
environment.
"""
memo_key = id(env)
try:
memo_dict = self._memo['subst_src_suffixes']
except KeyError:
memo_dict = {}
self._memo['subst_src_suffixes'] = memo_dict
else:
try:
return memo_dict[memo_key]
except KeyError:
pass
suffixes = map(lambda x, s=self, e=env: e.subst(x), self.src_suffix)
memo_dict[memo_key] = suffixes
return suffixes
def src_suffixes(self, env):
"""
Returns the list of source suffixes for all src_builders of this
Builder.
This is essentially a recursive descent of the src_builder "tree."
(This value isn't cached because there may be changes in a
src_builder many levels deep that we can't see.)
"""
sdict = {}
suffixes = self.subst_src_suffixes(env)
for s in suffixes:
sdict[s] = 1
for builder in self.get_src_builders(env):
for s in builder.src_suffixes(env):
if not sdict.has_key(s):
sdict[s] = 1
suffixes.append(s)
return suffixes
class CompositeBuilder(SCons.Util.Proxy):
"""A Builder Proxy whose main purpose is to always have
a DictCmdGenerator as its action, and to provide access
to the DictCmdGenerator's add_action() method.
"""
def __init__(self, builder, cmdgen):
if __debug__: logInstanceCreation(self, 'Builder.CompositeBuilder')
SCons.Util.Proxy.__init__(self, builder)
# cmdgen should always be an instance of DictCmdGenerator.
self.cmdgen = cmdgen
self.builder = builder
def add_action(self, suffix, action):
self.cmdgen.add_action(suffix, action)
self.set_src_suffix(self.cmdgen.src_suffixes())
| 38.812283
| 178
| 0.604986
|
"""SCons.Builder
Builder object subsystem.
A Builder object is a callable that encapsulates information about how
to execute actions to create a target Node (file) from source Nodes
(files), and how to create those dependencies for tracking.
The main entry point here is the Builder() factory method. This provides
a procedural interface that creates the right underlying Builder object
based on the keyword arguments supplied and the types of the arguments.
The goal is for this external interface to be simple enough that the
vast majority of users can create new Builders as necessary to support
building new types of files in their configurations, without having to
dive any deeper into this subsystem.
The base class here is BuilderBase. This is a concrete base class which
does, in fact, represent the Builder objects that we (or users) create.
There is also a proxy that looks like a Builder:
CompositeBuilder
This proxies for a Builder with an action that is actually a
dictionary that knows how to map file suffixes to a specific
action. This is so that we can invoke different actions
(compilers, compile options) for different flavors of source
files.
Builders and their proxies have the following public interface methods
used by other modules:
__call__()
THE public interface. Calling a Builder object (with the
use of internal helper methods) sets up the target and source
dependencies, appropriate mapping to a specific action, and the
environment manipulation necessary for overridden construction
variable. This also takes care of warning about possible mistakes
in keyword arguments.
add_emitter()
Adds an emitter for a specific file suffix, used by some Tool
modules to specify that (for example) a yacc invocation on a .y
can create a .h *and* a .c file.
add_action()
Adds an action for a specific file suffix, heavily used by
Tool modules to add their specific action(s) for turning
a source file into an object file to the global static
and shared object file Builders.
There are the following methods for internal use within this module:
_execute()
The internal method that handles the heavily lifting when a
Builder is called. This is used so that the __call__() methods
can set up warning about possible mistakes in keyword-argument
overrides, and *then* execute all of the steps necessary so that
the warnings only occur once.
get_name()
Returns the Builder's name within a specific Environment,
primarily used to try to return helpful information in error
messages.
adjust_suffix()
get_prefix()
get_suffix()
get_src_suffix()
set_src_suffix()
Miscellaneous stuff for handling the prefix and suffix
manipulation we use in turning source file names into target
file names.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Builder.py 3897 2009/01/13 06:45:54 scons"
import UserDict
import UserList
import SCons.Action
from SCons.Debug import logInstanceCreation
from SCons.Errors import InternalError, UserError
import SCons.Executor
import SCons.Memoize
import SCons.Node
import SCons.Node.FS
import SCons.Util
import SCons.Warnings
class _Null:
pass
_null = _Null
def match_splitext(path, suffixes = []):
if suffixes:
matchsuf = filter(lambda S,path=path: path[-len(S):] == S,
suffixes)
if matchsuf:
suf = max(map(None, map(len, matchsuf), matchsuf))[1]
return [path[:-len(suf)], path[-len(suf):]]
return SCons.Util.splitext(path)
class DictCmdGenerator(SCons.Util.Selector):
"""This is a callable class that can be used as a
command generator function. It holds on to a dictionary
mapping file suffixes to Actions. It uses that dictionary
to return the proper action based on the file suffix of
the source file."""
def __init__(self, dict=None, source_ext_match=1):
SCons.Util.Selector.__init__(self, dict)
self.source_ext_match = source_ext_match
def src_suffixes(self):
return self.keys()
def add_action(self, suffix, action):
"""Add a suffix-action pair to the mapping.
"""
self[suffix] = action
def __call__(self, target, source, env, for_signature):
if not source:
return []
if self.source_ext_match:
suffixes = self.src_suffixes()
ext = None
for src in map(str, source):
my_ext = match_splitext(src, suffixes)[1]
if ext and my_ext != ext:
raise UserError("While building `%s' from `%s': Cannot build multiple sources with different extensions: %s, %s" % (repr(map(str, target)), src, ext, my_ext))
ext = my_ext
else:
ext = match_splitext(str(source[0]), self.src_suffixes())[1]
if not ext:
#return ext
raise UserError("While building `%s': Cannot deduce file extension from source files: %s" % (repr(map(str, target)), repr(map(str, source))))
try:
ret = SCons.Util.Selector.__call__(self, env, source, ext)
except KeyError, e:
raise UserError("Ambiguous suffixes after environment substitution: %s == %s == %s" % (e[0], e[1], e[2]))
if ret is None:
raise UserError("While building `%s' from `%s': Don't know how to build from a source file with suffix `%s'. Expected a suffix in this list: %s." % \
(repr(map(str, target)), repr(map(str, source)), ext, repr(self.keys())))
return ret
class CallableSelector(SCons.Util.Selector):
"""A callable dictionary that will, in turn, call the value it
finds if it can."""
def __call__(self, env, source):
value = SCons.Util.Selector.__call__(self, env, source)
if callable(value):
value = value(env, source)
return value
class DictEmitter(SCons.Util.Selector):
"""A callable dictionary that maps file suffixes to emitters.
When called, it finds the right emitter in its dictionary for the
suffix of the first source file, and calls that emitter to get the
right lists of targets and sources to return. If there's no emitter
for the suffix in its dictionary, the original target and source are
returned.
"""
def __call__(self, target, source, env):
emitter = SCons.Util.Selector.__call__(self, env, source)
if emitter:
target, source = emitter(target, source, env)
return (target, source)
class ListEmitter(UserList.UserList):
"""A callable list of emitters that calls each in sequence,
returning the result.
"""
def __call__(self, target, source, env):
for e in self.data:
target, source = e(target, source, env)
return (target, source)
# These are a common errors when calling a Builder;
# they are similar to the 'target' and 'source' keyword args to builders,
# so we issue warnings when we see them. The warnings can, of course,
# be disabled.
misleading_keywords = {
'targets' : 'target',
'sources' : 'source',
}
class OverrideWarner(UserDict.UserDict):
"""A class for warning about keyword arguments that we use as
overrides in a Builder call.
This class exists to handle the fact that a single Builder call
can actually invoke multiple builders. This class only emits the
warnings once, no matter how many Builders are invoked.
"""
def __init__(self, dict):
UserDict.UserDict.__init__(self, dict)
if __debug__: logInstanceCreation(self, 'Builder.OverrideWarner')
self.already_warned = None
def warn(self):
if self.already_warned:
return
for k in self.keys():
if misleading_keywords.has_key(k):
alt = misleading_keywords[k]
msg = "Did you mean to use `%s' instead of `%s'?" % (alt, k)
SCons.Warnings.warn(SCons.Warnings.MisleadingKeywordsWarning, msg)
self.already_warned = 1
def Builder(**kw):
"""A factory for builder objects."""
composite = None
if kw.has_key('generator'):
if kw.has_key('action'):
raise UserError, "You must not specify both an action and a generator."
kw['action'] = SCons.Action.CommandGeneratorAction(kw['generator'], {})
del kw['generator']
elif kw.has_key('action'):
source_ext_match = kw.get('source_ext_match', 1)
if kw.has_key('source_ext_match'):
del kw['source_ext_match']
if SCons.Util.is_Dict(kw['action']):
composite = DictCmdGenerator(kw['action'], source_ext_match)
kw['action'] = SCons.Action.CommandGeneratorAction(composite, {})
kw['src_suffix'] = composite.src_suffixes()
else:
kw['action'] = SCons.Action.Action(kw['action'])
if kw.has_key('emitter'):
emitter = kw['emitter']
if SCons.Util.is_String(emitter):
# This allows users to pass in an Environment
# variable reference (like "$FOO") as an emitter.
# We will look in that Environment variable for
# a callable to use as the actual emitter.
var = SCons.Util.get_environment_var(emitter)
if not var:
raise UserError, "Supplied emitter '%s' does not appear to refer to an Environment variable" % emitter
kw['emitter'] = EmitterProxy(var)
elif SCons.Util.is_Dict(emitter):
kw['emitter'] = DictEmitter(emitter)
elif SCons.Util.is_List(emitter):
kw['emitter'] = ListEmitter(emitter)
result = apply(BuilderBase, (), kw)
if not composite is None:
result = CompositeBuilder(result, composite)
return result
def _node_errors(builder, env, tlist, slist):
"""Validate that the lists of target and source nodes are
legal for this builder and environment. Raise errors or
issue warnings as appropriate.
"""
# First, figure out if there are any errors in the way the targets
# were specified.
for t in tlist:
if t.side_effect:
raise UserError, "Multiple ways to build the same target were specified for: %s" % t
if t.has_explicit_builder():
if not t.env is None and not t.env is env:
action = t.builder.action
t_contents = action.get_contents(tlist, slist, t.env)
contents = action.get_contents(tlist, slist, env)
if t_contents == contents:
msg = "Two different environments were specified for target %s,\n\tbut they appear to have the same action: %s" % (t, action.genstring(tlist, slist, t.env))
SCons.Warnings.warn(SCons.Warnings.DuplicateEnvironmentWarning, msg)
else:
msg = "Two environments with different actions were specified for the same target: %s" % t
raise UserError, msg
if builder.multi:
if t.builder != builder:
msg = "Two different builders (%s and %s) were specified for the same target: %s" % (t.builder.get_name(env), builder.get_name(env), t)
raise UserError, msg
# TODO(batch): list constructed each time!
if t.get_executor().get_all_targets() != tlist:
msg = "Two different target lists have a target in common: %s (from %s and from %s)" % (t, map(str, t.get_executor().get_all_targets()), map(str, tlist))
raise UserError, msg
elif t.sources != slist:
msg = "Multiple ways to build the same target were specified for: %s (from %s and from %s)" % (t, map(str, t.sources), map(str, slist))
raise UserError, msg
if builder.single_source:
if len(slist) > 1:
raise UserError, "More than one source given for single-source builder: targets=%s sources=%s" % (map(str,tlist), map(str,slist))
class EmitterProxy:
"""This is a callable class that can act as a
Builder emitter. It holds on to a string that
is a key into an Environment dictionary, and will
look there at actual build time to see if it holds
a callable. If so, we will call that as the actual
emitter."""
def __init__(self, var):
self.var = SCons.Util.to_String(var)
def __call__(self, target, source, env):
emitter = self.var
# Recursively substitute the variable.
# We can't use env.subst() because it deals only
while SCons.Util.is_String(emitter) and env.has_key(emitter):
emitter = env[emitter]
if callable(emitter):
target, source = emitter(target, source, env)
elif SCons.Util.is_List(emitter):
for e in emitter:
target, source = e(target, source, env)
return (target, source)
def __cmp__(self, other):
return cmp(self.var, other.var)
class BuilderBase:
"""Base class for Builders, objects that create output
nodes (files) from input nodes (files).
"""
if SCons.Memoize.use_memoizer:
__metaclass__ = SCons.Memoize.Memoized_Metaclass
memoizer_counters = []
def __init__(self, action = None,
prefix = '',
suffix = '',
src_suffix = '',
target_factory = None,
source_factory = None,
target_scanner = None,
source_scanner = None,
emitter = None,
multi = 0,
env = None,
single_source = 0,
name = None,
chdir = _null,
is_explicit = 1,
src_builder = None,
ensure_suffix = False,
**overrides):
if __debug__: logInstanceCreation(self, 'Builder.BuilderBase')
self._memo = {}
self.action = action
self.multi = multi
if SCons.Util.is_Dict(prefix):
prefix = CallableSelector(prefix)
self.prefix = prefix
if SCons.Util.is_Dict(suffix):
suffix = CallableSelector(suffix)
self.env = env
self.single_source = single_source
if overrides.has_key('overrides'):
SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning,
"The \"overrides\" keyword to Builder() creation has been deprecated;\n" +\
"\tspecify the items as keyword arguments to the Builder() call instead.")
overrides.update(overrides['overrides'])
del overrides['overrides']
if overrides.has_key('scanner'):
SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning,
"The \"scanner\" keyword to Builder() creation has been deprecated;\n"
"\tuse: source_scanner or target_scanner as appropriate.")
del overrides['scanner']
self.overrides = overrides
self.set_suffix(suffix)
self.set_src_suffix(src_suffix)
self.ensure_suffix = ensure_suffix
self.target_factory = target_factory
self.source_factory = source_factory
self.target_scanner = target_scanner
self.source_scanner = source_scanner
self.emitter = emitter
if name:
self.name = name
self.executor_kw = {}
if not chdir is _null:
self.executor_kw['chdir'] = chdir
self.is_explicit = is_explicit
if src_builder is None:
src_builder = []
elif not SCons.Util.is_List(src_builder):
src_builder = [ src_builder ]
self.src_builder = src_builder
def __nonzero__(self):
raise InternalError, "Do not test for the Node.builder attribute directly; use Node.has_builder() instead"
def get_name(self, env):
"""Attempts to get the name of the Builder.
Look at the BUILDERS variable of env, expecting it to be a
dictionary containing this Builder, and return the key of the
dictionary. If there's no key, then return a directly-configured
name (if there is one) or the name of the class (by default)."""
try:
index = env['BUILDERS'].values().index(self)
return env['BUILDERS'].keys()[index]
except (AttributeError, KeyError, TypeError, ValueError):
try:
return self.name
except AttributeError:
return str(self.__class__)
def __cmp__(self, other):
return cmp(self.__dict__, other.__dict__)
def splitext(self, path, env=None):
if not env:
env = self.env
if env:
suffixes = self.src_suffixes(env)
else:
suffixes = []
return match_splitext(path, suffixes)
def _adjustixes(self, files, pre, suf, ensure_suffix=False):
if not files:
return []
result = []
if not SCons.Util.is_List(files):
files = [files]
for f in files:
if SCons.Util.is_String(f):
f = SCons.Util.adjustixes(f, pre, suf, ensure_suffix)
result.append(f)
return result
def _create_nodes(self, env, target = None, source = None):
"""Create and return lists of target and source nodes.
"""
src_suf = self.get_src_suffix(env)
target_factory = env.get_factory(self.target_factory)
source_factory = env.get_factory(self.source_factory)
source = self._adjustixes(source, None, src_suf)
slist = env.arg2nodes(source, source_factory)
pre = self.get_prefix(env, slist)
suf = self.get_suffix(env, slist)
if target is None:
try:
t_from_s = slist[0].target_from_source
except AttributeError:
raise UserError("Do not know how to create a target from source `%s'" % slist[0])
except IndexError:
tlist = []
else:
splitext = lambda S,self=self,env=env: self.splitext(S,env)
tlist = [ t_from_s(pre, suf, splitext) ]
else:
target = self._adjustixes(target, pre, suf, self.ensure_suffix)
tlist = env.arg2nodes(target, target_factory, target=target, source=source)
if self.emitter:
# The emitter is going to do str(node), but because we're
# source files. Fool the emitter's str() calls by setting
new_targets = []
for t in tlist:
if not t.is_derived():
t.builder_set(self)
new_targets.append(t)
orig_tlist = tlist[:]
orig_slist = slist[:]
target, source = self.emitter(target=tlist, source=slist, env=env)
# to them because it thinks they already have builders.
for t in new_targets:
if t.builder is self:
# Only delete the temporary builder if the emitter
# didn't change it on us.
t.builder_set(None)
tlist = env.arg2nodes(target, target_factory,
target=orig_tlist, source=orig_slist)
slist = env.arg2nodes(source, source_factory,
target=orig_tlist, source=orig_slist)
return tlist, slist
def _execute(self, env, target, source, overwarn={}, executor_kw={}):
if self.src_builder:
source = self.src_builder_sources(env, source, overwarn)
if self.single_source and len(source) > 1 and target is None:
result = []
if target is None: target = [None]*len(source)
for tgt, src in zip(target, source):
if not tgt is None: tgt = [tgt]
if not src is None: src = [src]
result.extend(self._execute(env, tgt, src, overwarn))
return SCons.Node.NodeList(result)
overwarn.warn()
tlist, slist = self._create_nodes(env, target, source)
_node_errors(self, env, tlist, slist)
executor = None
key = None
if self.multi:
try:
executor = tlist[0].get_executor(create = 0)
except (AttributeError, IndexError):
pass
else:
executor.add_sources(slist)
if executor is None:
if not self.action:
fmt = "Builder %s must have an action to build %s."
raise UserError, fmt % (self.get_name(env or self.env),
map(str,tlist))
key = self.action.batch_key(env or self.env, tlist, slist)
if key:
try:
executor = SCons.Executor.GetBatchExecutor(key)
except KeyError:
pass
else:
executor.add_batch(tlist, slist)
if executor is None:
executor = SCons.Executor.Executor(self.action, env, [],
tlist, slist, executor_kw)
if key:
SCons.Executor.AddBatchExecutor(key, executor)
for t in tlist:
t.cwd = env.fs.getcwd()
t.builder_set(self)
t.env_set(env)
t.add_source(slist)
t.set_executor(executor)
t.set_explicit(self.is_explicit)
return SCons.Node.NodeList(tlist)
def __call__(self, env, target=None, source=None, chdir=_null, **kw):
if chdir is _null:
ekw = self.executor_kw
else:
ekw = self.executor_kw.copy()
ekw['chdir'] = chdir
if kw:
if kw.has_key('srcdir'):
def prependDirIfRelative(f, srcdir=kw['srcdir']):
import os.path
if SCons.Util.is_String(f) and not os.path.isabs(f):
f = os.path.join(srcdir, f)
return f
if not SCons.Util.is_List(source):
source = [source]
source = map(prependDirIfRelative, source)
del kw['srcdir']
if self.overrides:
env_kw = self.overrides.copy()
env_kw.update(kw)
else:
env_kw = kw
else:
env_kw = self.overrides
env = env.Override(env_kw)
return self._execute(env, target, source, OverrideWarner(kw), ekw)
def adjust_suffix(self, suff):
if suff and not suff[0] in [ '.', '_', '$' ]:
return '.' + suff
return suff
def get_prefix(self, env, sources=[]):
prefix = self.prefix
if callable(prefix):
prefix = prefix(env, sources)
return env.subst(prefix)
def set_suffix(self, suffix):
if not callable(suffix):
suffix = self.adjust_suffix(suffix)
self.suffix = suffix
def get_suffix(self, env, sources=[]):
suffix = self.suffix
if callable(suffix):
suffix = suffix(env, sources)
return env.subst(suffix)
def set_src_suffix(self, src_suffix):
if not src_suffix:
src_suffix = []
elif not SCons.Util.is_List(src_suffix):
src_suffix = [ src_suffix ]
adjust = lambda suf, s=self: \
callable(suf) and suf or s.adjust_suffix(suf)
self.src_suffix = map(adjust, src_suffix)
def get_src_suffix(self, env):
"""Get the first src_suffix in the list of src_suffixes."""
ret = self.src_suffixes(env)
if not ret:
return ''
return ret[0]
def add_emitter(self, suffix, emitter):
"""Add a suffix-emitter mapping to this Builder.
This assumes that emitter has been initialized with an
appropriate dictionary type, and will throw a TypeError if
not, so the caller is responsible for knowing that this is an
appropriate method to call for the Builder in question.
"""
self.emitter[suffix] = emitter
def add_src_builder(self, builder):
"""
Add a new Builder to the list of src_builders.
This requires wiping out cached values so that the computed
lists of source suffixes get re-calculated.
"""
self._memo = {}
self.src_builder.append(builder)
def _get_sdict(self, env):
"""
Returns a dictionary mapping all of the source suffixes of all
src_builders of this Builder to the underlying Builder that
should be called first.
This dictionary is used for each target specified, so we save a
lot of extra computation by memoizing it for each construction
environment.
Note that this is re-computed each time, not cached, because there
might be changes to one of our source Builders (or one of their
source Builders, and so on, and so on...) that we can't "see."
The underlying methods we call cache their computed values,
though, so we hope repeatedly aggregating them into a dictionary
like this won't be too big a hit. We may need to look for a
better way to do this if performance data show this has turned
into a significant bottleneck.
"""
sdict = {}
for bld in self.get_src_builders(env):
for suf in bld.src_suffixes(env):
sdict[suf] = bld
return sdict
def src_builder_sources(self, env, source, overwarn={}):
sdict = self._get_sdict(env)
src_suffixes = self.src_suffixes(env)
lengths = list(set(map(len, src_suffixes)))
def match_src_suffix(name, src_suffixes=src_suffixes, lengths=lengths):
node_suffixes = map(lambda l, n=name: n[-l:], lengths)
for suf in src_suffixes:
if suf in node_suffixes:
return suf
return None
result = []
for s in SCons.Util.flatten(source):
if SCons.Util.is_String(s):
match_suffix = match_src_suffix(env.subst(s))
if not match_suffix and not '.' in s:
src_suf = self.get_src_suffix(env)
s = self._adjustixes(s, None, src_suf)[0]
else:
match_suffix = match_src_suffix(s.name)
if match_suffix:
try:
bld = sdict[match_suffix]
except KeyError:
result.append(s)
else:
tlist = bld._execute(env, None, [s], overwarn)
if len(tlist) > 1:
mss = lambda t, m=match_src_suffix: m(t.name)
tlist = filter(mss, tlist)
result.extend(tlist)
else:
result.append(s)
source_factory = env.get_factory(self.source_factory)
return env.arg2nodes(result, source_factory)
def _get_src_builders_key(self, env):
return id(env)
memoizer_counters.append(SCons.Memoize.CountDict('get_src_builders', _get_src_builders_key))
def get_src_builders(self, env):
"""
Returns the list of source Builders for this Builder.
This exists mainly to look up Builders referenced as
strings in the 'BUILDER' variable of the construction
environment and cache the result.
"""
memo_key = id(env)
try:
memo_dict = self._memo['get_src_builders']
except KeyError:
memo_dict = {}
self._memo['get_src_builders'] = memo_dict
else:
try:
return memo_dict[memo_key]
except KeyError:
pass
builders = []
for bld in self.src_builder:
if SCons.Util.is_String(bld):
try:
bld = env['BUILDERS'][bld]
except KeyError:
continue
builders.append(bld)
memo_dict[memo_key] = builders
return builders
def _subst_src_suffixes_key(self, env):
return id(env)
memoizer_counters.append(SCons.Memoize.CountDict('subst_src_suffixes', _subst_src_suffixes_key))
def subst_src_suffixes(self, env):
"""
The suffix list may contain construction variable expansions,
so we have to evaluate the individual strings. To avoid doing
this over and over, we memoize the results for each construction
environment.
"""
memo_key = id(env)
try:
memo_dict = self._memo['subst_src_suffixes']
except KeyError:
memo_dict = {}
self._memo['subst_src_suffixes'] = memo_dict
else:
try:
return memo_dict[memo_key]
except KeyError:
pass
suffixes = map(lambda x, s=self, e=env: e.subst(x), self.src_suffix)
memo_dict[memo_key] = suffixes
return suffixes
def src_suffixes(self, env):
"""
Returns the list of source suffixes for all src_builders of this
Builder.
This is essentially a recursive descent of the src_builder "tree."
(This value isn't cached because there may be changes in a
src_builder many levels deep that we can't see.)
"""
sdict = {}
suffixes = self.subst_src_suffixes(env)
for s in suffixes:
sdict[s] = 1
for builder in self.get_src_builders(env):
for s in builder.src_suffixes(env):
if not sdict.has_key(s):
sdict[s] = 1
suffixes.append(s)
return suffixes
class CompositeBuilder(SCons.Util.Proxy):
"""A Builder Proxy whose main purpose is to always have
a DictCmdGenerator as its action, and to provide access
to the DictCmdGenerator's add_action() method.
"""
def __init__(self, builder, cmdgen):
if __debug__: logInstanceCreation(self, 'Builder.CompositeBuilder')
SCons.Util.Proxy.__init__(self, builder)
self.cmdgen = cmdgen
self.builder = builder
def add_action(self, suffix, action):
self.cmdgen.add_action(suffix, action)
self.set_src_suffix(self.cmdgen.src_suffixes())
| false
| true
|
f71a9945ebfc1939e5f3b7f7596845dbf01070cf
| 2,311
|
py
|
Python
|
Semester 4/Open Source Technology/exp1.py
|
atharva8300/Engineering-Practical-Experiments
|
3f7fe4abbbe69a3bbb8aa19892dd7209e70c69ac
|
[
"Unlicense"
] | 7
|
2020-04-20T19:32:23.000Z
|
2021-08-03T16:50:15.000Z
|
Semester 4/Open Source Technology/exp1.py
|
atharva8300/Engineering-Practical-Experiments
|
3f7fe4abbbe69a3bbb8aa19892dd7209e70c69ac
|
[
"Unlicense"
] | null | null | null |
Semester 4/Open Source Technology/exp1.py
|
atharva8300/Engineering-Practical-Experiments
|
3f7fe4abbbe69a3bbb8aa19892dd7209e70c69ac
|
[
"Unlicense"
] | 5
|
2019-04-20T06:35:25.000Z
|
2021-12-12T12:25:08.000Z
|
print("String example")
s = "this is a test String"
print(f"String: {s}")
print(f"String Capitalized: {s.capitalize()}")
print(f"String Finding index: {s.find('e')}")
print(f"String Lowercase: {s.lower()}")
print(f"String Uppercase: {s.upper()}")
print(f"String Length: {len(s)}")
print(f"String Replace: {s.replace('this', 'THIS')}")
print(f"String Swapcase: {s.swapcase()}")
print(f"String Title: {s.title()}")
print()
print("List examples")
L = ['C++', 'Java', 'Python']
print(f"List: {L}")
print(f"List slicing: {L[1:]}")
print(f"List slicing: {L[::-1]}")
print(f"List slicing: {L[0:2]}")
L = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print(f"List: {L}")
L.append(10)
print(f"List Appending:{L}")
print(f"List Popping:{L.pop()}")
L.insert(4, 20)
print(f"List Inserting : {L}") # position, value
L.reverse()
print(f"List Reversed: {L}")
L.sort()
reversed_list = reversed(L)
print("Reversed list: {}".format(reversed_list))
for i in reversed_list:
print(i)
print(f"List Sorted: {L}")
print("\nTuple example")
tup1 = ('physics', 'chemistry', 1997, 2000)
tup2 = (1, 2, 3, 4, 5, 6, 7)
print(f"tup1[0]: {tup1[0]}")
print(f"tup2[1:5]: {tup2[1:5]}")
tup3 = tup1 + tup2
print(f"Creating new from existing: tup3: {tup3}")
print("\nDictionary examples")
d = {'Name': 'Test', 'Age': 99, 'Class': 'failed'}
print(f"Dicstionary d: {d}")
d['Age'] = 0 # update existing entry
d['School'] = "Under a tree" # Add new entry
print(f"Updating d['Age']: {d['Age']}")
print(f"Updating d['School']: {d['School']}")
print(f"Dictionary d: {d}")
print(f"Get Qualification : {d.get('Qualification', 'NA')}")
print(f"Dictionary items: {d.items()}")
print(f"Dictionary keys: {d.keys()}")
print(f"Dictionary values: {d.values()}")
print("\nSets example")
my_set = {1, 3}
print(my_set)
my_set.add(2) # add an element
print(my_set)
my_set.update([2, 3, 4]) # add multiple elements
print(my_set)
my_set.update([4, 5], {1, 6, 8}) # add list and set
print(my_set)
my_set.remove(6)
print(my_set)
my_set.pop() # pop another random element
print(my_set)
A = {1, 2, 3, 4, 5}
B = {4, 5, 6, 7, 8}
print(A | B) # Union or A.union(B)
print(A & B) # Intersection or A.intersection(B)
print(A - B) # Difference or A.difference(B)
A = frozenset([1, 2, 3, 4])
B = frozenset([3, 4, 5, 6])
print(A.difference(B))
print(A | B)
print(A.add(3)) # Error
| 28.182927
| 60
| 0.633059
|
print("String example")
s = "this is a test String"
print(f"String: {s}")
print(f"String Capitalized: {s.capitalize()}")
print(f"String Finding index: {s.find('e')}")
print(f"String Lowercase: {s.lower()}")
print(f"String Uppercase: {s.upper()}")
print(f"String Length: {len(s)}")
print(f"String Replace: {s.replace('this', 'THIS')}")
print(f"String Swapcase: {s.swapcase()}")
print(f"String Title: {s.title()}")
print()
print("List examples")
L = ['C++', 'Java', 'Python']
print(f"List: {L}")
print(f"List slicing: {L[1:]}")
print(f"List slicing: {L[::-1]}")
print(f"List slicing: {L[0:2]}")
L = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print(f"List: {L}")
L.append(10)
print(f"List Appending:{L}")
print(f"List Popping:{L.pop()}")
L.insert(4, 20)
print(f"List Inserting : {L}")
L.reverse()
print(f"List Reversed: {L}")
L.sort()
reversed_list = reversed(L)
print("Reversed list: {}".format(reversed_list))
for i in reversed_list:
print(i)
print(f"List Sorted: {L}")
print("\nTuple example")
tup1 = ('physics', 'chemistry', 1997, 2000)
tup2 = (1, 2, 3, 4, 5, 6, 7)
print(f"tup1[0]: {tup1[0]}")
print(f"tup2[1:5]: {tup2[1:5]}")
tup3 = tup1 + tup2
print(f"Creating new from existing: tup3: {tup3}")
print("\nDictionary examples")
d = {'Name': 'Test', 'Age': 99, 'Class': 'failed'}
print(f"Dicstionary d: {d}")
d['Age'] = 0
d['School'] = "Under a tree"
print(f"Updating d['Age']: {d['Age']}")
print(f"Updating d['School']: {d['School']}")
print(f"Dictionary d: {d}")
print(f"Get Qualification : {d.get('Qualification', 'NA')}")
print(f"Dictionary items: {d.items()}")
print(f"Dictionary keys: {d.keys()}")
print(f"Dictionary values: {d.values()}")
print("\nSets example")
my_set = {1, 3}
print(my_set)
my_set.add(2)
print(my_set)
my_set.update([2, 3, 4])
print(my_set)
my_set.update([4, 5], {1, 6, 8})
print(my_set)
my_set.remove(6)
print(my_set)
my_set.pop()
print(my_set)
A = {1, 2, 3, 4, 5}
B = {4, 5, 6, 7, 8}
print(A | B)
print(A & B)
print(A - B)
A = frozenset([1, 2, 3, 4])
B = frozenset([3, 4, 5, 6])
print(A.difference(B))
print(A | B)
print(A.add(3))
| true
| true
|
f71a9a29fc9f435c927a8cf78515482f4439afa0
| 105
|
py
|
Python
|
checks/root_path.py
|
Amourspirit/python-ooouno-ex
|
523dd9b89a74aaf887edbcfe1dda316a04c7125b
|
[
"MIT"
] | null | null | null |
checks/root_path.py
|
Amourspirit/python-ooouno-ex
|
523dd9b89a74aaf887edbcfe1dda316a04c7125b
|
[
"MIT"
] | 2
|
2022-03-28T19:03:21.000Z
|
2022-03-29T00:03:34.000Z
|
checks/root_path.py
|
Amourspirit/python-ooouno-ex
|
523dd9b89a74aaf887edbcfe1dda316a04c7125b
|
[
"MIT"
] | null | null | null |
# coding: utf-8
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent))
| 26.25
| 53
| 0.771429
|
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent))
| true
| true
|
f71a9a81693d0910320d55fb9df477edf8edac0a
| 209,992
|
py
|
Python
|
cinder/tests/unit/volume/drivers/huawei/test_huawei_drivers.py
|
2020human/cinder
|
04528318848620e4ce2639ea2dd5323783dc7a1f
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/volume/drivers/huawei/test_huawei_drivers.py
|
2020human/cinder
|
04528318848620e4ce2639ea2dd5323783dc7a1f
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/volume/drivers/huawei/test_huawei_drivers.py
|
2020human/cinder
|
04528318848620e4ce2639ea2dd5323783dc7a1f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2016 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for huawei drivers."""
import collections
import copy
import ddt
import json
import mock
import re
import tempfile
import unittest
from xml.dom import minidom
from cinder import context
from cinder import exception
from cinder import test
from cinder.tests.unit.consistencygroup import fake_cgsnapshot
from cinder.tests.unit.consistencygroup import fake_consistencygroup
from cinder.tests.unit import fake_snapshot
from cinder.tests.unit import fake_volume
from cinder.tests.unit import utils
from cinder.volume import configuration as conf
from cinder.volume.drivers.huawei import constants
from cinder.volume.drivers.huawei import fc_zone_helper
from cinder.volume.drivers.huawei import huawei_conf
from cinder.volume.drivers.huawei import huawei_driver
from cinder.volume.drivers.huawei import huawei_utils
from cinder.volume.drivers.huawei import hypermetro
from cinder.volume.drivers.huawei import replication
from cinder.volume.drivers.huawei import rest_client
from cinder.volume.drivers.huawei import smartx
from cinder.volume import qos_specs
from cinder.volume import volume_types
admin_contex = context.get_admin_context()
vol_attrs = ('id', 'lun_type', 'provider_location', 'metadata')
Volume = collections.namedtuple('Volume', vol_attrs)
PROVIDER_LOCATION = '11'
HOST = 'ubuntu001@backend001#OpenStack_Pool'
ID = '21ec7341-9256-497b-97d9-ef48edcf0635'
ENCODE_NAME = huawei_utils.encode_name(ID)
ADMIN_METADATA = {'huawei_lun_wwn': '6643e8c1004c5f6723e9f454003'}
TEST_PAIR_ID = "3400a30d844d0004"
REPLICA_DRIVER_DATA = '{"pair_id": "%s", "rmt_lun_id": "1"}' % TEST_PAIR_ID
VOL_METADATA = [{'key': 'hypermetro_id', 'value': '11'},
{'key': 'remote_lun_id', 'value': '1'}]
hypermetro_devices = """{
"remote_device": {
"RestURL": "http://192.0.2.69:8082/deviceManager/rest",
"UserName": "admin",
"UserPassword": "Admin@storage1",
"StoragePool": "OpenStack_Pool",
"domain_name": "hypermetro-domain",
"remote_target_ip": "192.0.2.241"
}
}
"""
fake_smartx_value = {'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': False,
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test',
}
fake_hypermetro_opts = {'hypermetro': 'true',
'smarttier': False,
'smartcache': False,
'smartpartition': False,
'thin_provisioning_support': False,
'thick_provisioning_support': False,
}
sync_replica_specs = {'replication_enabled': '<is> True',
'replication_type': '<in> sync'}
async_replica_specs = {'replication_enabled': '<is> True',
'replication_type': '<in> async'}
replica_hypermetro_specs = {'hypermetro': '<is> True',
'replication_enabled': '<is> True'}
test_host = {'host': 'ubuntu001@backend001#OpenStack_Pool',
'capabilities': {'smartcache': True,
'location_info': '210235G7J20000000000',
'QoS_support': True,
'pool_name': 'OpenStack_Pool',
'timestamp': '2015-07-13T11:41:00.513549',
'smartpartition': True,
'allocated_capacity_gb': 0,
'volume_backend_name': 'HuaweiFCDriver',
'free_capacity_gb': 20.0,
'driver_version': '1.1.0',
'total_capacity_gb': 20.0,
'smarttier': True,
'hypermetro': True,
'reserved_percentage': 0,
'vendor_name': None,
'thick_provisioning_support': False,
'thin_provisioning_support': True,
'storage_protocol': 'FC',
}
}
test_new_type = {
'name': u'new_type',
'qos_specs_id': None,
'deleted': False,
'created_at': None,
'updated_at': None,
'extra_specs': {
'smarttier': '<is> true',
'smartcache': '<is> true',
'smartpartition': '<is> true',
'thin_provisioning_support': '<is> true',
'thick_provisioning_support': '<is> False',
'policy': '2',
'smartcache:cachename': 'cache-test',
'smartpartition:partitionname': 'partition-test',
},
'is_public': True,
'deleted_at': None,
'id': u'530a56e1-a1a4-49f3-ab6c-779a6e5d999f',
'description': None,
}
test_new_replication_type = {
'name': u'new_type',
'qos_specs_id': None,
'deleted': False,
'created_at': None,
'updated_at': None,
'extra_specs': {
'replication_enabled': '<is> True',
'replication_type': '<in> sync',
},
'is_public': True,
'deleted_at': None,
'id': u'530a56e1-a1a4-49f3-ab6c-779a6e5d999f',
'description': None,
}
test_hypermetro_type = {
'name': u'new_type',
'qos_specs_id': None,
'deleted': False,
'created_at': None,
'updated_at': None,
'extra_specs': {
'hypermetro': '<is> True'
},
'is_public': True,
'deleted_at': None,
'id': u'550c089b-bfdd-4f7f-86e1-3ba88125555c',
'description': None,
}
hypermetro_devices = """
{
"remote_device": {
"RestURL": "http://192.0.2.69:8082/deviceManager/rest",
"UserName":"admin",
"UserPassword":"Admin@storage2",
"StoragePool":"OpenStack_Pool",
"domain_name":"hypermetro_test"}
}
"""
FAKE_FIND_POOL_RESPONSE = {'CAPACITY': '985661440',
'ID': '0',
'TOTALCAPACITY': '985661440'}
FAKE_CREATE_VOLUME_RESPONSE = {"ID": "1",
"NAME": "5mFHcBv4RkCcD+JyrWc0SA",
"WWN": '6643e8c1004c5f6723e9f454003'}
FakeConnector = {'initiator': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'multipath': False,
'wwpns': ['10000090fa0d6754'],
'wwnns': ['10000090fa0d6755'],
'host': 'ubuntuc',
}
smarttier_opts = {'smarttier': 'true',
'smartpartition': False,
'smartcache': False,
'thin_provisioning_support': True,
'thick_provisioning_support': False,
'policy': '3',
'readcachepolicy': '1',
'writecachepolicy': None,
}
fake_fabric_mapping = {
'swd1': {
'target_port_wwn_list': ['2000643e8c4c5f66'],
'initiator_port_wwn_list': ['10000090fa0d6754']
}
}
fake_fabric_mapping_no_ports = {
'swd1': {
'target_port_wwn_list': [],
'initiator_port_wwn_list': ['10000090fa0d6754']
}
}
fake_fabric_mapping_no_wwn = {
'swd1': {
'target_port_wwn_list': ['2000643e8c4c5f66'],
'initiator_port_wwn_list': []
}
}
CHANGE_OPTS = {'policy': ('1', '2'),
'partitionid': (['1', 'partition001'], ['2', 'partition002']),
'cacheid': (['1', 'cache001'], ['2', 'cache002']),
'qos': (['11', {'MAXIOPS': '100', 'IOType': '1'}],
{'MAXIOPS': '100', 'IOType': '2',
'MIN': 1, 'LATENCY': 1}),
'host': ('ubuntu@huawei#OpenStack_Pool',
'ubuntu@huawei#OpenStack_Pool'),
'LUNType': ('0', '1'),
}
# A fake response of create a host
FAKE_CREATE_HOST_RESPONSE = """
{
"error": {
"code": 0
},
"data":{"NAME": "ubuntuc001",
"ID": "1"}
}
"""
FAKE_GET_HOST_RESPONSE = """
{
"error": {
"code": 0
},
"data":{"NAME": "ubuntuc001",
"ID": "1",
"ISADD2HOSTGROUP": "true"}
}
"""
# A fake response of success response storage
FAKE_COMMON_SUCCESS_RESPONSE = """
{
"error": {
"code": 0,
"description": "None"
},
"data":{}
}
"""
# A fake response of fail response storage
FAKE_COMMON_FAIL_RESPONSE = """
{
"error": {
"code": 50331651,
"description": "An error occurs to the parameter."
},
"data":{}
}
"""
# A fake response of login huawei storage
FAKE_GET_LOGIN_STORAGE_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"username": "admin",
"iBaseToken": "2001031430",
"deviceid": "210235G7J20000000000",
"accountstate": 2
}
}
"""
# A fake response of login out huawei storage
FAKE_LOGIN_OUT_STORAGE_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": 11
}
}
"""
# A fake response of mock storage pool info
FAKE_STORAGE_POOL_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"USERFREECAPACITY": "985661440",
"ID": "0",
"NAME": "OpenStack_Pool",
"USERTOTALCAPACITY": "985661440",
"TIER0CAPACITY": "100",
"TIER1CAPACITY": "0",
"TIER2CAPACITY": "0"
}]
}
"""
# A fake response of lun or lungroup response
FAKE_LUN_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": "1",
"NAME": "5mFHcBv4RkCcD+JyrWc0SA",
"WWN": "6643e8c1004c5f6723e9f454003",
"DESCRIPTION": "21ec7341-9256-497b-97d9-ef48edcf0635",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "27",
"ALLOCTYPE": "1",
"CAPACITY": "2097152"
}
}
"""
# A fake report of mock storage pool info
FAKE_POOLS_UNSUPPORT_REPORT = {
'pool_name': 'StoragePool',
'location_info': '2102350BVB10F2000020',
'QoS_support': False,
'smartcache': False,
'thick_provisioning_support': False,
'splitmirror': False,
'allocated_capacity_gb': 7,
'thin_provisioning_support': True,
'free_capacity_gb': 400.0,
'smartpartition': False,
'total_capacity_gb': 400.0,
'reserved_percentage': 0,
'max_over_subscription_ratio': 20.0,
'luncopy': False
}
FAKE_POOLS_SUPPORT_REPORT = {
'pool_name': 'StoragePool',
'location_info': '2102350BVB10F2000020',
'QoS_support': True,
'smartcache': True,
'thick_provisioning_support': True,
'splitmirror': True,
'allocated_capacity_gb': 7,
'thin_provisioning_support': True,
'free_capacity_gb': 400.0,
'smartpartition': True,
'total_capacity_gb': 400.0,
'reserved_percentage': 0,
'max_over_subscription_ratio': 20.0,
'luncopy': True,
'hypermetro': True,
'consistencygroup_support': True
}
FAKE_LUN_GET_SUCCESS_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": "11",
"IOCLASSID": "11",
"NAME": "5mFHcBv4RkCcD+JyrWc0SA",
"DESCRIPTION": "21ec7341-9256-497b-97d9-ef48edcf0635",
"RUNNINGSTATUS": "10",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "27",
"LUNLIST": "",
"ALLOCTYPE": "1",
"CAPACITY": "2097152",
"WRITEPOLICY": "1",
"MIRRORPOLICY": "0",
"PREFETCHPOLICY": "1",
"PREFETCHVALUE": "20",
"DATATRANSFERPOLICY": "1",
"READCACHEPOLICY": "2",
"WRITECACHEPOLICY": "5",
"OWNINGCONTROLLER": "0B",
"SMARTCACHEPARTITIONID": "",
"CACHEPARTITIONID": "",
"WWN": "6643e8c1004c5f6723e9f454003",
"PARENTNAME": "OpenStack_Pool"
}
}
"""
FAKE_QUERY_ALL_LUN_RESPONSE = {
"error": {
"code": 0
},
"data": [{
"ID": "1",
"NAME": ENCODE_NAME
}]
}
FAKE_LUN_ASSOCIATE_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"ID":"11"
}]
}
"""
FAKE_QUERY_LUN_GROUP_INFO_RESPONSE = """
{
"error": {
"code":0
},
"data":[{
"NAME":"OpenStack_LunGroup_1",
"DESCRIPTION":"5mFHcBv4RkCcD+JyrWc0SA",
"ID":"11",
"TYPE":256
}]
}
"""
FAKE_QUERY_LUN_GROUP_RESPONSE = """
{
"error": {
"code":0
},
"data":{
"NAME":"5mFHcBv4RkCcD+JyrWc0SA",
"DESCRIPTION":"5mFHcBv4RkCcD+JyrWc0SA",
"ID":"11",
"TYPE":256
}
}
"""
FAKE_QUERY_LUN_GROUP_ASSOCIAT_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"NAME":"5mFHcBv4RkCcD+JyrWc0SA",
"DESCRIPTION":"5mFHcBv4RkCcD+JyrWc0SA",
"ID":"11",
"TYPE":256
}
}
"""
FAKE_LUN_COUNT_RESPONSE = """
{
"data":{
"COUNT":"0"
},
"error":{
"code":0,
"description":"0"
}
}
"""
# A fake response of snapshot list response
FAKE_SNAPSHOT_LIST_INFO_RESPONSE = {
"error": {
"code": 0,
"description": "0"
},
"data": [{
"ID": 11,
"NAME": ENCODE_NAME
}, ]
}
# A fake response of create snapshot response
FAKE_CREATE_SNAPSHOT_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": 11,
"NAME": "YheUoRwbSX2BxN7"
}
}
"""
# A fake response of get snapshot response
FAKE_GET_SNAPSHOT_INFO_RESPONSE = """
{
"error": {
"code": 0,
"description": "0"
},
"data": {
"ID": 11,
"NAME": "YheUoRwbSX2BxN7"
}
}
"""
FAKE_SNAPSHOT_COUNT_RESPONSE = """
{
"data":{
"COUNT":"2"
},
"error":{
"code":0,
"description":"0"
}
}
"""
# A fake response of get iscsi response
FAKE_GET_ISCSI_INFO_RESPONSE = """
{
"data": [{
"ETHPORTID": "139267",
"ID": "0+iqn.oceanstor:21004846fb8ca15f::22004:192.0.2.1,t,0x2005",
"TPGT": "8197",
"TYPE": 249
},
{
"ETHPORTID": "139268",
"ID": "1+iqn.oceanstor:21004846fb8ca15f::22003:192.0.2.2,t,0x2004",
"TPGT": "8196",
"TYPE": 249
}
],
"error": {
"code": 0,
"description": "0"
}
}
"""
# A fake response of get eth info response
FAKE_GET_ETH_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"PARENTTYPE": 209,
"MACADDRESS": "00:22:a1:0a:79:57",
"ETHNEGOTIATE": "-1",
"ERRORPACKETS": "0",
"IPV4ADDR": "192.0.2.2",
"IPV6GATEWAY": "",
"IPV6MASK": "0",
"OVERFLOWEDPACKETS": "0",
"ISCSINAME": "P0",
"HEALTHSTATUS": "1",
"ETHDUPLEX": "2",
"ID": "16909568",
"LOSTPACKETS": "0",
"TYPE": 213,
"NAME": "P0",
"INIORTGT": "4",
"RUNNINGSTATUS": "10",
"IPV4GATEWAY": "",
"BONDNAME": "",
"STARTTIME": "1371684218",
"SPEED": "1000",
"ISCSITCPPORT": "0",
"IPV4MASK": "255.255.0.0",
"IPV6ADDR": "",
"LOGICTYPE": "0",
"LOCATION": "ENG0.A5.P0",
"MTU": "1500",
"PARENTID": "1.5"
},
{
"PARENTTYPE": 209,
"MACADDRESS": "00:22:a1:0a:79:57",
"ETHNEGOTIATE": "-1",
"ERRORPACKETS": "0",
"IPV4ADDR": "192.0.2.1",
"IPV6GATEWAY": "",
"IPV6MASK": "0",
"OVERFLOWEDPACKETS": "0",
"ISCSINAME": "P0",
"HEALTHSTATUS": "1",
"ETHDUPLEX": "2",
"ID": "16909568",
"LOSTPACKETS": "0",
"TYPE": 213,
"NAME": "P0",
"INIORTGT": "4",
"RUNNINGSTATUS": "10",
"IPV4GATEWAY": "",
"BONDNAME": "",
"STARTTIME": "1371684218",
"SPEED": "1000",
"ISCSITCPPORT": "0",
"IPV4MASK": "255.255.0.0",
"IPV6ADDR": "",
"LOGICTYPE": "0",
"LOCATION": "ENG0.A5.P3",
"MTU": "1500",
"PARENTID": "1.5"
}]
}
"""
FAKE_GET_ETH_ASSOCIATE_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"IPV4ADDR": "192.0.2.1",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "10"
},
{
"IPV4ADDR": "192.0.2.2",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "10"
}
]
}
"""
# A fake response of get iscsi device info response
FAKE_GET_ISCSI_DEVICE_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"CMO_ISCSI_DEVICE_NAME": "iqn.2006-08.com.huawei:oceanstor:21000022a:"
}]
}
"""
# A fake response of get iscsi device info response
FAKE_GET_ALL_HOST_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"PARENTTYPE": 245,
"NAME": "ubuntuc",
"DESCRIPTION": "",
"RUNNINGSTATUS": "1",
"IP": "",
"PARENTNAME": "",
"OPERATIONSYSTEM": "0",
"LOCATION": "",
"HEALTHSTATUS": "1",
"MODEL": "",
"ID": "1",
"PARENTID": "",
"NETWORKNAME": "",
"TYPE": 21
},
{
"PARENTTYPE": 245,
"NAME": "ubuntu",
"DESCRIPTION": "",
"RUNNINGSTATUS": "1",
"IP": "",
"PARENTNAME": "",
"OPERATIONSYSTEM": "0",
"LOCATION": "",
"HEALTHSTATUS": "1",
"MODEL": "",
"ID": "2",
"PARENTID": "",
"NETWORKNAME": "",
"TYPE": 21
}]
}
"""
# A fake response of get host or hostgroup info response
FAKE_GET_ALL_HOST_GROUP_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"NAME":"ubuntuc",
"DESCRIPTION":"",
"ID":"0",
"TYPE":14
},
{"NAME":"OpenStack_HostGroup_1",
"DESCRIPTION":"",
"ID":"0",
"TYPE":14
}
]
}
"""
FAKE_GET_HOST_GROUP_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data":{
"NAME":"ubuntuc",
"DESCRIPTION":"",
"ID":"0",
"TYPE":14
}
}
"""
# A fake response of lun copy info response
FAKE_GET_LUN_COPY_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"COPYSTOPTIME": "-1",
"HEALTHSTATUS": "1",
"NAME": "w1PSNvu6RumcZMmSh4/l+Q==",
"RUNNINGSTATUS": "36",
"DESCRIPTION": "w1PSNvu6RumcZMmSh4/l+Q==",
"ID": "0",
"LUNCOPYTYPE": "1",
"COPYPROGRESS": "0",
"COPYSPEED": "2",
"TYPE": 219,
"COPYSTARTTIME": "-1"
}
}
"""
# A fake response of lun copy list info response
FAKE_GET_LUN_COPY_LIST_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"COPYSTOPTIME": "1372209335",
"HEALTHSTATUS": "1",
"NAME": "w1PSNvu6RumcZMmSh4/l+Q==",
"RUNNINGSTATUS": "40",
"DESCRIPTION": "w1PSNvu6RumcZMmSh4/l+Q==",
"ID": "0",
"LUNCOPYTYPE": "1",
"COPYPROGRESS": "100",
"COPYSPEED": "2",
"TYPE": 219,
"COPYSTARTTIME": "1372209329"
}]
}
"""
# A fake response of mappingview info response
FAKE_GET_MAPPING_VIEW_INFO_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"OpenStack_Mapping_View_1",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"1",
"INBANDLUNWWN":"",
"TYPE":245
},
{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"YheUoRwbSX2BxN767nvLSw",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"2",
"INBANDLUNWWN": "",
"TYPE": 245
}]
}
"""
FAKE_GET_MAPPING_VIEW_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"mOWtSXnaQKi3hpB3tdFRIQ",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"11",
"INBANDLUNWWN":"",
"TYPE": 245,
"AVAILABLEHOSTLUNIDLIST": ""
}]
}
"""
FAKE_GET_SPEC_MAPPING_VIEW_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"mOWtSXnaQKi3hpB3tdFRIQ",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"1",
"INBANDLUNWWN":"",
"TYPE":245,
"AVAILABLEHOSTLUNIDLIST": "[1]"
}
}
"""
FAKE_FC_INFO_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"HEALTHSTATUS":"1",
"NAME":"",
"MULTIPATHTYPE":"1",
"ISFREE":"true",
"RUNNINGSTATUS":"27",
"ID":"10000090fa0d6754",
"OPERATIONSYSTEM":"255",
"TYPE":223
},
{
"HEALTHSTATUS":"1",
"NAME":"",
"MULTIPATHTYPE":"1",
"ISFREE":"true",
"RUNNINGSTATUS":"27",
"ID":"10000090fa0d6755",
"OPERATIONSYSTEM":"255",
"TYPE":223
}]
}
"""
FAKE_ISCSI_INITIATOR_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"CHAPNAME":"mm-user",
"HEALTHSTATUS":"1",
"ID":"iqn.1993-08.org.debian:01:9073aba6c6f",
"ISFREE":"true",
"MULTIPATHTYPE":"1",
"NAME":"",
"OPERATIONSYSTEM":"255",
"RUNNINGSTATUS":"28",
"TYPE":222,
"USECHAP":"true"
},
{
"ISFREE":"true",
"ID":"ini-1"
},
{
"ISFREE":"false",
"ID":"ini-2",
"PARENTNAME":"Host2",
"PARENTID":"2"
}]
}
"""
FAKE_HOST_LINK_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"PARENTTYPE":21,
"TARGET_ID":"0000000000000000",
"INITIATOR_NODE_WWN":"20000090fa0d6754",
"INITIATOR_TYPE":"223",
"RUNNINGSTATUS":"27",
"PARENTNAME":"ubuntuc",
"INITIATOR_ID":"10000090fa0d6754",
"TARGET_PORT_WWN":"24000022a10a2a39",
"HEALTHSTATUS":"1",
"INITIATOR_PORT_WWN":"10000090fa0d6754",
"ID":"010000090fa0d675-0000000000110400",
"TARGET_NODE_WWN":"21000022a10a2a39",
"PARENTID":"1",
"CTRL_ID":"0",
"TYPE":255,
"TARGET_TYPE":"212"
}]
}
"""
FAKE_PORT_GROUP_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"ID":11,
"NAME": "portgroup-test"
}]
}
"""
FAKE_ERROR_INFO_RESPONSE = """
{
"error":{
"code":31755596
}
}
"""
FAKE_ERROR_CONNECT_RESPONSE = """
{
"error":{
"code":-403
}
}
"""
FAKE_ERROR_LUN_INFO_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"ID":"11",
"IOCLASSID":"11",
"NAME":"5mFHcBv4RkCcD+JyrWc0SA",
"ALLOCTYPE": "0",
"DATATRANSFERPOLICY": "0",
"SMARTCACHEPARTITIONID": "0",
"CACHEPARTITIONID": "0"
}
}
"""
FAKE_GET_FC_INI_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"ID":"10000090fa0d6754",
"ISFREE":"true"
}]
}
"""
FAKE_SYSTEM_VERSION_RESPONSE = """
{
"error":{
"code": 0
},
"data":{
"PRODUCTVERSION": "V100R001C10",
"wwn": "21003400a30d844d"
}
}
"""
FAKE_GET_LUN_MIGRATION_RESPONSE = """
{
"data":[{"ENDTIME":"1436816174",
"ID":"9",
"PARENTID":"11",
"PARENTNAME":"xmRBHMlVRruql5vwthpPXQ",
"PROCESS":"-1",
"RUNNINGSTATUS":"76",
"SPEED":"2",
"STARTTIME":"1436816111",
"TARGETLUNID":"1",
"TARGETLUNNAME":"4924891454902893639",
"TYPE":253,
"WORKMODE":"0"
}],
"error":{"code":0,
"description":"0"}
}
"""
FAKE_HYPERMETRODOMAIN_RESPONSE = """
{
"error":{
"code": 0
},
"data":[{
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "1",
"HEALTHSTATUS": "0"
}]
}
"""
FAKE_HYPERMETRO_RESPONSE = """
{
"error":{
"code": 0
},
"data":{
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "1",
"HEALTHSTATUS": "1"
}
}
"""
FAKE_QOS_INFO_RESPONSE = """
{
"error":{
"code": 0
},
"data":{
"ID": "11"
}
}
"""
FAKE_GET_FC_PORT_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"RUNNINGSTATUS":"10",
"WWN":"2000643e8c4c5f66",
"PARENTID":"0A.1",
"ID": "1114368",
"RUNSPEED": "16000"
},
{
"RUNNINGSTATUS":"10",
"WWN":"2000643e8c4c5f67",
"PARENTID":"0A.1",
"ID": "1114369",
"RUNSPEED": "16000"
}]
}
"""
FAKE_SMARTCACHEPARTITION_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"ID":"11",
"NAME":"cache-name"
}
}
"""
FAKE_CONNECT_FC_RESPONSE = {
"driver_volume_type": 'fibre_channel',
"data": {
"target_wwn": ["10000090fa0d6754"],
"target_lun": "1",
"volume_id": ID
}
}
FAKE_METRO_INFO_RESPONSE = {
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "42",
"HEALTHSTATUS": "0"
}
FAKE_METRO_INFO_NEW_RESPONSE = """{
"error": {
"code": 0
},
"data": {
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "1",
"HEALTHSTATUS": "1"
}
}
"""
FAKE_CREATE_METROROUP_RESPONSE = """
{
"data": {
"DESCRIPTION": "",
"DOMAINID": "643e8c4c5f670100",
"DOMAINNAME": "hypermetro-domain",
"HEALTHSTATUS": "1",
"ID": "3400a30d844d8002",
"ISEMPTY": "true",
"NAME": "6F7kdHZcQJ2zbzxHmBl4FQ",
"PRIORITYSTATIONTYPE": "0",
"RECOVERYPOLICY": "1",
"RESOURCETYPE": "11",
"RUNNINGSTATUS": "41",
"SPEED": "2",
"SYNCDIRECTION": "1",
"TYPE": 15364
},
"error": {
"code": 0,
"description": "0"
}
}
"""
FAKE_GET_METROROUP_RESPONSE = {
"data": [{
"DESCRIPTION": "",
"DOMAINID": "643e8c4c5f670100",
"DOMAINNAME": "hypermetro-domain",
"HEALTHSTATUS": "1",
"ID": "11",
"ISEMPTY": "true",
"NAME": huawei_utils.encode_name(ID),
"PRIORITYSTATIONTYPE": "0",
"RECOVERYPOLICY": "1",
"RESOURCETYPE": "11",
"RUNNINGSTATUS": "41",
"SPEED": "2",
"SYNCDIRECTION": "1",
"TYPE": 15364
}],
"error": {
"code": 0,
"description": "0"
},
}
FAKE_GET_METROROUP_ID_RESPONSE = """
{
"data": {
"DESCRIPTION": "",
"DOMAINID": "643e8c4c5f670100",
"DOMAINNAME": "hypermetro-domain",
"HEALTHSTATUS": "1",
"ID": "11",
"ISEMPTY": "false",
"NAME": "IexzQZJWSXuX2e9I7c8GNQ",
"PRIORITYSTATIONTYPE": "0",
"RECOVERYPOLICY": "1",
"RESOURCETYPE": "11",
"RUNNINGSTATUS": "1",
"SPEED": "2",
"SYNCDIRECTION": "1",
"TYPE": 15364
},
"error": {
"code": 0,
"description": "0"
}
}
"""
# mock login info map
MAP_COMMAND_TO_FAKE_RESPONSE = {}
MAP_COMMAND_TO_FAKE_RESPONSE['/xx/sessions'] = (
FAKE_GET_LOGIN_STORAGE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/sessions'] = (
FAKE_LOGIN_OUT_STORAGE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUN_MIGRATION/POST'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUN_MIGRATION?range=[0-256]/GET'] = (
FAKE_GET_LUN_MIGRATION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUN_MIGRATION/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
# mock storage info map
MAP_COMMAND_TO_FAKE_RESPONSE['/storagepool'] = (
FAKE_STORAGE_POOL_RESPONSE)
# mock lun info map
MAP_COMMAND_TO_FAKE_RESPONSE['/lun'] = (
FAKE_LUN_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/11/GET'] = (
FAKE_LUN_GET_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/1/GET'] = (
FAKE_LUN_GET_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/1/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/11/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun?filter=NAME::%s/GET' % ENCODE_NAME] = (
json.dumps(FAKE_QUERY_ALL_LUN_RESPONSE))
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?TYPE=11&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?TYPE=11&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=12/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?ID=1&TYPE=11&ASSOCIATEOBJTYPE=21'
'&ASSOCIATEOBJID=0/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?TYPE=11&ASSOCIATEOBJTYPE=21'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate/cachepartition?ID=1'
'&ASSOCIATEOBJTYPE=11&ASSOCIATEOBJID=11'
'/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/associate?TYPE=27&ASSOCIATEOBJTYPE=21'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/associate?TYPE=27&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup?range=[0-8191]/GET'] = (
FAKE_QUERY_LUN_GROUP_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup'] = (
FAKE_QUERY_LUN_GROUP_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate'] = (
FAKE_QUERY_LUN_GROUP_ASSOCIAT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNGroup/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=11&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?TYPE=256&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?TYPE=256&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=11&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=11&ASSOCIATEOBJTYPE=27'
'&ASSOCIATEOBJID=11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/count?TYPE=11&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_COUNT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/count?TYPE=27&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_SNAPSHOT_COUNT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/count?TYPE=27&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_SNAPSHOT_COUNT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?TYPE=256&ASSOCIATEOBJTYPE=27'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/expand/PUT'] = (
FAKE_LUN_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=12&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=12/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
# mock snapshot info map
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot'] = (
FAKE_CREATE_SNAPSHOT_INFO_RESPONSE)
# mock snapshot info map
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/11/GET'] = (
FAKE_GET_SNAPSHOT_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/activate'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/stop/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot?filter=NAME::%s/GET' % ENCODE_NAME] = (
json.dumps(FAKE_SNAPSHOT_LIST_INFO_RESPONSE))
# mock QoS info map
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/11/GET'] = (
FAKE_LUN_GET_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/11/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/active/11/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/'] = (
FAKE_QOS_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
# mock iscsi info map
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_tgt_port/GET'] = (
FAKE_GET_ISCSI_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/eth_port/GET'] = (
FAKE_GET_ETH_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/eth_port/associate?TYPE=213&ASSOCIATEOBJTYPE'
'=257&ASSOCIATEOBJID=11/GET'] = (
FAKE_GET_ETH_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsidevicename'] = (
FAKE_GET_ISCSI_DEVICE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator?range=[0-256]/GET'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/POST'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/PUT'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator?PARENTTYPE=21&PARENTID'
'=1/GET'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/remove_iscsi_from_host/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/'
'iqn.1993-08.debian:01:ec2bff7ac3a3/PUT'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
# mock host info map
MAP_COMMAND_TO_FAKE_RESPONSE['/host?range=[0-65535]/GET'] = (
FAKE_GET_ALL_HOST_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/1/GET'] = (
FAKE_GET_HOST_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host'] = (
FAKE_CREATE_HOST_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup?range=[0-8191]/GET'] = (
FAKE_GET_ALL_HOST_GROUP_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup'] = (
FAKE_GET_HOST_GROUP_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=14&ID=0'
'&ASSOCIATEOBJTYPE=21&ASSOCIATEOBJID=1'
'/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=14&ID=0'
'&ASSOCIATEOBJID=0/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=21&'
'ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=0/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup/0/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=21&'
'ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=0/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup/associate'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
# mock copy info map
MAP_COMMAND_TO_FAKE_RESPONSE['/luncopy'] = (
FAKE_GET_LUN_COPY_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNCOPY?range=[0-1023]/GET'] = (
FAKE_GET_LUN_COPY_LIST_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNCOPY/start/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNCOPY/0/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
# mock mapping view info map
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview?range=[0-8191]/GET'] = (
FAKE_GET_MAPPING_VIEW_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/PUT'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/MAPPINGVIEW/1/GET'] = (
FAKE_GET_SPEC_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/REMOVE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/lungroup?TYPE=256&'
'ASSOCIATEOBJTYPE=245&ASSOCIATEOBJID=1/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=256&ASSOCIATEOBJID=11/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=257&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=257&ASSOCIATEOBJID=11/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
FAKE_GET_ENGINES_RESPONSE = """
{
"error":{
"code": 0
},
"data":[{
"NODELIST": "[]",
"ID": "0"
}]
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/storageengine/GET'] = (
FAKE_GET_ENGINES_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate?ASSOCIATEOBJTYPE=245&'
'ASSOCIATEOBJID=1&range=[0-8191]/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/MAPPINGVIEW/CREATE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
# mock FC info map
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?ISFREE=true&'
'range=[0-8191]/GET'] = (
FAKE_FC_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/MAPPINGVIEW/CREATE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
# mock FC info map
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?ISFREE=true&'
'range=[0-8191]/GET'] = (
FAKE_FC_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator/10000090fa0d6754/GET'] = (
FAKE_FC_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator/10000090fa0d6754/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host_link?INITIATOR_TYPE=223'
'&INITIATOR_PORT_WWN=10000090fa0d6754/GET'] = (
FAKE_HOST_LINK_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup?range=[0-8191]&TYPE=257/GET'] = (
FAKE_PORT_GROUP_RESPONSE)
# mock system info map
MAP_COMMAND_TO_FAKE_RESPONSE['/system//GET'] = (
FAKE_SYSTEM_VERSION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?range=[0-256]/GET'] = (
FAKE_GET_FC_INI_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['fc_initiator?range=[0-256]/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?PARENTTYPE=21&PARENTID=1/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate/cachepartition/POST'] = (
FAKE_SYSTEM_VERSION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?range=[0-256]&PARENTID=1/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?PARENTTYPE=21&PARENTID=1/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/SMARTCACHEPARTITION/0/GET'] = (
FAKE_SMARTCACHEPARTITION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/SMARTCACHEPARTITION/REMOVE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/SMARTCACHEPARTITION/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/cachepartition/0/GET'] = (
FAKE_SMARTCACHEPARTITION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroDomain?range=[0-32]/GET'] = (
FAKE_HYPERMETRODOMAIN_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/POST'] = (
FAKE_HYPERMETRO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/3400a30d844d0007/GET'] = (
FAKE_METRO_INFO_NEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/disable_hcpair/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hyperMetro/associate/pair/POST'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hyperMetro/associate/pair/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/11/GET'] = (
FAKE_HYPERMETRO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair?range=[0-4095]/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/synchronize_hcpair/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/splitmirror?range=[0-8191]/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/splitmirror/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/smartcachepool/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
FAKE_GET_PORTG_BY_VIEW = """
{
"data": [{
"DESCRIPTION": "Please do NOT modify this. Engine ID: 0",
"ID": "0",
"NAME": "OpenStack_PortGroup_1",
"TYPE": 257
}],
"error": {
"code": 0
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate/mappingview?TYPE=257&AS'
'SOCIATEOBJTYPE=245&ASSOCIATEOBJID=1/GET'] = (
FAKE_GET_PORTG_BY_VIEW)
FAKE_GET_PORT_BY_PORTG = """
{
"data":[{
"CONFSPEED":"0","FCCONFMODE":"3",
"FCRUNMODE":"0","HEALTHSTATUS":"1","ID":"2000643e8c4c5f66",
"MAXSUPPORTSPEED":"16000","NAME":"P0","PARENTID":"0B.1",
"PARENTTYPE":209,"RUNNINGSTATUS":"10","RUNSPEED":"8000",
"WWN":"2000643e8c4c5f66"
}],
"error":{
"code":0,"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/associate/portgroup?TYPE=212&ASSOCI'
'ATEOBJTYPE=257&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_PORT_BY_PORTG)
FAKE_GET_PORTG = """
{
"data": {
"TYPE": 257,
"NAME": "OpenStack_PortGroup_1",
"DESCRIPTION": "Please DO NOT change thefollowing message: 0",
"ID": "0"
},
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/0/GET'] = FAKE_GET_PORTG
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/0/PUT'] = FAKE_GET_PORTG
MAP_COMMAND_TO_FAKE_RESPONSE['/port/associate/portgroup/POST'] = (
FAKE_GET_PORT_BY_PORTG)
MAP_COMMAND_TO_FAKE_RESPONSE['/port/associate/portgroup?ID=0&TYPE=257&ASSOCIA'
'TEOBJTYPE=212&ASSOCIATEOBJID=2000643e8c4c5f66/DE'
'LETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
FAKE_CREATE_PORTG = """
{
"data": {
"DESCRIPTION": "Please DO NOT change the following message: 0",
"ID": "0",
"NAME": "OpenStack_PortGroup_1",
"TYPE": 257
},
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/PortGroup/POST'] = FAKE_CREATE_PORTG
MAP_COMMAND_TO_FAKE_RESPONSE['/PortGroup/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
FAKE_GET_PORTG_FROM_PORT = """
{
"data": [{
"TYPE": 257,
"NAME": "OpenStack_PortGroup_1",
"DESCRIPTION": "PleaseDONOTchangethefollowingmessage: 0",
"ID": "0"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate/fc_port?TYPE=257&ASSOCIA'
'TEOBJTYPE=212&ASSOCIATEOBJID=1114368/GET'] = (
FAKE_GET_PORTG_FROM_PORT)
FAKE_GET_VIEW_BY_PORTG = """
{
"data": [{
"ASSOCIATEOBJID": "0",
"COUNT": "0",
"ASSOCIATEOBJTYPE": "0",
"INBANDLUNWWN": "",
"FORFILESYSTEM": "false",
"ID": "2",
"ENABLEINBANDCOMMAND": "false",
"NAME": "OpenStack_Mapping_View_1",
"WORKMODE": "0",
"TYPE": 245,
"HOSTLUNID": "0",
"DESCRIPTION": ""
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/portgroup?TYPE=245&ASS'
'OCIATEOBJTYPE=257&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_VIEW_BY_PORTG)
FAKE_GET_LUNG_BY_VIEW = """
{
"data": [{
"TYPE": 256,
"NAME": "OpenStack_LunGroup_1",
"DESCRIPTION": "OpenStack_LunGroup_1",
"ID": "1"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate/mappingview?TYPE=256&ASSO'
'CIATEOBJTYPE=245&ASSOCIATEOBJID=2/GET'] = (
FAKE_GET_LUNG_BY_VIEW)
FAKE_LUN_COUNT_RESPONSE_1 = """
{
"data":{
"COUNT":"2"
},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/count?TYPE=11&ASSOCIATEOB'
'JTYPE=256&ASSOCIATEOBJID=1/GET'] = (
FAKE_LUN_COUNT_RESPONSE_1)
FAKE_PORTS_IN_PG_RESPONSE = """
{
"data": [{
"ID": "1114114",
"WWN": "2002643e8c4c5f66"
},
{
"ID": "1114113",
"WWN": "2001643e8c4c5f66"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/associate?TYPE=213&ASSOCIATEOBJTYPE='
'257&ASSOCIATEOBJID=0/GET'] = (
FAKE_PORTS_IN_PG_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetro_ConsistentGroup/POST'] = (
FAKE_CREATE_METROROUP_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup?type"
"='15364'/GET"] = (
json.dumps(FAKE_GET_METROROUP_RESPONSE))
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/11/GET"] = (
FAKE_GET_METROROUP_ID_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/11/DELETE"] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/stop/PUT"] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/sync/PUT"] = (
FAKE_COMMON_SUCCESS_RESPONSE)
FAKE_GET_REMOTEDEV_RESPONSE = """
{
"data":[{
"ARRAYTYPE":"1",
"HEALTHSTATUS":"1",
"ID":"0",
"NAME":"Huawei.Storage",
"RUNNINGSTATUS":"1",
"WWN":"21003400a30d844d"
}],
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/remote_device/GET'] = (
FAKE_GET_REMOTEDEV_RESPONSE)
FAKE_CREATE_PAIR_RESPONSE = """
{
"data":{
"ID":"%s"
},
"error":{
"code":0,
"description":"0"
}
}
""" % TEST_PAIR_ID
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/POST'] = (
FAKE_CREATE_PAIR_RESPONSE)
FAKE_DELETE_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/%s/DELETE' % TEST_PAIR_ID] = (
FAKE_DELETE_PAIR_RESPONSE)
FAKE_SET_PAIR_ACCESS_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/%s/PUT' % TEST_PAIR_ID] = (
FAKE_SET_PAIR_ACCESS_RESPONSE)
FAKE_GET_PAIR_NORMAL_RESPONSE = """
{
"data":{
"REPLICATIONMODEL": "1",
"RUNNINGSTATUS": "1",
"SECRESACCESS": "2",
"HEALTHSTATUS": "1",
"ISPRIMARY": "true"
},
"error":{
"code":0,
"description":"0"
}
}
"""
FAKE_GET_PAIR_SPLIT_RESPONSE = """
{
"data":{
"REPLICATIONMODEL": "1",
"RUNNINGSTATUS": "26",
"SECRESACCESS": "2",
"ISPRIMARY": "true"
},
"error":{
"code":0,
"description":"0"
}
}
"""
FAKE_GET_PAIR_SYNC_RESPONSE = """
{
"data":{
"REPLICATIONMODEL": "1",
"RUNNINGSTATUS": "23",
"SECRESACCESS": "2"
},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/%s/GET' % TEST_PAIR_ID] = (
FAKE_GET_PAIR_NORMAL_RESPONSE)
FAKE_SYNC_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/sync/PUT'] = (
FAKE_SYNC_PAIR_RESPONSE)
FAKE_SPLIT_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/split/PUT'] = (
FAKE_SPLIT_PAIR_RESPONSE)
FAKE_SWITCH_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/switch/PUT'] = (
FAKE_SWITCH_PAIR_RESPONSE)
FAKE_PORTS_IN_PG_RESPONSE = """
{
"data": [{
"ID": "1114114",
"WWN": "2002643e8c4c5f66"
},
{
"ID": "1114113",
"WWN": "2001643e8c4c5f66"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/associate?TYPE=213&ASSOCIATEOBJTYPE='
'257&ASSOCIATEOBJID=0/GET'] = (
FAKE_PORTS_IN_PG_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate/fc_port?TYPE=257&ASSOCIA'
'TEOBJTYPE=212&ASSOCIATEOBJID=1114369/GET'] = (
FAKE_PORTS_IN_PG_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/portgroup?TYPE=245&ASSOC'
'IATEOBJTYPE=257&ASSOCIATEOBJID=1114114/GET'] = (
FAKE_SWITCH_PAIR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/portgroup?TYPE=245&ASSOC'
'IATEOBJTYPE=257&ASSOCIATEOBJID=1114113/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
REPLICA_BACKEND_ID = 'huawei-replica-1'
class FakeHuaweiConf(huawei_conf.HuaweiConf):
def __init__(self, conf, protocol):
self.conf = conf
self.protocol = protocol
def safe_get(self, key):
try:
return getattr(self.conf, key)
except Exception:
return
def update_config_value(self):
setattr(self.conf, 'volume_backend_name', 'huawei_storage')
setattr(self.conf, 'san_address',
['http://192.0.2.69:8082/deviceManager/rest/'])
setattr(self.conf, 'san_user', 'admin')
setattr(self.conf, 'san_password', 'Admin@storage')
setattr(self.conf, 'san_product', 'V3')
setattr(self.conf, 'san_protocol', self.protocol)
setattr(self.conf, 'lun_type', constants.THICK_LUNTYPE)
setattr(self.conf, 'lun_ready_wait_interval', 2)
setattr(self.conf, 'lun_copy_wait_interval', 2)
setattr(self.conf, 'lun_timeout', 43200)
setattr(self.conf, 'lun_write_type', '1')
setattr(self.conf, 'lun_mirror_switch', '1')
setattr(self.conf, 'lun_prefetch_type', '1')
setattr(self.conf, 'lun_prefetch_value', '0')
setattr(self.conf, 'lun_policy', '0')
setattr(self.conf, 'lun_read_cache_policy', '2')
setattr(self.conf, 'lun_write_cache_policy', '5')
setattr(self.conf, 'storage_pools', ['OpenStack_Pool'])
setattr(self.conf, 'iscsi_default_target_ip', ['192.0.2.68'])
setattr(self.conf, 'metro_san_address',
['https://192.0.2.240:8088/deviceManager/rest/'])
setattr(self.conf, 'metro_storage_pools', 'OpenStack_Pool')
setattr(self.conf, 'metro_san_user', 'admin')
setattr(self.conf, 'metro_san_password', 'Admin@storage1')
setattr(self.conf, 'metro_domain_name', 'hypermetro_test')
iscsi_info = {'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'TargetIP': '192.0.2.2',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1',
'TargetPortGroup': 'portgroup-test', }
setattr(self.conf, 'iscsi_info', [iscsi_info])
rmt_iscsi_info = ('{ Name: iqn.1993-08.debian:01:ec2bff7acxxx;\n'
'TargetIP:1.1.1.1;CHAPinfo:mm-user#mm-user@storage;'
'ALUA:1; TargetPortGroup:portgroup-test};\t\n '
'{ Name: iqn.1993-08.debian:01:ec2bff7acyyy;\n'
'TargetIP:2.2.2.2;CHAPinfo:nn-user#nn-user@storage;'
'ALUA:0; TargetPortGroup:portgroup-test1}\t\n')
targets = [{'backend_id': REPLICA_BACKEND_ID,
'storage_pool': 'OpenStack_Pool',
'san_address':
'https://192.0.2.69:8088/deviceManager/rest/',
'san_user': 'admin',
'san_password': 'Admin@storage1',
'iscsi_info': rmt_iscsi_info}]
setattr(self.conf, 'replication_device', targets)
setattr(self.conf, 'safe_get', self.safe_get)
class FakeClient(rest_client.RestClient):
def __init__(self, configuration):
san_address = configuration.san_address
san_user = configuration.san_user
san_password = configuration.san_password
rest_client.RestClient.__init__(self, configuration,
san_address,
san_user,
san_password)
self.test_fail = False
self.test_multi_url_flag = False
self.cache_not_exist = False
self.partition_not_exist = False
def _get_snapshotid_by_name(self, snapshot_name):
return "11"
def _check_snapshot_exist(self, snapshot_id):
return True
def get_partition_id_by_name(self, name):
if self.partition_not_exist:
return None
return "11"
def get_cache_id_by_name(self, name):
if self.cache_not_exist:
return None
return "11"
def add_lun_to_cache(self, lunid, cache_id):
pass
def do_call(self, url=False, data=None, method=None, calltimeout=4,
log_filter_flag=False):
url = url.replace('http://192.0.2.69:8082/deviceManager/rest', '')
command = url.replace('/210235G7J20000000000/', '')
data = json.dumps(data) if data else None
if method:
command = command + "/" + method
for item in MAP_COMMAND_TO_FAKE_RESPONSE.keys():
if command == item:
data = MAP_COMMAND_TO_FAKE_RESPONSE[item]
if self.test_fail:
data = FAKE_ERROR_INFO_RESPONSE
if command == 'lun/11/GET':
data = FAKE_ERROR_LUN_INFO_RESPONSE
self.test_fail = False
if self.test_multi_url_flag:
data = FAKE_ERROR_CONNECT_RESPONSE
self.test_multi_url_flag = False
return json.loads(data)
class FakeReplicaPairManager(replication.ReplicaPairManager):
def _init_rmt_client(self):
self.rmt_client = FakeClient(self.conf)
class FakeISCSIStorage(huawei_driver.HuaweiISCSIDriver):
"""Fake Huawei Storage, Rewrite some methods of HuaweiISCSIDriver."""
def __init__(self, configuration):
self.configuration = configuration
self.huawei_conf = FakeHuaweiConf(self.configuration, 'iSCSI')
self.active_backend_id = None
self.replica = None
self.support_func = None
def do_setup(self):
self.metro_flag = True
self.huawei_conf.update_config_value()
self.get_local_and_remote_dev_conf()
self.client = FakeClient(configuration=self.configuration)
self.rmt_client = FakeClient(configuration=self.configuration)
self.replica_client = FakeClient(configuration=self.configuration)
self.metro = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
self.replica = FakeReplicaPairManager(self.client,
self.replica_client,
self.configuration)
class FakeFCStorage(huawei_driver.HuaweiFCDriver):
"""Fake Huawei Storage, Rewrite some methods of HuaweiISCSIDriver."""
def __init__(self, configuration):
self.configuration = configuration
self.fcsan = None
self.huawei_conf = FakeHuaweiConf(self.configuration, 'iSCSI')
self.active_backend_id = None
self.replica = None
self.support_func = None
def do_setup(self):
self.metro_flag = True
self.huawei_conf.update_config_value()
self.get_local_and_remote_dev_conf()
self.client = FakeClient(configuration=self.configuration)
self.rmt_client = FakeClient(configuration=self.configuration)
self.replica_client = FakeClient(configuration=self.configuration)
self.metro = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
self.replica = FakeReplicaPairManager(self.client,
self.replica_client,
self.configuration)
@ddt.ddt
class HuaweiTestBase(test.TestCase):
"""Base class for Huawei test cases.
Implement common setup operations or test cases in this class.
"""
def setUp(self):
super(HuaweiTestBase, self).setUp()
self.configuration = mock.Mock(spec=conf.Configuration)
self.driver = FakeISCSIStorage(configuration=self.configuration)
self.driver.do_setup()
self.volume = fake_volume.fake_volume_obj(
admin_contex, host=HOST, provider_location=PROVIDER_LOCATION,
admin_metadata=ADMIN_METADATA, id=ID)
self.snapshot = fake_snapshot.fake_snapshot_obj(
admin_contex, provider_location=PROVIDER_LOCATION, id=ID)
self.snapshot.volume = self.volume
self.replica_volume = fake_volume.fake_volume_obj(
admin_contex, host=HOST, provider_location=PROVIDER_LOCATION,
admin_metadata=ADMIN_METADATA, replication_status='disabled',
replication_driver_data=REPLICA_DRIVER_DATA, id=ID)
self.hyper_volume = fake_volume.fake_volume_obj(
admin_contex, host=HOST, provider_location=PROVIDER_LOCATION,
volume_metadata=VOL_METADATA, id=ID)
self.original_volume = fake_volume.fake_volume_obj(admin_contex,
id=ID)
self.current_volume = fake_volume.fake_volume_obj(
admin_contex, id=ID, provider_location=PROVIDER_LOCATION,
name_id=ID)
self.cgsnapshot = fake_cgsnapshot.fake_cgsnapshot_obj(
admin_contex, id=ID, consistencygroup_id=ID, status='available')
self.cg = fake_consistencygroup.fake_consistencyobject_obj(
admin_contex, id=ID, status='available')
def test_encode_name(self):
lun_name = huawei_utils.encode_name(self.volume.id)
# The hash value is different between py27 and py34.
# So we use assertIn.
self.assertIn(lun_name, ('21ec7341-4687000622165227970',
'21ec7341-7953146827712520106'))
@mock.patch.object(rest_client, 'RestClient')
def test_create_snapshot_success(self, mock_client):
lun_info = self.driver.create_snapshot(self.snapshot)
self.assertEqual(11, lun_info['provider_location'])
self.snapshot.volume_id = ID
self.snapshot.volume = self.volume
lun_info = self.driver.create_snapshot(self.snapshot)
self.assertEqual(11, lun_info['provider_location'])
@ddt.data('1', '', '0')
def test_copy_volume(self, input_speed):
self.driver.configuration.lun_copy_wait_interval = 0
self.volume.metadata = {'copyspeed': input_speed}
mocker = self.mock_object(
self.driver.client, 'create_luncopy',
mock.Mock(wraps=self.driver.client.create_luncopy))
self.driver._copy_volume(self.volume,
'fake_copy_name',
'fake_src_lun',
'fake_tgt_lun')
mocker.assert_called_once_with('fake_copy_name',
'fake_src_lun',
'fake_tgt_lun',
input_speed)
@ddt.data({'input_speed': '1',
'actual_speed': '1'},
{'input_speed': '',
'actual_speed': '2'},
{'input_speed': None,
'actual_speed': '2'},
{'input_speed': '5',
'actual_speed': '2'})
@ddt.unpack
def test_client_create_luncopy(self, input_speed, actual_speed):
mocker = self.mock_object(
self.driver.client, 'call',
mock.Mock(wraps=self.driver.client.call))
self.driver.client.create_luncopy('fake_copy_name',
'fake_src_lun',
'fake_tgt_lun',
input_speed)
mocker.assert_called_once_with(
mock.ANY,
{"TYPE": 219,
"NAME": 'fake_copy_name',
"DESCRIPTION": 'fake_copy_name',
"COPYSPEED": actual_speed,
"LUNCOPYTYPE": "1",
"SOURCELUN": "INVALID;fake_src_lun;INVALID;INVALID;INVALID",
"TARGETLUN": "INVALID;fake_tgt_lun;INVALID;INVALID;INVALID"}
)
@ddt.ddt
class HuaweiISCSIDriverTestCase(HuaweiTestBase):
def setUp(self):
super(HuaweiISCSIDriverTestCase, self).setUp()
self.configuration = mock.Mock(spec=conf.Configuration)
self.configuration.hypermetro_devices = hypermetro_devices
self.flags(rpc_backend='oslo_messaging._drivers.impl_fake')
self.driver = FakeISCSIStorage(configuration=self.configuration)
self.driver.do_setup()
self.portgroup = 'portgroup-test'
self.iscsi_iqns = ['iqn.2006-08.com.huawei:oceanstor:21000022a:'
':20503:192.0.2.1',
'iqn.2006-08.com.huawei:oceanstor:21000022a:'
':20500:192.0.2.2']
self.target_ips = ['192.0.2.1',
'192.0.2.2']
self.portgroup_id = 11
self.driver.client.login()
def test_parse_rmt_iscsi_info(self):
rmt_devs = self.driver.huawei_conf.get_replication_devices()
iscsi_info = rmt_devs[0]['iscsi_info']
expected_iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7acxxx',
'TargetIP': '1.1.1.1',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1',
'TargetPortGroup': 'portgroup-test'},
{'Name': 'iqn.1993-08.debian:01:ec2bff7acyyy',
'TargetIP': '2.2.2.2',
'CHAPinfo': 'nn-user;nn-user@storage',
'ALUA': '0',
'TargetPortGroup': 'portgroup-test1'}]
self.assertEqual(expected_iscsi_info, iscsi_info)
def test_parse_rmt_iscsi_info_without_iscsi_configuration(self):
self.configuration.replication_device[0]['iscsi_info'] = ''
rmt_devs = self.driver.huawei_conf.get_replication_devices()
iscsi_info = rmt_devs[0]['iscsi_info']
self.assertEqual([], iscsi_info)
def test_login_success(self):
device_id = self.driver.client.login()
self.assertEqual('210235G7J20000000000', device_id)
@ddt.data(constants.PWD_EXPIRED, constants.PWD_RESET)
def test_login_password_expires_and_reset_fail(self, state):
with mock.patch.object(self.driver.client, 'logout') as mock_logout:
self.mock_object(FakeClient, 'do_call',
return_value={"error": {"code": 0},
"data": {
"username": "admin",
"iBaseToken": "2001031430",
"deviceid": "210235G7J20000000000",
"accountstate": state}})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
mock_logout.assert_called_once_with()
def test_login_logout_fail(self):
login_info = {"error": {"code": 0},
"data": {"username": "admin",
"iBaseToken": "2001031430",
"deviceid": "210235G7J20000000000",
"accountstate": 3}}
logout_info = {"error": {"code": 1}, "data": {}}
self.mock_object(FakeClient, 'do_call',
side_effect=[login_info, logout_info])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
def test_check_volume_exist_on_array(self):
self.mock_object(rest_client.RestClient, 'get_lun_id_by_name',
return_value=None)
self.driver._check_volume_exist_on_array(
self.volume, constants.VOLUME_NOT_EXISTS_WARN)
def test_create_volume_success(self):
# Have pool info in the volume.
self.volume.host = 'ubuntu001@backend001#OpenStack_Pool'
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
# No pool info in the volume.
self.volume.host = 'ubuntu001@backend001'
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_replication_fail(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(replication.ReplicaCommonDriver, 'split')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.mock_object(rest_client.RestClient,
'delete_lun',
side_effect=exception.VolumeBackendAPIException(
data='err'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume, self.replica_volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_success_no_data(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
moved = False
empty_dict = {}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
side_effect=[{}, task_info])
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_success_with_replication(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
moved = False
empty_dict = {}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
moved, model_update = self.driver.migrate_volume(None,
self.replica_volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_fail_migration_fault(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "74",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume,
None, self.volume, test_host, None)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_fail_no_migrate_task(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "12",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume,
None, self.volume, test_host, None)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_with_type_id(self, pool_data):
self.driver.support_func = pool_data
self.volume.volume_type_id = '550c089b-bfdd-4f7f-86e1-3ba88125555c'
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
empty_dict = {}
self.mock_object(volume_types, 'get_volume_type',
return_value=test_new_type)
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_manage_existing_fail(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152, 'ALLOCTYPE': 1})
self.mock_object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
self.mock_object(rest_client.RestClient, 'rename_lun')
self.mock_object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value={
'PARENTNAME': 'OpenStack_Pool',
'SNAPSHOTIDS': [],
'ID': 'ID1',
'HEALTHSTATUS': constants.STATUS_HEALTH,
'WWN': '6643e8c1004c5f6723e9f454003'})
self.mock_object(volume_types, 'get_volume_type',
return_value={'extra_specs': test_new_type})
self.mock_object(huawei_driver.HuaweiBaseDriver,
'_check_needed_changes',
return_value={})
external_ref = {'source-name': 'test1',
'source-id': 'ID1'}
self.driver.manage_existing(self.volume, external_ref)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_volume_success(self, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
def test_delete_snapshot_success(self):
self.driver.delete_snapshot(self.snapshot)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_create_volume_from_snapsuccess(self):
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.mock_object(replication.ReplicaCommonDriver, 'sync')
model_update = self.driver.create_volume_from_snapshot(self.volume,
self.volume)
self.assertEqual('1', model_update['provider_location'])
driver_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': '1'}
driver_data = replication.to_string(driver_data)
self.assertEqual(driver_data, model_update['replication_driver_data'])
self.assertEqual('available', model_update['replication_status'])
@mock.patch.object(huawei_driver.HuaweiISCSIDriver,
'initialize_connection',
return_value={"data": {'target_lun': 1}})
def test_initialize_connection_snapshot_success(self, mock_iscsi_init):
iscsi_properties = self.driver.initialize_connection_snapshot(
self.snapshot, FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
self.assertEqual(1, iscsi_properties['data']['target_lun'])
mock_iscsi_init.assert_called_with(volume, FakeConnector)
def test_initialize_connection_success_multipath_portgroup(self):
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.mock_object(rest_client.RestClient, 'get_tgt_port_group',
return_value = '11')
iscsi_properties = self.driver.initialize_connection(self.volume,
temp_connector)
self.assertEqual([1, 1], iscsi_properties['data']['target_luns'])
def test_initialize_connection_fail_multipath_portgroup(self):
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.mock_object(rest_client.RestClient, 'get_tgt_port_group',
return_value = '12')
self.mock_object(rest_client.RestClient, '_get_tgt_ip_from_portgroup',
return_value = [])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_success_multipath_targetip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'TargetIP': '192.0.2.2',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
iscsi_properties = driver.initialize_connection(self.volume,
temp_connector)
self.assertEqual([1], iscsi_properties['data']['target_luns'])
def test_initialize_connection_fail_multipath_targetip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'TargetIP': '192.0.2.6',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.assertRaises(exception.VolumeBackendAPIException,
driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_success_multipath_defaultip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
default_target_ip = ['192.0.2.2']
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
driver.configuration.iscsi_default_target_ip = default_target_ip
driver.client.iscsi_default_target_ip = default_target_ip
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
iscsi_properties = driver.initialize_connection(self.volume,
temp_connector)
self.assertEqual([1], iscsi_properties['data']['target_luns'])
def test_initialize_connection_fail_multipath_defaultip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
default_target_ip = ['192.0.2.6']
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
driver.configuration.iscsi_default_target_ip = default_target_ip
driver.client.iscsi_default_target_ip = default_target_ip
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.assertRaises(exception.VolumeBackendAPIException,
driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_fail_no_port_in_portgroup(self):
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.mock_object(rest_client.RestClient, 'get_tgt_port_group',
return_value='11')
self.mock_object(rest_client.RestClient, '_get_tgt_ip_from_portgroup',
return_value=[])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_fail_multipath_no_ip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
driver.configuration.iscsi_default_target_ip = None
driver.client.iscsi_default_target_ip = None
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.assertRaises(exception.VolumeBackendAPIException,
driver.initialize_connection,
self.volume, temp_connector)
@mock.patch.object(huawei_driver.HuaweiISCSIDriver,
'terminate_connection')
def test_terminate_connection_snapshot_success(self, mock_iscsi_term):
self.driver.terminate_connection_snapshot(self.snapshot,
FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
mock_iscsi_term.assert_called_with(volume, FakeConnector)
def test_terminate_connection_success(self):
self.driver.terminate_connection(self.volume, FakeConnector)
def test_get_volume_status(self):
data = self.driver.get_volume_stats()
self.assertEqual(self.driver.VERSION, data['driver_version'])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={"CAPACITY": 6291456})
@mock.patch.object(rest_client.RestClient, 'extend_lun')
def test_extend_volume_size_equal(self, mock_extend, mock_lun_info):
self.driver.extend_volume(self.volume, 3)
self.assertEqual(0, mock_extend.call_count)
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={"CAPACITY": 5291456})
@mock.patch.object(rest_client.RestClient, 'extend_lun')
def test_extend_volume_success(self, mock_extend, mock_lun_info):
self.driver.extend_volume(self.volume, 3)
self.assertEqual(1, mock_extend.call_count)
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={"CAPACITY": 7291456})
def test_extend_volume_fail(self, mock_lun_info):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.extend_volume, self.volume, 3)
def test_extend_nonexistent_volume(self):
self.volume = fake_volume.fake_volume_obj(admin_contex)
self.mock_object(rest_client.RestClient,
'get_lun_id_by_name',
return_value=None)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.extend_volume,
self.volume, 3)
def test_get_admin_metadata(self):
metadata = [{'key': 'huawei_lun_wwn', 'value': '1'}]
tmp_volume = fake_volume.fake_volume_obj(
admin_contex, volume_admin_metadata=metadata)
expected_value = {'huawei_lun_wwn': '1'}
admin_metadata = huawei_utils.get_admin_metadata(tmp_volume)
self.assertEqual(expected_value, admin_metadata)
metadata = {'huawei_lun_wwn': '1'}
tmp_volume = fake_volume.fake_volume_obj(admin_contex)
tmp_volume.admin_metadata = metadata
admin_metadata = huawei_utils.get_admin_metadata(tmp_volume)
self.assertEqual(expected_value, admin_metadata)
def test_login_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
def test_create_snapshot_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_snapshot, self.snapshot)
def test_create_volume_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
def test_delete_volume_fail(self):
self.driver.client.test_fail = True
self.driver.delete_volume(self.volume)
def test_delete_snapshot_fail(self):
self.driver.client.test_fail = True
self.driver.delete_snapshot(self.snapshot)
def test_delete_snapshot_with_snapshot_nonexistent(self):
self.snapshot.provider_location = None
self.driver.delete_snapshot(self.snapshot)
def test_initialize_connection_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, FakeConnector)
def test_lun_is_associated_to_lungroup(self):
self.driver.client.associate_lun_to_lungroup('11', '11')
result = self.driver.client._is_lun_associated_to_lungroup('11',
'11')
self.assertTrue(result)
def test_lun_is_not_associated_to_lun_group(self):
self.driver.client.associate_lun_to_lungroup('12', '12')
self.driver.client.remove_lun_from_lungroup('12', '12')
result = self.driver.client._is_lun_associated_to_lungroup('12', '12')
self.assertFalse(result)
def test_get_tgtip(self):
portg_id = self.driver.client.get_tgt_port_group(self.portgroup)
target_ip = self.driver.client._get_tgt_ip_from_portgroup(portg_id)
self.assertEqual(self.target_ips, target_ip)
def test_find_chap_info(self):
tmp_dict = {}
tmp_dict['Name'] = 'iqn.1993-08.debian:01:ec2bff7ac3a3'
tmp_dict['CHAPinfo'] = 'mm-user;mm-user@storage'
iscsi_info = [tmp_dict]
initiator_name = FakeConnector['initiator']
chapinfo = self.driver.client.find_chap_info(iscsi_info,
initiator_name)
chap_username, chap_password = chapinfo.split(';')
self.assertEqual('mm-user', chap_username)
self.assertEqual('mm-user@storage', chap_password)
def test_find_alua_info(self):
tmp_dict = {}
tmp_dict['Name'] = 'iqn.1993-08.debian:01:ec2bff7ac3a3'
tmp_dict['ALUA'] = '1'
iscsi_info = [tmp_dict]
initiator_name = FakeConnector['initiator']
type = self.driver.client._find_alua_info(iscsi_info,
initiator_name)
self.assertEqual('1', type)
def test_get_pool_info(self):
pools = [{"NAME": "test001",
"ID": "0",
"USERFREECAPACITY": "36",
"USERTOTALCAPACITY": "48",
"USAGETYPE": constants.BLOCK_STORAGE_POOL_TYPE,
"TIER0CAPACITY": "48",
"TIER1CAPACITY": "0",
"TIER2CAPACITY": "0"},
{"NAME": "test002",
"ID": "1",
"USERFREECAPACITY": "37",
"USERTOTALCAPACITY": "49",
"USAGETYPE": constants.FILE_SYSTEM_POOL_TYPE,
"TIER0CAPACITY": "0",
"TIER1CAPACITY": "49",
"TIER2CAPACITY": "0"},
{"NAME": "test003",
"ID": "0",
"USERFREECAPACITY": "36",
"DATASPACE": "35",
"USERTOTALCAPACITY": "48",
"USAGETYPE": constants.BLOCK_STORAGE_POOL_TYPE,
"TIER0CAPACITY": "0",
"TIER1CAPACITY": "0",
"TIER2CAPACITY": "48"}]
pool_name = 'test001'
test_info = {'CAPACITY': '36', 'ID': '0', 'TOTALCAPACITY': '48',
'TIER0CAPACITY': '48', 'TIER1CAPACITY': '0',
'TIER2CAPACITY': '0'}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
pool_name = 'test002'
test_info = {}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
pool_name = 'test000'
test_info = {}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
pool_name = 'test003'
test_info = {'CAPACITY': '35', 'ID': '0', 'TOTALCAPACITY': '48',
'TIER0CAPACITY': '0', 'TIER1CAPACITY': '0',
'TIER2CAPACITY': '48'}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
def test_get_smartx_specs_opts(self):
smartx_opts = smartx.SmartX().get_smartx_specs_opts(smarttier_opts)
self.assertEqual('3', smartx_opts['policy'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MAXIOPS': '100',
'IOType': '2'})
def test_create_smartqos(self, mock_qos_value, pool_data):
self.driver.support_func = pool_data
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_type',
return_value={'qos_specs_id': u'025ce295-15e9-41a7'})
@mock.patch.object(qos_specs, 'get_qos_specs',
return_value={'specs': {'maxBandWidth': '100',
'IOType': '0'},
'consumer': 'back-end'})
def test_create_smartqos_success(self,
mock_qos_specs,
mock_value_type,
mock_volume_params):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data([{'specs': {'maxBandWidth': '100', 'IOType': '3'}},
FAKE_POOLS_UNSUPPORT_REPORT],
[{'specs': {'maxBandWidth': '100', 'IOType': '3'}},
FAKE_POOLS_SUPPORT_REPORT],
[{'specs': {'minBandWidth': '0', 'IOType': '2'}},
FAKE_POOLS_UNSUPPORT_REPORT],
[{'specs': {'minBandWidth': '0', 'IOType': '2'}},
FAKE_POOLS_SUPPORT_REPORT])
@ddt.unpack
def test_create_smartqos_failed(self, qos_specs_value, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_type',
return_value={'qos_specs_id': u'025ce295-15e9-41a7'})
self.mock_object(qos_specs, 'get_qos_specs',
return_value=qos_specs_value)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_create_smartqos_without_huawei_type(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_type',
return_value={'qos_specs_id': u'025ce295-15e9-41a7'})
self.mock_object(qos_specs, 'get_qos_specs',
return_value={'specs': {'fake_qos_type': '100',
'IOType': '2'}})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MAXIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=(None, []))
def test_create_smartqos_on_v3r3_with_no_qos(self,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=('11', u'["0", "2", "3"]'))
def test_create_smartqos_on_v3r3_with_qos(self,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=('11', u'["0", "2", "3"]'))
def test_create_smartqos_on_v3r3_with_unsupport_qos(
self, mock_find_available_qos,
mock_qos_value, mock_array_version):
self.driver.support_func = FAKE_POOLS_UNSUPPORT_REPORT
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=(None, []))
@mock.patch.object(rest_client.RestClient, 'activate_deactivate_qos')
def test_create_smartqos_on_v3r3_active_failed(self,
pool_data,
mock_activate_qos,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = pool_data
mock_activate_qos.side_effect = (
exception.VolumeBackendAPIException(data='Activate or deactivate '
'QoS error. '))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=(None, []))
@mock.patch.object(rest_client.RestClient, 'create_qos_policy')
def test_create_smartqos_on_v3r3_qos_failed(self,
pool_data,
mock_create_qos,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = pool_data
mock_create_qos.side_effect = (
exception.VolumeBackendAPIException(data='Create QoS policy '
'error.'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_qos_info',
return_value={"LUNLIST": u'["1", "2", "3"]',
"RUNNINGSTATUS": "2"})
def test_delete_smartqos_with_lun_left(self, mock_qos_info, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_qos_info',
return_value={"LUNLIST": u'["1"]',
"RUNNINGSTATUS": "2"})
def test_delete_smartqos_with_no_lun_left(self, mock_qos_info, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
@mock.patch.object(rest_client.RestClient, 'add_lun_to_partition')
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
def test_create_smartx(self, mock_volume_types, mock_add_lun_to_partition):
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data([{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': None,
'partitionname': 'partition-test'},
FAKE_POOLS_UNSUPPORT_REPORT],
[{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': 'cache-test',
'partitionname': None},
FAKE_POOLS_SUPPORT_REPORT],
[{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': None,
'partitionname': 'partition-test'},
FAKE_POOLS_SUPPORT_REPORT],
[{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': 'cache-test',
'partitionname': None},
FAKE_POOLS_UNSUPPORT_REPORT])
@ddt.unpack
def test_create_smartCache_failed(self, opts, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_params',
return_value=opts)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
def test_create_smartCache_failed_with_no_cacheid(self,
mock_volume_type,
pool_data):
self.driver.client.cache_not_exist = True
self.driver.support_func = pool_data
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
def test_create_smartPartition_failed_with_no_partid(self,
mock_volume_type,
pool_data):
self.driver.client.partition_not_exist = True
self.driver.support_func = pool_data
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
def test_find_available_qos(self):
qos = {'MAXIOPS': '100', 'IOType': '2'}
fake_qos_info_response_equal = {
"error": {
"code": 0
},
"data": [{
"ID": "11",
"MAXIOPS": "100",
"LATENCY": "0",
"IOType": "2",
"FSLIST": u'[""]',
'RUNNINGSTATUS': "2",
"NAME": "OpenStack_57_20151225102851",
"LUNLIST": u'["1", "2", "3", "4", "5", "6", "7", "8", "9",\
"10", ,"11", "12", "13", "14", "15", "16", "17", "18", "19",\
"20", ,"21", "22", "23", "24", "25", "26", "27", "28", "29",\
"30", ,"31", "32", "33", "34", "35", "36", "37", "38", "39",\
"40", ,"41", "42", "43", "44", "45", "46", "47", "48", "49",\
"50", ,"51", "52", "53", "54", "55", "56", "57", "58", "59",\
"60", ,"61", "62", "63", "64"]'
}]
}
# Number of LUNs in QoS is equal to 64
with mock.patch.object(rest_client.RestClient, 'get_qos',
return_value=fake_qos_info_response_equal):
(qos_id, lun_list) = self.driver.client.find_available_qos(qos)
self.assertEqual((None, []), (qos_id, lun_list))
# Number of LUNs in QoS is less than 64
fake_qos_info_response_less = {
"error": {
"code": 0
},
"data": [{
"ID": "11",
"MAXIOPS": "100",
"LATENCY": "0",
"IOType": "2",
"FSLIST": u'[""]',
'RUNNINGSTATUS': "2",
"NAME": "OpenStack_57_20151225102851",
"LUNLIST": u'["0", "1", "2"]'
}]
}
with mock.patch.object(rest_client.RestClient, 'get_qos',
return_value=fake_qos_info_response_less):
(qos_id, lun_list) = self.driver.client.find_available_qos(qos)
self.assertEqual(("11", u'["0", "1", "2"]'), (qos_id, lun_list))
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
def test_create_hypermetro_success(self,
mock_volume_ready,
mock_hyper_domain,
mock_pool_info,
mock_all_pool_info,
mock_login_return):
metadata = {"hypermetro_id": '11',
"remote_lun_id": '1'}
lun_info = self.driver.create_volume(self.hyper_volume)
self.assertEqual(metadata, lun_info['metadata'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
@mock.patch.object(hypermetro.HuaweiHyperMetro,
'_create_hypermetro_pair')
@mock.patch.object(rest_client.RestClient, 'delete_lun')
def test_create_hypermetro_fail(self,
pool_data,
mock_delete_lun,
mock_hyper_pair_info,
mock_volume_ready,
mock_hyper_domain,
mock_pool_info,
mock_all_pool_info,
mock_hypermetro_opts
):
self.driver.client.login()
self.driver.support_func = pool_data
mock_hyper_pair_info.side_effect = exception.VolumeBackendAPIException(
data='Create hypermetro error.')
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.hyper_volume)
mock_delete_lun.assert_called_with('1')
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value={})
def test_create_hypermetro_remote_pool_none_fail(self,
mock_pool_info,
mock_all_pool_info):
param = {'TYPE': '11',
'PARENTID': ''}
self.driver.client.login()
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.create_hypermetro,
'2', param)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'create_lun',
return_value={'CAPACITY': '2097152',
'DESCRIPTION': '2f0635',
'HEALTHSTATUS': '1',
'ALLOCTYPE': '1',
'WWN': '6643e8c1004c5f6723e9f454003',
'ID': '1',
'RUNNINGSTATUS': '27',
'NAME': '5mFHcBv4RkCcD'})
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
def test_create_hypermetro_remote_pool_parentid(self,
mock_volume_ready,
mock_hyper_domain,
mock_create_lun,
mock_pool_info,
mock_all_pool_info):
param = {'TYPE': '11',
'PARENTID': ''}
self.driver.metro.create_hypermetro('2', param)
lun_PARENTID = mock_create_lun.call_args[0][0]['PARENTID']
self.assertEqual(FAKE_FIND_POOL_RESPONSE['ID'], lun_PARENTID)
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
def test_hypermetro_none_map_info_fail(self, mock_metadata):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.connect_volume_fc,
self.volume,
FakeConnector)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'check_lun_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'check_hypermetro_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'delete_hypermetro',
return_value=FAKE_COMMON_SUCCESS_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'delete_lun',
return_value=None)
def test_delete_hypermetro_success(self,
mock_delete_lun,
mock_delete_hypermetro,
mock_check_hyermetro,
mock_lun_exit,
pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.hyper_volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'check_lun_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'check_hypermetro_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'get_hypermetro_by_id',
return_value=FAKE_METRO_INFO_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'delete_hypermetro')
@mock.patch.object(rest_client.RestClient, 'delete_lun',
return_value=None)
def test_delete_hypermetro_fail(self,
pool_data,
mock_delete_lun,
mock_delete_hypermetro,
mock_metro_info,
mock_check_hyermetro,
mock_lun_exit):
self.driver.support_func = pool_data
mock_delete_hypermetro.side_effect = (
exception.VolumeBackendAPIException(data='Delete hypermetro '
'error.'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume, self.hyper_volume)
mock_delete_lun.assert_called_with('11')
def test_manage_existing_get_size_invalid_reference(self):
# Can't find LUN by source-name.
external_ref = {'source-name': 'LUN1'}
with mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value=None):
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size,
self.volume, external_ref)
self.assertIsNotNone(re.search('please check the source-name '
'or source-id', ex.msg))
# Can't find LUN by source-id.
external_ref = {'source-id': 'ID1'}
with mock.patch.object(rest_client.RestClient, 'get_lun_info') as m_gt:
m_gt.side_effect = exception.VolumeBackendAPIException(
data='Error')
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.manage_existing_get_size,
self.volume, external_ref)
self.assertIsNotNone(re.search('please check the source-name '
'or source-id', ex.msg))
@ddt.data({'source-id': 'ID1'}, {'source-name': 'LUN1'},
{'source-name': 'LUN1', 'source-id': 'ID1'})
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 3097152})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_get_size_success(self, mock_get_lun_id_by_name,
mock_get_lun_info,
external_ref):
size = self.driver.manage_existing_get_size(self.volume,
external_ref)
self.assertEqual(2, size)
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool'})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_pool_mismatch(self, mock_get_by_name,
mock_get_info):
# LUN does not belong to the specified pool.
with mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value={'PARENTNAME': 'StoragePool'}):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('The specified LUN does not belong'
' to the given pool', ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool'})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_lun_abnormal(self, mock_get_by_name,
mock_get_info):
# Status is not normal.
ret = {'PARENTNAME': "OpenStack_Pool",
'HEALTHSTATUS': '2'}
with mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value=ret):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('LUN status is not normal', ex.msg))
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_hypermetro_pairs',
return_value=[{'LOCALOBJID': 'ID1'}])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_hypermetro(self, mock_get_by_name,
mock_get_info,
mock_get_hyper_pairs,
pool_data):
self.driver.support_func = pool_data
# Exists in a HyperMetroPair.
with mock.patch.object(rest_client.RestClient,
'get_hypermetro_pairs',
return_value=[{'LOCALOBJID': 'ID1'}]):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('HyperMetroPair', ex.msg))
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_hypermetro_pairs')
@mock.patch.object(rest_client.RestClient, 'rename_lun')
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH,
'WWN': '6643e8c1004c5f6723e9f454003'})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_lower_version(self, pool_data,
mock_get_by_name,
mock_get_info, mock_rename,
mock_get_hyper_pairs):
self.driver.support_func = pool_data
mock_get_hyper_pairs.side_effect = (
exception.VolumeBackendAPIException(data='err'))
external_ref = {'source-name': 'LUN1'}
model_update = self.driver.manage_existing(self.volume,
external_ref)
expected_val = {
'admin_metadata': {
'huawei_lun_wwn': '6643e8c1004c5f6723e9f454003'
},
'provider_location': 'ID1'}
self.assertEqual(expected_val, model_update)
@ddt.data([[{'PRILUNID': 'ID1'}], []],
[[{'PRILUNID': 'ID2'}], ['ID1', 'ID2']])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_splitmirror(self, ddt_data,
mock_get_by_name,
mock_get_info):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
# Exists in a SplitMirror.
with mock.patch.object(rest_client.RestClient, 'get_split_mirrors',
return_value=ddt_data[0]), \
mock.patch.object(rest_client.RestClient, 'get_target_luns',
return_value=ddt_data[1]):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('SplitMirror', ex.msg))
@ddt.data([[{'PARENTID': 'ID1'}], FAKE_POOLS_UNSUPPORT_REPORT],
[[{'TARGETLUNID': 'ID1'}], FAKE_POOLS_UNSUPPORT_REPORT],
[[{'PARENTID': 'ID1'}], FAKE_POOLS_SUPPORT_REPORT],
[[{'TARGETLUNID': 'ID1'}], FAKE_POOLS_SUPPORT_REPORT])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
@ddt.unpack
def test_manage_existing_under_migration(self, ddt_data, pool_data,
mock_get_by_name,
mock_get_info):
self.driver.support_func = pool_data
# Exists in a migration task.
with mock.patch.object(rest_client.RestClient, 'get_migration_task',
return_value=ddt_data):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('migration', ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'SNAPSHOTIDS': [],
'ISADD2LUNGROUP': 'true',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_lungroup(self, mock_get_by_name,
mock_get_info):
# Already in LUN group.
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('Already exists in a LUN group',
ex.msg))
@ddt.data([{'source-name': 'LUN1'}, FAKE_POOLS_UNSUPPORT_REPORT],
[{'source-name': 'LUN1'}, FAKE_POOLS_SUPPORT_REPORT],
[{'source-id': 'ID1'}, FAKE_POOLS_UNSUPPORT_REPORT],
[{'source-id': 'ID1'}, FAKE_POOLS_SUPPORT_REPORT])
@mock.patch.object(rest_client.RestClient, 'rename_lun')
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value={'PARENTNAME': 'OpenStack_Pool',
'SNAPSHOTIDS': [],
'ID': 'ID1',
'HEALTHSTATUS': constants.STATUS_HEALTH,
'WWN': '6643e8c1004c5f6723e9f454003'})
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ALLOCTYPE': 1})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
@ddt.unpack
def test_manage_existing_success(self, mock_get_by_name, mock_get_info,
mock_check_lun, mock_rename,
external_ref, pool_data):
self.driver.support_func = pool_data
model_update = self.driver.manage_existing(self.volume,
external_ref)
expected_val = {
'admin_metadata': {
'huawei_lun_wwn': '6643e8c1004c5f6723e9f454003'
},
'provider_location': 'ID1'}
self.assertEqual(expected_val, model_update)
def test_unmanage(self):
self.driver.unmanage(self.volume)
def test_manage_existing_snapshot_abnormal(self):
with mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_snapshot_info_by_ref',
return_value={'HEALTHSTATUS': '2',
'PARENTID': '11'}):
external_ref = {'source-name': 'test1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
self.snapshot, external_ref)
self.assertIsNotNone(re.search('Snapshot status is not normal',
ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_snapshot_info',
return_value={'ID': 'ID1',
'EXPOSEDTOINITIATOR': 'true',
'NAME': 'test1',
'PARENTID': '11',
'USERCAPACITY': 2097152,
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value='ID1')
def test_manage_existing_snapshot_with_lungroup(self, mock_get_by_name,
mock_get_info):
# Already in LUN group.
external_ref = {'source-name': 'test1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
self.snapshot, external_ref)
self.assertIsNotNone(re.search('Snapshot is exposed to initiator',
ex.msg))
@mock.patch.object(rest_client.RestClient, 'rename_snapshot')
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_snapshot_info_by_ref',
return_value={'ID': 'ID1',
'EXPOSEDTOINITIATOR': 'false',
'NAME': 'test1',
'PARENTID': '11',
'USERCAPACITY': 2097152,
'HEALTHSTATUS': constants.STATUS_HEALTH})
def test_manage_existing_snapshot_success(self, mock_get_info,
mock_rename):
external_ref = {'source-name': 'test1'}
model_update = self.driver.manage_existing_snapshot(self.snapshot,
external_ref)
self.assertEqual({'provider_location': 'ID1'}, model_update)
external_ref = {'source-id': 'ID1'}
model_update = self.driver.manage_existing_snapshot(self.snapshot,
external_ref)
self.assertEqual({'provider_location': 'ID1'}, model_update)
@mock.patch.object(rest_client.RestClient, 'get_snapshot_info',
return_value={'ID': 'ID1',
'EXPOSEDTOINITIATOR': 'false',
'NAME': 'test1',
'USERCAPACITY': 2097152,
'PARENTID': '12',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value='ID1')
def test_manage_existing_snapshot_mismatch_lun(self, mock_get_by_name,
mock_get_info):
external_ref = {'source-name': 'test1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
self.snapshot, external_ref)
self.assertIsNotNone(re.search("Snapshot doesn't belong to volume",
ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_snapshot_info',
return_value={'USERCAPACITY': 3097152})
@mock.patch.object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value='ID1')
def test_manage_existing_snapshot_get_size_success(self,
mock_get_id_by_name,
mock_get_info):
external_ref = {'source-name': 'test1',
'source-id': 'ID1'}
size = self.driver.manage_existing_snapshot_get_size(self.snapshot,
external_ref)
self.assertEqual(2, size)
external_ref = {'source-name': 'test1'}
size = self.driver.manage_existing_snapshot_get_size(self.snapshot,
external_ref)
self.assertEqual(2, size)
external_ref = {'source-id': 'ID1'}
size = self.driver.manage_existing_snapshot_get_size(self.snapshot,
external_ref)
self.assertEqual(2, size)
def test_unmanage_snapshot(self):
self.driver.unmanage_snapshot(self.snapshot)
@ddt.data(sync_replica_specs, async_replica_specs)
def test_create_replication_success(self, mock_type):
self.mock_object(replication.ReplicaCommonDriver, 'sync')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': mock_type})
model_update = self.driver.create_volume(self.replica_volume)
driver_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': '1'}
driver_data = replication.to_string(driver_data)
self.assertEqual(driver_data, model_update['replication_driver_data'])
self.assertEqual('available', model_update['replication_status'])
@ddt.data(
[
rest_client.RestClient,
'get_array_info',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(return_value={}),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'wait_volume_online',
mock.Mock(side_effect=[
None,
exception.VolumeBackendAPIException(data='err')]),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'create_pair',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
replication.ReplicaCommonDriver,
'sync',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'get_array_info',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(return_value={}),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'wait_volume_online',
mock.Mock(side_effect=[
None,
exception.VolumeBackendAPIException(data='err')]),
FAKE_POOLS_SUPPORT_REPORT
],
[
rest_client.RestClient,
'create_pair',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaCommonDriver,
'sync',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
)
@ddt.unpack
def test_create_replication_fail(self, mock_module, mock_func,
mock_value, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.mock_object(replication.ReplicaPairManager, '_delete_pair')
self.mock_object(mock_module, mock_func, mock_value)
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver.create_volume, self.replica_volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_replication_success(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(replication.ReplicaCommonDriver, 'split')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.driver.delete_volume(self.replica_volume)
self.mock_object(rest_client.RestClient, 'check_lun_exist',
return_value=False)
self.driver.delete_volume(self.replica_volume)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_volume_online(self):
replica = FakeReplicaPairManager(self.driver.client,
self.driver.replica_client,
self.configuration)
lun_info = {'ID': '11'}
replica.wait_volume_online(self.driver.client, lun_info)
offline_status = {'RUNNINGSTATUS': '28'}
replica.wait_volume_online(self.driver.client, lun_info)
with mock.patch.object(rest_client.RestClient, 'get_lun_info',
offline_status):
self.assertRaises(exception.VolumeBackendAPIException,
replica.wait_volume_online,
self.driver.client,
lun_info)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_second_access(self):
pair_id = '1'
access_ro = constants.REPLICA_SECOND_RO
access_rw = constants.REPLICA_SECOND_RW
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.mock_object(replication.PairOp, 'get_replica_info',
return_value={'SECRESACCESS': access_ro})
self.mock_object(huawei_utils.time, 'time',
side_effect=utils.generate_timeout_series(
constants.DEFAULT_REPLICA_WAIT_TIMEOUT))
common_driver.wait_second_access(pair_id, access_ro)
self.assertRaises(exception.VolumeBackendAPIException,
common_driver.wait_second_access, pair_id, access_rw)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_replica_ready(self):
normal_status = {
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_NORMAL,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
split_status = {
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_SPLIT,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
sync_status = {
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_SYNC,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
pair_id = '1'
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
with mock.patch.object(replication.PairOp, 'get_replica_info',
return_value=normal_status):
common_driver.wait_replica_ready(pair_id)
with mock.patch.object(
replication.PairOp,
'get_replica_info',
side_effect=[sync_status, normal_status]):
common_driver.wait_replica_ready(pair_id)
with mock.patch.object(replication.PairOp, 'get_replica_info',
return_value=split_status):
self.assertRaises(exception.VolumeBackendAPIException,
common_driver.wait_replica_ready, pair_id)
def test_failover_to_current(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.client)
self.assertEqual(old_replica_client, driver.replica_client)
self.assertEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(0, len(volumes_update))
def test_failover_normal_volumes(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], REPLICA_BACKEND_ID)
self.assertEqual(REPLICA_BACKEND_ID, driver.active_backend_id)
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.volume.id, v_id)
self.assertEqual('error', v_update['status'])
self.assertEqual(self.volume['status'],
v_update['metadata']['old_status'])
def test_failback_to_current(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], REPLICA_BACKEND_ID)
self.assertEqual(REPLICA_BACKEND_ID, driver.active_backend_id)
self.assertEqual(old_client, driver.client)
self.assertEqual(old_replica_client, driver.replica_client)
self.assertEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(0, len(volumes_update))
def test_failback_normal_volumes(self):
self.volume.status = 'error'
self.volume.metadata = {'old_status': 'available'}
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.volume.id, v_id)
self.assertEqual('available', v_update['status'])
self.assertNotIn('old_status', v_update['metadata'])
def test_failover_replica_volumes(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
self.mock_object(replication.ReplicaCommonDriver, 'failover')
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
secondary_id, volumes_update = driver.failover_host(
None, [self.replica_volume], REPLICA_BACKEND_ID)
self.assertEqual(REPLICA_BACKEND_ID, driver.active_backend_id)
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.replica_volume.id, v_id)
self.assertEqual('1', v_update['provider_location'])
self.assertEqual('failed-over', v_update['replication_status'])
new_drv_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': self.replica_volume.provider_location}
new_drv_data = replication.to_string(new_drv_data)
self.assertEqual(new_drv_data, v_update['replication_driver_data'])
@ddt.data({}, {'pair_id': TEST_PAIR_ID})
def test_failover_replica_volumes_invalid_drv_data(self, mock_drv_data):
volume = self.replica_volume
volume['replication_driver_data'] = replication.to_string(
mock_drv_data)
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
secondary_id, volumes_update = driver.failover_host(
None, [volume], REPLICA_BACKEND_ID)
self.assertEqual(driver.active_backend_id, REPLICA_BACKEND_ID)
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(volume.id, v_id)
self.assertEqual('error', v_update['replication_status'])
def test_failback_replica_volumes(self):
self.mock_object(replication.ReplicaCommonDriver, 'enable')
self.mock_object(replication.ReplicaCommonDriver, 'wait_replica_ready')
self.mock_object(replication.ReplicaCommonDriver, 'failover')
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
volume = self.replica_volume
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.replica_volume.id, v_id)
self.assertEqual('1', v_update['provider_location'])
self.assertEqual('available', v_update['replication_status'])
new_drv_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': self.replica_volume.provider_location}
new_drv_data = replication.to_string(new_drv_data)
self.assertEqual(new_drv_data, v_update['replication_driver_data'])
@ddt.data({}, {'pair_id': TEST_PAIR_ID})
def test_failback_replica_volumes_invalid_drv_data(self, mock_drv_data):
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
volume = self.replica_volume
volume['replication_driver_data'] = replication.to_string(
mock_drv_data)
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.replica_volume.id, v_id)
self.assertEqual('error', v_update['replication_status'])
@unittest.skip("Skip until bug #1578986 is fixed")
@mock.patch('oslo_service.loopingcall.FixedIntervalLoopingCall',
new=utils.ZeroIntervalLoopingCall)
@mock.patch.object(replication.PairOp, 'is_primary',
side_effect=[False, True])
@mock.patch.object(replication.ReplicaCommonDriver, 'split')
@mock.patch.object(replication.ReplicaCommonDriver, 'unprotect_second')
def test_replication_driver_enable_success(self,
mock_unprotect,
mock_split,
mock_is_primary):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
common_driver.enable(replica_id)
self.assertTrue(mock_unprotect.called)
self.assertTrue(mock_split.called)
self.assertTrue(mock_is_primary.called)
@mock.patch.object(replication.PairOp, 'is_primary', return_value=False)
@mock.patch.object(replication.ReplicaCommonDriver, 'split')
def test_replication_driver_failover_success(self,
mock_split,
mock_is_primary):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
common_driver.failover(replica_id)
self.assertTrue(mock_split.called)
self.assertTrue(mock_is_primary.called)
@mock.patch.object(replication.PairOp, 'is_primary', return_value=True)
def test_replication_driver_failover_fail(self, mock_is_primary):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.assertRaises(
exception.VolumeBackendAPIException,
common_driver.failover,
replica_id)
@ddt.data(constants.REPLICA_SECOND_RW, constants.REPLICA_SECOND_RO)
def test_replication_driver_protect_second(self, mock_access):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.mock_object(replication.ReplicaCommonDriver, 'wait_second_access')
self.mock_object(
replication.PairOp,
'get_replica_info',
return_value={'SECRESACCESS': mock_access})
common_driver.protect_second(replica_id)
common_driver.unprotect_second(replica_id)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_replication_driver_sync(self):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
async_normal_status = {
'REPLICATIONMODEL': constants.REPLICA_ASYNC_MODEL,
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_NORMAL,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
self.mock_object(replication.ReplicaCommonDriver, 'protect_second')
self.mock_object(replication.PairOp, 'get_replica_info',
return_value=async_normal_status)
common_driver.sync(replica_id, True)
common_driver.sync(replica_id, False)
def test_replication_driver_split(self):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.mock_object(replication.ReplicaCommonDriver, 'wait_expect_state')
self.mock_object(
replication.PairOp, 'split',
side_effect=exception.VolumeBackendAPIException(data='err'))
common_driver.split(replica_id)
@mock.patch.object(replication.PairOp, 'split')
@ddt.data(constants.REPLICA_RUNNING_STATUS_SPLIT,
constants.REPLICA_RUNNING_STATUS_INVALID,
constants.REPLICA_RUNNING_STATUS_ERRUPTED)
def test_replication_driver_split_already_disabled(self, mock_status,
mock_op_split):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
pair_info = json.loads(FAKE_GET_PAIR_NORMAL_RESPONSE)['data']
pair_info['RUNNINGSTATUS'] = mock_status
self.mock_object(rest_client.RestClient, 'get_pair_by_id',
return_value=pair_info)
common_driver.split(replica_id)
self.assertFalse(mock_op_split.called)
def test_replication_base_op(self):
replica_id = '1'
op = replication.AbsReplicaOp(None)
op.create()
op.delete(replica_id)
op.protect_second(replica_id)
op.unprotect_second(replica_id)
op.sync(replica_id)
op.split(replica_id)
op.switch(replica_id)
op.is_primary({})
op.get_replica_info(replica_id)
op._is_status(None, {'key': 'volue'}, None)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"error": {"code": 0}})
def test_get_tgt_port_group_no_portg_exist(self, mock_call):
portg = self.driver.client.get_tgt_port_group('test_portg')
self.assertIsNone(portg)
def test_get_tgt_iqn_from_rest_match(self):
match_res = {
'data': [{
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 111.111.111.19,t,0x01'
}, {
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 111.111.111.191,t,0x01'
}],
'error': {
'code': 0
}
}
ip = '111.111.111.19'
expected_iqn = 'iqn.2006-08.com: 210048cee9d: 111.111.111.19'
self.mock_object(rest_client.RestClient, 'call',
return_value=match_res)
iqn = self.driver.client._get_tgt_iqn_from_rest(ip)
self.assertEqual(expected_iqn, iqn)
def test_get_tgt_iqn_from_rest_mismatch(self):
match_res = {
'data': [{
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 192.0.2.191,t,0x01'
}, {
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 192.0.2.192,t,0x01'
}],
'error': {
'code': 0
}
}
ip = '192.0.2.19'
self.mock_object(rest_client.RestClient, 'call',
return_value=match_res)
iqn = self.driver.client._get_tgt_iqn_from_rest(ip)
self.assertIsNone(iqn)
def test_create_cgsnapshot(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
model, snapshots = self.driver.create_cgsnapshot(ctxt,
self.cgsnapshot,
test_snapshots)
snapshots_model_update = [{'id': '21ec7341-9256-497b-97d9'
'-ef48edcf0635',
'status': 'available',
'provider_location': 11}]
self.assertEqual(snapshots_model_update, snapshots)
self.assertEqual('available', model['status'])
def test_create_cgsnapshot_create_snapshot_fail(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
self.mock_object(rest_client.RestClient, 'create_snapshot',
side_effect=(
exception.VolumeBackendAPIException(data='err')))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cgsnapshot,
ctxt,
self.cgsnapshot,
test_snapshots)
def test_create_cgsnapshot_active_snapshot_fail(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
self.mock_object(rest_client.RestClient, 'activate_snapshot',
side_effect=(
exception.VolumeBackendAPIException(data='err')))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cgsnapshot,
ctxt,
self.cgsnapshot,
test_snapshots)
def test_delete_cgsnapshot(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
self.driver.delete_cgsnapshot(ctxt,
self.cgsnapshot,
test_snapshots)
class FCSanLookupService(object):
def get_device_mapping_from_network(self, initiator_list,
target_list):
return fake_fabric_mapping
@ddt.ddt
class HuaweiFCDriverTestCase(HuaweiTestBase):
def setUp(self):
super(HuaweiFCDriverTestCase, self).setUp()
self.configuration = mock.Mock(spec=conf.Configuration)
self.flags(rpc_backend='oslo_messaging._drivers.impl_fake')
self.huawei_conf = FakeHuaweiConf(self.configuration, 'FC')
self.configuration.hypermetro_devices = hypermetro_devices
driver = FakeFCStorage(configuration=self.configuration)
self.driver = driver
self.driver.do_setup()
self.driver.client.login()
def test_login_success(self):
device_id = self.driver.client.login()
self.assertEqual('210235G7J20000000000', device_id)
def test_create_volume_success(self):
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_volume_success(self, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
def test_delete_snapshot_success(self):
self.driver.delete_snapshot(self.snapshot)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_create_volume_from_snapsuccess(self):
lun_info = self.driver.create_volume_from_snapshot(self.volume,
self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(huawei_driver.HuaweiFCDriver,
'initialize_connection',
return_value={"data": {'target_lun': 1}})
def test_initialize_connection_snapshot_success(self, mock_fc_init):
iscsi_properties = self.driver.initialize_connection_snapshot(
self.snapshot, FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
self.assertEqual(1, iscsi_properties['data']['target_lun'])
mock_fc_init.assert_called_with(volume, FakeConnector)
def test_initialize_connection_success(self):
iscsi_properties = self.driver.initialize_connection(self.volume,
FakeConnector)
self.assertEqual(1, iscsi_properties['data']['target_lun'])
def test_initialize_connection_fail_no_online_wwns_in_host(self):
self.mock_object(rest_client.RestClient, 'get_online_free_wwns',
return_value=[])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, FakeConnector)
def test_initialize_connection_no_local_ini_tgt_map(self):
self.mock_object(rest_client.RestClient, 'get_init_targ_map',
return_value=('', ''))
self.mock_object(huawei_driver.HuaweiFCDriver, '_get_same_hostid',
return_value='')
self.mock_object(rest_client.RestClient, 'change_hostlun_id',
return_value=None)
self.mock_object(rest_client.RestClient, 'do_mapping',
return_value={'lun_id': '1',
'view_id': '1',
'aval_luns': '[1]'})
self.driver.initialize_connection(self.hyper_volume, FakeConnector)
def test_hypermetro_connection_success(self):
self.mock_object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
fc_properties = self.driver.initialize_connection(self.hyper_volume,
FakeConnector)
self.assertEqual(1, fc_properties['data']['target_lun'])
@mock.patch.object(huawei_driver.HuaweiFCDriver,
'terminate_connection')
def test_terminate_connection_snapshot_success(self, mock_fc_term):
self.driver.terminate_connection_snapshot(self.snapshot,
FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
mock_fc_term.assert_called_with(volume, FakeConnector)
def test_terminate_connection_success(self):
self.driver.client.terminateFlag = True
self.driver.terminate_connection(self.volume, FakeConnector)
self.assertTrue(self.driver.client.terminateFlag)
def test_terminate_connection_portgroup_associated(self):
self.mock_object(rest_client.RestClient,
'is_portgroup_associated_to_view',
return_value=True)
self.mock_object(huawei_driver.HuaweiFCDriver,
'_delete_zone_and_remove_fc_initiators',
return_value=({}, 1))
self.driver.terminate_connection(self.volume, FakeConnector)
def test_terminate_connection_fc_initiators_exist_in_host(self):
self.mock_object(rest_client.RestClient,
'check_fc_initiators_exist_in_host',
return_value=True)
self.driver.terminate_connection(self.volume, FakeConnector)
def test_terminate_connection_hypermetro_in_metadata(self):
self.driver.terminate_connection(self.hyper_volume, FakeConnector)
def test_get_volume_status(self):
remote_device_info = {"ARRAYTYPE": "1",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "10"}
self.mock_object(
replication.ReplicaPairManager,
'get_remote_device_by_wwn',
return_value=remote_device_info)
data = self.driver.get_volume_stats()
self.assertEqual(self.driver.VERSION, data['driver_version'])
self.assertTrue(data['pools'][0]['replication_enabled'])
self.assertListEqual(['sync', 'async'],
data['pools'][0]['replication_type'])
self.mock_object(
replication.ReplicaPairManager,
'get_remote_device_by_wwn',
return_value={})
data = self.driver.get_volume_stats()
self.assertNotIn('replication_enabled', data['pools'][0])
self.mock_object(
replication.ReplicaPairManager,
'try_get_remote_wwn',
return_value={})
data = self.driver.get_volume_stats()
self.assertEqual(self.driver.VERSION, data['driver_version'])
self.assertNotIn('replication_enabled', data['pools'][0])
@ddt.data({'TIER0CAPACITY': '100',
'TIER1CAPACITY': '0',
'TIER2CAPACITY': '0',
'disktype': 'ssd'},
{'TIER0CAPACITY': '0',
'TIER1CAPACITY': '100',
'TIER2CAPACITY': '0',
'disktype': 'sas'},
{'TIER0CAPACITY': '0',
'TIER1CAPACITY': '0',
'TIER2CAPACITY': '100',
'disktype': 'nl_sas'},
{'TIER0CAPACITY': '100',
'TIER1CAPACITY': '100',
'TIER2CAPACITY': '100',
'disktype': 'mix'},
{'TIER0CAPACITY': '0',
'TIER1CAPACITY': '0',
'TIER2CAPACITY': '0',
'disktype': ''})
def test_get_volume_disk_type(self, disk_type_value):
response_dict = json.loads(FAKE_STORAGE_POOL_RESPONSE)
storage_pool_sas = copy.deepcopy(response_dict)
storage_pool_sas['data'][0]['TIER0CAPACITY'] = (
disk_type_value['TIER0CAPACITY'])
storage_pool_sas['data'][0]['TIER1CAPACITY'] = (
disk_type_value['TIER1CAPACITY'])
storage_pool_sas['data'][0]['TIER2CAPACITY'] = (
disk_type_value['TIER2CAPACITY'])
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
driver.replica = None
self.mock_object(rest_client.RestClient, 'get_all_pools',
return_value=storage_pool_sas['data'])
data = driver.get_volume_stats()
if disk_type_value['disktype']:
self.assertEqual(disk_type_value['disktype'],
data['pools'][0]['disk_type'])
else:
self.assertIsNone(data['pools'][0].get('disk_type'))
def test_get_disk_type_pool_info_none(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
driver.replica = None
self.mock_object(rest_client.RestClient, 'get_pool_info',
return_value=None)
data = driver.get_volume_stats()
self.assertIsNone(data['pools'][0].get('disk_type'))
def test_extend_volume(self):
self.driver.extend_volume(self.volume, 3)
def test_login_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
def test_create_snapshot_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_snapshot, self.snapshot)
def test_create_volume_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
def test_delete_volume_fail(self):
self.driver.client.test_fail = True
self.driver.delete_volume(self.volume)
def test_delete_snapshot_fail(self):
self.driver.client.test_fail = True
self.driver.delete_snapshot(self.snapshot)
def test_initialize_connection_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, FakeConnector)
def test_lun_is_associated_to_lungroup(self):
self.driver.client.associate_lun_to_lungroup('11', '11')
result = self.driver.client._is_lun_associated_to_lungroup('11',
'11')
self.assertTrue(result)
def test_lun_is_not_associated_to_lun_group(self):
self.driver.client.associate_lun_to_lungroup('12', '12')
self.driver.client.remove_lun_from_lungroup('12', '12')
result = self.driver.client._is_lun_associated_to_lungroup('12',
'12')
self.assertFalse(result)
@unittest.skip("Skip until bug #1578986 is fixed")
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client, 'RestClient')
def test_migrate_volume_success(self, mock_add_lun_to_partition,
pool_data):
# Migrate volume without new type.
empty_dict = {}
self.driver.support_func = pool_data
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
# Migrate volume with new type.
empty_dict = {}
new_type = {'extra_specs':
{'smarttier': '<is> true',
'smartcache': '<is> true',
'smartpartition': '<is> true',
'thin_provisioning_support': '<is> true',
'thick_provisioning_support': '<is> False',
'policy': '2',
'smartcache:cachename': 'cache-test',
'smartpartition:partitionname': 'partition-test'}}
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
new_type)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
def test_migrate_volume_fail(self):
self.driver.client.test_fail = True
# Migrate volume without new type.
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume, None,
self.volume, test_host, None)
# Migrate volume with new type.
new_type = {'extra_specs':
{'smarttier': '<is> true',
'smartcache': '<is> true',
'thin_provisioning_support': '<is> true',
'thick_provisioning_support': '<is> False',
'policy': '2',
'smartcache:cachename': 'cache-test',
'partitionname': 'partition-test'}}
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume, None,
self.volume, test_host, new_type)
def test_check_migration_valid(self):
is_valid = self.driver._check_migration_valid(test_host,
self.volume)
self.assertTrue(is_valid)
# No pool_name in capabilities.
invalid_host1 = {'host': 'ubuntu001@backend002#OpenStack_Pool',
'capabilities':
{'location_info': '210235G7J20000000000',
'allocated_capacity_gb': 0,
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'FC'}}
is_valid = self.driver._check_migration_valid(invalid_host1,
self.volume)
self.assertFalse(is_valid)
# location_info in capabilities is not matched.
invalid_host2 = {'host': 'ubuntu001@backend002#OpenStack_Pool',
'capabilities':
{'location_info': '210235G7J20000000001',
'allocated_capacity_gb': 0,
'pool_name': 'OpenStack_Pool',
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'FC'}}
is_valid = self.driver._check_migration_valid(invalid_host2,
self.volume)
self.assertFalse(is_valid)
# storage_protocol is not match current protocol and volume status is
# 'in-use'.
volume_in_use = {'name': 'volume-21ec7341-9256-497b-97d9-ef48edcf0635',
'size': 2,
'volume_name': 'vol1',
'id': ID,
'volume_id': '21ec7341-9256-497b-97d9-ef48edcf0635',
'volume_attachment': 'in-use',
'provider_location': '11'}
invalid_host2 = {'host': 'ubuntu001@backend002#OpenStack_Pool',
'capabilities':
{'location_info': '210235G7J20000000001',
'allocated_capacity_gb': 0,
'pool_name': 'OpenStack_Pool',
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'iSCSI'}}
is_valid = self.driver._check_migration_valid(invalid_host2,
volume_in_use)
self.assertFalse(is_valid)
# pool_name is empty.
invalid_host3 = {'host': 'ubuntu001@backend002#OpenStack_Pool',
'capabilities':
{'location_info': '210235G7J20000000001',
'allocated_capacity_gb': 0,
'pool_name': '',
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'iSCSI'}}
is_valid = self.driver._check_migration_valid(invalid_host3,
self.volume)
self.assertFalse(is_valid)
@mock.patch.object(rest_client.RestClient, 'rename_lun')
def test_update_migrated_volume_success(self, mock_rename_lun):
model_update = self.driver.update_migrated_volume(None,
self.original_volume,
self.current_volume,
'available')
self.assertEqual({'_name_id': None}, model_update)
@mock.patch.object(rest_client.RestClient, 'rename_lun')
def test_update_migrated_volume_fail(self, mock_rename_lun):
mock_rename_lun.side_effect = exception.VolumeBackendAPIException(
data='Error occurred.')
model_update = self.driver.update_migrated_volume(None,
self.original_volume,
self.current_volume,
'available')
self.assertEqual(self.current_volume.name_id,
model_update['_name_id'])
@mock.patch.object(rest_client.RestClient, 'add_lun_to_partition')
def test_retype_volume_success(self, mock_add_lun_to_partition):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
retype = self.driver.retype(None, self.volume,
test_new_type, None, test_host)
self.assertTrue(retype)
@unittest.skip("Skip until bug #1578986 is fixed")
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client, 'RestClient')
@mock.patch.object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
def test_retype_replication_volume_success(self, mock_get_type,
mock_add_lun_to_partition,
pool_data):
self.driver.support_func = pool_data
retype = self.driver.retype(None, self.volume,
test_new_replication_type, None, test_host)
self.assertTrue(retype)
@ddt.data(
[
replication.ReplicaPairManager,
'create_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'create_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'delete_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'delete_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_UNSUPPORT_REPORT
],
)
@ddt.unpack
def test_retype_replication_volume_fail(self,
mock_module,
mock_func,
side_effect,
pool_data):
self.driver.support_func = pool_data
self.mock_object(mock_module, mock_func, side_effect=side_effect)
self.mock_object(rest_client.RestClient, 'add_lun_to_partition')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
retype = self.driver.retype(None, self.volume,
test_new_replication_type, None, test_host)
self.assertFalse(retype)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_retype_volume_cache_fail(self, pool_data):
self.driver.client.cache_not_exist = True
self.driver.support_func = pool_data
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.retype, None,
self.volume, test_new_type, None, test_host)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_retype_volume_partition_fail(self, pool_data):
self.driver.support_func = pool_data
self.driver.client.partition_not_exist = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.retype, None,
self.volume, test_new_type, None, test_host)
@mock.patch.object(rest_client.RestClient, 'add_lun_to_partition')
def test_retype_volume_fail(self, mock_add_lun_to_partition):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
mock_add_lun_to_partition.side_effect = (
exception.VolumeBackendAPIException(data='Error occurred.'))
retype = self.driver.retype(None, self.volume,
test_new_type, None, test_host)
self.assertFalse(retype)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A","0B"]', 'ID': '0'}])
def test_build_ini_targ_map_engie_recorded(self, mock_engines):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '1', '11')
target_port_wwns = ['2000643e8c4c5f66']
self.assertEqual(target_port_wwns, tgt_wwns)
self.assertEqual({}, init_targ_map)
@ddt.data(fake_fabric_mapping_no_ports, fake_fabric_mapping_no_wwn)
def test_filter_by_fabric_fail(self, ddt_map):
self.mock_object(
FCSanLookupService, 'get_device_mapping_from_network',
return_value=ddt_map)
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
self.assertRaises(exception.VolumeBackendAPIException,
zone_helper._filter_by_fabric, ['10000090fa0d6754'],
None)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A"]', 'ID': '0'},
{'NODELIST': '["0B"]', 'ID': '1'}])
@mock.patch.object(fc_zone_helper.FCZoneHelper, '_build_contr_port_map',
return_value={'0B': ['2000643e8c4c5f67']})
def test_build_ini_targ_map_engie_not_recorded(self, mock_engines, map):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '1', '11')
expected_wwns = ['2000643e8c4c5f67', '2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': expected_wwns}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A", "0B"]', 'ID': '0'}])
def test_build_ini_targ_map_no_map(self, mock_engines):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
# Host with id '5' has no map on the array.
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '5', '11')
expected_wwns = ['2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': ['2000643e8c4c5f66']}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A", "0B"]', 'ID': '0'}])
@mock.patch.object(rest_client.RestClient, 'get_tgt_port_group',
return_value='0')
@mock.patch.object(rest_client.RestClient, 'delete_portgroup')
def test_build_ini_targ_map_exist_portg(self, delete, engines, portg):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
# Host with id '5' has no map on the array.
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '5', '11')
expected_wwns = ['2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': ['2000643e8c4c5f66']}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
self.assertEqual(1, delete.call_count)
def test_get_init_targ_map(self):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
(tgt_wwns, portg_id, init_targ_map) = zone_helper.get_init_targ_map(
['10000090fa0d6754'], '1')
expected_wwns = ['2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': ['2000643e8c4c5f66']}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
def test_get_init_targ_map_no_host(self):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
ret = zone_helper.get_init_targ_map(
['10000090fa0d6754'], None)
expected_ret = ([], None, {})
self.assertEqual(expected_ret, ret)
def test_multi_resturls_success(self):
self.driver.client.test_multi_url_flag = True
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
def test_get_id_from_result(self):
result = {}
name = 'test_name'
key = 'NAME'
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': {}}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': [{'COUNT': 1, 'ID': '1'},
{'COUNT': 2, 'ID': '2'}]}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': [{'NAME': 'test_name1', 'ID': '1'},
{'NAME': 'test_name2', 'ID': '2'}]}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': [{'NAME': 'test_name', 'ID': '1'},
{'NAME': 'test_name2', 'ID': '2'}]}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertEqual('1', re)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value={'ID': 1,
'CAPACITY': 110362624,
'TOTALCAPACITY': 209715200})
def test_get_capacity(self, mock_get_pool_info):
expected_pool_capacity = {'total_capacity': 100.0,
'free_capacity': 52.625}
pool_capacity = self.driver.client._get_capacity(None,
None)
self.assertEqual(expected_pool_capacity, pool_capacity)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
@mock.patch.object(hypermetro.HuaweiHyperMetro,
'_create_hypermetro_pair',
return_value={"ID": '11',
"NAME": 'hypermetro-pair'})
@mock.patch.object(rest_client.RestClient, 'logout',
return_value=None)
def test_create_hypermetro_success(self, mock_hypermetro_opts,
mock_login_return,
mock_all_pool_info,
mock_pool_info,
mock_hyper_domain,
mock_volume_ready,
mock_logout):
metadata = {"hypermetro_id": '11',
"remote_lun_id": '1'}
lun_info = self.driver.create_volume(self.hyper_volume)
self.assertEqual(metadata, lun_info['metadata'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'create_hypermetro')
def test_create_hypermetro_fail(self,
pool_data,
mock_pair_info,
mock_hypermetro_opts,
mock_all_pool_info,
mock_pool_info,
mock_hyper_domain,
mock_volume_ready
):
self.driver.support_func = pool_data
mock_pair_info.side_effect = (
exception.VolumeBackendAPIException(data='Error occurred.'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.create_hypermetro, "11", {})
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
@mock.patch.object(rest_client.RestClient, 'do_mapping',
return_value={'lun_id': '1',
'view_id': '1',
'aval_luns': '[1]'})
def test_hypermetro_connection_success_2(self, mock_map, mock_metadata):
fc_properties = self.driver.metro.connect_volume_fc(self.volume,
FakeConnector)
self.assertEqual(1, fc_properties['data']['target_lun'])
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
def test_terminate_hypermetro_connection_success(self, mock_metradata):
self.driver.metro.disconnect_volume_fc(self.volume, FakeConnector)
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': None})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value=None)
def test_hypermetroid_none_fail(self, mock_metadata, moke_metro_name):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.connect_volume_fc,
self.volume,
FakeConnector)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_volume_ready_success(self):
flag = self.driver.metro._wait_volume_ready("11")
self.assertIsNone(flag)
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
@mock.patch.object(rest_client.RestClient, 'get_online_free_wwns',
return_value=[])
@mock.patch.object(rest_client.RestClient, 'get_host_iscsi_initiators',
return_value=[])
def test_hypermetro_connection_fail(self, mock_metadata,
mock_fc_initiator,
mock_host_initiators):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.connect_volume_fc,
self.volume,
FakeConnector)
def test_create_snapshot_fail_hypermetro(self):
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': replica_hypermetro_specs})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
self.volume, self.snapshot)
def test_create_snapshot_fail_no_snapshot_id(self):
self.snapshot.provider_location = None
self.mock_object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value=None)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
self.volume, self.snapshot)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": [{"RUNNINGSTATUS": "27",
"ID": '1'},
{"RUNNINGSTATUS": "26",
"ID": '2'}],
"error": {"code": 0}})
def test_get_online_free_wwns(self, mock_call):
wwns = self.driver.client.get_online_free_wwns()
self.assertEqual(['1'], wwns)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {"ID": 1}, "error": {"code": 0}})
def test_rename_lun(self, mock_call):
des = 'This LUN is renamed.'
new_name = 'test_name'
self.driver.client.rename_lun('1', new_name, des)
self.assertEqual(1, mock_call.call_count)
url = "/lun/1"
data = {"NAME": new_name, "DESCRIPTION": des}
mock_call.assert_called_once_with(url, data, "PUT")
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {}})
def test_is_host_associated_to_hostgroup_no_data(self, mock_call):
res = self.driver.client.is_host_associated_to_hostgroup('1')
self.assertFalse(res)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {'ISADD2HOSTGROUP': 'true'}})
def test_is_host_associated_to_hostgroup_true(self, mock_call):
res = self.driver.client.is_host_associated_to_hostgroup('1')
self.assertTrue(res)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {'ISADD2HOSTGROUP': 'false'}})
def test_is_host_associated_to_hostgroup_false(self, mock_call):
res = self.driver.client.is_host_associated_to_hostgroup('1')
self.assertFalse(res)
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "true"})
def test_create_hypermetro_consistencygroup_success(self, mock_grouptype):
"""Test that create_consistencygroup return successfully."""
ctxt = context.get_admin_context()
# Create consistency group
model_update = self.driver.create_consistencygroup(ctxt, self.cg)
self.assertEqual('available',
model_update['status'],
"Consistency Group created failed")
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "false"})
def test_create_normal_consistencygroup_success(self,
mock_grouptype):
"""Test that create_consistencygroup return successfully."""
ctxt = context.get_admin_context()
# Create consistency group
model_update = self.driver.create_consistencygroup(ctxt, self.cg)
self.assertEqual('available',
model_update['status'],
"Consistency Group created failed")
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "true"})
def test_delete_hypermetro_consistencygroup_success(self, mock_grouptype):
"""Test that create_consistencygroup return successfully."""
test_volumes = [self.volume]
ctxt = context.get_admin_context()
# Create consistency group
model, volumes = self.driver.delete_consistencygroup(ctxt,
self.cg,
test_volumes)
self.assertEqual('available',
model['status'],
"Consistency Group created failed")
def test_delete_normal_consistencygroup_success(self):
ctxt = context.get_admin_context()
test_volumes = [self.volume]
self.mock_object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "false"})
model, volumes = self.driver.delete_consistencygroup(ctxt,
self.cg,
test_volumes)
self.assertEqual('available',
model['status'],
"Consistency Group created failed")
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "true"})
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '59'})
def test_update_consistencygroup_success(self,
mock_grouptype,
mock_metadata):
"""Test that create_consistencygroup return successfully."""
ctxt = context.get_admin_context()
add_volumes = [self.volume]
remove_volumes = [self.volume]
# Create consistency group
model_update = self.driver.update_consistencygroup(ctxt,
self.cg,
add_volumes,
remove_volumes)
self.assertEqual('available',
model_update[0]['status'],
"Consistency Group update failed")
def test_create_hypermetro_consistencygroup_success_2(self):
ctxt = context.get_admin_context()
# Create consistency group
temp_cg = copy.deepcopy(self.cg)
temp_cg['volume_type_id'] = '550c089b-bfdd-4f7f-86e1-3ba88125555c,'
self.mock_object(volume_types, 'get_volume_type',
return_value=test_hypermetro_type)
model_update = self.driver.create_consistencygroup(ctxt, temp_cg)
self.assertEqual('available',
model_update['status'],
"Consistency Group created failed")
def test_is_initiator_associated_to_host_raise(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.is_initiator_associated_to_host,
'ini-2', '1')
def test_is_initiator_associated_to_host_true(self):
ret = self.driver.client.is_initiator_associated_to_host('ini-1', '1')
self.assertFalse(ret)
ret = self.driver.client.is_initiator_associated_to_host('ini-2', '2')
self.assertTrue(ret)
class HuaweiConfTestCase(test.TestCase):
def setUp(self):
super(HuaweiConfTestCase, self).setUp()
self.tmp_dir = tempfile.mkdtemp()
self.fake_xml_file = self.tmp_dir + '/cinder_huawei_conf.xml'
self.conf = mock.Mock()
self.conf.cinder_huawei_conf_file = self.fake_xml_file
self.huawei_conf = huawei_conf.HuaweiConf(self.conf)
def _create_fake_conf_file(self):
"""Create a fake Config file.
Huawei storage customize a XML configuration file, the configuration
file is used to set the Huawei storage custom parameters, therefore,
in the UT test we need to simulate such a configuration file.
"""
doc = minidom.Document()
config = doc.createElement('config')
doc.appendChild(config)
storage = doc.createElement('Storage')
config.appendChild(storage)
url = doc.createElement('RestURL')
url_text = doc.createTextNode('http://192.0.2.69:8082/'
'deviceManager/rest/')
url.appendChild(url_text)
storage.appendChild(url)
username = doc.createElement('UserName')
username_text = doc.createTextNode('admin')
username.appendChild(username_text)
storage.appendChild(username)
password = doc.createElement('UserPassword')
password_text = doc.createTextNode('Admin@storage')
password.appendChild(password_text)
storage.appendChild(password)
product = doc.createElement('Product')
product_text = doc.createTextNode('V3')
product.appendChild(product_text)
storage.appendChild(product)
protocol = doc.createElement('Protocol')
protocol_text = doc.createTextNode('iSCSI')
protocol.appendChild(protocol_text)
storage.appendChild(protocol)
lun = doc.createElement('LUN')
config.appendChild(lun)
luntype = doc.createElement('LUNType')
luntype_text = doc.createTextNode('Thick')
luntype.appendChild(luntype_text)
lun.appendChild(luntype)
lun_ready_wait_interval = doc.createElement('LUNReadyWaitInterval')
lun_ready_wait_interval_text = doc.createTextNode('2')
lun_ready_wait_interval.appendChild(lun_ready_wait_interval_text)
lun.appendChild(lun_ready_wait_interval)
lun_copy_wait_interval = doc.createElement('LUNcopyWaitInterval')
lun_copy_wait_interval_text = doc.createTextNode('2')
lun_copy_wait_interval.appendChild(lun_copy_wait_interval_text)
lun.appendChild(lun_copy_wait_interval)
timeout = doc.createElement('Timeout')
timeout_text = doc.createTextNode('43200')
timeout.appendChild(timeout_text)
lun.appendChild(timeout)
write_type = doc.createElement('WriteType')
write_type_text = doc.createTextNode('1')
write_type.appendChild(write_type_text)
lun.appendChild(write_type)
mirror_switch = doc.createElement('MirrorSwitch')
mirror_switch_text = doc.createTextNode('1')
mirror_switch.appendChild(mirror_switch_text)
lun.appendChild(mirror_switch)
prefetch = doc.createElement('Prefetch')
prefetch.setAttribute('Type', '1')
prefetch.setAttribute('Value', '0')
lun.appendChild(prefetch)
pool = doc.createElement('StoragePool')
pool_text = doc.createTextNode('OpenStack_Pool')
pool.appendChild(pool_text)
lun.appendChild(pool)
iscsi = doc.createElement('iSCSI')
config.appendChild(iscsi)
defaulttargetip = doc.createElement('DefaultTargetIP')
defaulttargetip_text = doc.createTextNode('192.0.2.68')
defaulttargetip.appendChild(defaulttargetip_text)
iscsi.appendChild(defaulttargetip)
initiator = doc.createElement('Initiator')
initiator.setAttribute('Name', 'iqn.1993-08.debian:01:ec2bff7ac3a3')
initiator.setAttribute('TargetIP', '192.0.2.2')
initiator.setAttribute('CHAPinfo', 'mm-user;mm-user@storage')
initiator.setAttribute('ALUA', '1')
initiator.setAttribute('TargetPortGroup', 'PortGroup001')
iscsi.appendChild(initiator)
fakefile = open(self.conf.cinder_huawei_conf_file, 'w')
fakefile.write(doc.toprettyxml(indent=''))
fakefile.close()
| 39.524186
| 79
| 0.574789
|
import collections
import copy
import ddt
import json
import mock
import re
import tempfile
import unittest
from xml.dom import minidom
from cinder import context
from cinder import exception
from cinder import test
from cinder.tests.unit.consistencygroup import fake_cgsnapshot
from cinder.tests.unit.consistencygroup import fake_consistencygroup
from cinder.tests.unit import fake_snapshot
from cinder.tests.unit import fake_volume
from cinder.tests.unit import utils
from cinder.volume import configuration as conf
from cinder.volume.drivers.huawei import constants
from cinder.volume.drivers.huawei import fc_zone_helper
from cinder.volume.drivers.huawei import huawei_conf
from cinder.volume.drivers.huawei import huawei_driver
from cinder.volume.drivers.huawei import huawei_utils
from cinder.volume.drivers.huawei import hypermetro
from cinder.volume.drivers.huawei import replication
from cinder.volume.drivers.huawei import rest_client
from cinder.volume.drivers.huawei import smartx
from cinder.volume import qos_specs
from cinder.volume import volume_types
admin_contex = context.get_admin_context()
vol_attrs = ('id', 'lun_type', 'provider_location', 'metadata')
Volume = collections.namedtuple('Volume', vol_attrs)
PROVIDER_LOCATION = '11'
HOST = 'ubuntu001@backend001#OpenStack_Pool'
ID = '21ec7341-9256-497b-97d9-ef48edcf0635'
ENCODE_NAME = huawei_utils.encode_name(ID)
ADMIN_METADATA = {'huawei_lun_wwn': '6643e8c1004c5f6723e9f454003'}
TEST_PAIR_ID = "3400a30d844d0004"
REPLICA_DRIVER_DATA = '{"pair_id": "%s", "rmt_lun_id": "1"}' % TEST_PAIR_ID
VOL_METADATA = [{'key': 'hypermetro_id', 'value': '11'},
{'key': 'remote_lun_id', 'value': '1'}]
hypermetro_devices = """{
"remote_device": {
"RestURL": "http://192.0.2.69:8082/deviceManager/rest",
"UserName": "admin",
"UserPassword": "Admin@storage1",
"StoragePool": "OpenStack_Pool",
"domain_name": "hypermetro-domain",
"remote_target_ip": "192.0.2.241"
}
}
"""
fake_smartx_value = {'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': False,
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test',
}
fake_hypermetro_opts = {'hypermetro': 'true',
'smarttier': False,
'smartcache': False,
'smartpartition': False,
'thin_provisioning_support': False,
'thick_provisioning_support': False,
}
sync_replica_specs = {'replication_enabled': '<is> True',
'replication_type': '<in> sync'}
async_replica_specs = {'replication_enabled': '<is> True',
'replication_type': '<in> async'}
replica_hypermetro_specs = {'hypermetro': '<is> True',
'replication_enabled': '<is> True'}
test_host = {'host': 'ubuntu001@backend001#OpenStack_Pool',
'capabilities': {'smartcache': True,
'location_info': '210235G7J20000000000',
'QoS_support': True,
'pool_name': 'OpenStack_Pool',
'timestamp': '2015-07-13T11:41:00.513549',
'smartpartition': True,
'allocated_capacity_gb': 0,
'volume_backend_name': 'HuaweiFCDriver',
'free_capacity_gb': 20.0,
'driver_version': '1.1.0',
'total_capacity_gb': 20.0,
'smarttier': True,
'hypermetro': True,
'reserved_percentage': 0,
'vendor_name': None,
'thick_provisioning_support': False,
'thin_provisioning_support': True,
'storage_protocol': 'FC',
}
}
test_new_type = {
'name': u'new_type',
'qos_specs_id': None,
'deleted': False,
'created_at': None,
'updated_at': None,
'extra_specs': {
'smarttier': '<is> true',
'smartcache': '<is> true',
'smartpartition': '<is> true',
'thin_provisioning_support': '<is> true',
'thick_provisioning_support': '<is> False',
'policy': '2',
'smartcache:cachename': 'cache-test',
'smartpartition:partitionname': 'partition-test',
},
'is_public': True,
'deleted_at': None,
'id': u'530a56e1-a1a4-49f3-ab6c-779a6e5d999f',
'description': None,
}
test_new_replication_type = {
'name': u'new_type',
'qos_specs_id': None,
'deleted': False,
'created_at': None,
'updated_at': None,
'extra_specs': {
'replication_enabled': '<is> True',
'replication_type': '<in> sync',
},
'is_public': True,
'deleted_at': None,
'id': u'530a56e1-a1a4-49f3-ab6c-779a6e5d999f',
'description': None,
}
test_hypermetro_type = {
'name': u'new_type',
'qos_specs_id': None,
'deleted': False,
'created_at': None,
'updated_at': None,
'extra_specs': {
'hypermetro': '<is> True'
},
'is_public': True,
'deleted_at': None,
'id': u'550c089b-bfdd-4f7f-86e1-3ba88125555c',
'description': None,
}
hypermetro_devices = """
{
"remote_device": {
"RestURL": "http://192.0.2.69:8082/deviceManager/rest",
"UserName":"admin",
"UserPassword":"Admin@storage2",
"StoragePool":"OpenStack_Pool",
"domain_name":"hypermetro_test"}
}
"""
FAKE_FIND_POOL_RESPONSE = {'CAPACITY': '985661440',
'ID': '0',
'TOTALCAPACITY': '985661440'}
FAKE_CREATE_VOLUME_RESPONSE = {"ID": "1",
"NAME": "5mFHcBv4RkCcD+JyrWc0SA",
"WWN": '6643e8c1004c5f6723e9f454003'}
FakeConnector = {'initiator': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'multipath': False,
'wwpns': ['10000090fa0d6754'],
'wwnns': ['10000090fa0d6755'],
'host': 'ubuntuc',
}
smarttier_opts = {'smarttier': 'true',
'smartpartition': False,
'smartcache': False,
'thin_provisioning_support': True,
'thick_provisioning_support': False,
'policy': '3',
'readcachepolicy': '1',
'writecachepolicy': None,
}
fake_fabric_mapping = {
'swd1': {
'target_port_wwn_list': ['2000643e8c4c5f66'],
'initiator_port_wwn_list': ['10000090fa0d6754']
}
}
fake_fabric_mapping_no_ports = {
'swd1': {
'target_port_wwn_list': [],
'initiator_port_wwn_list': ['10000090fa0d6754']
}
}
fake_fabric_mapping_no_wwn = {
'swd1': {
'target_port_wwn_list': ['2000643e8c4c5f66'],
'initiator_port_wwn_list': []
}
}
CHANGE_OPTS = {'policy': ('1', '2'),
'partitionid': (['1', 'partition001'], ['2', 'partition002']),
'cacheid': (['1', 'cache001'], ['2', 'cache002']),
'qos': (['11', {'MAXIOPS': '100', 'IOType': '1'}],
{'MAXIOPS': '100', 'IOType': '2',
'MIN': 1, 'LATENCY': 1}),
'host': ('ubuntu@huawei#OpenStack_Pool',
'ubuntu@huawei#OpenStack_Pool'),
'LUNType': ('0', '1'),
}
FAKE_CREATE_HOST_RESPONSE = """
{
"error": {
"code": 0
},
"data":{"NAME": "ubuntuc001",
"ID": "1"}
}
"""
FAKE_GET_HOST_RESPONSE = """
{
"error": {
"code": 0
},
"data":{"NAME": "ubuntuc001",
"ID": "1",
"ISADD2HOSTGROUP": "true"}
}
"""
FAKE_COMMON_SUCCESS_RESPONSE = """
{
"error": {
"code": 0,
"description": "None"
},
"data":{}
}
"""
FAKE_COMMON_FAIL_RESPONSE = """
{
"error": {
"code": 50331651,
"description": "An error occurs to the parameter."
},
"data":{}
}
"""
FAKE_GET_LOGIN_STORAGE_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"username": "admin",
"iBaseToken": "2001031430",
"deviceid": "210235G7J20000000000",
"accountstate": 2
}
}
"""
FAKE_LOGIN_OUT_STORAGE_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": 11
}
}
"""
FAKE_STORAGE_POOL_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"USERFREECAPACITY": "985661440",
"ID": "0",
"NAME": "OpenStack_Pool",
"USERTOTALCAPACITY": "985661440",
"TIER0CAPACITY": "100",
"TIER1CAPACITY": "0",
"TIER2CAPACITY": "0"
}]
}
"""
FAKE_LUN_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": "1",
"NAME": "5mFHcBv4RkCcD+JyrWc0SA",
"WWN": "6643e8c1004c5f6723e9f454003",
"DESCRIPTION": "21ec7341-9256-497b-97d9-ef48edcf0635",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "27",
"ALLOCTYPE": "1",
"CAPACITY": "2097152"
}
}
"""
FAKE_POOLS_UNSUPPORT_REPORT = {
'pool_name': 'StoragePool',
'location_info': '2102350BVB10F2000020',
'QoS_support': False,
'smartcache': False,
'thick_provisioning_support': False,
'splitmirror': False,
'allocated_capacity_gb': 7,
'thin_provisioning_support': True,
'free_capacity_gb': 400.0,
'smartpartition': False,
'total_capacity_gb': 400.0,
'reserved_percentage': 0,
'max_over_subscription_ratio': 20.0,
'luncopy': False
}
FAKE_POOLS_SUPPORT_REPORT = {
'pool_name': 'StoragePool',
'location_info': '2102350BVB10F2000020',
'QoS_support': True,
'smartcache': True,
'thick_provisioning_support': True,
'splitmirror': True,
'allocated_capacity_gb': 7,
'thin_provisioning_support': True,
'free_capacity_gb': 400.0,
'smartpartition': True,
'total_capacity_gb': 400.0,
'reserved_percentage': 0,
'max_over_subscription_ratio': 20.0,
'luncopy': True,
'hypermetro': True,
'consistencygroup_support': True
}
FAKE_LUN_GET_SUCCESS_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": "11",
"IOCLASSID": "11",
"NAME": "5mFHcBv4RkCcD+JyrWc0SA",
"DESCRIPTION": "21ec7341-9256-497b-97d9-ef48edcf0635",
"RUNNINGSTATUS": "10",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "27",
"LUNLIST": "",
"ALLOCTYPE": "1",
"CAPACITY": "2097152",
"WRITEPOLICY": "1",
"MIRRORPOLICY": "0",
"PREFETCHPOLICY": "1",
"PREFETCHVALUE": "20",
"DATATRANSFERPOLICY": "1",
"READCACHEPOLICY": "2",
"WRITECACHEPOLICY": "5",
"OWNINGCONTROLLER": "0B",
"SMARTCACHEPARTITIONID": "",
"CACHEPARTITIONID": "",
"WWN": "6643e8c1004c5f6723e9f454003",
"PARENTNAME": "OpenStack_Pool"
}
}
"""
FAKE_QUERY_ALL_LUN_RESPONSE = {
"error": {
"code": 0
},
"data": [{
"ID": "1",
"NAME": ENCODE_NAME
}]
}
FAKE_LUN_ASSOCIATE_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"ID":"11"
}]
}
"""
FAKE_QUERY_LUN_GROUP_INFO_RESPONSE = """
{
"error": {
"code":0
},
"data":[{
"NAME":"OpenStack_LunGroup_1",
"DESCRIPTION":"5mFHcBv4RkCcD+JyrWc0SA",
"ID":"11",
"TYPE":256
}]
}
"""
FAKE_QUERY_LUN_GROUP_RESPONSE = """
{
"error": {
"code":0
},
"data":{
"NAME":"5mFHcBv4RkCcD+JyrWc0SA",
"DESCRIPTION":"5mFHcBv4RkCcD+JyrWc0SA",
"ID":"11",
"TYPE":256
}
}
"""
FAKE_QUERY_LUN_GROUP_ASSOCIAT_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"NAME":"5mFHcBv4RkCcD+JyrWc0SA",
"DESCRIPTION":"5mFHcBv4RkCcD+JyrWc0SA",
"ID":"11",
"TYPE":256
}
}
"""
FAKE_LUN_COUNT_RESPONSE = """
{
"data":{
"COUNT":"0"
},
"error":{
"code":0,
"description":"0"
}
}
"""
FAKE_SNAPSHOT_LIST_INFO_RESPONSE = {
"error": {
"code": 0,
"description": "0"
},
"data": [{
"ID": 11,
"NAME": ENCODE_NAME
}, ]
}
FAKE_CREATE_SNAPSHOT_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": 11,
"NAME": "YheUoRwbSX2BxN7"
}
}
"""
FAKE_GET_SNAPSHOT_INFO_RESPONSE = """
{
"error": {
"code": 0,
"description": "0"
},
"data": {
"ID": 11,
"NAME": "YheUoRwbSX2BxN7"
}
}
"""
FAKE_SNAPSHOT_COUNT_RESPONSE = """
{
"data":{
"COUNT":"2"
},
"error":{
"code":0,
"description":"0"
}
}
"""
FAKE_GET_ISCSI_INFO_RESPONSE = """
{
"data": [{
"ETHPORTID": "139267",
"ID": "0+iqn.oceanstor:21004846fb8ca15f::22004:192.0.2.1,t,0x2005",
"TPGT": "8197",
"TYPE": 249
},
{
"ETHPORTID": "139268",
"ID": "1+iqn.oceanstor:21004846fb8ca15f::22003:192.0.2.2,t,0x2004",
"TPGT": "8196",
"TYPE": 249
}
],
"error": {
"code": 0,
"description": "0"
}
}
"""
FAKE_GET_ETH_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"PARENTTYPE": 209,
"MACADDRESS": "00:22:a1:0a:79:57",
"ETHNEGOTIATE": "-1",
"ERRORPACKETS": "0",
"IPV4ADDR": "192.0.2.2",
"IPV6GATEWAY": "",
"IPV6MASK": "0",
"OVERFLOWEDPACKETS": "0",
"ISCSINAME": "P0",
"HEALTHSTATUS": "1",
"ETHDUPLEX": "2",
"ID": "16909568",
"LOSTPACKETS": "0",
"TYPE": 213,
"NAME": "P0",
"INIORTGT": "4",
"RUNNINGSTATUS": "10",
"IPV4GATEWAY": "",
"BONDNAME": "",
"STARTTIME": "1371684218",
"SPEED": "1000",
"ISCSITCPPORT": "0",
"IPV4MASK": "255.255.0.0",
"IPV6ADDR": "",
"LOGICTYPE": "0",
"LOCATION": "ENG0.A5.P0",
"MTU": "1500",
"PARENTID": "1.5"
},
{
"PARENTTYPE": 209,
"MACADDRESS": "00:22:a1:0a:79:57",
"ETHNEGOTIATE": "-1",
"ERRORPACKETS": "0",
"IPV4ADDR": "192.0.2.1",
"IPV6GATEWAY": "",
"IPV6MASK": "0",
"OVERFLOWEDPACKETS": "0",
"ISCSINAME": "P0",
"HEALTHSTATUS": "1",
"ETHDUPLEX": "2",
"ID": "16909568",
"LOSTPACKETS": "0",
"TYPE": 213,
"NAME": "P0",
"INIORTGT": "4",
"RUNNINGSTATUS": "10",
"IPV4GATEWAY": "",
"BONDNAME": "",
"STARTTIME": "1371684218",
"SPEED": "1000",
"ISCSITCPPORT": "0",
"IPV4MASK": "255.255.0.0",
"IPV6ADDR": "",
"LOGICTYPE": "0",
"LOCATION": "ENG0.A5.P3",
"MTU": "1500",
"PARENTID": "1.5"
}]
}
"""
FAKE_GET_ETH_ASSOCIATE_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"IPV4ADDR": "192.0.2.1",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "10"
},
{
"IPV4ADDR": "192.0.2.2",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "10"
}
]
}
"""
FAKE_GET_ISCSI_DEVICE_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"CMO_ISCSI_DEVICE_NAME": "iqn.2006-08.com.huawei:oceanstor:21000022a:"
}]
}
"""
FAKE_GET_ALL_HOST_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"PARENTTYPE": 245,
"NAME": "ubuntuc",
"DESCRIPTION": "",
"RUNNINGSTATUS": "1",
"IP": "",
"PARENTNAME": "",
"OPERATIONSYSTEM": "0",
"LOCATION": "",
"HEALTHSTATUS": "1",
"MODEL": "",
"ID": "1",
"PARENTID": "",
"NETWORKNAME": "",
"TYPE": 21
},
{
"PARENTTYPE": 245,
"NAME": "ubuntu",
"DESCRIPTION": "",
"RUNNINGSTATUS": "1",
"IP": "",
"PARENTNAME": "",
"OPERATIONSYSTEM": "0",
"LOCATION": "",
"HEALTHSTATUS": "1",
"MODEL": "",
"ID": "2",
"PARENTID": "",
"NETWORKNAME": "",
"TYPE": 21
}]
}
"""
FAKE_GET_ALL_HOST_GROUP_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"NAME":"ubuntuc",
"DESCRIPTION":"",
"ID":"0",
"TYPE":14
},
{"NAME":"OpenStack_HostGroup_1",
"DESCRIPTION":"",
"ID":"0",
"TYPE":14
}
]
}
"""
FAKE_GET_HOST_GROUP_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data":{
"NAME":"ubuntuc",
"DESCRIPTION":"",
"ID":"0",
"TYPE":14
}
}
"""
FAKE_GET_LUN_COPY_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"COPYSTOPTIME": "-1",
"HEALTHSTATUS": "1",
"NAME": "w1PSNvu6RumcZMmSh4/l+Q==",
"RUNNINGSTATUS": "36",
"DESCRIPTION": "w1PSNvu6RumcZMmSh4/l+Q==",
"ID": "0",
"LUNCOPYTYPE": "1",
"COPYPROGRESS": "0",
"COPYSPEED": "2",
"TYPE": 219,
"COPYSTARTTIME": "-1"
}
}
"""
FAKE_GET_LUN_COPY_LIST_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"COPYSTOPTIME": "1372209335",
"HEALTHSTATUS": "1",
"NAME": "w1PSNvu6RumcZMmSh4/l+Q==",
"RUNNINGSTATUS": "40",
"DESCRIPTION": "w1PSNvu6RumcZMmSh4/l+Q==",
"ID": "0",
"LUNCOPYTYPE": "1",
"COPYPROGRESS": "100",
"COPYSPEED": "2",
"TYPE": 219,
"COPYSTARTTIME": "1372209329"
}]
}
"""
FAKE_GET_MAPPING_VIEW_INFO_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"OpenStack_Mapping_View_1",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"1",
"INBANDLUNWWN":"",
"TYPE":245
},
{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"YheUoRwbSX2BxN767nvLSw",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"2",
"INBANDLUNWWN": "",
"TYPE": 245
}]
}
"""
FAKE_GET_MAPPING_VIEW_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"mOWtSXnaQKi3hpB3tdFRIQ",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"11",
"INBANDLUNWWN":"",
"TYPE": 245,
"AVAILABLEHOSTLUNIDLIST": ""
}]
}
"""
FAKE_GET_SPEC_MAPPING_VIEW_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"mOWtSXnaQKi3hpB3tdFRIQ",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"1",
"INBANDLUNWWN":"",
"TYPE":245,
"AVAILABLEHOSTLUNIDLIST": "[1]"
}
}
"""
FAKE_FC_INFO_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"HEALTHSTATUS":"1",
"NAME":"",
"MULTIPATHTYPE":"1",
"ISFREE":"true",
"RUNNINGSTATUS":"27",
"ID":"10000090fa0d6754",
"OPERATIONSYSTEM":"255",
"TYPE":223
},
{
"HEALTHSTATUS":"1",
"NAME":"",
"MULTIPATHTYPE":"1",
"ISFREE":"true",
"RUNNINGSTATUS":"27",
"ID":"10000090fa0d6755",
"OPERATIONSYSTEM":"255",
"TYPE":223
}]
}
"""
FAKE_ISCSI_INITIATOR_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"CHAPNAME":"mm-user",
"HEALTHSTATUS":"1",
"ID":"iqn.1993-08.org.debian:01:9073aba6c6f",
"ISFREE":"true",
"MULTIPATHTYPE":"1",
"NAME":"",
"OPERATIONSYSTEM":"255",
"RUNNINGSTATUS":"28",
"TYPE":222,
"USECHAP":"true"
},
{
"ISFREE":"true",
"ID":"ini-1"
},
{
"ISFREE":"false",
"ID":"ini-2",
"PARENTNAME":"Host2",
"PARENTID":"2"
}]
}
"""
FAKE_HOST_LINK_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"PARENTTYPE":21,
"TARGET_ID":"0000000000000000",
"INITIATOR_NODE_WWN":"20000090fa0d6754",
"INITIATOR_TYPE":"223",
"RUNNINGSTATUS":"27",
"PARENTNAME":"ubuntuc",
"INITIATOR_ID":"10000090fa0d6754",
"TARGET_PORT_WWN":"24000022a10a2a39",
"HEALTHSTATUS":"1",
"INITIATOR_PORT_WWN":"10000090fa0d6754",
"ID":"010000090fa0d675-0000000000110400",
"TARGET_NODE_WWN":"21000022a10a2a39",
"PARENTID":"1",
"CTRL_ID":"0",
"TYPE":255,
"TARGET_TYPE":"212"
}]
}
"""
FAKE_PORT_GROUP_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"ID":11,
"NAME": "portgroup-test"
}]
}
"""
FAKE_ERROR_INFO_RESPONSE = """
{
"error":{
"code":31755596
}
}
"""
FAKE_ERROR_CONNECT_RESPONSE = """
{
"error":{
"code":-403
}
}
"""
FAKE_ERROR_LUN_INFO_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"ID":"11",
"IOCLASSID":"11",
"NAME":"5mFHcBv4RkCcD+JyrWc0SA",
"ALLOCTYPE": "0",
"DATATRANSFERPOLICY": "0",
"SMARTCACHEPARTITIONID": "0",
"CACHEPARTITIONID": "0"
}
}
"""
FAKE_GET_FC_INI_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"ID":"10000090fa0d6754",
"ISFREE":"true"
}]
}
"""
FAKE_SYSTEM_VERSION_RESPONSE = """
{
"error":{
"code": 0
},
"data":{
"PRODUCTVERSION": "V100R001C10",
"wwn": "21003400a30d844d"
}
}
"""
FAKE_GET_LUN_MIGRATION_RESPONSE = """
{
"data":[{"ENDTIME":"1436816174",
"ID":"9",
"PARENTID":"11",
"PARENTNAME":"xmRBHMlVRruql5vwthpPXQ",
"PROCESS":"-1",
"RUNNINGSTATUS":"76",
"SPEED":"2",
"STARTTIME":"1436816111",
"TARGETLUNID":"1",
"TARGETLUNNAME":"4924891454902893639",
"TYPE":253,
"WORKMODE":"0"
}],
"error":{"code":0,
"description":"0"}
}
"""
FAKE_HYPERMETRODOMAIN_RESPONSE = """
{
"error":{
"code": 0
},
"data":[{
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "1",
"HEALTHSTATUS": "0"
}]
}
"""
FAKE_HYPERMETRO_RESPONSE = """
{
"error":{
"code": 0
},
"data":{
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "1",
"HEALTHSTATUS": "1"
}
}
"""
FAKE_QOS_INFO_RESPONSE = """
{
"error":{
"code": 0
},
"data":{
"ID": "11"
}
}
"""
FAKE_GET_FC_PORT_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"RUNNINGSTATUS":"10",
"WWN":"2000643e8c4c5f66",
"PARENTID":"0A.1",
"ID": "1114368",
"RUNSPEED": "16000"
},
{
"RUNNINGSTATUS":"10",
"WWN":"2000643e8c4c5f67",
"PARENTID":"0A.1",
"ID": "1114369",
"RUNSPEED": "16000"
}]
}
"""
FAKE_SMARTCACHEPARTITION_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"ID":"11",
"NAME":"cache-name"
}
}
"""
FAKE_CONNECT_FC_RESPONSE = {
"driver_volume_type": 'fibre_channel',
"data": {
"target_wwn": ["10000090fa0d6754"],
"target_lun": "1",
"volume_id": ID
}
}
FAKE_METRO_INFO_RESPONSE = {
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "42",
"HEALTHSTATUS": "0"
}
FAKE_METRO_INFO_NEW_RESPONSE = """{
"error": {
"code": 0
},
"data": {
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "1",
"HEALTHSTATUS": "1"
}
}
"""
FAKE_CREATE_METROROUP_RESPONSE = """
{
"data": {
"DESCRIPTION": "",
"DOMAINID": "643e8c4c5f670100",
"DOMAINNAME": "hypermetro-domain",
"HEALTHSTATUS": "1",
"ID": "3400a30d844d8002",
"ISEMPTY": "true",
"NAME": "6F7kdHZcQJ2zbzxHmBl4FQ",
"PRIORITYSTATIONTYPE": "0",
"RECOVERYPOLICY": "1",
"RESOURCETYPE": "11",
"RUNNINGSTATUS": "41",
"SPEED": "2",
"SYNCDIRECTION": "1",
"TYPE": 15364
},
"error": {
"code": 0,
"description": "0"
}
}
"""
FAKE_GET_METROROUP_RESPONSE = {
"data": [{
"DESCRIPTION": "",
"DOMAINID": "643e8c4c5f670100",
"DOMAINNAME": "hypermetro-domain",
"HEALTHSTATUS": "1",
"ID": "11",
"ISEMPTY": "true",
"NAME": huawei_utils.encode_name(ID),
"PRIORITYSTATIONTYPE": "0",
"RECOVERYPOLICY": "1",
"RESOURCETYPE": "11",
"RUNNINGSTATUS": "41",
"SPEED": "2",
"SYNCDIRECTION": "1",
"TYPE": 15364
}],
"error": {
"code": 0,
"description": "0"
},
}
FAKE_GET_METROROUP_ID_RESPONSE = """
{
"data": {
"DESCRIPTION": "",
"DOMAINID": "643e8c4c5f670100",
"DOMAINNAME": "hypermetro-domain",
"HEALTHSTATUS": "1",
"ID": "11",
"ISEMPTY": "false",
"NAME": "IexzQZJWSXuX2e9I7c8GNQ",
"PRIORITYSTATIONTYPE": "0",
"RECOVERYPOLICY": "1",
"RESOURCETYPE": "11",
"RUNNINGSTATUS": "1",
"SPEED": "2",
"SYNCDIRECTION": "1",
"TYPE": 15364
},
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE = {}
MAP_COMMAND_TO_FAKE_RESPONSE['/xx/sessions'] = (
FAKE_GET_LOGIN_STORAGE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/sessions'] = (
FAKE_LOGIN_OUT_STORAGE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUN_MIGRATION/POST'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUN_MIGRATION?range=[0-256]/GET'] = (
FAKE_GET_LUN_MIGRATION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUN_MIGRATION/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/storagepool'] = (
FAKE_STORAGE_POOL_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun'] = (
FAKE_LUN_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/11/GET'] = (
FAKE_LUN_GET_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/1/GET'] = (
FAKE_LUN_GET_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/1/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/11/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun?filter=NAME::%s/GET' % ENCODE_NAME] = (
json.dumps(FAKE_QUERY_ALL_LUN_RESPONSE))
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?TYPE=11&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?TYPE=11&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=12/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?ID=1&TYPE=11&ASSOCIATEOBJTYPE=21'
'&ASSOCIATEOBJID=0/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?TYPE=11&ASSOCIATEOBJTYPE=21'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate/cachepartition?ID=1'
'&ASSOCIATEOBJTYPE=11&ASSOCIATEOBJID=11'
'/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/associate?TYPE=27&ASSOCIATEOBJTYPE=21'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/associate?TYPE=27&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup?range=[0-8191]/GET'] = (
FAKE_QUERY_LUN_GROUP_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup'] = (
FAKE_QUERY_LUN_GROUP_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate'] = (
FAKE_QUERY_LUN_GROUP_ASSOCIAT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNGroup/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=11&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?TYPE=256&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?TYPE=256&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=11&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=11&ASSOCIATEOBJTYPE=27'
'&ASSOCIATEOBJID=11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/count?TYPE=11&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_COUNT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/count?TYPE=27&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_SNAPSHOT_COUNT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/count?TYPE=27&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_SNAPSHOT_COUNT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?TYPE=256&ASSOCIATEOBJTYPE=27'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/expand/PUT'] = (
FAKE_LUN_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=12&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=12/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot'] = (
FAKE_CREATE_SNAPSHOT_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/11/GET'] = (
FAKE_GET_SNAPSHOT_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/activate'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/stop/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot?filter=NAME::%s/GET' % ENCODE_NAME] = (
json.dumps(FAKE_SNAPSHOT_LIST_INFO_RESPONSE))
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/11/GET'] = (
FAKE_LUN_GET_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/11/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/active/11/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/'] = (
FAKE_QOS_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_tgt_port/GET'] = (
FAKE_GET_ISCSI_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/eth_port/GET'] = (
FAKE_GET_ETH_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/eth_port/associate?TYPE=213&ASSOCIATEOBJTYPE'
'=257&ASSOCIATEOBJID=11/GET'] = (
FAKE_GET_ETH_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsidevicename'] = (
FAKE_GET_ISCSI_DEVICE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator?range=[0-256]/GET'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/POST'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/PUT'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator?PARENTTYPE=21&PARENTID'
'=1/GET'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/remove_iscsi_from_host/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/'
'iqn.1993-08.debian:01:ec2bff7ac3a3/PUT'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host?range=[0-65535]/GET'] = (
FAKE_GET_ALL_HOST_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/1/GET'] = (
FAKE_GET_HOST_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host'] = (
FAKE_CREATE_HOST_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup?range=[0-8191]/GET'] = (
FAKE_GET_ALL_HOST_GROUP_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup'] = (
FAKE_GET_HOST_GROUP_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=14&ID=0'
'&ASSOCIATEOBJTYPE=21&ASSOCIATEOBJID=1'
'/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=14&ID=0'
'&ASSOCIATEOBJID=0/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=21&'
'ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=0/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup/0/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=21&'
'ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=0/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup/associate'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/luncopy'] = (
FAKE_GET_LUN_COPY_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNCOPY?range=[0-1023]/GET'] = (
FAKE_GET_LUN_COPY_LIST_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNCOPY/start/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNCOPY/0/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview?range=[0-8191]/GET'] = (
FAKE_GET_MAPPING_VIEW_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/PUT'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/MAPPINGVIEW/1/GET'] = (
FAKE_GET_SPEC_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/REMOVE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/lungroup?TYPE=256&'
'ASSOCIATEOBJTYPE=245&ASSOCIATEOBJID=1/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=256&ASSOCIATEOBJID=11/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=257&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=257&ASSOCIATEOBJID=11/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
FAKE_GET_ENGINES_RESPONSE = """
{
"error":{
"code": 0
},
"data":[{
"NODELIST": "[]",
"ID": "0"
}]
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/storageengine/GET'] = (
FAKE_GET_ENGINES_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate?ASSOCIATEOBJTYPE=245&'
'ASSOCIATEOBJID=1&range=[0-8191]/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/MAPPINGVIEW/CREATE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?ISFREE=true&'
'range=[0-8191]/GET'] = (
FAKE_FC_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/MAPPINGVIEW/CREATE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?ISFREE=true&'
'range=[0-8191]/GET'] = (
FAKE_FC_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator/10000090fa0d6754/GET'] = (
FAKE_FC_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator/10000090fa0d6754/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host_link?INITIATOR_TYPE=223'
'&INITIATOR_PORT_WWN=10000090fa0d6754/GET'] = (
FAKE_HOST_LINK_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup?range=[0-8191]&TYPE=257/GET'] = (
FAKE_PORT_GROUP_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/system//GET'] = (
FAKE_SYSTEM_VERSION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?range=[0-256]/GET'] = (
FAKE_GET_FC_INI_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['fc_initiator?range=[0-256]/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?PARENTTYPE=21&PARENTID=1/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate/cachepartition/POST'] = (
FAKE_SYSTEM_VERSION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?range=[0-256]&PARENTID=1/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?PARENTTYPE=21&PARENTID=1/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/SMARTCACHEPARTITION/0/GET'] = (
FAKE_SMARTCACHEPARTITION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/SMARTCACHEPARTITION/REMOVE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/SMARTCACHEPARTITION/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/cachepartition/0/GET'] = (
FAKE_SMARTCACHEPARTITION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroDomain?range=[0-32]/GET'] = (
FAKE_HYPERMETRODOMAIN_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/POST'] = (
FAKE_HYPERMETRO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/3400a30d844d0007/GET'] = (
FAKE_METRO_INFO_NEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/disable_hcpair/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hyperMetro/associate/pair/POST'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hyperMetro/associate/pair/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/11/GET'] = (
FAKE_HYPERMETRO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair?range=[0-4095]/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/synchronize_hcpair/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/splitmirror?range=[0-8191]/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/splitmirror/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/smartcachepool/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
FAKE_GET_PORTG_BY_VIEW = """
{
"data": [{
"DESCRIPTION": "Please do NOT modify this. Engine ID: 0",
"ID": "0",
"NAME": "OpenStack_PortGroup_1",
"TYPE": 257
}],
"error": {
"code": 0
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate/mappingview?TYPE=257&AS'
'SOCIATEOBJTYPE=245&ASSOCIATEOBJID=1/GET'] = (
FAKE_GET_PORTG_BY_VIEW)
FAKE_GET_PORT_BY_PORTG = """
{
"data":[{
"CONFSPEED":"0","FCCONFMODE":"3",
"FCRUNMODE":"0","HEALTHSTATUS":"1","ID":"2000643e8c4c5f66",
"MAXSUPPORTSPEED":"16000","NAME":"P0","PARENTID":"0B.1",
"PARENTTYPE":209,"RUNNINGSTATUS":"10","RUNSPEED":"8000",
"WWN":"2000643e8c4c5f66"
}],
"error":{
"code":0,"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/associate/portgroup?TYPE=212&ASSOCI'
'ATEOBJTYPE=257&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_PORT_BY_PORTG)
FAKE_GET_PORTG = """
{
"data": {
"TYPE": 257,
"NAME": "OpenStack_PortGroup_1",
"DESCRIPTION": "Please DO NOT change thefollowing message: 0",
"ID": "0"
},
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/0/GET'] = FAKE_GET_PORTG
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/0/PUT'] = FAKE_GET_PORTG
MAP_COMMAND_TO_FAKE_RESPONSE['/port/associate/portgroup/POST'] = (
FAKE_GET_PORT_BY_PORTG)
MAP_COMMAND_TO_FAKE_RESPONSE['/port/associate/portgroup?ID=0&TYPE=257&ASSOCIA'
'TEOBJTYPE=212&ASSOCIATEOBJID=2000643e8c4c5f66/DE'
'LETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
FAKE_CREATE_PORTG = """
{
"data": {
"DESCRIPTION": "Please DO NOT change the following message: 0",
"ID": "0",
"NAME": "OpenStack_PortGroup_1",
"TYPE": 257
},
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/PortGroup/POST'] = FAKE_CREATE_PORTG
MAP_COMMAND_TO_FAKE_RESPONSE['/PortGroup/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
FAKE_GET_PORTG_FROM_PORT = """
{
"data": [{
"TYPE": 257,
"NAME": "OpenStack_PortGroup_1",
"DESCRIPTION": "PleaseDONOTchangethefollowingmessage: 0",
"ID": "0"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate/fc_port?TYPE=257&ASSOCIA'
'TEOBJTYPE=212&ASSOCIATEOBJID=1114368/GET'] = (
FAKE_GET_PORTG_FROM_PORT)
FAKE_GET_VIEW_BY_PORTG = """
{
"data": [{
"ASSOCIATEOBJID": "0",
"COUNT": "0",
"ASSOCIATEOBJTYPE": "0",
"INBANDLUNWWN": "",
"FORFILESYSTEM": "false",
"ID": "2",
"ENABLEINBANDCOMMAND": "false",
"NAME": "OpenStack_Mapping_View_1",
"WORKMODE": "0",
"TYPE": 245,
"HOSTLUNID": "0",
"DESCRIPTION": ""
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/portgroup?TYPE=245&ASS'
'OCIATEOBJTYPE=257&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_VIEW_BY_PORTG)
FAKE_GET_LUNG_BY_VIEW = """
{
"data": [{
"TYPE": 256,
"NAME": "OpenStack_LunGroup_1",
"DESCRIPTION": "OpenStack_LunGroup_1",
"ID": "1"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate/mappingview?TYPE=256&ASSO'
'CIATEOBJTYPE=245&ASSOCIATEOBJID=2/GET'] = (
FAKE_GET_LUNG_BY_VIEW)
FAKE_LUN_COUNT_RESPONSE_1 = """
{
"data":{
"COUNT":"2"
},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/count?TYPE=11&ASSOCIATEOB'
'JTYPE=256&ASSOCIATEOBJID=1/GET'] = (
FAKE_LUN_COUNT_RESPONSE_1)
FAKE_PORTS_IN_PG_RESPONSE = """
{
"data": [{
"ID": "1114114",
"WWN": "2002643e8c4c5f66"
},
{
"ID": "1114113",
"WWN": "2001643e8c4c5f66"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/associate?TYPE=213&ASSOCIATEOBJTYPE='
'257&ASSOCIATEOBJID=0/GET'] = (
FAKE_PORTS_IN_PG_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetro_ConsistentGroup/POST'] = (
FAKE_CREATE_METROROUP_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup?type"
"='15364'/GET"] = (
json.dumps(FAKE_GET_METROROUP_RESPONSE))
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/11/GET"] = (
FAKE_GET_METROROUP_ID_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/11/DELETE"] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/stop/PUT"] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/sync/PUT"] = (
FAKE_COMMON_SUCCESS_RESPONSE)
FAKE_GET_REMOTEDEV_RESPONSE = """
{
"data":[{
"ARRAYTYPE":"1",
"HEALTHSTATUS":"1",
"ID":"0",
"NAME":"Huawei.Storage",
"RUNNINGSTATUS":"1",
"WWN":"21003400a30d844d"
}],
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/remote_device/GET'] = (
FAKE_GET_REMOTEDEV_RESPONSE)
FAKE_CREATE_PAIR_RESPONSE = """
{
"data":{
"ID":"%s"
},
"error":{
"code":0,
"description":"0"
}
}
""" % TEST_PAIR_ID
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/POST'] = (
FAKE_CREATE_PAIR_RESPONSE)
FAKE_DELETE_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/%s/DELETE' % TEST_PAIR_ID] = (
FAKE_DELETE_PAIR_RESPONSE)
FAKE_SET_PAIR_ACCESS_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/%s/PUT' % TEST_PAIR_ID] = (
FAKE_SET_PAIR_ACCESS_RESPONSE)
FAKE_GET_PAIR_NORMAL_RESPONSE = """
{
"data":{
"REPLICATIONMODEL": "1",
"RUNNINGSTATUS": "1",
"SECRESACCESS": "2",
"HEALTHSTATUS": "1",
"ISPRIMARY": "true"
},
"error":{
"code":0,
"description":"0"
}
}
"""
FAKE_GET_PAIR_SPLIT_RESPONSE = """
{
"data":{
"REPLICATIONMODEL": "1",
"RUNNINGSTATUS": "26",
"SECRESACCESS": "2",
"ISPRIMARY": "true"
},
"error":{
"code":0,
"description":"0"
}
}
"""
FAKE_GET_PAIR_SYNC_RESPONSE = """
{
"data":{
"REPLICATIONMODEL": "1",
"RUNNINGSTATUS": "23",
"SECRESACCESS": "2"
},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/%s/GET' % TEST_PAIR_ID] = (
FAKE_GET_PAIR_NORMAL_RESPONSE)
FAKE_SYNC_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/sync/PUT'] = (
FAKE_SYNC_PAIR_RESPONSE)
FAKE_SPLIT_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/split/PUT'] = (
FAKE_SPLIT_PAIR_RESPONSE)
FAKE_SWITCH_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/switch/PUT'] = (
FAKE_SWITCH_PAIR_RESPONSE)
FAKE_PORTS_IN_PG_RESPONSE = """
{
"data": [{
"ID": "1114114",
"WWN": "2002643e8c4c5f66"
},
{
"ID": "1114113",
"WWN": "2001643e8c4c5f66"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/associate?TYPE=213&ASSOCIATEOBJTYPE='
'257&ASSOCIATEOBJID=0/GET'] = (
FAKE_PORTS_IN_PG_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate/fc_port?TYPE=257&ASSOCIA'
'TEOBJTYPE=212&ASSOCIATEOBJID=1114369/GET'] = (
FAKE_PORTS_IN_PG_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/portgroup?TYPE=245&ASSOC'
'IATEOBJTYPE=257&ASSOCIATEOBJID=1114114/GET'] = (
FAKE_SWITCH_PAIR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/portgroup?TYPE=245&ASSOC'
'IATEOBJTYPE=257&ASSOCIATEOBJID=1114113/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
REPLICA_BACKEND_ID = 'huawei-replica-1'
class FakeHuaweiConf(huawei_conf.HuaweiConf):
def __init__(self, conf, protocol):
self.conf = conf
self.protocol = protocol
def safe_get(self, key):
try:
return getattr(self.conf, key)
except Exception:
return
def update_config_value(self):
setattr(self.conf, 'volume_backend_name', 'huawei_storage')
setattr(self.conf, 'san_address',
['http://192.0.2.69:8082/deviceManager/rest/'])
setattr(self.conf, 'san_user', 'admin')
setattr(self.conf, 'san_password', 'Admin@storage')
setattr(self.conf, 'san_product', 'V3')
setattr(self.conf, 'san_protocol', self.protocol)
setattr(self.conf, 'lun_type', constants.THICK_LUNTYPE)
setattr(self.conf, 'lun_ready_wait_interval', 2)
setattr(self.conf, 'lun_copy_wait_interval', 2)
setattr(self.conf, 'lun_timeout', 43200)
setattr(self.conf, 'lun_write_type', '1')
setattr(self.conf, 'lun_mirror_switch', '1')
setattr(self.conf, 'lun_prefetch_type', '1')
setattr(self.conf, 'lun_prefetch_value', '0')
setattr(self.conf, 'lun_policy', '0')
setattr(self.conf, 'lun_read_cache_policy', '2')
setattr(self.conf, 'lun_write_cache_policy', '5')
setattr(self.conf, 'storage_pools', ['OpenStack_Pool'])
setattr(self.conf, 'iscsi_default_target_ip', ['192.0.2.68'])
setattr(self.conf, 'metro_san_address',
['https://192.0.2.240:8088/deviceManager/rest/'])
setattr(self.conf, 'metro_storage_pools', 'OpenStack_Pool')
setattr(self.conf, 'metro_san_user', 'admin')
setattr(self.conf, 'metro_san_password', 'Admin@storage1')
setattr(self.conf, 'metro_domain_name', 'hypermetro_test')
iscsi_info = {'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'TargetIP': '192.0.2.2',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1',
'TargetPortGroup': 'portgroup-test', }
setattr(self.conf, 'iscsi_info', [iscsi_info])
rmt_iscsi_info = ('{ Name: iqn.1993-08.debian:01:ec2bff7acxxx;\n'
'TargetIP:1.1.1.1;CHAPinfo:mm-user#mm-user@storage;'
'ALUA:1; TargetPortGroup:portgroup-test};\t\n '
'{ Name: iqn.1993-08.debian:01:ec2bff7acyyy;\n'
'TargetIP:2.2.2.2;CHAPinfo:nn-user#nn-user@storage;'
'ALUA:0; TargetPortGroup:portgroup-test1}\t\n')
targets = [{'backend_id': REPLICA_BACKEND_ID,
'storage_pool': 'OpenStack_Pool',
'san_address':
'https://192.0.2.69:8088/deviceManager/rest/',
'san_user': 'admin',
'san_password': 'Admin@storage1',
'iscsi_info': rmt_iscsi_info}]
setattr(self.conf, 'replication_device', targets)
setattr(self.conf, 'safe_get', self.safe_get)
class FakeClient(rest_client.RestClient):
def __init__(self, configuration):
san_address = configuration.san_address
san_user = configuration.san_user
san_password = configuration.san_password
rest_client.RestClient.__init__(self, configuration,
san_address,
san_user,
san_password)
self.test_fail = False
self.test_multi_url_flag = False
self.cache_not_exist = False
self.partition_not_exist = False
def _get_snapshotid_by_name(self, snapshot_name):
return "11"
def _check_snapshot_exist(self, snapshot_id):
return True
def get_partition_id_by_name(self, name):
if self.partition_not_exist:
return None
return "11"
def get_cache_id_by_name(self, name):
if self.cache_not_exist:
return None
return "11"
def add_lun_to_cache(self, lunid, cache_id):
pass
def do_call(self, url=False, data=None, method=None, calltimeout=4,
log_filter_flag=False):
url = url.replace('http://192.0.2.69:8082/deviceManager/rest', '')
command = url.replace('/210235G7J20000000000/', '')
data = json.dumps(data) if data else None
if method:
command = command + "/" + method
for item in MAP_COMMAND_TO_FAKE_RESPONSE.keys():
if command == item:
data = MAP_COMMAND_TO_FAKE_RESPONSE[item]
if self.test_fail:
data = FAKE_ERROR_INFO_RESPONSE
if command == 'lun/11/GET':
data = FAKE_ERROR_LUN_INFO_RESPONSE
self.test_fail = False
if self.test_multi_url_flag:
data = FAKE_ERROR_CONNECT_RESPONSE
self.test_multi_url_flag = False
return json.loads(data)
class FakeReplicaPairManager(replication.ReplicaPairManager):
def _init_rmt_client(self):
self.rmt_client = FakeClient(self.conf)
class FakeISCSIStorage(huawei_driver.HuaweiISCSIDriver):
def __init__(self, configuration):
self.configuration = configuration
self.huawei_conf = FakeHuaweiConf(self.configuration, 'iSCSI')
self.active_backend_id = None
self.replica = None
self.support_func = None
def do_setup(self):
self.metro_flag = True
self.huawei_conf.update_config_value()
self.get_local_and_remote_dev_conf()
self.client = FakeClient(configuration=self.configuration)
self.rmt_client = FakeClient(configuration=self.configuration)
self.replica_client = FakeClient(configuration=self.configuration)
self.metro = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
self.replica = FakeReplicaPairManager(self.client,
self.replica_client,
self.configuration)
class FakeFCStorage(huawei_driver.HuaweiFCDriver):
def __init__(self, configuration):
self.configuration = configuration
self.fcsan = None
self.huawei_conf = FakeHuaweiConf(self.configuration, 'iSCSI')
self.active_backend_id = None
self.replica = None
self.support_func = None
def do_setup(self):
self.metro_flag = True
self.huawei_conf.update_config_value()
self.get_local_and_remote_dev_conf()
self.client = FakeClient(configuration=self.configuration)
self.rmt_client = FakeClient(configuration=self.configuration)
self.replica_client = FakeClient(configuration=self.configuration)
self.metro = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
self.replica = FakeReplicaPairManager(self.client,
self.replica_client,
self.configuration)
@ddt.ddt
class HuaweiTestBase(test.TestCase):
def setUp(self):
super(HuaweiTestBase, self).setUp()
self.configuration = mock.Mock(spec=conf.Configuration)
self.driver = FakeISCSIStorage(configuration=self.configuration)
self.driver.do_setup()
self.volume = fake_volume.fake_volume_obj(
admin_contex, host=HOST, provider_location=PROVIDER_LOCATION,
admin_metadata=ADMIN_METADATA, id=ID)
self.snapshot = fake_snapshot.fake_snapshot_obj(
admin_contex, provider_location=PROVIDER_LOCATION, id=ID)
self.snapshot.volume = self.volume
self.replica_volume = fake_volume.fake_volume_obj(
admin_contex, host=HOST, provider_location=PROVIDER_LOCATION,
admin_metadata=ADMIN_METADATA, replication_status='disabled',
replication_driver_data=REPLICA_DRIVER_DATA, id=ID)
self.hyper_volume = fake_volume.fake_volume_obj(
admin_contex, host=HOST, provider_location=PROVIDER_LOCATION,
volume_metadata=VOL_METADATA, id=ID)
self.original_volume = fake_volume.fake_volume_obj(admin_contex,
id=ID)
self.current_volume = fake_volume.fake_volume_obj(
admin_contex, id=ID, provider_location=PROVIDER_LOCATION,
name_id=ID)
self.cgsnapshot = fake_cgsnapshot.fake_cgsnapshot_obj(
admin_contex, id=ID, consistencygroup_id=ID, status='available')
self.cg = fake_consistencygroup.fake_consistencyobject_obj(
admin_contex, id=ID, status='available')
def test_encode_name(self):
lun_name = huawei_utils.encode_name(self.volume.id)
self.assertIn(lun_name, ('21ec7341-4687000622165227970',
'21ec7341-7953146827712520106'))
@mock.patch.object(rest_client, 'RestClient')
def test_create_snapshot_success(self, mock_client):
lun_info = self.driver.create_snapshot(self.snapshot)
self.assertEqual(11, lun_info['provider_location'])
self.snapshot.volume_id = ID
self.snapshot.volume = self.volume
lun_info = self.driver.create_snapshot(self.snapshot)
self.assertEqual(11, lun_info['provider_location'])
@ddt.data('1', '', '0')
def test_copy_volume(self, input_speed):
self.driver.configuration.lun_copy_wait_interval = 0
self.volume.metadata = {'copyspeed': input_speed}
mocker = self.mock_object(
self.driver.client, 'create_luncopy',
mock.Mock(wraps=self.driver.client.create_luncopy))
self.driver._copy_volume(self.volume,
'fake_copy_name',
'fake_src_lun',
'fake_tgt_lun')
mocker.assert_called_once_with('fake_copy_name',
'fake_src_lun',
'fake_tgt_lun',
input_speed)
@ddt.data({'input_speed': '1',
'actual_speed': '1'},
{'input_speed': '',
'actual_speed': '2'},
{'input_speed': None,
'actual_speed': '2'},
{'input_speed': '5',
'actual_speed': '2'})
@ddt.unpack
def test_client_create_luncopy(self, input_speed, actual_speed):
mocker = self.mock_object(
self.driver.client, 'call',
mock.Mock(wraps=self.driver.client.call))
self.driver.client.create_luncopy('fake_copy_name',
'fake_src_lun',
'fake_tgt_lun',
input_speed)
mocker.assert_called_once_with(
mock.ANY,
{"TYPE": 219,
"NAME": 'fake_copy_name',
"DESCRIPTION": 'fake_copy_name',
"COPYSPEED": actual_speed,
"LUNCOPYTYPE": "1",
"SOURCELUN": "INVALID;fake_src_lun;INVALID;INVALID;INVALID",
"TARGETLUN": "INVALID;fake_tgt_lun;INVALID;INVALID;INVALID"}
)
@ddt.ddt
class HuaweiISCSIDriverTestCase(HuaweiTestBase):
def setUp(self):
super(HuaweiISCSIDriverTestCase, self).setUp()
self.configuration = mock.Mock(spec=conf.Configuration)
self.configuration.hypermetro_devices = hypermetro_devices
self.flags(rpc_backend='oslo_messaging._drivers.impl_fake')
self.driver = FakeISCSIStorage(configuration=self.configuration)
self.driver.do_setup()
self.portgroup = 'portgroup-test'
self.iscsi_iqns = ['iqn.2006-08.com.huawei:oceanstor:21000022a:'
':20503:192.0.2.1',
'iqn.2006-08.com.huawei:oceanstor:21000022a:'
':20500:192.0.2.2']
self.target_ips = ['192.0.2.1',
'192.0.2.2']
self.portgroup_id = 11
self.driver.client.login()
def test_parse_rmt_iscsi_info(self):
rmt_devs = self.driver.huawei_conf.get_replication_devices()
iscsi_info = rmt_devs[0]['iscsi_info']
expected_iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7acxxx',
'TargetIP': '1.1.1.1',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1',
'TargetPortGroup': 'portgroup-test'},
{'Name': 'iqn.1993-08.debian:01:ec2bff7acyyy',
'TargetIP': '2.2.2.2',
'CHAPinfo': 'nn-user;nn-user@storage',
'ALUA': '0',
'TargetPortGroup': 'portgroup-test1'}]
self.assertEqual(expected_iscsi_info, iscsi_info)
def test_parse_rmt_iscsi_info_without_iscsi_configuration(self):
self.configuration.replication_device[0]['iscsi_info'] = ''
rmt_devs = self.driver.huawei_conf.get_replication_devices()
iscsi_info = rmt_devs[0]['iscsi_info']
self.assertEqual([], iscsi_info)
def test_login_success(self):
device_id = self.driver.client.login()
self.assertEqual('210235G7J20000000000', device_id)
@ddt.data(constants.PWD_EXPIRED, constants.PWD_RESET)
def test_login_password_expires_and_reset_fail(self, state):
with mock.patch.object(self.driver.client, 'logout') as mock_logout:
self.mock_object(FakeClient, 'do_call',
return_value={"error": {"code": 0},
"data": {
"username": "admin",
"iBaseToken": "2001031430",
"deviceid": "210235G7J20000000000",
"accountstate": state}})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
mock_logout.assert_called_once_with()
def test_login_logout_fail(self):
login_info = {"error": {"code": 0},
"data": {"username": "admin",
"iBaseToken": "2001031430",
"deviceid": "210235G7J20000000000",
"accountstate": 3}}
logout_info = {"error": {"code": 1}, "data": {}}
self.mock_object(FakeClient, 'do_call',
side_effect=[login_info, logout_info])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
def test_check_volume_exist_on_array(self):
self.mock_object(rest_client.RestClient, 'get_lun_id_by_name',
return_value=None)
self.driver._check_volume_exist_on_array(
self.volume, constants.VOLUME_NOT_EXISTS_WARN)
def test_create_volume_success(self):
self.volume.host = 'ubuntu001@backend001#OpenStack_Pool'
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
self.volume.host = 'ubuntu001@backend001'
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_replication_fail(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(replication.ReplicaCommonDriver, 'split')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.mock_object(rest_client.RestClient,
'delete_lun',
side_effect=exception.VolumeBackendAPIException(
data='err'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume, self.replica_volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_success_no_data(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
moved = False
empty_dict = {}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
side_effect=[{}, task_info])
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_success_with_replication(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
moved = False
empty_dict = {}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
moved, model_update = self.driver.migrate_volume(None,
self.replica_volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_fail_migration_fault(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "74",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume,
None, self.volume, test_host, None)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_fail_no_migrate_task(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "12",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume,
None, self.volume, test_host, None)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_with_type_id(self, pool_data):
self.driver.support_func = pool_data
self.volume.volume_type_id = '550c089b-bfdd-4f7f-86e1-3ba88125555c'
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
empty_dict = {}
self.mock_object(volume_types, 'get_volume_type',
return_value=test_new_type)
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_manage_existing_fail(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152, 'ALLOCTYPE': 1})
self.mock_object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
self.mock_object(rest_client.RestClient, 'rename_lun')
self.mock_object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value={
'PARENTNAME': 'OpenStack_Pool',
'SNAPSHOTIDS': [],
'ID': 'ID1',
'HEALTHSTATUS': constants.STATUS_HEALTH,
'WWN': '6643e8c1004c5f6723e9f454003'})
self.mock_object(volume_types, 'get_volume_type',
return_value={'extra_specs': test_new_type})
self.mock_object(huawei_driver.HuaweiBaseDriver,
'_check_needed_changes',
return_value={})
external_ref = {'source-name': 'test1',
'source-id': 'ID1'}
self.driver.manage_existing(self.volume, external_ref)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_volume_success(self, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
def test_delete_snapshot_success(self):
self.driver.delete_snapshot(self.snapshot)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_create_volume_from_snapsuccess(self):
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.mock_object(replication.ReplicaCommonDriver, 'sync')
model_update = self.driver.create_volume_from_snapshot(self.volume,
self.volume)
self.assertEqual('1', model_update['provider_location'])
driver_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': '1'}
driver_data = replication.to_string(driver_data)
self.assertEqual(driver_data, model_update['replication_driver_data'])
self.assertEqual('available', model_update['replication_status'])
@mock.patch.object(huawei_driver.HuaweiISCSIDriver,
'initialize_connection',
return_value={"data": {'target_lun': 1}})
def test_initialize_connection_snapshot_success(self, mock_iscsi_init):
iscsi_properties = self.driver.initialize_connection_snapshot(
self.snapshot, FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
self.assertEqual(1, iscsi_properties['data']['target_lun'])
mock_iscsi_init.assert_called_with(volume, FakeConnector)
def test_initialize_connection_success_multipath_portgroup(self):
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.mock_object(rest_client.RestClient, 'get_tgt_port_group',
return_value = '11')
iscsi_properties = self.driver.initialize_connection(self.volume,
temp_connector)
self.assertEqual([1, 1], iscsi_properties['data']['target_luns'])
def test_initialize_connection_fail_multipath_portgroup(self):
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.mock_object(rest_client.RestClient, 'get_tgt_port_group',
return_value = '12')
self.mock_object(rest_client.RestClient, '_get_tgt_ip_from_portgroup',
return_value = [])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_success_multipath_targetip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'TargetIP': '192.0.2.2',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
iscsi_properties = driver.initialize_connection(self.volume,
temp_connector)
self.assertEqual([1], iscsi_properties['data']['target_luns'])
def test_initialize_connection_fail_multipath_targetip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'TargetIP': '192.0.2.6',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.assertRaises(exception.VolumeBackendAPIException,
driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_success_multipath_defaultip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
default_target_ip = ['192.0.2.2']
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
driver.configuration.iscsi_default_target_ip = default_target_ip
driver.client.iscsi_default_target_ip = default_target_ip
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
iscsi_properties = driver.initialize_connection(self.volume,
temp_connector)
self.assertEqual([1], iscsi_properties['data']['target_luns'])
def test_initialize_connection_fail_multipath_defaultip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
default_target_ip = ['192.0.2.6']
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
driver.configuration.iscsi_default_target_ip = default_target_ip
driver.client.iscsi_default_target_ip = default_target_ip
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.assertRaises(exception.VolumeBackendAPIException,
driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_fail_no_port_in_portgroup(self):
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.mock_object(rest_client.RestClient, 'get_tgt_port_group',
return_value='11')
self.mock_object(rest_client.RestClient, '_get_tgt_ip_from_portgroup',
return_value=[])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_fail_multipath_no_ip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
driver.configuration.iscsi_default_target_ip = None
driver.client.iscsi_default_target_ip = None
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.assertRaises(exception.VolumeBackendAPIException,
driver.initialize_connection,
self.volume, temp_connector)
@mock.patch.object(huawei_driver.HuaweiISCSIDriver,
'terminate_connection')
def test_terminate_connection_snapshot_success(self, mock_iscsi_term):
self.driver.terminate_connection_snapshot(self.snapshot,
FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
mock_iscsi_term.assert_called_with(volume, FakeConnector)
def test_terminate_connection_success(self):
self.driver.terminate_connection(self.volume, FakeConnector)
def test_get_volume_status(self):
data = self.driver.get_volume_stats()
self.assertEqual(self.driver.VERSION, data['driver_version'])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={"CAPACITY": 6291456})
@mock.patch.object(rest_client.RestClient, 'extend_lun')
def test_extend_volume_size_equal(self, mock_extend, mock_lun_info):
self.driver.extend_volume(self.volume, 3)
self.assertEqual(0, mock_extend.call_count)
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={"CAPACITY": 5291456})
@mock.patch.object(rest_client.RestClient, 'extend_lun')
def test_extend_volume_success(self, mock_extend, mock_lun_info):
self.driver.extend_volume(self.volume, 3)
self.assertEqual(1, mock_extend.call_count)
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={"CAPACITY": 7291456})
def test_extend_volume_fail(self, mock_lun_info):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.extend_volume, self.volume, 3)
def test_extend_nonexistent_volume(self):
self.volume = fake_volume.fake_volume_obj(admin_contex)
self.mock_object(rest_client.RestClient,
'get_lun_id_by_name',
return_value=None)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.extend_volume,
self.volume, 3)
def test_get_admin_metadata(self):
metadata = [{'key': 'huawei_lun_wwn', 'value': '1'}]
tmp_volume = fake_volume.fake_volume_obj(
admin_contex, volume_admin_metadata=metadata)
expected_value = {'huawei_lun_wwn': '1'}
admin_metadata = huawei_utils.get_admin_metadata(tmp_volume)
self.assertEqual(expected_value, admin_metadata)
metadata = {'huawei_lun_wwn': '1'}
tmp_volume = fake_volume.fake_volume_obj(admin_contex)
tmp_volume.admin_metadata = metadata
admin_metadata = huawei_utils.get_admin_metadata(tmp_volume)
self.assertEqual(expected_value, admin_metadata)
def test_login_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
def test_create_snapshot_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_snapshot, self.snapshot)
def test_create_volume_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
def test_delete_volume_fail(self):
self.driver.client.test_fail = True
self.driver.delete_volume(self.volume)
def test_delete_snapshot_fail(self):
self.driver.client.test_fail = True
self.driver.delete_snapshot(self.snapshot)
def test_delete_snapshot_with_snapshot_nonexistent(self):
self.snapshot.provider_location = None
self.driver.delete_snapshot(self.snapshot)
def test_initialize_connection_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, FakeConnector)
def test_lun_is_associated_to_lungroup(self):
self.driver.client.associate_lun_to_lungroup('11', '11')
result = self.driver.client._is_lun_associated_to_lungroup('11',
'11')
self.assertTrue(result)
def test_lun_is_not_associated_to_lun_group(self):
self.driver.client.associate_lun_to_lungroup('12', '12')
self.driver.client.remove_lun_from_lungroup('12', '12')
result = self.driver.client._is_lun_associated_to_lungroup('12', '12')
self.assertFalse(result)
def test_get_tgtip(self):
portg_id = self.driver.client.get_tgt_port_group(self.portgroup)
target_ip = self.driver.client._get_tgt_ip_from_portgroup(portg_id)
self.assertEqual(self.target_ips, target_ip)
def test_find_chap_info(self):
tmp_dict = {}
tmp_dict['Name'] = 'iqn.1993-08.debian:01:ec2bff7ac3a3'
tmp_dict['CHAPinfo'] = 'mm-user;mm-user@storage'
iscsi_info = [tmp_dict]
initiator_name = FakeConnector['initiator']
chapinfo = self.driver.client.find_chap_info(iscsi_info,
initiator_name)
chap_username, chap_password = chapinfo.split(';')
self.assertEqual('mm-user', chap_username)
self.assertEqual('mm-user@storage', chap_password)
def test_find_alua_info(self):
tmp_dict = {}
tmp_dict['Name'] = 'iqn.1993-08.debian:01:ec2bff7ac3a3'
tmp_dict['ALUA'] = '1'
iscsi_info = [tmp_dict]
initiator_name = FakeConnector['initiator']
type = self.driver.client._find_alua_info(iscsi_info,
initiator_name)
self.assertEqual('1', type)
def test_get_pool_info(self):
pools = [{"NAME": "test001",
"ID": "0",
"USERFREECAPACITY": "36",
"USERTOTALCAPACITY": "48",
"USAGETYPE": constants.BLOCK_STORAGE_POOL_TYPE,
"TIER0CAPACITY": "48",
"TIER1CAPACITY": "0",
"TIER2CAPACITY": "0"},
{"NAME": "test002",
"ID": "1",
"USERFREECAPACITY": "37",
"USERTOTALCAPACITY": "49",
"USAGETYPE": constants.FILE_SYSTEM_POOL_TYPE,
"TIER0CAPACITY": "0",
"TIER1CAPACITY": "49",
"TIER2CAPACITY": "0"},
{"NAME": "test003",
"ID": "0",
"USERFREECAPACITY": "36",
"DATASPACE": "35",
"USERTOTALCAPACITY": "48",
"USAGETYPE": constants.BLOCK_STORAGE_POOL_TYPE,
"TIER0CAPACITY": "0",
"TIER1CAPACITY": "0",
"TIER2CAPACITY": "48"}]
pool_name = 'test001'
test_info = {'CAPACITY': '36', 'ID': '0', 'TOTALCAPACITY': '48',
'TIER0CAPACITY': '48', 'TIER1CAPACITY': '0',
'TIER2CAPACITY': '0'}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
pool_name = 'test002'
test_info = {}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
pool_name = 'test000'
test_info = {}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
pool_name = 'test003'
test_info = {'CAPACITY': '35', 'ID': '0', 'TOTALCAPACITY': '48',
'TIER0CAPACITY': '0', 'TIER1CAPACITY': '0',
'TIER2CAPACITY': '48'}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
def test_get_smartx_specs_opts(self):
smartx_opts = smartx.SmartX().get_smartx_specs_opts(smarttier_opts)
self.assertEqual('3', smartx_opts['policy'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MAXIOPS': '100',
'IOType': '2'})
def test_create_smartqos(self, mock_qos_value, pool_data):
self.driver.support_func = pool_data
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_type',
return_value={'qos_specs_id': u'025ce295-15e9-41a7'})
@mock.patch.object(qos_specs, 'get_qos_specs',
return_value={'specs': {'maxBandWidth': '100',
'IOType': '0'},
'consumer': 'back-end'})
def test_create_smartqos_success(self,
mock_qos_specs,
mock_value_type,
mock_volume_params):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data([{'specs': {'maxBandWidth': '100', 'IOType': '3'}},
FAKE_POOLS_UNSUPPORT_REPORT],
[{'specs': {'maxBandWidth': '100', 'IOType': '3'}},
FAKE_POOLS_SUPPORT_REPORT],
[{'specs': {'minBandWidth': '0', 'IOType': '2'}},
FAKE_POOLS_UNSUPPORT_REPORT],
[{'specs': {'minBandWidth': '0', 'IOType': '2'}},
FAKE_POOLS_SUPPORT_REPORT])
@ddt.unpack
def test_create_smartqos_failed(self, qos_specs_value, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_type',
return_value={'qos_specs_id': u'025ce295-15e9-41a7'})
self.mock_object(qos_specs, 'get_qos_specs',
return_value=qos_specs_value)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_create_smartqos_without_huawei_type(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_type',
return_value={'qos_specs_id': u'025ce295-15e9-41a7'})
self.mock_object(qos_specs, 'get_qos_specs',
return_value={'specs': {'fake_qos_type': '100',
'IOType': '2'}})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MAXIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=(None, []))
def test_create_smartqos_on_v3r3_with_no_qos(self,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=('11', u'["0", "2", "3"]'))
def test_create_smartqos_on_v3r3_with_qos(self,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=('11', u'["0", "2", "3"]'))
def test_create_smartqos_on_v3r3_with_unsupport_qos(
self, mock_find_available_qos,
mock_qos_value, mock_array_version):
self.driver.support_func = FAKE_POOLS_UNSUPPORT_REPORT
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=(None, []))
@mock.patch.object(rest_client.RestClient, 'activate_deactivate_qos')
def test_create_smartqos_on_v3r3_active_failed(self,
pool_data,
mock_activate_qos,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = pool_data
mock_activate_qos.side_effect = (
exception.VolumeBackendAPIException(data='Activate or deactivate '
'QoS error. '))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=(None, []))
@mock.patch.object(rest_client.RestClient, 'create_qos_policy')
def test_create_smartqos_on_v3r3_qos_failed(self,
pool_data,
mock_create_qos,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = pool_data
mock_create_qos.side_effect = (
exception.VolumeBackendAPIException(data='Create QoS policy '
'error.'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_qos_info',
return_value={"LUNLIST": u'["1", "2", "3"]',
"RUNNINGSTATUS": "2"})
def test_delete_smartqos_with_lun_left(self, mock_qos_info, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_qos_info',
return_value={"LUNLIST": u'["1"]',
"RUNNINGSTATUS": "2"})
def test_delete_smartqos_with_no_lun_left(self, mock_qos_info, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
@mock.patch.object(rest_client.RestClient, 'add_lun_to_partition')
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
def test_create_smartx(self, mock_volume_types, mock_add_lun_to_partition):
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data([{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': None,
'partitionname': 'partition-test'},
FAKE_POOLS_UNSUPPORT_REPORT],
[{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': 'cache-test',
'partitionname': None},
FAKE_POOLS_SUPPORT_REPORT],
[{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': None,
'partitionname': 'partition-test'},
FAKE_POOLS_SUPPORT_REPORT],
[{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': 'cache-test',
'partitionname': None},
FAKE_POOLS_UNSUPPORT_REPORT])
@ddt.unpack
def test_create_smartCache_failed(self, opts, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_params',
return_value=opts)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
def test_create_smartCache_failed_with_no_cacheid(self,
mock_volume_type,
pool_data):
self.driver.client.cache_not_exist = True
self.driver.support_func = pool_data
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
def test_create_smartPartition_failed_with_no_partid(self,
mock_volume_type,
pool_data):
self.driver.client.partition_not_exist = True
self.driver.support_func = pool_data
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
def test_find_available_qos(self):
qos = {'MAXIOPS': '100', 'IOType': '2'}
fake_qos_info_response_equal = {
"error": {
"code": 0
},
"data": [{
"ID": "11",
"MAXIOPS": "100",
"LATENCY": "0",
"IOType": "2",
"FSLIST": u'[""]',
'RUNNINGSTATUS': "2",
"NAME": "OpenStack_57_20151225102851",
"LUNLIST": u'["1", "2", "3", "4", "5", "6", "7", "8", "9",\
"10", ,"11", "12", "13", "14", "15", "16", "17", "18", "19",\
"20", ,"21", "22", "23", "24", "25", "26", "27", "28", "29",\
"30", ,"31", "32", "33", "34", "35", "36", "37", "38", "39",\
"40", ,"41", "42", "43", "44", "45", "46", "47", "48", "49",\
"50", ,"51", "52", "53", "54", "55", "56", "57", "58", "59",\
"60", ,"61", "62", "63", "64"]'
}]
}
with mock.patch.object(rest_client.RestClient, 'get_qos',
return_value=fake_qos_info_response_equal):
(qos_id, lun_list) = self.driver.client.find_available_qos(qos)
self.assertEqual((None, []), (qos_id, lun_list))
fake_qos_info_response_less = {
"error": {
"code": 0
},
"data": [{
"ID": "11",
"MAXIOPS": "100",
"LATENCY": "0",
"IOType": "2",
"FSLIST": u'[""]',
'RUNNINGSTATUS': "2",
"NAME": "OpenStack_57_20151225102851",
"LUNLIST": u'["0", "1", "2"]'
}]
}
with mock.patch.object(rest_client.RestClient, 'get_qos',
return_value=fake_qos_info_response_less):
(qos_id, lun_list) = self.driver.client.find_available_qos(qos)
self.assertEqual(("11", u'["0", "1", "2"]'), (qos_id, lun_list))
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
def test_create_hypermetro_success(self,
mock_volume_ready,
mock_hyper_domain,
mock_pool_info,
mock_all_pool_info,
mock_login_return):
metadata = {"hypermetro_id": '11',
"remote_lun_id": '1'}
lun_info = self.driver.create_volume(self.hyper_volume)
self.assertEqual(metadata, lun_info['metadata'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
@mock.patch.object(hypermetro.HuaweiHyperMetro,
'_create_hypermetro_pair')
@mock.patch.object(rest_client.RestClient, 'delete_lun')
def test_create_hypermetro_fail(self,
pool_data,
mock_delete_lun,
mock_hyper_pair_info,
mock_volume_ready,
mock_hyper_domain,
mock_pool_info,
mock_all_pool_info,
mock_hypermetro_opts
):
self.driver.client.login()
self.driver.support_func = pool_data
mock_hyper_pair_info.side_effect = exception.VolumeBackendAPIException(
data='Create hypermetro error.')
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.hyper_volume)
mock_delete_lun.assert_called_with('1')
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value={})
def test_create_hypermetro_remote_pool_none_fail(self,
mock_pool_info,
mock_all_pool_info):
param = {'TYPE': '11',
'PARENTID': ''}
self.driver.client.login()
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.create_hypermetro,
'2', param)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'create_lun',
return_value={'CAPACITY': '2097152',
'DESCRIPTION': '2f0635',
'HEALTHSTATUS': '1',
'ALLOCTYPE': '1',
'WWN': '6643e8c1004c5f6723e9f454003',
'ID': '1',
'RUNNINGSTATUS': '27',
'NAME': '5mFHcBv4RkCcD'})
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
def test_create_hypermetro_remote_pool_parentid(self,
mock_volume_ready,
mock_hyper_domain,
mock_create_lun,
mock_pool_info,
mock_all_pool_info):
param = {'TYPE': '11',
'PARENTID': ''}
self.driver.metro.create_hypermetro('2', param)
lun_PARENTID = mock_create_lun.call_args[0][0]['PARENTID']
self.assertEqual(FAKE_FIND_POOL_RESPONSE['ID'], lun_PARENTID)
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
def test_hypermetro_none_map_info_fail(self, mock_metadata):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.connect_volume_fc,
self.volume,
FakeConnector)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'check_lun_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'check_hypermetro_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'delete_hypermetro',
return_value=FAKE_COMMON_SUCCESS_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'delete_lun',
return_value=None)
def test_delete_hypermetro_success(self,
mock_delete_lun,
mock_delete_hypermetro,
mock_check_hyermetro,
mock_lun_exit,
pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.hyper_volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'check_lun_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'check_hypermetro_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'get_hypermetro_by_id',
return_value=FAKE_METRO_INFO_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'delete_hypermetro')
@mock.patch.object(rest_client.RestClient, 'delete_lun',
return_value=None)
def test_delete_hypermetro_fail(self,
pool_data,
mock_delete_lun,
mock_delete_hypermetro,
mock_metro_info,
mock_check_hyermetro,
mock_lun_exit):
self.driver.support_func = pool_data
mock_delete_hypermetro.side_effect = (
exception.VolumeBackendAPIException(data='Delete hypermetro '
'error.'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume, self.hyper_volume)
mock_delete_lun.assert_called_with('11')
def test_manage_existing_get_size_invalid_reference(self):
external_ref = {'source-name': 'LUN1'}
with mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value=None):
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size,
self.volume, external_ref)
self.assertIsNotNone(re.search('please check the source-name '
'or source-id', ex.msg))
# Can't find LUN by source-id.
external_ref = {'source-id': 'ID1'}
with mock.patch.object(rest_client.RestClient, 'get_lun_info') as m_gt:
m_gt.side_effect = exception.VolumeBackendAPIException(
data='Error')
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.manage_existing_get_size,
self.volume, external_ref)
self.assertIsNotNone(re.search('please check the source-name '
'or source-id', ex.msg))
@ddt.data({'source-id': 'ID1'}, {'source-name': 'LUN1'},
{'source-name': 'LUN1', 'source-id': 'ID1'})
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 3097152})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_get_size_success(self, mock_get_lun_id_by_name,
mock_get_lun_info,
external_ref):
size = self.driver.manage_existing_get_size(self.volume,
external_ref)
self.assertEqual(2, size)
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool'})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_pool_mismatch(self, mock_get_by_name,
mock_get_info):
with mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value={'PARENTNAME': 'StoragePool'}):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('The specified LUN does not belong'
' to the given pool', ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool'})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_lun_abnormal(self, mock_get_by_name,
mock_get_info):
ret = {'PARENTNAME': "OpenStack_Pool",
'HEALTHSTATUS': '2'}
with mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value=ret):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('LUN status is not normal', ex.msg))
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_hypermetro_pairs',
return_value=[{'LOCALOBJID': 'ID1'}])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_hypermetro(self, mock_get_by_name,
mock_get_info,
mock_get_hyper_pairs,
pool_data):
self.driver.support_func = pool_data
with mock.patch.object(rest_client.RestClient,
'get_hypermetro_pairs',
return_value=[{'LOCALOBJID': 'ID1'}]):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('HyperMetroPair', ex.msg))
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_hypermetro_pairs')
@mock.patch.object(rest_client.RestClient, 'rename_lun')
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH,
'WWN': '6643e8c1004c5f6723e9f454003'})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_lower_version(self, pool_data,
mock_get_by_name,
mock_get_info, mock_rename,
mock_get_hyper_pairs):
self.driver.support_func = pool_data
mock_get_hyper_pairs.side_effect = (
exception.VolumeBackendAPIException(data='err'))
external_ref = {'source-name': 'LUN1'}
model_update = self.driver.manage_existing(self.volume,
external_ref)
expected_val = {
'admin_metadata': {
'huawei_lun_wwn': '6643e8c1004c5f6723e9f454003'
},
'provider_location': 'ID1'}
self.assertEqual(expected_val, model_update)
@ddt.data([[{'PRILUNID': 'ID1'}], []],
[[{'PRILUNID': 'ID2'}], ['ID1', 'ID2']])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_splitmirror(self, ddt_data,
mock_get_by_name,
mock_get_info):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
with mock.patch.object(rest_client.RestClient, 'get_split_mirrors',
return_value=ddt_data[0]), \
mock.patch.object(rest_client.RestClient, 'get_target_luns',
return_value=ddt_data[1]):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('SplitMirror', ex.msg))
@ddt.data([[{'PARENTID': 'ID1'}], FAKE_POOLS_UNSUPPORT_REPORT],
[[{'TARGETLUNID': 'ID1'}], FAKE_POOLS_UNSUPPORT_REPORT],
[[{'PARENTID': 'ID1'}], FAKE_POOLS_SUPPORT_REPORT],
[[{'TARGETLUNID': 'ID1'}], FAKE_POOLS_SUPPORT_REPORT])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
@ddt.unpack
def test_manage_existing_under_migration(self, ddt_data, pool_data,
mock_get_by_name,
mock_get_info):
self.driver.support_func = pool_data
with mock.patch.object(rest_client.RestClient, 'get_migration_task',
return_value=ddt_data):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('migration', ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'SNAPSHOTIDS': [],
'ISADD2LUNGROUP': 'true',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_lungroup(self, mock_get_by_name,
mock_get_info):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('Already exists in a LUN group',
ex.msg))
@ddt.data([{'source-name': 'LUN1'}, FAKE_POOLS_UNSUPPORT_REPORT],
[{'source-name': 'LUN1'}, FAKE_POOLS_SUPPORT_REPORT],
[{'source-id': 'ID1'}, FAKE_POOLS_UNSUPPORT_REPORT],
[{'source-id': 'ID1'}, FAKE_POOLS_SUPPORT_REPORT])
@mock.patch.object(rest_client.RestClient, 'rename_lun')
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value={'PARENTNAME': 'OpenStack_Pool',
'SNAPSHOTIDS': [],
'ID': 'ID1',
'HEALTHSTATUS': constants.STATUS_HEALTH,
'WWN': '6643e8c1004c5f6723e9f454003'})
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ALLOCTYPE': 1})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
@ddt.unpack
def test_manage_existing_success(self, mock_get_by_name, mock_get_info,
mock_check_lun, mock_rename,
external_ref, pool_data):
self.driver.support_func = pool_data
model_update = self.driver.manage_existing(self.volume,
external_ref)
expected_val = {
'admin_metadata': {
'huawei_lun_wwn': '6643e8c1004c5f6723e9f454003'
},
'provider_location': 'ID1'}
self.assertEqual(expected_val, model_update)
def test_unmanage(self):
self.driver.unmanage(self.volume)
def test_manage_existing_snapshot_abnormal(self):
with mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_snapshot_info_by_ref',
return_value={'HEALTHSTATUS': '2',
'PARENTID': '11'}):
external_ref = {'source-name': 'test1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
self.snapshot, external_ref)
self.assertIsNotNone(re.search('Snapshot status is not normal',
ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_snapshot_info',
return_value={'ID': 'ID1',
'EXPOSEDTOINITIATOR': 'true',
'NAME': 'test1',
'PARENTID': '11',
'USERCAPACITY': 2097152,
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value='ID1')
def test_manage_existing_snapshot_with_lungroup(self, mock_get_by_name,
mock_get_info):
external_ref = {'source-name': 'test1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
self.snapshot, external_ref)
self.assertIsNotNone(re.search('Snapshot is exposed to initiator',
ex.msg))
@mock.patch.object(rest_client.RestClient, 'rename_snapshot')
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_snapshot_info_by_ref',
return_value={'ID': 'ID1',
'EXPOSEDTOINITIATOR': 'false',
'NAME': 'test1',
'PARENTID': '11',
'USERCAPACITY': 2097152,
'HEALTHSTATUS': constants.STATUS_HEALTH})
def test_manage_existing_snapshot_success(self, mock_get_info,
mock_rename):
external_ref = {'source-name': 'test1'}
model_update = self.driver.manage_existing_snapshot(self.snapshot,
external_ref)
self.assertEqual({'provider_location': 'ID1'}, model_update)
external_ref = {'source-id': 'ID1'}
model_update = self.driver.manage_existing_snapshot(self.snapshot,
external_ref)
self.assertEqual({'provider_location': 'ID1'}, model_update)
@mock.patch.object(rest_client.RestClient, 'get_snapshot_info',
return_value={'ID': 'ID1',
'EXPOSEDTOINITIATOR': 'false',
'NAME': 'test1',
'USERCAPACITY': 2097152,
'PARENTID': '12',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value='ID1')
def test_manage_existing_snapshot_mismatch_lun(self, mock_get_by_name,
mock_get_info):
external_ref = {'source-name': 'test1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
self.snapshot, external_ref)
self.assertIsNotNone(re.search("Snapshot doesn't belong to volume",
ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_snapshot_info',
return_value={'USERCAPACITY': 3097152})
@mock.patch.object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value='ID1')
def test_manage_existing_snapshot_get_size_success(self,
mock_get_id_by_name,
mock_get_info):
external_ref = {'source-name': 'test1',
'source-id': 'ID1'}
size = self.driver.manage_existing_snapshot_get_size(self.snapshot,
external_ref)
self.assertEqual(2, size)
external_ref = {'source-name': 'test1'}
size = self.driver.manage_existing_snapshot_get_size(self.snapshot,
external_ref)
self.assertEqual(2, size)
external_ref = {'source-id': 'ID1'}
size = self.driver.manage_existing_snapshot_get_size(self.snapshot,
external_ref)
self.assertEqual(2, size)
def test_unmanage_snapshot(self):
self.driver.unmanage_snapshot(self.snapshot)
@ddt.data(sync_replica_specs, async_replica_specs)
def test_create_replication_success(self, mock_type):
self.mock_object(replication.ReplicaCommonDriver, 'sync')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': mock_type})
model_update = self.driver.create_volume(self.replica_volume)
driver_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': '1'}
driver_data = replication.to_string(driver_data)
self.assertEqual(driver_data, model_update['replication_driver_data'])
self.assertEqual('available', model_update['replication_status'])
@ddt.data(
[
rest_client.RestClient,
'get_array_info',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(return_value={}),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'wait_volume_online',
mock.Mock(side_effect=[
None,
exception.VolumeBackendAPIException(data='err')]),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'create_pair',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
replication.ReplicaCommonDriver,
'sync',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'get_array_info',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(return_value={}),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'wait_volume_online',
mock.Mock(side_effect=[
None,
exception.VolumeBackendAPIException(data='err')]),
FAKE_POOLS_SUPPORT_REPORT
],
[
rest_client.RestClient,
'create_pair',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaCommonDriver,
'sync',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
)
@ddt.unpack
def test_create_replication_fail(self, mock_module, mock_func,
mock_value, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.mock_object(replication.ReplicaPairManager, '_delete_pair')
self.mock_object(mock_module, mock_func, mock_value)
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver.create_volume, self.replica_volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_replication_success(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(replication.ReplicaCommonDriver, 'split')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.driver.delete_volume(self.replica_volume)
self.mock_object(rest_client.RestClient, 'check_lun_exist',
return_value=False)
self.driver.delete_volume(self.replica_volume)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_volume_online(self):
replica = FakeReplicaPairManager(self.driver.client,
self.driver.replica_client,
self.configuration)
lun_info = {'ID': '11'}
replica.wait_volume_online(self.driver.client, lun_info)
offline_status = {'RUNNINGSTATUS': '28'}
replica.wait_volume_online(self.driver.client, lun_info)
with mock.patch.object(rest_client.RestClient, 'get_lun_info',
offline_status):
self.assertRaises(exception.VolumeBackendAPIException,
replica.wait_volume_online,
self.driver.client,
lun_info)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_second_access(self):
pair_id = '1'
access_ro = constants.REPLICA_SECOND_RO
access_rw = constants.REPLICA_SECOND_RW
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.mock_object(replication.PairOp, 'get_replica_info',
return_value={'SECRESACCESS': access_ro})
self.mock_object(huawei_utils.time, 'time',
side_effect=utils.generate_timeout_series(
constants.DEFAULT_REPLICA_WAIT_TIMEOUT))
common_driver.wait_second_access(pair_id, access_ro)
self.assertRaises(exception.VolumeBackendAPIException,
common_driver.wait_second_access, pair_id, access_rw)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_replica_ready(self):
normal_status = {
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_NORMAL,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
split_status = {
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_SPLIT,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
sync_status = {
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_SYNC,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
pair_id = '1'
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
with mock.patch.object(replication.PairOp, 'get_replica_info',
return_value=normal_status):
common_driver.wait_replica_ready(pair_id)
with mock.patch.object(
replication.PairOp,
'get_replica_info',
side_effect=[sync_status, normal_status]):
common_driver.wait_replica_ready(pair_id)
with mock.patch.object(replication.PairOp, 'get_replica_info',
return_value=split_status):
self.assertRaises(exception.VolumeBackendAPIException,
common_driver.wait_replica_ready, pair_id)
def test_failover_to_current(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.client)
self.assertEqual(old_replica_client, driver.replica_client)
self.assertEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(0, len(volumes_update))
def test_failover_normal_volumes(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], REPLICA_BACKEND_ID)
self.assertEqual(REPLICA_BACKEND_ID, driver.active_backend_id)
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.volume.id, v_id)
self.assertEqual('error', v_update['status'])
self.assertEqual(self.volume['status'],
v_update['metadata']['old_status'])
def test_failback_to_current(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], REPLICA_BACKEND_ID)
self.assertEqual(REPLICA_BACKEND_ID, driver.active_backend_id)
self.assertEqual(old_client, driver.client)
self.assertEqual(old_replica_client, driver.replica_client)
self.assertEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(0, len(volumes_update))
def test_failback_normal_volumes(self):
self.volume.status = 'error'
self.volume.metadata = {'old_status': 'available'}
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.volume.id, v_id)
self.assertEqual('available', v_update['status'])
self.assertNotIn('old_status', v_update['metadata'])
def test_failover_replica_volumes(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
self.mock_object(replication.ReplicaCommonDriver, 'failover')
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
secondary_id, volumes_update = driver.failover_host(
None, [self.replica_volume], REPLICA_BACKEND_ID)
self.assertEqual(REPLICA_BACKEND_ID, driver.active_backend_id)
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.replica_volume.id, v_id)
self.assertEqual('1', v_update['provider_location'])
self.assertEqual('failed-over', v_update['replication_status'])
new_drv_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': self.replica_volume.provider_location}
new_drv_data = replication.to_string(new_drv_data)
self.assertEqual(new_drv_data, v_update['replication_driver_data'])
@ddt.data({}, {'pair_id': TEST_PAIR_ID})
def test_failover_replica_volumes_invalid_drv_data(self, mock_drv_data):
volume = self.replica_volume
volume['replication_driver_data'] = replication.to_string(
mock_drv_data)
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
secondary_id, volumes_update = driver.failover_host(
None, [volume], REPLICA_BACKEND_ID)
self.assertEqual(driver.active_backend_id, REPLICA_BACKEND_ID)
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(volume.id, v_id)
self.assertEqual('error', v_update['replication_status'])
def test_failback_replica_volumes(self):
self.mock_object(replication.ReplicaCommonDriver, 'enable')
self.mock_object(replication.ReplicaCommonDriver, 'wait_replica_ready')
self.mock_object(replication.ReplicaCommonDriver, 'failover')
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
volume = self.replica_volume
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.replica_volume.id, v_id)
self.assertEqual('1', v_update['provider_location'])
self.assertEqual('available', v_update['replication_status'])
new_drv_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': self.replica_volume.provider_location}
new_drv_data = replication.to_string(new_drv_data)
self.assertEqual(new_drv_data, v_update['replication_driver_data'])
@ddt.data({}, {'pair_id': TEST_PAIR_ID})
def test_failback_replica_volumes_invalid_drv_data(self, mock_drv_data):
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
volume = self.replica_volume
volume['replication_driver_data'] = replication.to_string(
mock_drv_data)
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.replica_volume.id, v_id)
self.assertEqual('error', v_update['replication_status'])
@unittest.skip("Skip until bug #1578986 is fixed")
@mock.patch('oslo_service.loopingcall.FixedIntervalLoopingCall',
new=utils.ZeroIntervalLoopingCall)
@mock.patch.object(replication.PairOp, 'is_primary',
side_effect=[False, True])
@mock.patch.object(replication.ReplicaCommonDriver, 'split')
@mock.patch.object(replication.ReplicaCommonDriver, 'unprotect_second')
def test_replication_driver_enable_success(self,
mock_unprotect,
mock_split,
mock_is_primary):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
common_driver.enable(replica_id)
self.assertTrue(mock_unprotect.called)
self.assertTrue(mock_split.called)
self.assertTrue(mock_is_primary.called)
@mock.patch.object(replication.PairOp, 'is_primary', return_value=False)
@mock.patch.object(replication.ReplicaCommonDriver, 'split')
def test_replication_driver_failover_success(self,
mock_split,
mock_is_primary):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
common_driver.failover(replica_id)
self.assertTrue(mock_split.called)
self.assertTrue(mock_is_primary.called)
@mock.patch.object(replication.PairOp, 'is_primary', return_value=True)
def test_replication_driver_failover_fail(self, mock_is_primary):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.assertRaises(
exception.VolumeBackendAPIException,
common_driver.failover,
replica_id)
@ddt.data(constants.REPLICA_SECOND_RW, constants.REPLICA_SECOND_RO)
def test_replication_driver_protect_second(self, mock_access):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.mock_object(replication.ReplicaCommonDriver, 'wait_second_access')
self.mock_object(
replication.PairOp,
'get_replica_info',
return_value={'SECRESACCESS': mock_access})
common_driver.protect_second(replica_id)
common_driver.unprotect_second(replica_id)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_replication_driver_sync(self):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
async_normal_status = {
'REPLICATIONMODEL': constants.REPLICA_ASYNC_MODEL,
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_NORMAL,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
self.mock_object(replication.ReplicaCommonDriver, 'protect_second')
self.mock_object(replication.PairOp, 'get_replica_info',
return_value=async_normal_status)
common_driver.sync(replica_id, True)
common_driver.sync(replica_id, False)
def test_replication_driver_split(self):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.mock_object(replication.ReplicaCommonDriver, 'wait_expect_state')
self.mock_object(
replication.PairOp, 'split',
side_effect=exception.VolumeBackendAPIException(data='err'))
common_driver.split(replica_id)
@mock.patch.object(replication.PairOp, 'split')
@ddt.data(constants.REPLICA_RUNNING_STATUS_SPLIT,
constants.REPLICA_RUNNING_STATUS_INVALID,
constants.REPLICA_RUNNING_STATUS_ERRUPTED)
def test_replication_driver_split_already_disabled(self, mock_status,
mock_op_split):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
pair_info = json.loads(FAKE_GET_PAIR_NORMAL_RESPONSE)['data']
pair_info['RUNNINGSTATUS'] = mock_status
self.mock_object(rest_client.RestClient, 'get_pair_by_id',
return_value=pair_info)
common_driver.split(replica_id)
self.assertFalse(mock_op_split.called)
def test_replication_base_op(self):
replica_id = '1'
op = replication.AbsReplicaOp(None)
op.create()
op.delete(replica_id)
op.protect_second(replica_id)
op.unprotect_second(replica_id)
op.sync(replica_id)
op.split(replica_id)
op.switch(replica_id)
op.is_primary({})
op.get_replica_info(replica_id)
op._is_status(None, {'key': 'volue'}, None)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"error": {"code": 0}})
def test_get_tgt_port_group_no_portg_exist(self, mock_call):
portg = self.driver.client.get_tgt_port_group('test_portg')
self.assertIsNone(portg)
def test_get_tgt_iqn_from_rest_match(self):
match_res = {
'data': [{
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 111.111.111.19,t,0x01'
}, {
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 111.111.111.191,t,0x01'
}],
'error': {
'code': 0
}
}
ip = '111.111.111.19'
expected_iqn = 'iqn.2006-08.com: 210048cee9d: 111.111.111.19'
self.mock_object(rest_client.RestClient, 'call',
return_value=match_res)
iqn = self.driver.client._get_tgt_iqn_from_rest(ip)
self.assertEqual(expected_iqn, iqn)
def test_get_tgt_iqn_from_rest_mismatch(self):
match_res = {
'data': [{
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 192.0.2.191,t,0x01'
}, {
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 192.0.2.192,t,0x01'
}],
'error': {
'code': 0
}
}
ip = '192.0.2.19'
self.mock_object(rest_client.RestClient, 'call',
return_value=match_res)
iqn = self.driver.client._get_tgt_iqn_from_rest(ip)
self.assertIsNone(iqn)
def test_create_cgsnapshot(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
model, snapshots = self.driver.create_cgsnapshot(ctxt,
self.cgsnapshot,
test_snapshots)
snapshots_model_update = [{'id': '21ec7341-9256-497b-97d9'
'-ef48edcf0635',
'status': 'available',
'provider_location': 11}]
self.assertEqual(snapshots_model_update, snapshots)
self.assertEqual('available', model['status'])
def test_create_cgsnapshot_create_snapshot_fail(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
self.mock_object(rest_client.RestClient, 'create_snapshot',
side_effect=(
exception.VolumeBackendAPIException(data='err')))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cgsnapshot,
ctxt,
self.cgsnapshot,
test_snapshots)
def test_create_cgsnapshot_active_snapshot_fail(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
self.mock_object(rest_client.RestClient, 'activate_snapshot',
side_effect=(
exception.VolumeBackendAPIException(data='err')))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cgsnapshot,
ctxt,
self.cgsnapshot,
test_snapshots)
def test_delete_cgsnapshot(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
self.driver.delete_cgsnapshot(ctxt,
self.cgsnapshot,
test_snapshots)
class FCSanLookupService(object):
def get_device_mapping_from_network(self, initiator_list,
target_list):
return fake_fabric_mapping
@ddt.ddt
class HuaweiFCDriverTestCase(HuaweiTestBase):
def setUp(self):
super(HuaweiFCDriverTestCase, self).setUp()
self.configuration = mock.Mock(spec=conf.Configuration)
self.flags(rpc_backend='oslo_messaging._drivers.impl_fake')
self.huawei_conf = FakeHuaweiConf(self.configuration, 'FC')
self.configuration.hypermetro_devices = hypermetro_devices
driver = FakeFCStorage(configuration=self.configuration)
self.driver = driver
self.driver.do_setup()
self.driver.client.login()
def test_login_success(self):
device_id = self.driver.client.login()
self.assertEqual('210235G7J20000000000', device_id)
def test_create_volume_success(self):
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_volume_success(self, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
def test_delete_snapshot_success(self):
self.driver.delete_snapshot(self.snapshot)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_create_volume_from_snapsuccess(self):
lun_info = self.driver.create_volume_from_snapshot(self.volume,
self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(huawei_driver.HuaweiFCDriver,
'initialize_connection',
return_value={"data": {'target_lun': 1}})
def test_initialize_connection_snapshot_success(self, mock_fc_init):
iscsi_properties = self.driver.initialize_connection_snapshot(
self.snapshot, FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
self.assertEqual(1, iscsi_properties['data']['target_lun'])
mock_fc_init.assert_called_with(volume, FakeConnector)
def test_initialize_connection_success(self):
iscsi_properties = self.driver.initialize_connection(self.volume,
FakeConnector)
self.assertEqual(1, iscsi_properties['data']['target_lun'])
def test_initialize_connection_fail_no_online_wwns_in_host(self):
self.mock_object(rest_client.RestClient, 'get_online_free_wwns',
return_value=[])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, FakeConnector)
def test_initialize_connection_no_local_ini_tgt_map(self):
self.mock_object(rest_client.RestClient, 'get_init_targ_map',
return_value=('', ''))
self.mock_object(huawei_driver.HuaweiFCDriver, '_get_same_hostid',
return_value='')
self.mock_object(rest_client.RestClient, 'change_hostlun_id',
return_value=None)
self.mock_object(rest_client.RestClient, 'do_mapping',
return_value={'lun_id': '1',
'view_id': '1',
'aval_luns': '[1]'})
self.driver.initialize_connection(self.hyper_volume, FakeConnector)
def test_hypermetro_connection_success(self):
self.mock_object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
fc_properties = self.driver.initialize_connection(self.hyper_volume,
FakeConnector)
self.assertEqual(1, fc_properties['data']['target_lun'])
@mock.patch.object(huawei_driver.HuaweiFCDriver,
'terminate_connection')
def test_terminate_connection_snapshot_success(self, mock_fc_term):
self.driver.terminate_connection_snapshot(self.snapshot,
FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
mock_fc_term.assert_called_with(volume, FakeConnector)
def test_terminate_connection_success(self):
self.driver.client.terminateFlag = True
self.driver.terminate_connection(self.volume, FakeConnector)
self.assertTrue(self.driver.client.terminateFlag)
def test_terminate_connection_portgroup_associated(self):
self.mock_object(rest_client.RestClient,
'is_portgroup_associated_to_view',
return_value=True)
self.mock_object(huawei_driver.HuaweiFCDriver,
'_delete_zone_and_remove_fc_initiators',
return_value=({}, 1))
self.driver.terminate_connection(self.volume, FakeConnector)
def test_terminate_connection_fc_initiators_exist_in_host(self):
self.mock_object(rest_client.RestClient,
'check_fc_initiators_exist_in_host',
return_value=True)
self.driver.terminate_connection(self.volume, FakeConnector)
def test_terminate_connection_hypermetro_in_metadata(self):
self.driver.terminate_connection(self.hyper_volume, FakeConnector)
def test_get_volume_status(self):
remote_device_info = {"ARRAYTYPE": "1",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "10"}
self.mock_object(
replication.ReplicaPairManager,
'get_remote_device_by_wwn',
return_value=remote_device_info)
data = self.driver.get_volume_stats()
self.assertEqual(self.driver.VERSION, data['driver_version'])
self.assertTrue(data['pools'][0]['replication_enabled'])
self.assertListEqual(['sync', 'async'],
data['pools'][0]['replication_type'])
self.mock_object(
replication.ReplicaPairManager,
'get_remote_device_by_wwn',
return_value={})
data = self.driver.get_volume_stats()
self.assertNotIn('replication_enabled', data['pools'][0])
self.mock_object(
replication.ReplicaPairManager,
'try_get_remote_wwn',
return_value={})
data = self.driver.get_volume_stats()
self.assertEqual(self.driver.VERSION, data['driver_version'])
self.assertNotIn('replication_enabled', data['pools'][0])
@ddt.data({'TIER0CAPACITY': '100',
'TIER1CAPACITY': '0',
'TIER2CAPACITY': '0',
'disktype': 'ssd'},
{'TIER0CAPACITY': '0',
'TIER1CAPACITY': '100',
'TIER2CAPACITY': '0',
'disktype': 'sas'},
{'TIER0CAPACITY': '0',
'TIER1CAPACITY': '0',
'TIER2CAPACITY': '100',
'disktype': 'nl_sas'},
{'TIER0CAPACITY': '100',
'TIER1CAPACITY': '100',
'TIER2CAPACITY': '100',
'disktype': 'mix'},
{'TIER0CAPACITY': '0',
'TIER1CAPACITY': '0',
'TIER2CAPACITY': '0',
'disktype': ''})
def test_get_volume_disk_type(self, disk_type_value):
response_dict = json.loads(FAKE_STORAGE_POOL_RESPONSE)
storage_pool_sas = copy.deepcopy(response_dict)
storage_pool_sas['data'][0]['TIER0CAPACITY'] = (
disk_type_value['TIER0CAPACITY'])
storage_pool_sas['data'][0]['TIER1CAPACITY'] = (
disk_type_value['TIER1CAPACITY'])
storage_pool_sas['data'][0]['TIER2CAPACITY'] = (
disk_type_value['TIER2CAPACITY'])
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
driver.replica = None
self.mock_object(rest_client.RestClient, 'get_all_pools',
return_value=storage_pool_sas['data'])
data = driver.get_volume_stats()
if disk_type_value['disktype']:
self.assertEqual(disk_type_value['disktype'],
data['pools'][0]['disk_type'])
else:
self.assertIsNone(data['pools'][0].get('disk_type'))
def test_get_disk_type_pool_info_none(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
driver.replica = None
self.mock_object(rest_client.RestClient, 'get_pool_info',
return_value=None)
data = driver.get_volume_stats()
self.assertIsNone(data['pools'][0].get('disk_type'))
def test_extend_volume(self):
self.driver.extend_volume(self.volume, 3)
def test_login_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
def test_create_snapshot_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_snapshot, self.snapshot)
def test_create_volume_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
def test_delete_volume_fail(self):
self.driver.client.test_fail = True
self.driver.delete_volume(self.volume)
def test_delete_snapshot_fail(self):
self.driver.client.test_fail = True
self.driver.delete_snapshot(self.snapshot)
def test_initialize_connection_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, FakeConnector)
def test_lun_is_associated_to_lungroup(self):
self.driver.client.associate_lun_to_lungroup('11', '11')
result = self.driver.client._is_lun_associated_to_lungroup('11',
'11')
self.assertTrue(result)
def test_lun_is_not_associated_to_lun_group(self):
self.driver.client.associate_lun_to_lungroup('12', '12')
self.driver.client.remove_lun_from_lungroup('12', '12')
result = self.driver.client._is_lun_associated_to_lungroup('12',
'12')
self.assertFalse(result)
@unittest.skip("Skip until bug #1578986 is fixed")
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client, 'RestClient')
def test_migrate_volume_success(self, mock_add_lun_to_partition,
pool_data):
# Migrate volume without new type.
empty_dict = {}
self.driver.support_func = pool_data
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
# Migrate volume with new type.
empty_dict = {}
new_type = {'extra_specs':
{'smarttier': '<is> true',
'smartcache': '<is> true',
'smartpartition': '<is> true',
'thin_provisioning_support': '<is> true',
'thick_provisioning_support': '<is> False',
'policy': '2',
'smartcache:cachename': 'cache-test',
'smartpartition:partitionname': 'partition-test'}}
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
new_type)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
def test_migrate_volume_fail(self):
self.driver.client.test_fail = True
# Migrate volume without new type.
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume, None,
self.volume, test_host, None)
# Migrate volume with new type.
new_type = {'extra_specs':
{'smarttier': '<is> true',
'smartcache': '<is> true',
'thin_provisioning_support': '<is> true',
'thick_provisioning_support': '<is> False',
'policy': '2',
'smartcache:cachename': 'cache-test',
'partitionname': 'partition-test'}}
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume, None,
self.volume, test_host, new_type)
def test_check_migration_valid(self):
is_valid = self.driver._check_migration_valid(test_host,
self.volume)
self.assertTrue(is_valid)
# No pool_name in capabilities.
invalid_host1 = {'host': 'ubuntu001@backend002
'capabilities':
{'location_info': '210235G7J20000000000',
'allocated_capacity_gb': 0,
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'FC'}}
is_valid = self.driver._check_migration_valid(invalid_host1,
self.volume)
self.assertFalse(is_valid)
# location_info in capabilities is not matched.
invalid_host2 = {'host': 'ubuntu001@backend002
'capabilities':
{'location_info': '210235G7J20000000001',
'allocated_capacity_gb': 0,
'pool_name': 'OpenStack_Pool',
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'FC'}}
is_valid = self.driver._check_migration_valid(invalid_host2,
self.volume)
self.assertFalse(is_valid)
# storage_protocol is not match current protocol and volume status is
# 'in-use'.
volume_in_use = {'name': 'volume-21ec7341-9256-497b-97d9-ef48edcf0635',
'size': 2,
'volume_name': 'vol1',
'id': ID,
'volume_id': '21ec7341-9256-497b-97d9-ef48edcf0635',
'volume_attachment': 'in-use',
'provider_location': '11'}
invalid_host2 = {'host': 'ubuntu001@backend002
'capabilities':
{'location_info': '210235G7J20000000001',
'allocated_capacity_gb': 0,
'pool_name': 'OpenStack_Pool',
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'iSCSI'}}
is_valid = self.driver._check_migration_valid(invalid_host2,
volume_in_use)
self.assertFalse(is_valid)
# pool_name is empty.
invalid_host3 = {'host': 'ubuntu001@backend002
'capabilities':
{'location_info': '210235G7J20000000001',
'allocated_capacity_gb': 0,
'pool_name': '',
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'iSCSI'}}
is_valid = self.driver._check_migration_valid(invalid_host3,
self.volume)
self.assertFalse(is_valid)
@mock.patch.object(rest_client.RestClient, 'rename_lun')
def test_update_migrated_volume_success(self, mock_rename_lun):
model_update = self.driver.update_migrated_volume(None,
self.original_volume,
self.current_volume,
'available')
self.assertEqual({'_name_id': None}, model_update)
@mock.patch.object(rest_client.RestClient, 'rename_lun')
def test_update_migrated_volume_fail(self, mock_rename_lun):
mock_rename_lun.side_effect = exception.VolumeBackendAPIException(
data='Error occurred.')
model_update = self.driver.update_migrated_volume(None,
self.original_volume,
self.current_volume,
'available')
self.assertEqual(self.current_volume.name_id,
model_update['_name_id'])
@mock.patch.object(rest_client.RestClient, 'add_lun_to_partition')
def test_retype_volume_success(self, mock_add_lun_to_partition):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
retype = self.driver.retype(None, self.volume,
test_new_type, None, test_host)
self.assertTrue(retype)
@unittest.skip("Skip until bug #1578986 is fixed")
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client, 'RestClient')
@mock.patch.object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
def test_retype_replication_volume_success(self, mock_get_type,
mock_add_lun_to_partition,
pool_data):
self.driver.support_func = pool_data
retype = self.driver.retype(None, self.volume,
test_new_replication_type, None, test_host)
self.assertTrue(retype)
@ddt.data(
[
replication.ReplicaPairManager,
'create_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'create_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'delete_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'delete_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_UNSUPPORT_REPORT
],
)
@ddt.unpack
def test_retype_replication_volume_fail(self,
mock_module,
mock_func,
side_effect,
pool_data):
self.driver.support_func = pool_data
self.mock_object(mock_module, mock_func, side_effect=side_effect)
self.mock_object(rest_client.RestClient, 'add_lun_to_partition')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
retype = self.driver.retype(None, self.volume,
test_new_replication_type, None, test_host)
self.assertFalse(retype)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_retype_volume_cache_fail(self, pool_data):
self.driver.client.cache_not_exist = True
self.driver.support_func = pool_data
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.retype, None,
self.volume, test_new_type, None, test_host)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_retype_volume_partition_fail(self, pool_data):
self.driver.support_func = pool_data
self.driver.client.partition_not_exist = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.retype, None,
self.volume, test_new_type, None, test_host)
@mock.patch.object(rest_client.RestClient, 'add_lun_to_partition')
def test_retype_volume_fail(self, mock_add_lun_to_partition):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
mock_add_lun_to_partition.side_effect = (
exception.VolumeBackendAPIException(data='Error occurred.'))
retype = self.driver.retype(None, self.volume,
test_new_type, None, test_host)
self.assertFalse(retype)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A","0B"]', 'ID': '0'}])
def test_build_ini_targ_map_engie_recorded(self, mock_engines):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '1', '11')
target_port_wwns = ['2000643e8c4c5f66']
self.assertEqual(target_port_wwns, tgt_wwns)
self.assertEqual({}, init_targ_map)
@ddt.data(fake_fabric_mapping_no_ports, fake_fabric_mapping_no_wwn)
def test_filter_by_fabric_fail(self, ddt_map):
self.mock_object(
FCSanLookupService, 'get_device_mapping_from_network',
return_value=ddt_map)
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
self.assertRaises(exception.VolumeBackendAPIException,
zone_helper._filter_by_fabric, ['10000090fa0d6754'],
None)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A"]', 'ID': '0'},
{'NODELIST': '["0B"]', 'ID': '1'}])
@mock.patch.object(fc_zone_helper.FCZoneHelper, '_build_contr_port_map',
return_value={'0B': ['2000643e8c4c5f67']})
def test_build_ini_targ_map_engie_not_recorded(self, mock_engines, map):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '1', '11')
expected_wwns = ['2000643e8c4c5f67', '2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': expected_wwns}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A", "0B"]', 'ID': '0'}])
def test_build_ini_targ_map_no_map(self, mock_engines):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
# Host with id '5' has no map on the array.
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '5', '11')
expected_wwns = ['2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': ['2000643e8c4c5f66']}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A", "0B"]', 'ID': '0'}])
@mock.patch.object(rest_client.RestClient, 'get_tgt_port_group',
return_value='0')
@mock.patch.object(rest_client.RestClient, 'delete_portgroup')
def test_build_ini_targ_map_exist_portg(self, delete, engines, portg):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
# Host with id '5' has no map on the array.
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '5', '11')
expected_wwns = ['2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': ['2000643e8c4c5f66']}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
self.assertEqual(1, delete.call_count)
def test_get_init_targ_map(self):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
(tgt_wwns, portg_id, init_targ_map) = zone_helper.get_init_targ_map(
['10000090fa0d6754'], '1')
expected_wwns = ['2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': ['2000643e8c4c5f66']}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
def test_get_init_targ_map_no_host(self):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
ret = zone_helper.get_init_targ_map(
['10000090fa0d6754'], None)
expected_ret = ([], None, {})
self.assertEqual(expected_ret, ret)
def test_multi_resturls_success(self):
self.driver.client.test_multi_url_flag = True
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
def test_get_id_from_result(self):
result = {}
name = 'test_name'
key = 'NAME'
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': {}}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': [{'COUNT': 1, 'ID': '1'},
{'COUNT': 2, 'ID': '2'}]}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': [{'NAME': 'test_name1', 'ID': '1'},
{'NAME': 'test_name2', 'ID': '2'}]}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': [{'NAME': 'test_name', 'ID': '1'},
{'NAME': 'test_name2', 'ID': '2'}]}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertEqual('1', re)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value={'ID': 1,
'CAPACITY': 110362624,
'TOTALCAPACITY': 209715200})
def test_get_capacity(self, mock_get_pool_info):
expected_pool_capacity = {'total_capacity': 100.0,
'free_capacity': 52.625}
pool_capacity = self.driver.client._get_capacity(None,
None)
self.assertEqual(expected_pool_capacity, pool_capacity)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
@mock.patch.object(hypermetro.HuaweiHyperMetro,
'_create_hypermetro_pair',
return_value={"ID": '11',
"NAME": 'hypermetro-pair'})
@mock.patch.object(rest_client.RestClient, 'logout',
return_value=None)
def test_create_hypermetro_success(self, mock_hypermetro_opts,
mock_login_return,
mock_all_pool_info,
mock_pool_info,
mock_hyper_domain,
mock_volume_ready,
mock_logout):
metadata = {"hypermetro_id": '11',
"remote_lun_id": '1'}
lun_info = self.driver.create_volume(self.hyper_volume)
self.assertEqual(metadata, lun_info['metadata'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'create_hypermetro')
def test_create_hypermetro_fail(self,
pool_data,
mock_pair_info,
mock_hypermetro_opts,
mock_all_pool_info,
mock_pool_info,
mock_hyper_domain,
mock_volume_ready
):
self.driver.support_func = pool_data
mock_pair_info.side_effect = (
exception.VolumeBackendAPIException(data='Error occurred.'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.create_hypermetro, "11", {})
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
@mock.patch.object(rest_client.RestClient, 'do_mapping',
return_value={'lun_id': '1',
'view_id': '1',
'aval_luns': '[1]'})
def test_hypermetro_connection_success_2(self, mock_map, mock_metadata):
fc_properties = self.driver.metro.connect_volume_fc(self.volume,
FakeConnector)
self.assertEqual(1, fc_properties['data']['target_lun'])
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
def test_terminate_hypermetro_connection_success(self, mock_metradata):
self.driver.metro.disconnect_volume_fc(self.volume, FakeConnector)
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': None})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value=None)
def test_hypermetroid_none_fail(self, mock_metadata, moke_metro_name):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.connect_volume_fc,
self.volume,
FakeConnector)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_volume_ready_success(self):
flag = self.driver.metro._wait_volume_ready("11")
self.assertIsNone(flag)
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
@mock.patch.object(rest_client.RestClient, 'get_online_free_wwns',
return_value=[])
@mock.patch.object(rest_client.RestClient, 'get_host_iscsi_initiators',
return_value=[])
def test_hypermetro_connection_fail(self, mock_metadata,
mock_fc_initiator,
mock_host_initiators):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.connect_volume_fc,
self.volume,
FakeConnector)
def test_create_snapshot_fail_hypermetro(self):
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': replica_hypermetro_specs})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
self.volume, self.snapshot)
def test_create_snapshot_fail_no_snapshot_id(self):
self.snapshot.provider_location = None
self.mock_object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value=None)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
self.volume, self.snapshot)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": [{"RUNNINGSTATUS": "27",
"ID": '1'},
{"RUNNINGSTATUS": "26",
"ID": '2'}],
"error": {"code": 0}})
def test_get_online_free_wwns(self, mock_call):
wwns = self.driver.client.get_online_free_wwns()
self.assertEqual(['1'], wwns)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {"ID": 1}, "error": {"code": 0}})
def test_rename_lun(self, mock_call):
des = 'This LUN is renamed.'
new_name = 'test_name'
self.driver.client.rename_lun('1', new_name, des)
self.assertEqual(1, mock_call.call_count)
url = "/lun/1"
data = {"NAME": new_name, "DESCRIPTION": des}
mock_call.assert_called_once_with(url, data, "PUT")
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {}})
def test_is_host_associated_to_hostgroup_no_data(self, mock_call):
res = self.driver.client.is_host_associated_to_hostgroup('1')
self.assertFalse(res)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {'ISADD2HOSTGROUP': 'true'}})
def test_is_host_associated_to_hostgroup_true(self, mock_call):
res = self.driver.client.is_host_associated_to_hostgroup('1')
self.assertTrue(res)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {'ISADD2HOSTGROUP': 'false'}})
def test_is_host_associated_to_hostgroup_false(self, mock_call):
res = self.driver.client.is_host_associated_to_hostgroup('1')
self.assertFalse(res)
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "true"})
def test_create_hypermetro_consistencygroup_success(self, mock_grouptype):
ctxt = context.get_admin_context()
# Create consistency group
model_update = self.driver.create_consistencygroup(ctxt, self.cg)
self.assertEqual('available',
model_update['status'],
"Consistency Group created failed")
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "false"})
def test_create_normal_consistencygroup_success(self,
mock_grouptype):
ctxt = context.get_admin_context()
# Create consistency group
model_update = self.driver.create_consistencygroup(ctxt, self.cg)
self.assertEqual('available',
model_update['status'],
"Consistency Group created failed")
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "true"})
def test_delete_hypermetro_consistencygroup_success(self, mock_grouptype):
test_volumes = [self.volume]
ctxt = context.get_admin_context()
# Create consistency group
model, volumes = self.driver.delete_consistencygroup(ctxt,
self.cg,
test_volumes)
self.assertEqual('available',
model['status'],
"Consistency Group created failed")
def test_delete_normal_consistencygroup_success(self):
ctxt = context.get_admin_context()
test_volumes = [self.volume]
self.mock_object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "false"})
model, volumes = self.driver.delete_consistencygroup(ctxt,
self.cg,
test_volumes)
self.assertEqual('available',
model['status'],
"Consistency Group created failed")
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "true"})
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '59'})
def test_update_consistencygroup_success(self,
mock_grouptype,
mock_metadata):
ctxt = context.get_admin_context()
add_volumes = [self.volume]
remove_volumes = [self.volume]
# Create consistency group
model_update = self.driver.update_consistencygroup(ctxt,
self.cg,
add_volumes,
remove_volumes)
self.assertEqual('available',
model_update[0]['status'],
"Consistency Group update failed")
def test_create_hypermetro_consistencygroup_success_2(self):
ctxt = context.get_admin_context()
# Create consistency group
temp_cg = copy.deepcopy(self.cg)
temp_cg['volume_type_id'] = '550c089b-bfdd-4f7f-86e1-3ba88125555c,'
self.mock_object(volume_types, 'get_volume_type',
return_value=test_hypermetro_type)
model_update = self.driver.create_consistencygroup(ctxt, temp_cg)
self.assertEqual('available',
model_update['status'],
"Consistency Group created failed")
def test_is_initiator_associated_to_host_raise(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.is_initiator_associated_to_host,
'ini-2', '1')
def test_is_initiator_associated_to_host_true(self):
ret = self.driver.client.is_initiator_associated_to_host('ini-1', '1')
self.assertFalse(ret)
ret = self.driver.client.is_initiator_associated_to_host('ini-2', '2')
self.assertTrue(ret)
class HuaweiConfTestCase(test.TestCase):
def setUp(self):
super(HuaweiConfTestCase, self).setUp()
self.tmp_dir = tempfile.mkdtemp()
self.fake_xml_file = self.tmp_dir + '/cinder_huawei_conf.xml'
self.conf = mock.Mock()
self.conf.cinder_huawei_conf_file = self.fake_xml_file
self.huawei_conf = huawei_conf.HuaweiConf(self.conf)
def _create_fake_conf_file(self):
doc = minidom.Document()
config = doc.createElement('config')
doc.appendChild(config)
storage = doc.createElement('Storage')
config.appendChild(storage)
url = doc.createElement('RestURL')
url_text = doc.createTextNode('http://192.0.2.69:8082/'
'deviceManager/rest/')
url.appendChild(url_text)
storage.appendChild(url)
username = doc.createElement('UserName')
username_text = doc.createTextNode('admin')
username.appendChild(username_text)
storage.appendChild(username)
password = doc.createElement('UserPassword')
password_text = doc.createTextNode('Admin@storage')
password.appendChild(password_text)
storage.appendChild(password)
product = doc.createElement('Product')
product_text = doc.createTextNode('V3')
product.appendChild(product_text)
storage.appendChild(product)
protocol = doc.createElement('Protocol')
protocol_text = doc.createTextNode('iSCSI')
protocol.appendChild(protocol_text)
storage.appendChild(protocol)
lun = doc.createElement('LUN')
config.appendChild(lun)
luntype = doc.createElement('LUNType')
luntype_text = doc.createTextNode('Thick')
luntype.appendChild(luntype_text)
lun.appendChild(luntype)
lun_ready_wait_interval = doc.createElement('LUNReadyWaitInterval')
lun_ready_wait_interval_text = doc.createTextNode('2')
lun_ready_wait_interval.appendChild(lun_ready_wait_interval_text)
lun.appendChild(lun_ready_wait_interval)
lun_copy_wait_interval = doc.createElement('LUNcopyWaitInterval')
lun_copy_wait_interval_text = doc.createTextNode('2')
lun_copy_wait_interval.appendChild(lun_copy_wait_interval_text)
lun.appendChild(lun_copy_wait_interval)
timeout = doc.createElement('Timeout')
timeout_text = doc.createTextNode('43200')
timeout.appendChild(timeout_text)
lun.appendChild(timeout)
write_type = doc.createElement('WriteType')
write_type_text = doc.createTextNode('1')
write_type.appendChild(write_type_text)
lun.appendChild(write_type)
mirror_switch = doc.createElement('MirrorSwitch')
mirror_switch_text = doc.createTextNode('1')
mirror_switch.appendChild(mirror_switch_text)
lun.appendChild(mirror_switch)
prefetch = doc.createElement('Prefetch')
prefetch.setAttribute('Type', '1')
prefetch.setAttribute('Value', '0')
lun.appendChild(prefetch)
pool = doc.createElement('StoragePool')
pool_text = doc.createTextNode('OpenStack_Pool')
pool.appendChild(pool_text)
lun.appendChild(pool)
iscsi = doc.createElement('iSCSI')
config.appendChild(iscsi)
defaulttargetip = doc.createElement('DefaultTargetIP')
defaulttargetip_text = doc.createTextNode('192.0.2.68')
defaulttargetip.appendChild(defaulttargetip_text)
iscsi.appendChild(defaulttargetip)
initiator = doc.createElement('Initiator')
initiator.setAttribute('Name', 'iqn.1993-08.debian:01:ec2bff7ac3a3')
initiator.setAttribute('TargetIP', '192.0.2.2')
initiator.setAttribute('CHAPinfo', 'mm-user;mm-user@storage')
initiator.setAttribute('ALUA', '1')
initiator.setAttribute('TargetPortGroup', 'PortGroup001')
iscsi.appendChild(initiator)
fakefile = open(self.conf.cinder_huawei_conf_file, 'w')
fakefile.write(doc.toprettyxml(indent=''))
fakefile.close()
| true
| true
|
f71a9b18087b06ded86cb08a3f7326b177d08f3e
| 60,291
|
py
|
Python
|
presubmit_support.py
|
AndersonYangOh/depot_tools
|
7d9d9233cb657e968831238346917b0b64abb4c1
|
[
"BSD-3-Clause"
] | null | null | null |
presubmit_support.py
|
AndersonYangOh/depot_tools
|
7d9d9233cb657e968831238346917b0b64abb4c1
|
[
"BSD-3-Clause"
] | null | null | null |
presubmit_support.py
|
AndersonYangOh/depot_tools
|
7d9d9233cb657e968831238346917b0b64abb4c1
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Enables directory-specific presubmit checks to run at upload and/or commit.
"""
__version__ = '1.8.0'
# TODO(joi) Add caching where appropriate/needed. The API is designed to allow
# caching (between all different invocations of presubmit scripts for a given
# change). We should add it as our presubmit scripts start feeling slow.
import ast # Exposed through the API.
import cpplint
import cPickle # Exposed through the API.
import cStringIO # Exposed through the API.
import contextlib
import fnmatch # Exposed through the API.
import glob
import inspect
import itertools
import json # Exposed through the API.
import logging
import marshal # Exposed through the API.
import multiprocessing
import optparse
import os # Somewhat exposed through the API.
import pickle # Exposed through the API.
import random
import re # Exposed through the API.
import sys # Parts exposed through API.
import tempfile # Exposed through the API.
import time
import traceback # Exposed through the API.
import types
import unittest # Exposed through the API.
import urllib2 # Exposed through the API.
import urlparse
from warnings import warn
# Local imports.
import auth
import fix_encoding
import gclient_utils
import git_footers
import gerrit_util
import owners
import owners_finder
import presubmit_canned_checks
import rietveld
import scm
import subprocess2 as subprocess # Exposed through the API.
# Ask for feedback only once in program lifetime.
_ASKED_FOR_FEEDBACK = False
class PresubmitFailure(Exception):
pass
class CommandData(object):
def __init__(self, name, cmd, kwargs, message):
self.name = name
self.cmd = cmd
self.kwargs = kwargs
self.message = message
self.info = None
def normpath(path):
'''Version of os.path.normpath that also changes backward slashes to
forward slashes when not running on Windows.
'''
# This is safe to always do because the Windows version of os.path.normpath
# will replace forward slashes with backward slashes.
path = path.replace(os.sep, '/')
return os.path.normpath(path)
def _RightHandSideLinesImpl(affected_files):
"""Implements RightHandSideLines for InputApi and GclChange."""
for af in affected_files:
lines = af.ChangedContents()
for line in lines:
yield (af, line[0], line[1])
class PresubmitOutput(object):
def __init__(self, input_stream=None, output_stream=None):
self.input_stream = input_stream
self.output_stream = output_stream
self.reviewers = []
self.more_cc = []
self.written_output = []
self.error_count = 0
def prompt_yes_no(self, prompt_string):
self.write(prompt_string)
if self.input_stream:
response = self.input_stream.readline().strip().lower()
if response not in ('y', 'yes'):
self.fail()
else:
self.fail()
def fail(self):
self.error_count += 1
def should_continue(self):
return not self.error_count
def write(self, s):
self.written_output.append(s)
if self.output_stream:
self.output_stream.write(s)
def getvalue(self):
return ''.join(self.written_output)
# Top level object so multiprocessing can pickle
# Public access through OutputApi object.
class _PresubmitResult(object):
"""Base class for result objects."""
fatal = False
should_prompt = False
def __init__(self, message, items=None, long_text=''):
"""
message: A short one-line message to indicate errors.
items: A list of short strings to indicate where errors occurred.
long_text: multi-line text output, e.g. from another tool
"""
self._message = message
self._items = items or []
if items:
self._items = items
self._long_text = long_text.rstrip()
def handle(self, output):
output.write(self._message)
output.write('\n')
for index, item in enumerate(self._items):
output.write(' ')
# Write separately in case it's unicode.
output.write(str(item))
if index < len(self._items) - 1:
output.write(' \\')
output.write('\n')
if self._long_text:
output.write('\n***************\n')
# Write separately in case it's unicode.
output.write(self._long_text)
output.write('\n***************\n')
if self.fatal:
output.fail()
# Top level object so multiprocessing can pickle
# Public access through OutputApi object.
class _PresubmitError(_PresubmitResult):
"""A hard presubmit error."""
fatal = True
# Top level object so multiprocessing can pickle
# Public access through OutputApi object.
class _PresubmitPromptWarning(_PresubmitResult):
"""An warning that prompts the user if they want to continue."""
should_prompt = True
# Top level object so multiprocessing can pickle
# Public access through OutputApi object.
class _PresubmitNotifyResult(_PresubmitResult):
"""Just print something to the screen -- but it's not even a warning."""
pass
# Top level object so multiprocessing can pickle
# Public access through OutputApi object.
class _MailTextResult(_PresubmitResult):
"""A warning that should be included in the review request email."""
def __init__(self, *args, **kwargs):
super(_MailTextResult, self).__init__()
raise NotImplementedError()
class GerritAccessor(object):
"""Limited Gerrit functionality for canned presubmit checks to work.
To avoid excessive Gerrit calls, caches the results.
"""
def __init__(self, host):
self.host = host
self.cache = {}
def _FetchChangeDetail(self, issue):
# Separate function to be easily mocked in tests.
try:
return gerrit_util.GetChangeDetail(
self.host, str(issue),
['ALL_REVISIONS', 'DETAILED_LABELS', 'ALL_COMMITS'])
except gerrit_util.GerritError as e:
if e.http_status == 404:
raise Exception('Either Gerrit issue %s doesn\'t exist, or '
'no credentials to fetch issue details' % issue)
raise
def GetChangeInfo(self, issue):
"""Returns labels and all revisions (patchsets) for this issue.
The result is a dictionary according to Gerrit REST Api.
https://gerrit-review.googlesource.com/Documentation/rest-api.html
However, API isn't very clear what's inside, so see tests for example.
"""
assert issue
cache_key = int(issue)
if cache_key not in self.cache:
self.cache[cache_key] = self._FetchChangeDetail(issue)
return self.cache[cache_key]
def GetChangeDescription(self, issue, patchset=None):
"""If patchset is none, fetches current patchset."""
info = self.GetChangeInfo(issue)
# info is a reference to cache. We'll modify it here adding description to
# it to the right patchset, if it is not yet there.
# Find revision info for the patchset we want.
if patchset is not None:
for rev, rev_info in info['revisions'].iteritems():
if str(rev_info['_number']) == str(patchset):
break
else:
raise Exception('patchset %s doesn\'t exist in issue %s' % (
patchset, issue))
else:
rev = info['current_revision']
rev_info = info['revisions'][rev]
return rev_info['commit']['message']
def GetChangeOwner(self, issue):
return self.GetChangeInfo(issue)['owner']['email']
def GetChangeReviewers(self, issue, approving_only=True):
changeinfo = self.GetChangeInfo(issue)
if approving_only:
labelinfo = changeinfo.get('labels', {}).get('Code-Review', {})
values = labelinfo.get('values', {}).keys()
try:
max_value = max(int(v) for v in values)
reviewers = [r for r in labelinfo.get('all', [])
if r.get('value', 0) == max_value]
except ValueError: # values is the empty list
reviewers = []
else:
reviewers = changeinfo.get('reviewers', {}).get('REVIEWER', [])
return [r.get('email') for r in reviewers]
class OutputApi(object):
"""An instance of OutputApi gets passed to presubmit scripts so that they
can output various types of results.
"""
PresubmitResult = _PresubmitResult
PresubmitError = _PresubmitError
PresubmitPromptWarning = _PresubmitPromptWarning
PresubmitNotifyResult = _PresubmitNotifyResult
MailTextResult = _MailTextResult
def __init__(self, is_committing):
self.is_committing = is_committing
self.more_cc = []
def AppendCC(self, cc):
"""Appends a user to cc for this change."""
self.more_cc.append(cc)
def PresubmitPromptOrNotify(self, *args, **kwargs):
"""Warn the user when uploading, but only notify if committing."""
if self.is_committing:
return self.PresubmitNotifyResult(*args, **kwargs)
return self.PresubmitPromptWarning(*args, **kwargs)
def EnsureCQIncludeTrybotsAreAdded(self, cl, bots_to_include, message):
"""Helper for any PostUploadHook wishing to add CQ_INCLUDE_TRYBOTS.
Merges the bots_to_include into the current CQ_INCLUDE_TRYBOTS list,
keeping it alphabetically sorted. Returns the results that should be
returned from the PostUploadHook.
Args:
cl: The git_cl.Changelist object.
bots_to_include: A list of strings of bots to include, in the form
"master:slave".
message: A message to be printed in the case that
CQ_INCLUDE_TRYBOTS was updated.
"""
description = cl.GetDescription(force=True)
include_re = re.compile(r'^CQ_INCLUDE_TRYBOTS=(.*)$', re.M | re.I)
prior_bots = []
if cl.IsGerrit():
trybot_footers = git_footers.parse_footers(description).get(
git_footers.normalize_name('Cq-Include-Trybots'), [])
for f in trybot_footers:
prior_bots += [b.strip() for b in f.split(';') if b.strip()]
else:
trybot_tags = include_re.finditer(description)
for t in trybot_tags:
prior_bots += [b.strip() for b in t.group(1).split(';') if b.strip()]
if set(prior_bots) >= set(bots_to_include):
return []
all_bots = ';'.join(sorted(set(prior_bots) | set(bots_to_include)))
if cl.IsGerrit():
description = git_footers.remove_footer(
description, 'Cq-Include-Trybots')
description = git_footers.add_footer(
description, 'Cq-Include-Trybots', all_bots,
before_keys=['Change-Id'])
else:
new_include_trybots = 'CQ_INCLUDE_TRYBOTS=%s' % all_bots
m = include_re.search(description)
if m:
description = include_re.sub(new_include_trybots, description)
else:
description = '%s\n%s\n' % (description, new_include_trybots)
cl.UpdateDescription(description, force=True)
return [self.PresubmitNotifyResult(message)]
class InputApi(object):
"""An instance of this object is passed to presubmit scripts so they can
know stuff about the change they're looking at.
"""
# Method could be a function
# pylint: disable=no-self-use
# File extensions that are considered source files from a style guide
# perspective. Don't modify this list from a presubmit script!
#
# Files without an extension aren't included in the list. If you want to
# filter them as source files, add r"(^|.*?[\\\/])[^.]+$" to the white list.
# Note that ALL CAPS files are black listed in DEFAULT_BLACK_LIST below.
DEFAULT_WHITE_LIST = (
# C++ and friends
r".+\.c$", r".+\.cc$", r".+\.cpp$", r".+\.h$", r".+\.m$", r".+\.mm$",
r".+\.inl$", r".+\.asm$", r".+\.hxx$", r".+\.hpp$", r".+\.s$", r".+\.S$",
# Scripts
r".+\.js$", r".+\.py$", r".+\.sh$", r".+\.rb$", r".+\.pl$", r".+\.pm$",
# Other
r".+\.java$", r".+\.mk$", r".+\.am$", r".+\.css$"
)
# Path regexp that should be excluded from being considered containing source
# files. Don't modify this list from a presubmit script!
DEFAULT_BLACK_LIST = (
r"testing_support[\\\/]google_appengine[\\\/].*",
r".*\bexperimental[\\\/].*",
# Exclude third_party/.* but NOT third_party/WebKit (crbug.com/539768).
r".*\bthird_party[\\\/](?!WebKit[\\\/]).*",
# Output directories (just in case)
r".*\bDebug[\\\/].*",
r".*\bRelease[\\\/].*",
r".*\bxcodebuild[\\\/].*",
r".*\bout[\\\/].*",
# All caps files like README and LICENCE.
r".*\b[A-Z0-9_]{2,}$",
# SCM (can happen in dual SCM configuration). (Slightly over aggressive)
r"(|.*[\\\/])\.git[\\\/].*",
r"(|.*[\\\/])\.svn[\\\/].*",
# There is no point in processing a patch file.
r".+\.diff$",
r".+\.patch$",
)
def __init__(self, change, presubmit_path, is_committing,
rietveld_obj, verbose, gerrit_obj=None, dry_run=None):
"""Builds an InputApi object.
Args:
change: A presubmit.Change object.
presubmit_path: The path to the presubmit script being processed.
is_committing: True if the change is about to be committed.
rietveld_obj: rietveld.Rietveld client object
gerrit_obj: provides basic Gerrit codereview functionality.
dry_run: if true, some Checks will be skipped.
"""
# Version number of the presubmit_support script.
self.version = [int(x) for x in __version__.split('.')]
self.change = change
self.is_committing = is_committing
self.rietveld = rietveld_obj
self.gerrit = gerrit_obj
self.dry_run = dry_run
# TBD
self.host_url = 'http://codereview.chromium.org'
if self.rietveld:
self.host_url = self.rietveld.url
# We expose various modules and functions as attributes of the input_api
# so that presubmit scripts don't have to import them.
self.ast = ast
self.basename = os.path.basename
self.cPickle = cPickle
self.cpplint = cpplint
self.cStringIO = cStringIO
self.fnmatch = fnmatch
self.glob = glob.glob
self.json = json
self.logging = logging.getLogger('PRESUBMIT')
self.os_listdir = os.listdir
self.os_walk = os.walk
self.os_path = os.path
self.os_stat = os.stat
self.pickle = pickle
self.marshal = marshal
self.re = re
self.subprocess = subprocess
self.tempfile = tempfile
self.time = time
self.traceback = traceback
self.unittest = unittest
self.urllib2 = urllib2
# To easily fork python.
self.python_executable = sys.executable
self.environ = os.environ
# InputApi.platform is the platform you're currently running on.
self.platform = sys.platform
self.cpu_count = multiprocessing.cpu_count()
# this is done here because in RunTests, the current working directory has
# changed, which causes Pool() to explode fantastically when run on windows
# (because it tries to load the __main__ module, which imports lots of
# things relative to the current working directory).
self._run_tests_pool = multiprocessing.Pool(self.cpu_count)
# The local path of the currently-being-processed presubmit script.
self._current_presubmit_path = os.path.dirname(presubmit_path)
# We carry the canned checks so presubmit scripts can easily use them.
self.canned_checks = presubmit_canned_checks
# Temporary files we must manually remove at the end of a run.
self._named_temporary_files = []
# TODO(dpranke): figure out a list of all approved owners for a repo
# in order to be able to handle wildcard OWNERS files?
self.owners_db = owners.Database(change.RepositoryRoot(),
fopen=file, os_path=self.os_path)
self.owners_finder = owners_finder.OwnersFinder
self.verbose = verbose
self.is_windows = sys.platform == 'win32'
self.Command = CommandData
# Replace <hash_map> and <hash_set> as headers that need to be included
# with "base/containers/hash_tables.h" instead.
# Access to a protected member _XX of a client class
# pylint: disable=protected-access
self.cpplint._re_pattern_templates = [
(a, b, 'base/containers/hash_tables.h')
if header in ('<hash_map>', '<hash_set>') else (a, b, header)
for (a, b, header) in cpplint._re_pattern_templates
]
def PresubmitLocalPath(self):
"""Returns the local path of the presubmit script currently being run.
This is useful if you don't want to hard-code absolute paths in the
presubmit script. For example, It can be used to find another file
relative to the PRESUBMIT.py script, so the whole tree can be branched and
the presubmit script still works, without editing its content.
"""
return self._current_presubmit_path
def AffectedFiles(self, include_deletes=True, file_filter=None):
"""Same as input_api.change.AffectedFiles() except only lists files
(and optionally directories) in the same directory as the current presubmit
script, or subdirectories thereof.
"""
dir_with_slash = normpath("%s/" % self.PresubmitLocalPath())
if len(dir_with_slash) == 1:
dir_with_slash = ''
return filter(
lambda x: normpath(x.AbsoluteLocalPath()).startswith(dir_with_slash),
self.change.AffectedFiles(include_deletes, file_filter))
def LocalPaths(self):
"""Returns local paths of input_api.AffectedFiles()."""
paths = [af.LocalPath() for af in self.AffectedFiles()]
logging.debug("LocalPaths: %s", paths)
return paths
def AbsoluteLocalPaths(self):
"""Returns absolute local paths of input_api.AffectedFiles()."""
return [af.AbsoluteLocalPath() for af in self.AffectedFiles()]
def AffectedTestableFiles(self, include_deletes=None):
"""Same as input_api.change.AffectedTestableFiles() except only lists files
in the same directory as the current presubmit script, or subdirectories
thereof.
"""
if include_deletes is not None:
warn("AffectedTestableFiles(include_deletes=%s)"
" is deprecated and ignored" % str(include_deletes),
category=DeprecationWarning,
stacklevel=2)
return filter(lambda x: x.IsTestableFile(),
self.AffectedFiles(include_deletes=False))
def AffectedTextFiles(self, include_deletes=None):
"""An alias to AffectedTestableFiles for backwards compatibility."""
return self.AffectedTestableFiles(include_deletes=include_deletes)
def FilterSourceFile(self, affected_file, white_list=None, black_list=None):
"""Filters out files that aren't considered "source file".
If white_list or black_list is None, InputApi.DEFAULT_WHITE_LIST
and InputApi.DEFAULT_BLACK_LIST is used respectively.
The lists will be compiled as regular expression and
AffectedFile.LocalPath() needs to pass both list.
Note: Copy-paste this function to suit your needs or use a lambda function.
"""
def Find(affected_file, items):
local_path = affected_file.LocalPath()
for item in items:
if self.re.match(item, local_path):
return True
return False
return (Find(affected_file, white_list or self.DEFAULT_WHITE_LIST) and
not Find(affected_file, black_list or self.DEFAULT_BLACK_LIST))
def AffectedSourceFiles(self, source_file):
"""Filter the list of AffectedTestableFiles by the function source_file.
If source_file is None, InputApi.FilterSourceFile() is used.
"""
if not source_file:
source_file = self.FilterSourceFile
return filter(source_file, self.AffectedTestableFiles())
def RightHandSideLines(self, source_file_filter=None):
"""An iterator over all text lines in "new" version of changed files.
Only lists lines from new or modified text files in the change that are
contained by the directory of the currently executing presubmit script.
This is useful for doing line-by-line regex checks, like checking for
trailing whitespace.
Yields:
a 3 tuple:
the AffectedFile instance of the current file;
integer line number (1-based); and
the contents of the line as a string.
Note: The carriage return (LF or CR) is stripped off.
"""
files = self.AffectedSourceFiles(source_file_filter)
return _RightHandSideLinesImpl(files)
def ReadFile(self, file_item, mode='r'):
"""Reads an arbitrary file.
Deny reading anything outside the repository.
"""
if isinstance(file_item, AffectedFile):
file_item = file_item.AbsoluteLocalPath()
if not file_item.startswith(self.change.RepositoryRoot()):
raise IOError('Access outside the repository root is denied.')
return gclient_utils.FileRead(file_item, mode)
def CreateTemporaryFile(self, **kwargs):
"""Returns a named temporary file that must be removed with a call to
RemoveTemporaryFiles().
All keyword arguments are forwarded to tempfile.NamedTemporaryFile(),
except for |delete|, which is always set to False.
Presubmit checks that need to create a temporary file and pass it for
reading should use this function instead of NamedTemporaryFile(), as
Windows fails to open a file that is already open for writing.
with input_api.CreateTemporaryFile() as f:
f.write('xyz')
f.close()
input_api.subprocess.check_output(['script-that', '--reads-from',
f.name])
Note that callers of CreateTemporaryFile() should not worry about removing
any temporary file; this is done transparently by the presubmit handling
code.
"""
if 'delete' in kwargs:
# Prevent users from passing |delete|; we take care of file deletion
# ourselves and this prevents unintuitive error messages when we pass
# delete=False and 'delete' is also in kwargs.
raise TypeError('CreateTemporaryFile() does not take a "delete" '
'argument, file deletion is handled automatically by '
'the same presubmit_support code that creates InputApi '
'objects.')
temp_file = self.tempfile.NamedTemporaryFile(delete=False, **kwargs)
self._named_temporary_files.append(temp_file.name)
return temp_file
@property
def tbr(self):
"""Returns if a change is TBR'ed."""
return 'TBR' in self.change.tags or self.change.TBRsFromDescription()
def RunTests(self, tests_mix, parallel=True):
tests = []
msgs = []
for t in tests_mix:
if isinstance(t, OutputApi.PresubmitResult):
msgs.append(t)
else:
assert issubclass(t.message, _PresubmitResult)
tests.append(t)
if self.verbose:
t.info = _PresubmitNotifyResult
if len(tests) > 1 and parallel:
# async recipe works around multiprocessing bug handling Ctrl-C
msgs.extend(self._run_tests_pool.map_async(CallCommand, tests).get(99999))
else:
msgs.extend(map(CallCommand, tests))
return [m for m in msgs if m]
def ShutdownPool(self):
self._run_tests_pool.close()
self._run_tests_pool.join()
self._run_tests_pool = None
class _DiffCache(object):
"""Caches diffs retrieved from a particular SCM."""
def __init__(self, upstream=None):
"""Stores the upstream revision against which all diffs will be computed."""
self._upstream = upstream
def GetDiff(self, path, local_root):
"""Get the diff for a particular path."""
raise NotImplementedError()
def GetOldContents(self, path, local_root):
"""Get the old version for a particular path."""
raise NotImplementedError()
class _GitDiffCache(_DiffCache):
"""DiffCache implementation for git; gets all file diffs at once."""
def __init__(self, upstream):
super(_GitDiffCache, self).__init__(upstream=upstream)
self._diffs_by_file = None
def GetDiff(self, path, local_root):
if not self._diffs_by_file:
# Compute a single diff for all files and parse the output; should
# with git this is much faster than computing one diff for each file.
diffs = {}
# Don't specify any filenames below, because there are command line length
# limits on some platforms and GenerateDiff would fail.
unified_diff = scm.GIT.GenerateDiff(local_root, files=[], full_move=True,
branch=self._upstream)
# This regex matches the path twice, separated by a space. Note that
# filename itself may contain spaces.
file_marker = re.compile('^diff --git (?P<filename>.*) (?P=filename)$')
current_diff = []
keep_line_endings = True
for x in unified_diff.splitlines(keep_line_endings):
match = file_marker.match(x)
if match:
# Marks the start of a new per-file section.
diffs[match.group('filename')] = current_diff = [x]
elif x.startswith('diff --git'):
raise PresubmitFailure('Unexpected diff line: %s' % x)
else:
current_diff.append(x)
self._diffs_by_file = dict(
(normpath(path), ''.join(diff)) for path, diff in diffs.items())
if path not in self._diffs_by_file:
raise PresubmitFailure(
'Unified diff did not contain entry for file %s' % path)
return self._diffs_by_file[path]
def GetOldContents(self, path, local_root):
return scm.GIT.GetOldContents(local_root, path, branch=self._upstream)
class AffectedFile(object):
"""Representation of a file in a change."""
DIFF_CACHE = _DiffCache
# Method could be a function
# pylint: disable=no-self-use
def __init__(self, path, action, repository_root, diff_cache):
self._path = path
self._action = action
self._local_root = repository_root
self._is_directory = None
self._cached_changed_contents = None
self._cached_new_contents = None
self._diff_cache = diff_cache
logging.debug('%s(%s)', self.__class__.__name__, self._path)
def LocalPath(self):
"""Returns the path of this file on the local disk relative to client root.
This should be used for error messages but not for accessing files,
because presubmit checks are run with CWD=PresubmitLocalPath() (which is
often != client root).
"""
return normpath(self._path)
def AbsoluteLocalPath(self):
"""Returns the absolute path of this file on the local disk.
"""
return os.path.abspath(os.path.join(self._local_root, self.LocalPath()))
def Action(self):
"""Returns the action on this opened file, e.g. A, M, D, etc."""
return self._action
def IsTestableFile(self):
"""Returns True if the file is a text file and not a binary file.
Deleted files are not text file."""
raise NotImplementedError() # Implement when needed
def IsTextFile(self):
"""An alias to IsTestableFile for backwards compatibility."""
return self.IsTestableFile()
def OldContents(self):
"""Returns an iterator over the lines in the old version of file.
The old version is the file before any modifications in the user's
workspace, i.e. the "left hand side".
Contents will be empty if the file is a directory or does not exist.
Note: The carriage returns (LF or CR) are stripped off.
"""
return self._diff_cache.GetOldContents(self.LocalPath(),
self._local_root).splitlines()
def NewContents(self):
"""Returns an iterator over the lines in the new version of file.
The new version is the file in the user's workspace, i.e. the "right hand
side".
Contents will be empty if the file is a directory or does not exist.
Note: The carriage returns (LF or CR) are stripped off.
"""
if self._cached_new_contents is None:
self._cached_new_contents = []
try:
self._cached_new_contents = gclient_utils.FileRead(
self.AbsoluteLocalPath(), 'rU').splitlines()
except IOError:
pass # File not found? That's fine; maybe it was deleted.
return self._cached_new_contents[:]
def ChangedContents(self):
"""Returns a list of tuples (line number, line text) of all new lines.
This relies on the scm diff output describing each changed code section
with a line of the form
^@@ <old line num>,<old size> <new line num>,<new size> @@$
"""
if self._cached_changed_contents is not None:
return self._cached_changed_contents[:]
self._cached_changed_contents = []
line_num = 0
for line in self.GenerateScmDiff().splitlines():
m = re.match(r'^@@ [0-9\,\+\-]+ \+([0-9]+)\,[0-9]+ @@', line)
if m:
line_num = int(m.groups(1)[0])
continue
if line.startswith('+') and not line.startswith('++'):
self._cached_changed_contents.append((line_num, line[1:]))
if not line.startswith('-'):
line_num += 1
return self._cached_changed_contents[:]
def __str__(self):
return self.LocalPath()
def GenerateScmDiff(self):
return self._diff_cache.GetDiff(self.LocalPath(), self._local_root)
class GitAffectedFile(AffectedFile):
"""Representation of a file in a change out of a git checkout."""
# Method 'NNN' is abstract in class 'NNN' but is not overridden
# pylint: disable=abstract-method
DIFF_CACHE = _GitDiffCache
def __init__(self, *args, **kwargs):
AffectedFile.__init__(self, *args, **kwargs)
self._server_path = None
self._is_testable_file = None
def IsTestableFile(self):
if self._is_testable_file is None:
if self.Action() == 'D':
# A deleted file is not testable.
self._is_testable_file = False
else:
self._is_testable_file = os.path.isfile(self.AbsoluteLocalPath())
return self._is_testable_file
class Change(object):
"""Describe a change.
Used directly by the presubmit scripts to query the current change being
tested.
Instance members:
tags: Dictionary of KEY=VALUE pairs found in the change description.
self.KEY: equivalent to tags['KEY']
"""
_AFFECTED_FILES = AffectedFile
# Matches key/value (or "tag") lines in changelist descriptions.
TAG_LINE_RE = re.compile(
'^[ \t]*(?P<key>[A-Z][A-Z_0-9]*)[ \t]*=[ \t]*(?P<value>.*?)[ \t]*$')
scm = ''
def __init__(
self, name, description, local_root, files, issue, patchset, author,
upstream=None):
if files is None:
files = []
self._name = name
# Convert root into an absolute path.
self._local_root = os.path.abspath(local_root)
self._upstream = upstream
self.issue = issue
self.patchset = patchset
self.author_email = author
self._full_description = ''
self.tags = {}
self._description_without_tags = ''
self.SetDescriptionText(description)
assert all(
(isinstance(f, (list, tuple)) and len(f) == 2) for f in files), files
diff_cache = self._AFFECTED_FILES.DIFF_CACHE(self._upstream)
self._affected_files = [
self._AFFECTED_FILES(path, action.strip(), self._local_root, diff_cache)
for action, path in files
]
def Name(self):
"""Returns the change name."""
return self._name
def DescriptionText(self):
"""Returns the user-entered changelist description, minus tags.
Any line in the user-provided description starting with e.g. "FOO="
(whitespace permitted before and around) is considered a tag line. Such
lines are stripped out of the description this function returns.
"""
return self._description_without_tags
def FullDescriptionText(self):
"""Returns the complete changelist description including tags."""
return self._full_description
def SetDescriptionText(self, description):
"""Sets the full description text (including tags) to |description|.
Also updates the list of tags."""
self._full_description = description
# From the description text, build up a dictionary of key/value pairs
# plus the description minus all key/value or "tag" lines.
description_without_tags = []
self.tags = {}
for line in self._full_description.splitlines():
m = self.TAG_LINE_RE.match(line)
if m:
self.tags[m.group('key')] = m.group('value')
else:
description_without_tags.append(line)
# Change back to text and remove whitespace at end.
self._description_without_tags = (
'\n'.join(description_without_tags).rstrip())
def RepositoryRoot(self):
"""Returns the repository (checkout) root directory for this change,
as an absolute path.
"""
return self._local_root
def __getattr__(self, attr):
"""Return tags directly as attributes on the object."""
if not re.match(r"^[A-Z_]*$", attr):
raise AttributeError(self, attr)
return self.tags.get(attr)
def BugsFromDescription(self):
"""Returns all bugs referenced in the commit description."""
tags = [b.strip() for b in self.tags.get('BUG', '').split(',') if b.strip()]
footers = git_footers.parse_footers(self._full_description).get('Bug', [])
return sorted(set(tags + footers))
def ReviewersFromDescription(self):
"""Returns all reviewers listed in the commit description."""
# We don't support a "R:" git-footer for reviewers; that is in metadata.
tags = [r.strip() for r in self.tags.get('R', '').split(',') if r.strip()]
return sorted(set(tags))
def TBRsFromDescription(self):
"""Returns all TBR reviewers listed in the commit description."""
tags = [r.strip() for r in self.tags.get('TBR', '').split(',') if r.strip()]
# TODO(agable): Remove support for 'Tbr:' when TBRs are programmatically
# determined by self-CR+1s.
footers = git_footers.parse_footers(self._full_description).get('Tbr', [])
return sorted(set(tags + footers))
# TODO(agable): Delete these once we're sure they're unused.
@property
def BUG(self):
return ','.join(self.BugsFromDescription())
@property
def R(self):
return ','.join(self.ReviewersFromDescription())
@property
def TBR(self):
return ','.join(self.TBRsFromDescription())
def AllFiles(self, root=None):
"""List all files under source control in the repo."""
raise NotImplementedError()
def AffectedFiles(self, include_deletes=True, file_filter=None):
"""Returns a list of AffectedFile instances for all files in the change.
Args:
include_deletes: If false, deleted files will be filtered out.
file_filter: An additional filter to apply.
Returns:
[AffectedFile(path, action), AffectedFile(path, action)]
"""
affected = filter(file_filter, self._affected_files)
if include_deletes:
return affected
return filter(lambda x: x.Action() != 'D', affected)
def AffectedTestableFiles(self, include_deletes=None):
"""Return a list of the existing text files in a change."""
if include_deletes is not None:
warn("AffectedTeestableFiles(include_deletes=%s)"
" is deprecated and ignored" % str(include_deletes),
category=DeprecationWarning,
stacklevel=2)
return filter(lambda x: x.IsTestableFile(),
self.AffectedFiles(include_deletes=False))
def AffectedTextFiles(self, include_deletes=None):
"""An alias to AffectedTestableFiles for backwards compatibility."""
return self.AffectedTestableFiles(include_deletes=include_deletes)
def LocalPaths(self):
"""Convenience function."""
return [af.LocalPath() for af in self.AffectedFiles()]
def AbsoluteLocalPaths(self):
"""Convenience function."""
return [af.AbsoluteLocalPath() for af in self.AffectedFiles()]
def RightHandSideLines(self):
"""An iterator over all text lines in "new" version of changed files.
Lists lines from new or modified text files in the change.
This is useful for doing line-by-line regex checks, like checking for
trailing whitespace.
Yields:
a 3 tuple:
the AffectedFile instance of the current file;
integer line number (1-based); and
the contents of the line as a string.
"""
return _RightHandSideLinesImpl(
x for x in self.AffectedFiles(include_deletes=False)
if x.IsTestableFile())
def OriginalOwnersFiles(self):
"""A map from path names of affected OWNERS files to their old content."""
def owners_file_filter(f):
return 'OWNERS' in os.path.split(f.LocalPath())[1]
files = self.AffectedFiles(file_filter=owners_file_filter)
return dict([(f.LocalPath(), f.OldContents()) for f in files])
class GitChange(Change):
_AFFECTED_FILES = GitAffectedFile
scm = 'git'
def AllFiles(self, root=None):
"""List all files under source control in the repo."""
root = root or self.RepositoryRoot()
return subprocess.check_output(
['git', 'ls-files', '--', '.'], cwd=root).splitlines()
def ListRelevantPresubmitFiles(files, root):
"""Finds all presubmit files that apply to a given set of source files.
If inherit-review-settings-ok is present right under root, looks for
PRESUBMIT.py in directories enclosing root.
Args:
files: An iterable container containing file paths.
root: Path where to stop searching.
Return:
List of absolute paths of the existing PRESUBMIT.py scripts.
"""
files = [normpath(os.path.join(root, f)) for f in files]
# List all the individual directories containing files.
directories = set([os.path.dirname(f) for f in files])
# Ignore root if inherit-review-settings-ok is present.
if os.path.isfile(os.path.join(root, 'inherit-review-settings-ok')):
root = None
# Collect all unique directories that may contain PRESUBMIT.py.
candidates = set()
for directory in directories:
while True:
if directory in candidates:
break
candidates.add(directory)
if directory == root:
break
parent_dir = os.path.dirname(directory)
if parent_dir == directory:
# We hit the system root directory.
break
directory = parent_dir
# Look for PRESUBMIT.py in all candidate directories.
results = []
for directory in sorted(list(candidates)):
try:
for f in os.listdir(directory):
p = os.path.join(directory, f)
if os.path.isfile(p) and re.match(
r'PRESUBMIT.*\.py$', f) and not f.startswith('PRESUBMIT_test'):
results.append(p)
except OSError:
pass
logging.debug('Presubmit files: %s', ','.join(results))
return results
class GetTryMastersExecuter(object):
@staticmethod
def ExecPresubmitScript(script_text, presubmit_path, project, change):
"""Executes GetPreferredTryMasters() from a single presubmit script.
Args:
script_text: The text of the presubmit script.
presubmit_path: Project script to run.
project: Project name to pass to presubmit script for bot selection.
Return:
A map of try masters to map of builders to set of tests.
"""
context = {}
try:
exec script_text in context
except Exception, e:
raise PresubmitFailure('"%s" had an exception.\n%s'
% (presubmit_path, e))
function_name = 'GetPreferredTryMasters'
if function_name not in context:
return {}
get_preferred_try_masters = context[function_name]
if not len(inspect.getargspec(get_preferred_try_masters)[0]) == 2:
raise PresubmitFailure(
'Expected function "GetPreferredTryMasters" to take two arguments.')
return get_preferred_try_masters(project, change)
class GetPostUploadExecuter(object):
@staticmethod
def ExecPresubmitScript(script_text, presubmit_path, cl, change):
"""Executes PostUploadHook() from a single presubmit script.
Args:
script_text: The text of the presubmit script.
presubmit_path: Project script to run.
cl: The Changelist object.
change: The Change object.
Return:
A list of results objects.
"""
context = {}
try:
exec script_text in context
except Exception, e:
raise PresubmitFailure('"%s" had an exception.\n%s'
% (presubmit_path, e))
function_name = 'PostUploadHook'
if function_name not in context:
return {}
post_upload_hook = context[function_name]
if not len(inspect.getargspec(post_upload_hook)[0]) == 3:
raise PresubmitFailure(
'Expected function "PostUploadHook" to take three arguments.')
return post_upload_hook(cl, change, OutputApi(False))
def _MergeMasters(masters1, masters2):
"""Merges two master maps. Merges also the tests of each builder."""
result = {}
for (master, builders) in itertools.chain(masters1.iteritems(),
masters2.iteritems()):
new_builders = result.setdefault(master, {})
for (builder, tests) in builders.iteritems():
new_builders.setdefault(builder, set([])).update(tests)
return result
def DoGetTryMasters(change,
changed_files,
repository_root,
default_presubmit,
project,
verbose,
output_stream):
"""Get the list of try masters from the presubmit scripts.
Args:
changed_files: List of modified files.
repository_root: The repository root.
default_presubmit: A default presubmit script to execute in any case.
project: Optional name of a project used in selecting trybots.
verbose: Prints debug info.
output_stream: A stream to write debug output to.
Return:
Map of try masters to map of builders to set of tests.
"""
presubmit_files = ListRelevantPresubmitFiles(changed_files, repository_root)
if not presubmit_files and verbose:
output_stream.write("Warning, no PRESUBMIT.py found.\n")
results = {}
executer = GetTryMastersExecuter()
if default_presubmit:
if verbose:
output_stream.write("Running default presubmit script.\n")
fake_path = os.path.join(repository_root, 'PRESUBMIT.py')
results = _MergeMasters(results, executer.ExecPresubmitScript(
default_presubmit, fake_path, project, change))
for filename in presubmit_files:
filename = os.path.abspath(filename)
if verbose:
output_stream.write("Running %s\n" % filename)
# Accept CRLF presubmit script.
presubmit_script = gclient_utils.FileRead(filename, 'rU')
results = _MergeMasters(results, executer.ExecPresubmitScript(
presubmit_script, filename, project, change))
# Make sets to lists again for later JSON serialization.
for builders in results.itervalues():
for builder in builders:
builders[builder] = list(builders[builder])
if results and verbose:
output_stream.write('%s\n' % str(results))
return results
def DoPostUploadExecuter(change,
cl,
repository_root,
verbose,
output_stream):
"""Execute the post upload hook.
Args:
change: The Change object.
cl: The Changelist object.
repository_root: The repository root.
verbose: Prints debug info.
output_stream: A stream to write debug output to.
"""
presubmit_files = ListRelevantPresubmitFiles(
change.LocalPaths(), repository_root)
if not presubmit_files and verbose:
output_stream.write("Warning, no PRESUBMIT.py found.\n")
results = []
executer = GetPostUploadExecuter()
# The root presubmit file should be executed after the ones in subdirectories.
# i.e. the specific post upload hooks should run before the general ones.
# Thus, reverse the order provided by ListRelevantPresubmitFiles.
presubmit_files.reverse()
for filename in presubmit_files:
filename = os.path.abspath(filename)
if verbose:
output_stream.write("Running %s\n" % filename)
# Accept CRLF presubmit script.
presubmit_script = gclient_utils.FileRead(filename, 'rU')
results.extend(executer.ExecPresubmitScript(
presubmit_script, filename, cl, change))
output_stream.write('\n')
if results:
output_stream.write('** Post Upload Hook Messages **\n')
for result in results:
result.handle(output_stream)
output_stream.write('\n')
return results
class PresubmitExecuter(object):
def __init__(self, change, committing, rietveld_obj, verbose,
gerrit_obj=None, dry_run=None):
"""
Args:
change: The Change object.
committing: True if 'git cl land' is running, False if 'git cl upload' is.
rietveld_obj: rietveld.Rietveld client object.
gerrit_obj: provides basic Gerrit codereview functionality.
dry_run: if true, some Checks will be skipped.
"""
self.change = change
self.committing = committing
self.rietveld = rietveld_obj
self.gerrit = gerrit_obj
self.verbose = verbose
self.dry_run = dry_run
self.more_cc = []
def ExecPresubmitScript(self, script_text, presubmit_path):
"""Executes a single presubmit script.
Args:
script_text: The text of the presubmit script.
presubmit_path: The path to the presubmit file (this will be reported via
input_api.PresubmitLocalPath()).
Return:
A list of result objects, empty if no problems.
"""
# Change to the presubmit file's directory to support local imports.
main_path = os.getcwd()
os.chdir(os.path.dirname(presubmit_path))
# Load the presubmit script into context.
input_api = InputApi(self.change, presubmit_path, self.committing,
self.rietveld, self.verbose,
gerrit_obj=self.gerrit, dry_run=self.dry_run)
output_api = OutputApi(self.committing)
context = {}
try:
exec script_text in context
except Exception, e:
raise PresubmitFailure('"%s" had an exception.\n%s' % (presubmit_path, e))
# These function names must change if we make substantial changes to
# the presubmit API that are not backwards compatible.
if self.committing:
function_name = 'CheckChangeOnCommit'
else:
function_name = 'CheckChangeOnUpload'
if function_name in context:
try:
context['__args'] = (input_api, output_api)
logging.debug('Running %s in %s', function_name, presubmit_path)
result = eval(function_name + '(*__args)', context)
logging.debug('Running %s done.', function_name)
self.more_cc = output_api.more_cc
finally:
map(os.remove, input_api._named_temporary_files)
if not (isinstance(result, types.TupleType) or
isinstance(result, types.ListType)):
raise PresubmitFailure(
'Presubmit functions must return a tuple or list')
for item in result:
if not isinstance(item, OutputApi.PresubmitResult):
raise PresubmitFailure(
'All presubmit results must be of types derived from '
'output_api.PresubmitResult')
else:
result = () # no error since the script doesn't care about current event.
input_api.ShutdownPool()
# Return the process to the original working directory.
os.chdir(main_path)
return result
def DoPresubmitChecks(change,
committing,
verbose,
output_stream,
input_stream,
default_presubmit,
may_prompt,
rietveld_obj,
gerrit_obj=None,
dry_run=None):
"""Runs all presubmit checks that apply to the files in the change.
This finds all PRESUBMIT.py files in directories enclosing the files in the
change (up to the repository root) and calls the relevant entrypoint function
depending on whether the change is being committed or uploaded.
Prints errors, warnings and notifications. Prompts the user for warnings
when needed.
Args:
change: The Change object.
committing: True if 'git cl land' is running, False if 'git cl upload' is.
verbose: Prints debug info.
output_stream: A stream to write output from presubmit tests to.
input_stream: A stream to read input from the user.
default_presubmit: A default presubmit script to execute in any case.
may_prompt: Enable (y/n) questions on warning or error. If False,
any questions are answered with yes by default.
rietveld_obj: rietveld.Rietveld object.
gerrit_obj: provides basic Gerrit codereview functionality.
dry_run: if true, some Checks will be skipped.
Warning:
If may_prompt is true, output_stream SHOULD be sys.stdout and input_stream
SHOULD be sys.stdin.
Return:
A PresubmitOutput object. Use output.should_continue() to figure out
if there were errors or warnings and the caller should abort.
"""
old_environ = os.environ
try:
# Make sure python subprocesses won't generate .pyc files.
os.environ = os.environ.copy()
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
output = PresubmitOutput(input_stream, output_stream)
if committing:
output.write("Running presubmit commit checks ...\n")
else:
output.write("Running presubmit upload checks ...\n")
start_time = time.time()
presubmit_files = ListRelevantPresubmitFiles(
change.AbsoluteLocalPaths(), change.RepositoryRoot())
if not presubmit_files and verbose:
output.write("Warning, no PRESUBMIT.py found.\n")
results = []
executer = PresubmitExecuter(change, committing, rietveld_obj, verbose,
gerrit_obj, dry_run)
if default_presubmit:
if verbose:
output.write("Running default presubmit script.\n")
fake_path = os.path.join(change.RepositoryRoot(), 'PRESUBMIT.py')
results += executer.ExecPresubmitScript(default_presubmit, fake_path)
for filename in presubmit_files:
filename = os.path.abspath(filename)
if verbose:
output.write("Running %s\n" % filename)
# Accept CRLF presubmit script.
presubmit_script = gclient_utils.FileRead(filename, 'rU')
results += executer.ExecPresubmitScript(presubmit_script, filename)
output.more_cc.extend(executer.more_cc)
errors = []
notifications = []
warnings = []
for result in results:
if result.fatal:
errors.append(result)
elif result.should_prompt:
warnings.append(result)
else:
notifications.append(result)
output.write('\n')
for name, items in (('Messages', notifications),
('Warnings', warnings),
('ERRORS', errors)):
if items:
output.write('** Presubmit %s **\n' % name)
for item in items:
item.handle(output)
output.write('\n')
total_time = time.time() - start_time
if total_time > 1.0:
output.write("Presubmit checks took %.1fs to calculate.\n\n" % total_time)
if errors:
output.fail()
elif warnings:
output.write('There were presubmit warnings. ')
if may_prompt:
output.prompt_yes_no('Are you sure you wish to continue? (y/N): ')
else:
output.write('Presubmit checks passed.\n')
global _ASKED_FOR_FEEDBACK
# Ask for feedback one time out of 5.
if (len(results) and random.randint(0, 4) == 0 and not _ASKED_FOR_FEEDBACK):
output.write(
'Was the presubmit check useful? If not, run "git cl presubmit -v"\n'
'to figure out which PRESUBMIT.py was run, then run git blame\n'
'on the file to figure out who to ask for help.\n')
_ASKED_FOR_FEEDBACK = True
return output
finally:
os.environ = old_environ
def ScanSubDirs(mask, recursive):
if not recursive:
return [x for x in glob.glob(mask) if x not in ('.svn', '.git')]
results = []
for root, dirs, files in os.walk('.'):
if '.svn' in dirs:
dirs.remove('.svn')
if '.git' in dirs:
dirs.remove('.git')
for name in files:
if fnmatch.fnmatch(name, mask):
results.append(os.path.join(root, name))
return results
def ParseFiles(args, recursive):
logging.debug('Searching for %s', args)
files = []
for arg in args:
files.extend([('M', f) for f in ScanSubDirs(arg, recursive)])
return files
def load_files(options, args):
"""Tries to determine the SCM."""
files = []
if args:
files = ParseFiles(args, options.recursive)
change_scm = scm.determine_scm(options.root)
if change_scm == 'git':
change_class = GitChange
upstream = options.upstream or None
if not files:
files = scm.GIT.CaptureStatus([], options.root, upstream)
else:
logging.info('Doesn\'t seem under source control. Got %d files', len(args))
if not files:
return None, None
change_class = Change
return change_class, files
class NonexistantCannedCheckFilter(Exception):
pass
@contextlib.contextmanager
def canned_check_filter(method_names):
filtered = {}
try:
for method_name in method_names:
if not hasattr(presubmit_canned_checks, method_name):
raise NonexistantCannedCheckFilter(method_name)
filtered[method_name] = getattr(presubmit_canned_checks, method_name)
setattr(presubmit_canned_checks, method_name, lambda *_a, **_kw: [])
yield
finally:
for name, method in filtered.iteritems():
setattr(presubmit_canned_checks, name, method)
def CallCommand(cmd_data):
"""Runs an external program, potentially from a child process created by the
multiprocessing module.
multiprocessing needs a top level function with a single argument.
"""
cmd_data.kwargs['stdout'] = subprocess.PIPE
cmd_data.kwargs['stderr'] = subprocess.STDOUT
try:
start = time.time()
(out, _), code = subprocess.communicate(cmd_data.cmd, **cmd_data.kwargs)
duration = time.time() - start
except OSError as e:
duration = time.time() - start
return cmd_data.message(
'%s exec failure (%4.2fs)\n %s' % (cmd_data.name, duration, e))
if code != 0:
return cmd_data.message(
'%s (%4.2fs) failed\n%s' % (cmd_data.name, duration, out))
if cmd_data.info:
return cmd_data.info('%s (%4.2fs)' % (cmd_data.name, duration))
def main(argv=None):
parser = optparse.OptionParser(usage="%prog [options] <files...>",
version="%prog " + str(__version__))
parser.add_option("-c", "--commit", action="store_true", default=False,
help="Use commit instead of upload checks")
parser.add_option("-u", "--upload", action="store_false", dest='commit',
help="Use upload instead of commit checks")
parser.add_option("-r", "--recursive", action="store_true",
help="Act recursively")
parser.add_option("-v", "--verbose", action="count", default=0,
help="Use 2 times for more debug info")
parser.add_option("--name", default='no name')
parser.add_option("--author")
parser.add_option("--description", default='')
parser.add_option("--issue", type='int', default=0)
parser.add_option("--patchset", type='int', default=0)
parser.add_option("--root", default=os.getcwd(),
help="Search for PRESUBMIT.py up to this directory. "
"If inherit-review-settings-ok is present in this "
"directory, parent directories up to the root file "
"system directories will also be searched.")
parser.add_option("--upstream",
help="Git only: the base ref or upstream branch against "
"which the diff should be computed.")
parser.add_option("--default_presubmit")
parser.add_option("--may_prompt", action='store_true', default=False)
parser.add_option("--skip_canned", action='append', default=[],
help="A list of checks to skip which appear in "
"presubmit_canned_checks. Can be provided multiple times "
"to skip multiple canned checks.")
parser.add_option("--dry_run", action='store_true',
help=optparse.SUPPRESS_HELP)
parser.add_option("--gerrit_url", help=optparse.SUPPRESS_HELP)
parser.add_option("--gerrit_fetch", action='store_true',
help=optparse.SUPPRESS_HELP)
parser.add_option("--rietveld_url", help=optparse.SUPPRESS_HELP)
parser.add_option("--rietveld_email", help=optparse.SUPPRESS_HELP)
parser.add_option("--rietveld_fetch", action='store_true', default=False,
help=optparse.SUPPRESS_HELP)
# These are for OAuth2 authentication for bots. See also apply_issue.py
parser.add_option("--rietveld_email_file", help=optparse.SUPPRESS_HELP)
parser.add_option("--rietveld_private_key_file", help=optparse.SUPPRESS_HELP)
auth.add_auth_options(parser)
options, args = parser.parse_args(argv)
auth_config = auth.extract_auth_config_from_options(options)
if options.verbose >= 2:
logging.basicConfig(level=logging.DEBUG)
elif options.verbose:
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig(level=logging.ERROR)
if (any((options.rietveld_url, options.rietveld_email_file,
options.rietveld_fetch, options.rietveld_private_key_file))
and any((options.gerrit_url, options.gerrit_fetch))):
parser.error('Options for only codereview --rietveld_* or --gerrit_* '
'allowed')
if options.rietveld_email and options.rietveld_email_file:
parser.error("Only one of --rietveld_email or --rietveld_email_file "
"can be passed to this program.")
if options.rietveld_email_file:
with open(options.rietveld_email_file, "rb") as f:
options.rietveld_email = f.read().strip()
change_class, files = load_files(options, args)
if not change_class:
parser.error('For unversioned directory, <files> is not optional.')
logging.info('Found %d file(s).', len(files))
rietveld_obj, gerrit_obj = None, None
if options.rietveld_url:
# The empty password is permitted: '' is not None.
if options.rietveld_private_key_file:
rietveld_obj = rietveld.JwtOAuth2Rietveld(
options.rietveld_url,
options.rietveld_email,
options.rietveld_private_key_file)
else:
rietveld_obj = rietveld.CachingRietveld(
options.rietveld_url,
auth_config,
options.rietveld_email)
if options.rietveld_fetch:
assert options.issue
props = rietveld_obj.get_issue_properties(options.issue, False)
options.author = props['owner_email']
options.description = props['description']
logging.info('Got author: "%s"', options.author)
logging.info('Got description: """\n%s\n"""', options.description)
if options.gerrit_url and options.gerrit_fetch:
assert options.issue and options.patchset
rietveld_obj = None
gerrit_obj = GerritAccessor(urlparse.urlparse(options.gerrit_url).netloc)
options.author = gerrit_obj.GetChangeOwner(options.issue)
options.description = gerrit_obj.GetChangeDescription(options.issue,
options.patchset)
logging.info('Got author: "%s"', options.author)
logging.info('Got description: """\n%s\n"""', options.description)
try:
with canned_check_filter(options.skip_canned):
results = DoPresubmitChecks(
change_class(options.name,
options.description,
options.root,
files,
options.issue,
options.patchset,
options.author,
upstream=options.upstream),
options.commit,
options.verbose,
sys.stdout,
sys.stdin,
options.default_presubmit,
options.may_prompt,
rietveld_obj,
gerrit_obj,
options.dry_run)
return not results.should_continue()
except NonexistantCannedCheckFilter, e:
print >> sys.stderr, (
'Attempted to skip nonexistent canned presubmit check: %s' % e.message)
return 2
except PresubmitFailure, e:
print >> sys.stderr, e
print >> sys.stderr, 'Maybe your depot_tools is out of date?'
print >> sys.stderr, 'If all fails, contact maruel@'
return 2
if __name__ == '__main__':
fix_encoding.fix_encoding()
try:
sys.exit(main())
except KeyboardInterrupt:
sys.stderr.write('interrupted\n')
sys.exit(2)
| 35.675148
| 80
| 0.675209
|
"""Enables directory-specific presubmit checks to run at upload and/or commit.
"""
__version__ = '1.8.0'
import ast
import cpplint
import cPickle
import cStringIO
import contextlib
import fnmatch
import glob
import inspect
import itertools
import json
import logging
import marshal
import multiprocessing
import optparse
import os
import pickle
import random
import re
import sys
import tempfile
import time
import traceback
import types
import unittest
import urllib2
import urlparse
from warnings import warn
import auth
import fix_encoding
import gclient_utils
import git_footers
import gerrit_util
import owners
import owners_finder
import presubmit_canned_checks
import rietveld
import scm
import subprocess2 as subprocess
_ASKED_FOR_FEEDBACK = False
class PresubmitFailure(Exception):
pass
class CommandData(object):
def __init__(self, name, cmd, kwargs, message):
self.name = name
self.cmd = cmd
self.kwargs = kwargs
self.message = message
self.info = None
def normpath(path):
'''Version of os.path.normpath that also changes backward slashes to
forward slashes when not running on Windows.
'''
path = path.replace(os.sep, '/')
return os.path.normpath(path)
def _RightHandSideLinesImpl(affected_files):
"""Implements RightHandSideLines for InputApi and GclChange."""
for af in affected_files:
lines = af.ChangedContents()
for line in lines:
yield (af, line[0], line[1])
class PresubmitOutput(object):
def __init__(self, input_stream=None, output_stream=None):
self.input_stream = input_stream
self.output_stream = output_stream
self.reviewers = []
self.more_cc = []
self.written_output = []
self.error_count = 0
def prompt_yes_no(self, prompt_string):
self.write(prompt_string)
if self.input_stream:
response = self.input_stream.readline().strip().lower()
if response not in ('y', 'yes'):
self.fail()
else:
self.fail()
def fail(self):
self.error_count += 1
def should_continue(self):
return not self.error_count
def write(self, s):
self.written_output.append(s)
if self.output_stream:
self.output_stream.write(s)
def getvalue(self):
return ''.join(self.written_output)
class _PresubmitResult(object):
"""Base class for result objects."""
fatal = False
should_prompt = False
def __init__(self, message, items=None, long_text=''):
"""
message: A short one-line message to indicate errors.
items: A list of short strings to indicate where errors occurred.
long_text: multi-line text output, e.g. from another tool
"""
self._message = message
self._items = items or []
if items:
self._items = items
self._long_text = long_text.rstrip()
def handle(self, output):
output.write(self._message)
output.write('\n')
for index, item in enumerate(self._items):
output.write(' ')
output.write(str(item))
if index < len(self._items) - 1:
output.write(' \\')
output.write('\n')
if self._long_text:
output.write('\n***************\n')
# Write separately in case it's unicode.
output.write(self._long_text)
output.write('\n***************\n')
if self.fatal:
output.fail()
class _PresubmitError(_PresubmitResult):
"""A hard presubmit error."""
fatal = True
class _PresubmitPromptWarning(_PresubmitResult):
"""An warning that prompts the user if they want to continue."""
should_prompt = True
class _PresubmitNotifyResult(_PresubmitResult):
"""Just print something to the screen -- but it's not even a warning."""
pass
# Top level object so multiprocessing can pickle
# Public access through OutputApi object.
class _MailTextResult(_PresubmitResult):
"""A warning that should be included in the review request email."""
def __init__(self, *args, **kwargs):
super(_MailTextResult, self).__init__()
raise NotImplementedError()
class GerritAccessor(object):
"""Limited Gerrit functionality for canned presubmit checks to work.
To avoid excessive Gerrit calls, caches the results.
"""
def __init__(self, host):
self.host = host
self.cache = {}
def _FetchChangeDetail(self, issue):
# Separate function to be easily mocked in tests.
try:
return gerrit_util.GetChangeDetail(
self.host, str(issue),
['ALL_REVISIONS', 'DETAILED_LABELS', 'ALL_COMMITS'])
except gerrit_util.GerritError as e:
if e.http_status == 404:
raise Exception('Either Gerrit issue %s doesn\'t exist, or '
'no credentials to fetch issue details' % issue)
raise
def GetChangeInfo(self, issue):
"""Returns labels and all revisions (patchsets) for this issue.
The result is a dictionary according to Gerrit REST Api.
https://gerrit-review.googlesource.com/Documentation/rest-api.html
However, API isn't very clear what's inside, so see tests for example.
"""
assert issue
cache_key = int(issue)
if cache_key not in self.cache:
self.cache[cache_key] = self._FetchChangeDetail(issue)
return self.cache[cache_key]
def GetChangeDescription(self, issue, patchset=None):
"""If patchset is none, fetches current patchset."""
info = self.GetChangeInfo(issue)
# it to the right patchset, if it is not yet there.
# Find revision info for the patchset we want.
if patchset is not None:
for rev, rev_info in info['revisions'].iteritems():
if str(rev_info['_number']) == str(patchset):
break
else:
raise Exception('patchset %s doesn\'t exist in issue %s' % (
patchset, issue))
else:
rev = info['current_revision']
rev_info = info['revisions'][rev]
return rev_info['commit']['message']
def GetChangeOwner(self, issue):
return self.GetChangeInfo(issue)['owner']['email']
def GetChangeReviewers(self, issue, approving_only=True):
changeinfo = self.GetChangeInfo(issue)
if approving_only:
labelinfo = changeinfo.get('labels', {}).get('Code-Review', {})
values = labelinfo.get('values', {}).keys()
try:
max_value = max(int(v) for v in values)
reviewers = [r for r in labelinfo.get('all', [])
if r.get('value', 0) == max_value]
except ValueError:
reviewers = []
else:
reviewers = changeinfo.get('reviewers', {}).get('REVIEWER', [])
return [r.get('email') for r in reviewers]
class OutputApi(object):
"""An instance of OutputApi gets passed to presubmit scripts so that they
can output various types of results.
"""
PresubmitResult = _PresubmitResult
PresubmitError = _PresubmitError
PresubmitPromptWarning = _PresubmitPromptWarning
PresubmitNotifyResult = _PresubmitNotifyResult
MailTextResult = _MailTextResult
def __init__(self, is_committing):
self.is_committing = is_committing
self.more_cc = []
def AppendCC(self, cc):
"""Appends a user to cc for this change."""
self.more_cc.append(cc)
def PresubmitPromptOrNotify(self, *args, **kwargs):
"""Warn the user when uploading, but only notify if committing."""
if self.is_committing:
return self.PresubmitNotifyResult(*args, **kwargs)
return self.PresubmitPromptWarning(*args, **kwargs)
def EnsureCQIncludeTrybotsAreAdded(self, cl, bots_to_include, message):
"""Helper for any PostUploadHook wishing to add CQ_INCLUDE_TRYBOTS.
Merges the bots_to_include into the current CQ_INCLUDE_TRYBOTS list,
keeping it alphabetically sorted. Returns the results that should be
returned from the PostUploadHook.
Args:
cl: The git_cl.Changelist object.
bots_to_include: A list of strings of bots to include, in the form
"master:slave".
message: A message to be printed in the case that
CQ_INCLUDE_TRYBOTS was updated.
"""
description = cl.GetDescription(force=True)
include_re = re.compile(r'^CQ_INCLUDE_TRYBOTS=(.*)$', re.M | re.I)
prior_bots = []
if cl.IsGerrit():
trybot_footers = git_footers.parse_footers(description).get(
git_footers.normalize_name('Cq-Include-Trybots'), [])
for f in trybot_footers:
prior_bots += [b.strip() for b in f.split(';') if b.strip()]
else:
trybot_tags = include_re.finditer(description)
for t in trybot_tags:
prior_bots += [b.strip() for b in t.group(1).split(';') if b.strip()]
if set(prior_bots) >= set(bots_to_include):
return []
all_bots = ';'.join(sorted(set(prior_bots) | set(bots_to_include)))
if cl.IsGerrit():
description = git_footers.remove_footer(
description, 'Cq-Include-Trybots')
description = git_footers.add_footer(
description, 'Cq-Include-Trybots', all_bots,
before_keys=['Change-Id'])
else:
new_include_trybots = 'CQ_INCLUDE_TRYBOTS=%s' % all_bots
m = include_re.search(description)
if m:
description = include_re.sub(new_include_trybots, description)
else:
description = '%s\n%s\n' % (description, new_include_trybots)
cl.UpdateDescription(description, force=True)
return [self.PresubmitNotifyResult(message)]
class InputApi(object):
"""An instance of this object is passed to presubmit scripts so they can
know stuff about the change they're looking at.
"""
# Method could be a function
# pylint: disable=no-self-use
# File extensions that are considered source files from a style guide
# perspective. Don't modify this list from a presubmit script!
# filter them as source files, add r"(^|.*?[\\\/])[^.]+$" to the white list.
# Note that ALL CAPS files are black listed in DEFAULT_BLACK_LIST below.
DEFAULT_WHITE_LIST = (
# C++ and friends
r".+\.c$", r".+\.cc$", r".+\.cpp$", r".+\.h$", r".+\.m$", r".+\.mm$",
r".+\.inl$", r".+\.asm$", r".+\.hxx$", r".+\.hpp$", r".+\.s$", r".+\.S$",
# Scripts
r".+\.js$", r".+\.py$", r".+\.sh$", r".+\.rb$", r".+\.pl$", r".+\.pm$",
# Other
r".+\.java$", r".+\.mk$", r".+\.am$", r".+\.css$"
)
# Path regexp that should be excluded from being considered containing source
# files. Don't modify this list from a presubmit script!
DEFAULT_BLACK_LIST = (
r"testing_support[\\\/]google_appengine[\\\/].*",
r".*\bexperimental[\\\/].*",
r".*\bthird_party[\\\/](?!WebKit[\\\/]).*",
r".*\bDebug[\\\/].*",
r".*\bRelease[\\\/].*",
r".*\bxcodebuild[\\\/].*",
r".*\bout[\\\/].*",
r".*\b[A-Z0-9_]{2,}$",
r"(|.*[\\\/])\.git[\\\/].*",
r"(|.*[\\\/])\.svn[\\\/].*",
r".+\.diff$",
r".+\.patch$",
)
def __init__(self, change, presubmit_path, is_committing,
rietveld_obj, verbose, gerrit_obj=None, dry_run=None):
"""Builds an InputApi object.
Args:
change: A presubmit.Change object.
presubmit_path: The path to the presubmit script being processed.
is_committing: True if the change is about to be committed.
rietveld_obj: rietveld.Rietveld client object
gerrit_obj: provides basic Gerrit codereview functionality.
dry_run: if true, some Checks will be skipped.
"""
self.version = [int(x) for x in __version__.split('.')]
self.change = change
self.is_committing = is_committing
self.rietveld = rietveld_obj
self.gerrit = gerrit_obj
self.dry_run = dry_run
self.host_url = 'http://codereview.chromium.org'
if self.rietveld:
self.host_url = self.rietveld.url
self.ast = ast
self.basename = os.path.basename
self.cPickle = cPickle
self.cpplint = cpplint
self.cStringIO = cStringIO
self.fnmatch = fnmatch
self.glob = glob.glob
self.json = json
self.logging = logging.getLogger('PRESUBMIT')
self.os_listdir = os.listdir
self.os_walk = os.walk
self.os_path = os.path
self.os_stat = os.stat
self.pickle = pickle
self.marshal = marshal
self.re = re
self.subprocess = subprocess
self.tempfile = tempfile
self.time = time
self.traceback = traceback
self.unittest = unittest
self.urllib2 = urllib2
# To easily fork python.
self.python_executable = sys.executable
self.environ = os.environ
# InputApi.platform is the platform you're currently running on.
self.platform = sys.platform
self.cpu_count = multiprocessing.cpu_count()
self._run_tests_pool = multiprocessing.Pool(self.cpu_count)
self._current_presubmit_path = os.path.dirname(presubmit_path)
self.canned_checks = presubmit_canned_checks
self._named_temporary_files = []
self.owners_db = owners.Database(change.RepositoryRoot(),
fopen=file, os_path=self.os_path)
self.owners_finder = owners_finder.OwnersFinder
self.verbose = verbose
self.is_windows = sys.platform == 'win32'
self.Command = CommandData
self.cpplint._re_pattern_templates = [
(a, b, 'base/containers/hash_tables.h')
if header in ('<hash_map>', '<hash_set>') else (a, b, header)
for (a, b, header) in cpplint._re_pattern_templates
]
def PresubmitLocalPath(self):
"""Returns the local path of the presubmit script currently being run.
This is useful if you don't want to hard-code absolute paths in the
presubmit script. For example, It can be used to find another file
relative to the PRESUBMIT.py script, so the whole tree can be branched and
the presubmit script still works, without editing its content.
"""
return self._current_presubmit_path
def AffectedFiles(self, include_deletes=True, file_filter=None):
"""Same as input_api.change.AffectedFiles() except only lists files
(and optionally directories) in the same directory as the current presubmit
script, or subdirectories thereof.
"""
dir_with_slash = normpath("%s/" % self.PresubmitLocalPath())
if len(dir_with_slash) == 1:
dir_with_slash = ''
return filter(
lambda x: normpath(x.AbsoluteLocalPath()).startswith(dir_with_slash),
self.change.AffectedFiles(include_deletes, file_filter))
def LocalPaths(self):
"""Returns local paths of input_api.AffectedFiles()."""
paths = [af.LocalPath() for af in self.AffectedFiles()]
logging.debug("LocalPaths: %s", paths)
return paths
def AbsoluteLocalPaths(self):
"""Returns absolute local paths of input_api.AffectedFiles()."""
return [af.AbsoluteLocalPath() for af in self.AffectedFiles()]
def AffectedTestableFiles(self, include_deletes=None):
"""Same as input_api.change.AffectedTestableFiles() except only lists files
in the same directory as the current presubmit script, or subdirectories
thereof.
"""
if include_deletes is not None:
warn("AffectedTestableFiles(include_deletes=%s)"
" is deprecated and ignored" % str(include_deletes),
category=DeprecationWarning,
stacklevel=2)
return filter(lambda x: x.IsTestableFile(),
self.AffectedFiles(include_deletes=False))
def AffectedTextFiles(self, include_deletes=None):
"""An alias to AffectedTestableFiles for backwards compatibility."""
return self.AffectedTestableFiles(include_deletes=include_deletes)
def FilterSourceFile(self, affected_file, white_list=None, black_list=None):
"""Filters out files that aren't considered "source file".
If white_list or black_list is None, InputApi.DEFAULT_WHITE_LIST
and InputApi.DEFAULT_BLACK_LIST is used respectively.
The lists will be compiled as regular expression and
AffectedFile.LocalPath() needs to pass both list.
Note: Copy-paste this function to suit your needs or use a lambda function.
"""
def Find(affected_file, items):
local_path = affected_file.LocalPath()
for item in items:
if self.re.match(item, local_path):
return True
return False
return (Find(affected_file, white_list or self.DEFAULT_WHITE_LIST) and
not Find(affected_file, black_list or self.DEFAULT_BLACK_LIST))
def AffectedSourceFiles(self, source_file):
"""Filter the list of AffectedTestableFiles by the function source_file.
If source_file is None, InputApi.FilterSourceFile() is used.
"""
if not source_file:
source_file = self.FilterSourceFile
return filter(source_file, self.AffectedTestableFiles())
def RightHandSideLines(self, source_file_filter=None):
"""An iterator over all text lines in "new" version of changed files.
Only lists lines from new or modified text files in the change that are
contained by the directory of the currently executing presubmit script.
This is useful for doing line-by-line regex checks, like checking for
trailing whitespace.
Yields:
a 3 tuple:
the AffectedFile instance of the current file;
integer line number (1-based); and
the contents of the line as a string.
Note: The carriage return (LF or CR) is stripped off.
"""
files = self.AffectedSourceFiles(source_file_filter)
return _RightHandSideLinesImpl(files)
def ReadFile(self, file_item, mode='r'):
"""Reads an arbitrary file.
Deny reading anything outside the repository.
"""
if isinstance(file_item, AffectedFile):
file_item = file_item.AbsoluteLocalPath()
if not file_item.startswith(self.change.RepositoryRoot()):
raise IOError('Access outside the repository root is denied.')
return gclient_utils.FileRead(file_item, mode)
def CreateTemporaryFile(self, **kwargs):
"""Returns a named temporary file that must be removed with a call to
RemoveTemporaryFiles().
All keyword arguments are forwarded to tempfile.NamedTemporaryFile(),
except for |delete|, which is always set to False.
Presubmit checks that need to create a temporary file and pass it for
reading should use this function instead of NamedTemporaryFile(), as
Windows fails to open a file that is already open for writing.
with input_api.CreateTemporaryFile() as f:
f.write('xyz')
f.close()
input_api.subprocess.check_output(['script-that', '--reads-from',
f.name])
Note that callers of CreateTemporaryFile() should not worry about removing
any temporary file; this is done transparently by the presubmit handling
code.
"""
if 'delete' in kwargs:
raise TypeError('CreateTemporaryFile() does not take a "delete" '
'argument, file deletion is handled automatically by '
'the same presubmit_support code that creates InputApi '
'objects.')
temp_file = self.tempfile.NamedTemporaryFile(delete=False, **kwargs)
self._named_temporary_files.append(temp_file.name)
return temp_file
@property
def tbr(self):
"""Returns if a change is TBR'ed."""
return 'TBR' in self.change.tags or self.change.TBRsFromDescription()
def RunTests(self, tests_mix, parallel=True):
tests = []
msgs = []
for t in tests_mix:
if isinstance(t, OutputApi.PresubmitResult):
msgs.append(t)
else:
assert issubclass(t.message, _PresubmitResult)
tests.append(t)
if self.verbose:
t.info = _PresubmitNotifyResult
if len(tests) > 1 and parallel:
# async recipe works around multiprocessing bug handling Ctrl-C
msgs.extend(self._run_tests_pool.map_async(CallCommand, tests).get(99999))
else:
msgs.extend(map(CallCommand, tests))
return [m for m in msgs if m]
def ShutdownPool(self):
self._run_tests_pool.close()
self._run_tests_pool.join()
self._run_tests_pool = None
class _DiffCache(object):
"""Caches diffs retrieved from a particular SCM."""
def __init__(self, upstream=None):
"""Stores the upstream revision against which all diffs will be computed."""
self._upstream = upstream
def GetDiff(self, path, local_root):
"""Get the diff for a particular path."""
raise NotImplementedError()
def GetOldContents(self, path, local_root):
"""Get the old version for a particular path."""
raise NotImplementedError()
class _GitDiffCache(_DiffCache):
"""DiffCache implementation for git; gets all file diffs at once."""
def __init__(self, upstream):
super(_GitDiffCache, self).__init__(upstream=upstream)
self._diffs_by_file = None
def GetDiff(self, path, local_root):
if not self._diffs_by_file:
# Compute a single diff for all files and parse the output; should
# with git this is much faster than computing one diff for each file.
diffs = {}
# Don't specify any filenames below, because there are command line length
unified_diff = scm.GIT.GenerateDiff(local_root, files=[], full_move=True,
branch=self._upstream)
file_marker = re.compile('^diff --git (?P<filename>.*) (?P=filename)$')
current_diff = []
keep_line_endings = True
for x in unified_diff.splitlines(keep_line_endings):
match = file_marker.match(x)
if match:
diffs[match.group('filename')] = current_diff = [x]
elif x.startswith('diff --git'):
raise PresubmitFailure('Unexpected diff line: %s' % x)
else:
current_diff.append(x)
self._diffs_by_file = dict(
(normpath(path), ''.join(diff)) for path, diff in diffs.items())
if path not in self._diffs_by_file:
raise PresubmitFailure(
'Unified diff did not contain entry for file %s' % path)
return self._diffs_by_file[path]
def GetOldContents(self, path, local_root):
return scm.GIT.GetOldContents(local_root, path, branch=self._upstream)
class AffectedFile(object):
"""Representation of a file in a change."""
DIFF_CACHE = _DiffCache
def __init__(self, path, action, repository_root, diff_cache):
self._path = path
self._action = action
self._local_root = repository_root
self._is_directory = None
self._cached_changed_contents = None
self._cached_new_contents = None
self._diff_cache = diff_cache
logging.debug('%s(%s)', self.__class__.__name__, self._path)
def LocalPath(self):
"""Returns the path of this file on the local disk relative to client root.
This should be used for error messages but not for accessing files,
because presubmit checks are run with CWD=PresubmitLocalPath() (which is
often != client root).
"""
return normpath(self._path)
def AbsoluteLocalPath(self):
"""Returns the absolute path of this file on the local disk.
"""
return os.path.abspath(os.path.join(self._local_root, self.LocalPath()))
def Action(self):
"""Returns the action on this opened file, e.g. A, M, D, etc."""
return self._action
def IsTestableFile(self):
"""Returns True if the file is a text file and not a binary file.
Deleted files are not text file."""
raise NotImplementedError()
def IsTextFile(self):
"""An alias to IsTestableFile for backwards compatibility."""
return self.IsTestableFile()
def OldContents(self):
"""Returns an iterator over the lines in the old version of file.
The old version is the file before any modifications in the user's
workspace, i.e. the "left hand side".
Contents will be empty if the file is a directory or does not exist.
Note: The carriage returns (LF or CR) are stripped off.
"""
return self._diff_cache.GetOldContents(self.LocalPath(),
self._local_root).splitlines()
def NewContents(self):
"""Returns an iterator over the lines in the new version of file.
The new version is the file in the user's workspace, i.e. the "right hand
side".
Contents will be empty if the file is a directory or does not exist.
Note: The carriage returns (LF or CR) are stripped off.
"""
if self._cached_new_contents is None:
self._cached_new_contents = []
try:
self._cached_new_contents = gclient_utils.FileRead(
self.AbsoluteLocalPath(), 'rU').splitlines()
except IOError:
pass
return self._cached_new_contents[:]
def ChangedContents(self):
"""Returns a list of tuples (line number, line text) of all new lines.
This relies on the scm diff output describing each changed code section
with a line of the form
^@@ <old line num>,<old size> <new line num>,<new size> @@$
"""
if self._cached_changed_contents is not None:
return self._cached_changed_contents[:]
self._cached_changed_contents = []
line_num = 0
for line in self.GenerateScmDiff().splitlines():
m = re.match(r'^@@ [0-9\,\+\-]+ \+([0-9]+)\,[0-9]+ @@', line)
if m:
line_num = int(m.groups(1)[0])
continue
if line.startswith('+') and not line.startswith('++'):
self._cached_changed_contents.append((line_num, line[1:]))
if not line.startswith('-'):
line_num += 1
return self._cached_changed_contents[:]
def __str__(self):
return self.LocalPath()
def GenerateScmDiff(self):
return self._diff_cache.GetDiff(self.LocalPath(), self._local_root)
class GitAffectedFile(AffectedFile):
"""Representation of a file in a change out of a git checkout."""
# Method 'NNN' is abstract in class 'NNN' but is not overridden
# pylint: disable=abstract-method
DIFF_CACHE = _GitDiffCache
def __init__(self, *args, **kwargs):
AffectedFile.__init__(self, *args, **kwargs)
self._server_path = None
self._is_testable_file = None
def IsTestableFile(self):
if self._is_testable_file is None:
if self.Action() == 'D':
# A deleted file is not testable.
self._is_testable_file = False
else:
self._is_testable_file = os.path.isfile(self.AbsoluteLocalPath())
return self._is_testable_file
class Change(object):
"""Describe a change.
Used directly by the presubmit scripts to query the current change being
tested.
Instance members:
tags: Dictionary of KEY=VALUE pairs found in the change description.
self.KEY: equivalent to tags['KEY']
"""
_AFFECTED_FILES = AffectedFile
# Matches key/value (or "tag") lines in changelist descriptions.
TAG_LINE_RE = re.compile(
'^[ \t]*(?P<key>[A-Z][A-Z_0-9]*)[ \t]*=[ \t]*(?P<value>.*?)[ \t]*$')
scm = ''
def __init__(
self, name, description, local_root, files, issue, patchset, author,
upstream=None):
if files is None:
files = []
self._name = name
# Convert root into an absolute path.
self._local_root = os.path.abspath(local_root)
self._upstream = upstream
self.issue = issue
self.patchset = patchset
self.author_email = author
self._full_description = ''
self.tags = {}
self._description_without_tags = ''
self.SetDescriptionText(description)
assert all(
(isinstance(f, (list, tuple)) and len(f) == 2) for f in files), files
diff_cache = self._AFFECTED_FILES.DIFF_CACHE(self._upstream)
self._affected_files = [
self._AFFECTED_FILES(path, action.strip(), self._local_root, diff_cache)
for action, path in files
]
def Name(self):
"""Returns the change name."""
return self._name
def DescriptionText(self):
"""Returns the user-entered changelist description, minus tags.
Any line in the user-provided description starting with e.g. "FOO="
(whitespace permitted before and around) is considered a tag line. Such
lines are stripped out of the description this function returns.
"""
return self._description_without_tags
def FullDescriptionText(self):
"""Returns the complete changelist description including tags."""
return self._full_description
def SetDescriptionText(self, description):
"""Sets the full description text (including tags) to |description|.
Also updates the list of tags."""
self._full_description = description
# From the description text, build up a dictionary of key/value pairs
# plus the description minus all key/value or "tag" lines.
description_without_tags = []
self.tags = {}
for line in self._full_description.splitlines():
m = self.TAG_LINE_RE.match(line)
if m:
self.tags[m.group('key')] = m.group('value')
else:
description_without_tags.append(line)
# Change back to text and remove whitespace at end.
self._description_without_tags = (
'\n'.join(description_without_tags).rstrip())
def RepositoryRoot(self):
"""Returns the repository (checkout) root directory for this change,
as an absolute path.
"""
return self._local_root
def __getattr__(self, attr):
"""Return tags directly as attributes on the object."""
if not re.match(r"^[A-Z_]*$", attr):
raise AttributeError(self, attr)
return self.tags.get(attr)
def BugsFromDescription(self):
"""Returns all bugs referenced in the commit description."""
tags = [b.strip() for b in self.tags.get('BUG', '').split(',') if b.strip()]
footers = git_footers.parse_footers(self._full_description).get('Bug', [])
return sorted(set(tags + footers))
def ReviewersFromDescription(self):
"""Returns all reviewers listed in the commit description."""
# We don't support a "R:" git-footer for reviewers; that is in metadata.
tags = [r.strip() for r in self.tags.get('R', '').split(',') if r.strip()]
return sorted(set(tags))
def TBRsFromDescription(self):
"""Returns all TBR reviewers listed in the commit description."""
tags = [r.strip() for r in self.tags.get('TBR', '').split(',') if r.strip()]
footers = git_footers.parse_footers(self._full_description).get('Tbr', [])
return sorted(set(tags + footers))
@property
def BUG(self):
return ','.join(self.BugsFromDescription())
@property
def R(self):
return ','.join(self.ReviewersFromDescription())
@property
def TBR(self):
return ','.join(self.TBRsFromDescription())
def AllFiles(self, root=None):
"""List all files under source control in the repo."""
raise NotImplementedError()
def AffectedFiles(self, include_deletes=True, file_filter=None):
"""Returns a list of AffectedFile instances for all files in the change.
Args:
include_deletes: If false, deleted files will be filtered out.
file_filter: An additional filter to apply.
Returns:
[AffectedFile(path, action), AffectedFile(path, action)]
"""
affected = filter(file_filter, self._affected_files)
if include_deletes:
return affected
return filter(lambda x: x.Action() != 'D', affected)
def AffectedTestableFiles(self, include_deletes=None):
"""Return a list of the existing text files in a change."""
if include_deletes is not None:
warn("AffectedTeestableFiles(include_deletes=%s)"
" is deprecated and ignored" % str(include_deletes),
category=DeprecationWarning,
stacklevel=2)
return filter(lambda x: x.IsTestableFile(),
self.AffectedFiles(include_deletes=False))
def AffectedTextFiles(self, include_deletes=None):
"""An alias to AffectedTestableFiles for backwards compatibility."""
return self.AffectedTestableFiles(include_deletes=include_deletes)
def LocalPaths(self):
"""Convenience function."""
return [af.LocalPath() for af in self.AffectedFiles()]
def AbsoluteLocalPaths(self):
"""Convenience function."""
return [af.AbsoluteLocalPath() for af in self.AffectedFiles()]
def RightHandSideLines(self):
"""An iterator over all text lines in "new" version of changed files.
Lists lines from new or modified text files in the change.
This is useful for doing line-by-line regex checks, like checking for
trailing whitespace.
Yields:
a 3 tuple:
the AffectedFile instance of the current file;
integer line number (1-based); and
the contents of the line as a string.
"""
return _RightHandSideLinesImpl(
x for x in self.AffectedFiles(include_deletes=False)
if x.IsTestableFile())
def OriginalOwnersFiles(self):
"""A map from path names of affected OWNERS files to their old content."""
def owners_file_filter(f):
return 'OWNERS' in os.path.split(f.LocalPath())[1]
files = self.AffectedFiles(file_filter=owners_file_filter)
return dict([(f.LocalPath(), f.OldContents()) for f in files])
class GitChange(Change):
_AFFECTED_FILES = GitAffectedFile
scm = 'git'
def AllFiles(self, root=None):
"""List all files under source control in the repo."""
root = root or self.RepositoryRoot()
return subprocess.check_output(
['git', 'ls-files', '--', '.'], cwd=root).splitlines()
def ListRelevantPresubmitFiles(files, root):
"""Finds all presubmit files that apply to a given set of source files.
If inherit-review-settings-ok is present right under root, looks for
PRESUBMIT.py in directories enclosing root.
Args:
files: An iterable container containing file paths.
root: Path where to stop searching.
Return:
List of absolute paths of the existing PRESUBMIT.py scripts.
"""
files = [normpath(os.path.join(root, f)) for f in files]
directories = set([os.path.dirname(f) for f in files])
if os.path.isfile(os.path.join(root, 'inherit-review-settings-ok')):
root = None
candidates = set()
for directory in directories:
while True:
if directory in candidates:
break
candidates.add(directory)
if directory == root:
break
parent_dir = os.path.dirname(directory)
if parent_dir == directory:
break
directory = parent_dir
results = []
for directory in sorted(list(candidates)):
try:
for f in os.listdir(directory):
p = os.path.join(directory, f)
if os.path.isfile(p) and re.match(
r'PRESUBMIT.*\.py$', f) and not f.startswith('PRESUBMIT_test'):
results.append(p)
except OSError:
pass
logging.debug('Presubmit files: %s', ','.join(results))
return results
class GetTryMastersExecuter(object):
@staticmethod
def ExecPresubmitScript(script_text, presubmit_path, project, change):
"""Executes GetPreferredTryMasters() from a single presubmit script.
Args:
script_text: The text of the presubmit script.
presubmit_path: Project script to run.
project: Project name to pass to presubmit script for bot selection.
Return:
A map of try masters to map of builders to set of tests.
"""
context = {}
try:
exec script_text in context
except Exception, e:
raise PresubmitFailure('"%s" had an exception.\n%s'
% (presubmit_path, e))
function_name = 'GetPreferredTryMasters'
if function_name not in context:
return {}
get_preferred_try_masters = context[function_name]
if not len(inspect.getargspec(get_preferred_try_masters)[0]) == 2:
raise PresubmitFailure(
'Expected function "GetPreferredTryMasters" to take two arguments.')
return get_preferred_try_masters(project, change)
class GetPostUploadExecuter(object):
@staticmethod
def ExecPresubmitScript(script_text, presubmit_path, cl, change):
"""Executes PostUploadHook() from a single presubmit script.
Args:
script_text: The text of the presubmit script.
presubmit_path: Project script to run.
cl: The Changelist object.
change: The Change object.
Return:
A list of results objects.
"""
context = {}
try:
exec script_text in context
except Exception, e:
raise PresubmitFailure('"%s" had an exception.\n%s'
% (presubmit_path, e))
function_name = 'PostUploadHook'
if function_name not in context:
return {}
post_upload_hook = context[function_name]
if not len(inspect.getargspec(post_upload_hook)[0]) == 3:
raise PresubmitFailure(
'Expected function "PostUploadHook" to take three arguments.')
return post_upload_hook(cl, change, OutputApi(False))
def _MergeMasters(masters1, masters2):
"""Merges two master maps. Merges also the tests of each builder."""
result = {}
for (master, builders) in itertools.chain(masters1.iteritems(),
masters2.iteritems()):
new_builders = result.setdefault(master, {})
for (builder, tests) in builders.iteritems():
new_builders.setdefault(builder, set([])).update(tests)
return result
def DoGetTryMasters(change,
changed_files,
repository_root,
default_presubmit,
project,
verbose,
output_stream):
"""Get the list of try masters from the presubmit scripts.
Args:
changed_files: List of modified files.
repository_root: The repository root.
default_presubmit: A default presubmit script to execute in any case.
project: Optional name of a project used in selecting trybots.
verbose: Prints debug info.
output_stream: A stream to write debug output to.
Return:
Map of try masters to map of builders to set of tests.
"""
presubmit_files = ListRelevantPresubmitFiles(changed_files, repository_root)
if not presubmit_files and verbose:
output_stream.write("Warning, no PRESUBMIT.py found.\n")
results = {}
executer = GetTryMastersExecuter()
if default_presubmit:
if verbose:
output_stream.write("Running default presubmit script.\n")
fake_path = os.path.join(repository_root, 'PRESUBMIT.py')
results = _MergeMasters(results, executer.ExecPresubmitScript(
default_presubmit, fake_path, project, change))
for filename in presubmit_files:
filename = os.path.abspath(filename)
if verbose:
output_stream.write("Running %s\n" % filename)
presubmit_script = gclient_utils.FileRead(filename, 'rU')
results = _MergeMasters(results, executer.ExecPresubmitScript(
presubmit_script, filename, project, change))
for builders in results.itervalues():
for builder in builders:
builders[builder] = list(builders[builder])
if results and verbose:
output_stream.write('%s\n' % str(results))
return results
def DoPostUploadExecuter(change,
cl,
repository_root,
verbose,
output_stream):
"""Execute the post upload hook.
Args:
change: The Change object.
cl: The Changelist object.
repository_root: The repository root.
verbose: Prints debug info.
output_stream: A stream to write debug output to.
"""
presubmit_files = ListRelevantPresubmitFiles(
change.LocalPaths(), repository_root)
if not presubmit_files and verbose:
output_stream.write("Warning, no PRESUBMIT.py found.\n")
results = []
executer = GetPostUploadExecuter()
presubmit_files.reverse()
for filename in presubmit_files:
filename = os.path.abspath(filename)
if verbose:
output_stream.write("Running %s\n" % filename)
presubmit_script = gclient_utils.FileRead(filename, 'rU')
results.extend(executer.ExecPresubmitScript(
presubmit_script, filename, cl, change))
output_stream.write('\n')
if results:
output_stream.write('** Post Upload Hook Messages **\n')
for result in results:
result.handle(output_stream)
output_stream.write('\n')
return results
class PresubmitExecuter(object):
def __init__(self, change, committing, rietveld_obj, verbose,
gerrit_obj=None, dry_run=None):
"""
Args:
change: The Change object.
committing: True if 'git cl land' is running, False if 'git cl upload' is.
rietveld_obj: rietveld.Rietveld client object.
gerrit_obj: provides basic Gerrit codereview functionality.
dry_run: if true, some Checks will be skipped.
"""
self.change = change
self.committing = committing
self.rietveld = rietveld_obj
self.gerrit = gerrit_obj
self.verbose = verbose
self.dry_run = dry_run
self.more_cc = []
def ExecPresubmitScript(self, script_text, presubmit_path):
"""Executes a single presubmit script.
Args:
script_text: The text of the presubmit script.
presubmit_path: The path to the presubmit file (this will be reported via
input_api.PresubmitLocalPath()).
Return:
A list of result objects, empty if no problems.
"""
main_path = os.getcwd()
os.chdir(os.path.dirname(presubmit_path))
# Load the presubmit script into context.
input_api = InputApi(self.change, presubmit_path, self.committing,
self.rietveld, self.verbose,
gerrit_obj=self.gerrit, dry_run=self.dry_run)
output_api = OutputApi(self.committing)
context = {}
try:
exec script_text in context
except Exception, e:
raise PresubmitFailure('"%s" had an exception.\n%s' % (presubmit_path, e))
# These function names must change if we make substantial changes to
# the presubmit API that are not backwards compatible.
if self.committing:
function_name = 'CheckChangeOnCommit'
else:
function_name = 'CheckChangeOnUpload'
if function_name in context:
try:
context['__args'] = (input_api, output_api)
logging.debug('Running %s in %s', function_name, presubmit_path)
result = eval(function_name + '(*__args)', context)
logging.debug('Running %s done.', function_name)
self.more_cc = output_api.more_cc
finally:
map(os.remove, input_api._named_temporary_files)
if not (isinstance(result, types.TupleType) or
isinstance(result, types.ListType)):
raise PresubmitFailure(
'Presubmit functions must return a tuple or list')
for item in result:
if not isinstance(item, OutputApi.PresubmitResult):
raise PresubmitFailure(
'All presubmit results must be of types derived from '
'output_api.PresubmitResult')
else:
result = () # no error since the script doesn't care about current event.
input_api.ShutdownPool()
os.chdir(main_path)
return result
def DoPresubmitChecks(change,
committing,
verbose,
output_stream,
input_stream,
default_presubmit,
may_prompt,
rietveld_obj,
gerrit_obj=None,
dry_run=None):
"""Runs all presubmit checks that apply to the files in the change.
This finds all PRESUBMIT.py files in directories enclosing the files in the
change (up to the repository root) and calls the relevant entrypoint function
depending on whether the change is being committed or uploaded.
Prints errors, warnings and notifications. Prompts the user for warnings
when needed.
Args:
change: The Change object.
committing: True if 'git cl land' is running, False if 'git cl upload' is.
verbose: Prints debug info.
output_stream: A stream to write output from presubmit tests to.
input_stream: A stream to read input from the user.
default_presubmit: A default presubmit script to execute in any case.
may_prompt: Enable (y/n) questions on warning or error. If False,
any questions are answered with yes by default.
rietveld_obj: rietveld.Rietveld object.
gerrit_obj: provides basic Gerrit codereview functionality.
dry_run: if true, some Checks will be skipped.
Warning:
If may_prompt is true, output_stream SHOULD be sys.stdout and input_stream
SHOULD be sys.stdin.
Return:
A PresubmitOutput object. Use output.should_continue() to figure out
if there were errors or warnings and the caller should abort.
"""
old_environ = os.environ
try:
os.environ = os.environ.copy()
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
output = PresubmitOutput(input_stream, output_stream)
if committing:
output.write("Running presubmit commit checks ...\n")
else:
output.write("Running presubmit upload checks ...\n")
start_time = time.time()
presubmit_files = ListRelevantPresubmitFiles(
change.AbsoluteLocalPaths(), change.RepositoryRoot())
if not presubmit_files and verbose:
output.write("Warning, no PRESUBMIT.py found.\n")
results = []
executer = PresubmitExecuter(change, committing, rietveld_obj, verbose,
gerrit_obj, dry_run)
if default_presubmit:
if verbose:
output.write("Running default presubmit script.\n")
fake_path = os.path.join(change.RepositoryRoot(), 'PRESUBMIT.py')
results += executer.ExecPresubmitScript(default_presubmit, fake_path)
for filename in presubmit_files:
filename = os.path.abspath(filename)
if verbose:
output.write("Running %s\n" % filename)
# Accept CRLF presubmit script.
presubmit_script = gclient_utils.FileRead(filename, 'rU')
results += executer.ExecPresubmitScript(presubmit_script, filename)
output.more_cc.extend(executer.more_cc)
errors = []
notifications = []
warnings = []
for result in results:
if result.fatal:
errors.append(result)
elif result.should_prompt:
warnings.append(result)
else:
notifications.append(result)
output.write('\n')
for name, items in (('Messages', notifications),
('Warnings', warnings),
('ERRORS', errors)):
if items:
output.write('** Presubmit %s **\n' % name)
for item in items:
item.handle(output)
output.write('\n')
total_time = time.time() - start_time
if total_time > 1.0:
output.write("Presubmit checks took %.1fs to calculate.\n\n" % total_time)
if errors:
output.fail()
elif warnings:
output.write('There were presubmit warnings. ')
if may_prompt:
output.prompt_yes_no('Are you sure you wish to continue? (y/N): ')
else:
output.write('Presubmit checks passed.\n')
global _ASKED_FOR_FEEDBACK
# Ask for feedback one time out of 5.
if (len(results) and random.randint(0, 4) == 0 and not _ASKED_FOR_FEEDBACK):
output.write(
'Was the presubmit check useful? If not, run "git cl presubmit -v"\n'
'to figure out which PRESUBMIT.py was run, then run git blame\n'
'on the file to figure out who to ask for help.\n')
_ASKED_FOR_FEEDBACK = True
return output
finally:
os.environ = old_environ
def ScanSubDirs(mask, recursive):
if not recursive:
return [x for x in glob.glob(mask) if x not in ('.svn', '.git')]
results = []
for root, dirs, files in os.walk('.'):
if '.svn' in dirs:
dirs.remove('.svn')
if '.git' in dirs:
dirs.remove('.git')
for name in files:
if fnmatch.fnmatch(name, mask):
results.append(os.path.join(root, name))
return results
def ParseFiles(args, recursive):
logging.debug('Searching for %s', args)
files = []
for arg in args:
files.extend([('M', f) for f in ScanSubDirs(arg, recursive)])
return files
def load_files(options, args):
"""Tries to determine the SCM."""
files = []
if args:
files = ParseFiles(args, options.recursive)
change_scm = scm.determine_scm(options.root)
if change_scm == 'git':
change_class = GitChange
upstream = options.upstream or None
if not files:
files = scm.GIT.CaptureStatus([], options.root, upstream)
else:
logging.info('Doesn\'t seem under source control. Got %d files', len(args))
if not files:
return None, None
change_class = Change
return change_class, files
class NonexistantCannedCheckFilter(Exception):
pass
@contextlib.contextmanager
def canned_check_filter(method_names):
filtered = {}
try:
for method_name in method_names:
if not hasattr(presubmit_canned_checks, method_name):
raise NonexistantCannedCheckFilter(method_name)
filtered[method_name] = getattr(presubmit_canned_checks, method_name)
setattr(presubmit_canned_checks, method_name, lambda *_a, **_kw: [])
yield
finally:
for name, method in filtered.iteritems():
setattr(presubmit_canned_checks, name, method)
def CallCommand(cmd_data):
"""Runs an external program, potentially from a child process created by the
multiprocessing module.
multiprocessing needs a top level function with a single argument.
"""
cmd_data.kwargs['stdout'] = subprocess.PIPE
cmd_data.kwargs['stderr'] = subprocess.STDOUT
try:
start = time.time()
(out, _), code = subprocess.communicate(cmd_data.cmd, **cmd_data.kwargs)
duration = time.time() - start
except OSError as e:
duration = time.time() - start
return cmd_data.message(
'%s exec failure (%4.2fs)\n %s' % (cmd_data.name, duration, e))
if code != 0:
return cmd_data.message(
'%s (%4.2fs) failed\n%s' % (cmd_data.name, duration, out))
if cmd_data.info:
return cmd_data.info('%s (%4.2fs)' % (cmd_data.name, duration))
def main(argv=None):
parser = optparse.OptionParser(usage="%prog [options] <files...>",
version="%prog " + str(__version__))
parser.add_option("-c", "--commit", action="store_true", default=False,
help="Use commit instead of upload checks")
parser.add_option("-u", "--upload", action="store_false", dest='commit',
help="Use upload instead of commit checks")
parser.add_option("-r", "--recursive", action="store_true",
help="Act recursively")
parser.add_option("-v", "--verbose", action="count", default=0,
help="Use 2 times for more debug info")
parser.add_option("--name", default='no name')
parser.add_option("--author")
parser.add_option("--description", default='')
parser.add_option("--issue", type='int', default=0)
parser.add_option("--patchset", type='int', default=0)
parser.add_option("--root", default=os.getcwd(),
help="Search for PRESUBMIT.py up to this directory. "
"If inherit-review-settings-ok is present in this "
"directory, parent directories up to the root file "
"system directories will also be searched.")
parser.add_option("--upstream",
help="Git only: the base ref or upstream branch against "
"which the diff should be computed.")
parser.add_option("--default_presubmit")
parser.add_option("--may_prompt", action='store_true', default=False)
parser.add_option("--skip_canned", action='append', default=[],
help="A list of checks to skip which appear in "
"presubmit_canned_checks. Can be provided multiple times "
"to skip multiple canned checks.")
parser.add_option("--dry_run", action='store_true',
help=optparse.SUPPRESS_HELP)
parser.add_option("--gerrit_url", help=optparse.SUPPRESS_HELP)
parser.add_option("--gerrit_fetch", action='store_true',
help=optparse.SUPPRESS_HELP)
parser.add_option("--rietveld_url", help=optparse.SUPPRESS_HELP)
parser.add_option("--rietveld_email", help=optparse.SUPPRESS_HELP)
parser.add_option("--rietveld_fetch", action='store_true', default=False,
help=optparse.SUPPRESS_HELP)
parser.add_option("--rietveld_email_file", help=optparse.SUPPRESS_HELP)
parser.add_option("--rietveld_private_key_file", help=optparse.SUPPRESS_HELP)
auth.add_auth_options(parser)
options, args = parser.parse_args(argv)
auth_config = auth.extract_auth_config_from_options(options)
if options.verbose >= 2:
logging.basicConfig(level=logging.DEBUG)
elif options.verbose:
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig(level=logging.ERROR)
if (any((options.rietveld_url, options.rietveld_email_file,
options.rietveld_fetch, options.rietveld_private_key_file))
and any((options.gerrit_url, options.gerrit_fetch))):
parser.error('Options for only codereview --rietveld_* or --gerrit_* '
'allowed')
if options.rietveld_email and options.rietveld_email_file:
parser.error("Only one of --rietveld_email or --rietveld_email_file "
"can be passed to this program.")
if options.rietveld_email_file:
with open(options.rietveld_email_file, "rb") as f:
options.rietveld_email = f.read().strip()
change_class, files = load_files(options, args)
if not change_class:
parser.error('For unversioned directory, <files> is not optional.')
logging.info('Found %d file(s).', len(files))
rietveld_obj, gerrit_obj = None, None
if options.rietveld_url:
if options.rietveld_private_key_file:
rietveld_obj = rietveld.JwtOAuth2Rietveld(
options.rietveld_url,
options.rietveld_email,
options.rietveld_private_key_file)
else:
rietveld_obj = rietveld.CachingRietveld(
options.rietveld_url,
auth_config,
options.rietveld_email)
if options.rietveld_fetch:
assert options.issue
props = rietveld_obj.get_issue_properties(options.issue, False)
options.author = props['owner_email']
options.description = props['description']
logging.info('Got author: "%s"', options.author)
logging.info('Got description: """\n%s\n"""', options.description)
if options.gerrit_url and options.gerrit_fetch:
assert options.issue and options.patchset
rietveld_obj = None
gerrit_obj = GerritAccessor(urlparse.urlparse(options.gerrit_url).netloc)
options.author = gerrit_obj.GetChangeOwner(options.issue)
options.description = gerrit_obj.GetChangeDescription(options.issue,
options.patchset)
logging.info('Got author: "%s"', options.author)
logging.info('Got description: """\n%s\n"""', options.description)
try:
with canned_check_filter(options.skip_canned):
results = DoPresubmitChecks(
change_class(options.name,
options.description,
options.root,
files,
options.issue,
options.patchset,
options.author,
upstream=options.upstream),
options.commit,
options.verbose,
sys.stdout,
sys.stdin,
options.default_presubmit,
options.may_prompt,
rietveld_obj,
gerrit_obj,
options.dry_run)
return not results.should_continue()
except NonexistantCannedCheckFilter, e:
print >> sys.stderr, (
'Attempted to skip nonexistent canned presubmit check: %s' % e.message)
return 2
except PresubmitFailure, e:
print >> sys.stderr, e
print >> sys.stderr, 'Maybe your depot_tools is out of date?'
print >> sys.stderr, 'If all fails, contact maruel@'
return 2
if __name__ == '__main__':
fix_encoding.fix_encoding()
try:
sys.exit(main())
except KeyboardInterrupt:
sys.stderr.write('interrupted\n')
sys.exit(2)
| false
| true
|
f71a9b3881862c5eb958a16f5a70f95f5060726c
| 5,616
|
py
|
Python
|
Briefly/api/Punc/punctuator/tests.py
|
q815101630/Briefly2.0
|
d92ba52308ef8c644fe8fb453169d0bee1a7f47e
|
[
"MIT"
] | 20
|
2019-12-03T06:06:58.000Z
|
2022-02-23T21:49:03.000Z
|
Briefly/api/Punc/punctuator/tests.py
|
q815101630/Briefly2.0
|
d92ba52308ef8c644fe8fb453169d0bee1a7f47e
|
[
"MIT"
] | 9
|
2020-06-15T14:56:38.000Z
|
2022-02-12T13:09:38.000Z
|
Briefly/api/Punc/punctuator/tests.py
|
q815101630/Briefly2.0
|
d92ba52308ef8c644fe8fb453169d0bee1a7f47e
|
[
"MIT"
] | 8
|
2020-07-27T14:00:37.000Z
|
2022-02-20T17:59:04.000Z
|
from __future__ import absolute_import
import time
import os
import unittest
import tempfile
import shutil
from io import StringIO
from . import punc
from .punc import Punctuator, download_model
class Tests(unittest.TestCase):
samples = [
(
'mary had a little lamb its fleece was white as snow and anywhere that mary went the lamb was sure to go',
'Mary had a little lamb, its fleece was white as snow and anywhere that mary went, the lamb was sure to go.'
),
(
"they say it's only as cold as it feels in your mind i don't buy into that theory much what do you think",
"They say it's only as cold as it feels in your mind. I don't buy into that theory much. What do you think."
),
(
"he's a do me a favor go home to your wife",
"He's a do me: a favor go home to your wife.",
),
(
"they'll even negotiate your rate with the insurance company",
"They'll even negotiate your rate with the insurance company.",
),
(
"for me i wanted to get into commentary some sort of way i didn't know how to do that so i left the firm and i started a business",
"For me, I wanted to get into commentary some sort of way. I didn't know how to do that. So I left the firm and I started a business."
),
]
def test_punctuate(self):
# Create temp directory for downloading data.
d = tempfile.mkdtemp()
os.makedirs(punc.PUNCTUATOR_DATA_DIR, exist_ok=True)
model_file = os.path.join(punc.PUNCTUATOR_DATA_DIR, 'Demo-Europarl-EN.pcl')
print('Temp dir:', d)
os.chdir(d)
try:
# Download pre-trained model.
if not os.path.isfile(model_file):
model_file = download_model()
print('Model file:', model_file)
# Create punctuator.
t0 = time.time()
p = Punctuator(model_file=model_file)
td = time.time() - t0
print('Loaded in %s seconds from path.' % td)
# Add punctuation.
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
# Serialize the entire punctuator, not just the model.
print('Writing...')
t0 = time.time()
fn = 'data.pickle'
p.save(fn)
td = time.time() - t0
print('Wrote in %s seconds.' % td)
# Load puncutator.
print('Loading...')
t0 = time.time()
p2 = Punctuator.load(fn)
td = time.time() - t0
print('Loaded in %s seconds.' % td)
# Confirm punctuations match previous.
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p2.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
finally:
shutil.rmtree(d)
def test_punctuate_stream(self):
# Create temp directory for downloading data.
d = tempfile.mkdtemp()
os.makedirs(punc.PUNCTUATOR_DATA_DIR, exist_ok=True)
model_file = os.path.join(punc.PUNCTUATOR_DATA_DIR, 'Demo-Europarl-EN.pcl')
print('Temp dir:', d)
os.chdir(d)
try:
# Download pre-trained model.
if not os.path.isfile(model_file):
model_file = download_model()
print('Model file:', model_file)
# Check if file can be read in as bytes
infile = open(model_file, 'rb')
data = infile.read()
t0 = time.time()
p = Punctuator(data)
td = time.time() - t0
print('Loaded in %s seconds as bytes.' % td)
# Add punctuation.
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
# Serialize the entire punctuator, not just the model.
print('Writing...')
t0 = time.time()
fn = 'data.pickle'
p.save(fn)
td = time.time() - t0
print('Wrote in %s seconds.' % td)
# Load puncutator.
print('Loading...')
t0 = time.time()
p2 = Punctuator.load(fn)
td = time.time() - t0
print('Loaded in %s seconds.' % td)
# Confirm punctuations match previous.
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p2.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
finally:
shutil.rmtree(d)
if __name__ == '__main__':
unittest.main()
| 36.705882
| 146
| 0.570691
|
from __future__ import absolute_import
import time
import os
import unittest
import tempfile
import shutil
from io import StringIO
from . import punc
from .punc import Punctuator, download_model
class Tests(unittest.TestCase):
samples = [
(
'mary had a little lamb its fleece was white as snow and anywhere that mary went the lamb was sure to go',
'Mary had a little lamb, its fleece was white as snow and anywhere that mary went, the lamb was sure to go.'
),
(
"they say it's only as cold as it feels in your mind i don't buy into that theory much what do you think",
"They say it's only as cold as it feels in your mind. I don't buy into that theory much. What do you think."
),
(
"he's a do me a favor go home to your wife",
"He's a do me: a favor go home to your wife.",
),
(
"they'll even negotiate your rate with the insurance company",
"They'll even negotiate your rate with the insurance company.",
),
(
"for me i wanted to get into commentary some sort of way i didn't know how to do that so i left the firm and i started a business",
"For me, I wanted to get into commentary some sort of way. I didn't know how to do that. So I left the firm and I started a business."
),
]
def test_punctuate(self):
d = tempfile.mkdtemp()
os.makedirs(punc.PUNCTUATOR_DATA_DIR, exist_ok=True)
model_file = os.path.join(punc.PUNCTUATOR_DATA_DIR, 'Demo-Europarl-EN.pcl')
print('Temp dir:', d)
os.chdir(d)
try:
if not os.path.isfile(model_file):
model_file = download_model()
print('Model file:', model_file)
t0 = time.time()
p = Punctuator(model_file=model_file)
td = time.time() - t0
print('Loaded in %s seconds from path.' % td)
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
print('Writing...')
t0 = time.time()
fn = 'data.pickle'
p.save(fn)
td = time.time() - t0
print('Wrote in %s seconds.' % td)
print('Loading...')
t0 = time.time()
p2 = Punctuator.load(fn)
td = time.time() - t0
print('Loaded in %s seconds.' % td)
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p2.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
finally:
shutil.rmtree(d)
def test_punctuate_stream(self):
d = tempfile.mkdtemp()
os.makedirs(punc.PUNCTUATOR_DATA_DIR, exist_ok=True)
model_file = os.path.join(punc.PUNCTUATOR_DATA_DIR, 'Demo-Europarl-EN.pcl')
print('Temp dir:', d)
os.chdir(d)
try:
if not os.path.isfile(model_file):
model_file = download_model()
print('Model file:', model_file)
infile = open(model_file, 'rb')
data = infile.read()
t0 = time.time()
p = Punctuator(data)
td = time.time() - t0
print('Loaded in %s seconds as bytes.' % td)
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
print('Writing...')
t0 = time.time()
fn = 'data.pickle'
p.save(fn)
td = time.time() - t0
print('Wrote in %s seconds.' % td)
print('Loading...')
t0 = time.time()
p2 = Punctuator.load(fn)
td = time.time() - t0
print('Loaded in %s seconds.' % td)
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p2.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
finally:
shutil.rmtree(d)
if __name__ == '__main__':
unittest.main()
| true
| true
|
f71a9c2559fc2833e574b56aa245554739a58e09
| 8,913
|
py
|
Python
|
sleekxmpp/features/feature_mechanisms/mechanisms.py
|
RedbackThomson/LoLShadow
|
c47dd2826b43f47663eed55bb3f8a6866609c5b4
|
[
"MIT"
] | 1
|
2015-09-04T05:52:45.000Z
|
2015-09-04T05:52:45.000Z
|
sleekxmpp/features/feature_mechanisms/mechanisms.py
|
RedbackThomson/LoLShadow
|
c47dd2826b43f47663eed55bb3f8a6866609c5b4
|
[
"MIT"
] | null | null | null |
sleekxmpp/features/feature_mechanisms/mechanisms.py
|
RedbackThomson/LoLShadow
|
c47dd2826b43f47663eed55bb3f8a6866609c5b4
|
[
"MIT"
] | null | null | null |
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2011 Nathanael C. Fritz
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
import ssl
import logging
from sleekxmpp.util import sasl
from sleekxmpp.util.stringprep_profiles import StringPrepError
from sleekxmpp.stanza import StreamFeatures
from sleekxmpp.xmlstream import RestartStream, register_stanza_plugin
from sleekxmpp.plugins import BasePlugin
from sleekxmpp.xmlstream.matcher import MatchXPath
from sleekxmpp.xmlstream.handler import Callback
from sleekxmpp.features.feature_mechanisms import stanza
log = logging.getLogger(__name__)
class FeatureMechanisms(BasePlugin):
name = 'feature_mechanisms'
description = 'RFC 6120: Stream Feature: SASL'
dependencies = set()
stanza = stanza
default_config = {
'use_mech': None,
'use_mechs': None,
'min_mech': None,
'sasl_callback': None,
'security_callback': None,
'encrypted_plain': True,
'unencrypted_plain': False,
'unencrypted_digest': False,
'unencrypted_cram': False,
'unencrypted_scram': True,
'order': 100
}
def plugin_init(self):
if self.sasl_callback is None:
self.sasl_callback = self._default_credentials
if self.security_callback is None:
self.security_callback = self._default_security
creds = self.sasl_callback(set(['username']), set())
if not self.use_mech and not creds['username']:
self.use_mech = 'ANONYMOUS'
self.mech = None
self.mech_list = set()
self.attempted_mechs = set()
register_stanza_plugin(StreamFeatures, stanza.Mechanisms)
self.xmpp.register_stanza(stanza.Success)
self.xmpp.register_stanza(stanza.Failure)
self.xmpp.register_stanza(stanza.Auth)
self.xmpp.register_stanza(stanza.Challenge)
self.xmpp.register_stanza(stanza.Response)
self.xmpp.register_stanza(stanza.Abort)
self.xmpp.register_handler(
Callback('SASL Success',
MatchXPath(stanza.Success.tag_name()),
self._handle_success,
instream=True))
self.xmpp.register_handler(
Callback('SASL Failure',
MatchXPath(stanza.Failure.tag_name()),
self._handle_fail,
instream=True))
self.xmpp.register_handler(
Callback('SASL Challenge',
MatchXPath(stanza.Challenge.tag_name()),
self._handle_challenge))
self.xmpp.register_feature('mechanisms',
self._handle_sasl_auth,
restart=True,
order=self.order)
def _default_credentials(self, required_values, optional_values):
creds = self.xmpp.credentials
result = {}
values = required_values.union(optional_values)
for value in values:
if value == 'username':
result[value] = creds.get('username', self.xmpp.requested_jid.user)
elif value == 'email':
jid = self.xmpp.requested_jid.bare
result[value] = creds.get('email', jid)
elif value == 'channel_binding':
if hasattr(self.xmpp.socket, 'get_channel_binding'):
result[value] = self.xmpp.socket.get_channel_binding()
else:
log.debug("Channel binding not supported.")
log.debug("Use Python 3.3+ for channel binding and " + \
"SCRAM-SHA-1-PLUS support")
result[value] = None
elif value == 'host':
result[value] = creds.get('host', self.xmpp.requested_jid.domain)
elif value == 'realm':
result[value] = creds.get('realm', self.xmpp.requested_jid.domain)
elif value == 'service-name':
result[value] = creds.get('service-name', self.xmpp._service_name)
elif value == 'service':
result[value] = creds.get('service', 'xmpp')
elif value in creds:
result[value] = creds[value]
return result
def _default_security(self, values):
result = {}
for value in values:
if value == 'encrypted':
if 'starttls' in self.xmpp.features:
result[value] = True
elif isinstance(self.xmpp.socket, ssl.SSLSocket):
result[value] = True
else:
result[value] = False
else:
result[value] = self.config.get(value, False)
return result
def _handle_sasl_auth(self, features):
"""
Handle authenticating using SASL.
Arguments:
features -- The stream features stanza.
"""
if 'mechanisms' in self.xmpp.features:
# SASL authentication has already succeeded, but the
# server has incorrectly offered it again.
return False
enforce_limit = False
limited_mechs = self.use_mechs
if limited_mechs is None:
limited_mechs = set()
elif limited_mechs and not isinstance(limited_mechs, set):
limited_mechs = set(limited_mechs)
enforce_limit = True
if self.use_mech:
limited_mechs.add(self.use_mech)
enforce_limit = True
if enforce_limit:
self.use_mechs = limited_mechs
self.mech_list = set(features['mechanisms'])
return self._send_auth()
def _send_auth(self):
mech_list = self.mech_list - self.attempted_mechs
try:
self.mech = sasl.choose(mech_list,
self.sasl_callback,
self.security_callback,
limit=self.use_mechs,
min_mech=self.min_mech)
except sasl.SASLNoAppropriateMechanism:
log.error("No appropriate login method.")
self.xmpp.event("no_auth", direct=True)
self.xmpp.event("failed_auth", direct=True)
self.attempted_mechs = set()
return self.xmpp.disconnect()
except StringPrepError:
log.exception("A credential value did not pass SASLprep.")
self.xmpp.disconnect()
resp = stanza.Auth(self.xmpp)
resp['mechanism'] = self.mech.name
try:
resp['value'] = self.mech.process()
except sasl.SASLCancelled:
self.attempted_mechs.add(self.mech.name)
self._send_auth()
except sasl.SASLFailed:
self.attempted_mechs.add(self.mech.name)
self._send_auth()
except sasl.SASLMutualAuthFailed:
log.error("Mutual authentication failed! " + \
"A security breach is possible.")
self.attempted_mechs.add(self.mech.name)
self.xmpp.disconnect()
else:
resp.send(now=True)
return True
def _handle_challenge(self, stanza):
"""SASL challenge received. Process and send response."""
resp = self.stanza.Response(self.xmpp)
try:
resp['value'] = self.mech.process(stanza['value'])
except sasl.SASLCancelled:
self.stanza.Abort(self.xmpp).send()
except sasl.SASLFailed:
self.stanza.Abort(self.xmpp).send()
except sasl.SASLMutualAuthFailed:
log.error("Mutual authentication failed! " + \
"A security breach is possible.")
self.attempted_mechs.add(self.mech.name)
self.xmpp.disconnect()
else:
resp.send(now=True)
def _handle_success(self, stanza):
"""SASL authentication succeeded. Restart the stream."""
try:
final = self.mech.process(stanza['value'])
except sasl.SASLMutualAuthFailed:
log.error("Mutual authentication failed! " + \
"A security breach is possible.")
self.attempted_mechs.add(self.mech.name)
self.xmpp.disconnect()
else:
self.attempted_mechs = set()
self.xmpp.authenticated = True
self.xmpp.features.add('mechanisms')
self.xmpp.event('auth_success', stanza, direct=True)
raise RestartStream()
def _handle_fail(self, stanza):
"""SASL authentication failed. Disconnect and shutdown."""
self.attempted_mechs.add(self.mech.name)
log.info("Authentication failed: %s", stanza['condition'])
self.xmpp.event("failed_auth", stanza, direct=True)
self._send_auth()
return True
| 36.679012
| 83
| 0.58151
|
import ssl
import logging
from sleekxmpp.util import sasl
from sleekxmpp.util.stringprep_profiles import StringPrepError
from sleekxmpp.stanza import StreamFeatures
from sleekxmpp.xmlstream import RestartStream, register_stanza_plugin
from sleekxmpp.plugins import BasePlugin
from sleekxmpp.xmlstream.matcher import MatchXPath
from sleekxmpp.xmlstream.handler import Callback
from sleekxmpp.features.feature_mechanisms import stanza
log = logging.getLogger(__name__)
class FeatureMechanisms(BasePlugin):
name = 'feature_mechanisms'
description = 'RFC 6120: Stream Feature: SASL'
dependencies = set()
stanza = stanza
default_config = {
'use_mech': None,
'use_mechs': None,
'min_mech': None,
'sasl_callback': None,
'security_callback': None,
'encrypted_plain': True,
'unencrypted_plain': False,
'unencrypted_digest': False,
'unencrypted_cram': False,
'unencrypted_scram': True,
'order': 100
}
def plugin_init(self):
if self.sasl_callback is None:
self.sasl_callback = self._default_credentials
if self.security_callback is None:
self.security_callback = self._default_security
creds = self.sasl_callback(set(['username']), set())
if not self.use_mech and not creds['username']:
self.use_mech = 'ANONYMOUS'
self.mech = None
self.mech_list = set()
self.attempted_mechs = set()
register_stanza_plugin(StreamFeatures, stanza.Mechanisms)
self.xmpp.register_stanza(stanza.Success)
self.xmpp.register_stanza(stanza.Failure)
self.xmpp.register_stanza(stanza.Auth)
self.xmpp.register_stanza(stanza.Challenge)
self.xmpp.register_stanza(stanza.Response)
self.xmpp.register_stanza(stanza.Abort)
self.xmpp.register_handler(
Callback('SASL Success',
MatchXPath(stanza.Success.tag_name()),
self._handle_success,
instream=True))
self.xmpp.register_handler(
Callback('SASL Failure',
MatchXPath(stanza.Failure.tag_name()),
self._handle_fail,
instream=True))
self.xmpp.register_handler(
Callback('SASL Challenge',
MatchXPath(stanza.Challenge.tag_name()),
self._handle_challenge))
self.xmpp.register_feature('mechanisms',
self._handle_sasl_auth,
restart=True,
order=self.order)
def _default_credentials(self, required_values, optional_values):
creds = self.xmpp.credentials
result = {}
values = required_values.union(optional_values)
for value in values:
if value == 'username':
result[value] = creds.get('username', self.xmpp.requested_jid.user)
elif value == 'email':
jid = self.xmpp.requested_jid.bare
result[value] = creds.get('email', jid)
elif value == 'channel_binding':
if hasattr(self.xmpp.socket, 'get_channel_binding'):
result[value] = self.xmpp.socket.get_channel_binding()
else:
log.debug("Channel binding not supported.")
log.debug("Use Python 3.3+ for channel binding and " + \
"SCRAM-SHA-1-PLUS support")
result[value] = None
elif value == 'host':
result[value] = creds.get('host', self.xmpp.requested_jid.domain)
elif value == 'realm':
result[value] = creds.get('realm', self.xmpp.requested_jid.domain)
elif value == 'service-name':
result[value] = creds.get('service-name', self.xmpp._service_name)
elif value == 'service':
result[value] = creds.get('service', 'xmpp')
elif value in creds:
result[value] = creds[value]
return result
def _default_security(self, values):
result = {}
for value in values:
if value == 'encrypted':
if 'starttls' in self.xmpp.features:
result[value] = True
elif isinstance(self.xmpp.socket, ssl.SSLSocket):
result[value] = True
else:
result[value] = False
else:
result[value] = self.config.get(value, False)
return result
def _handle_sasl_auth(self, features):
if 'mechanisms' in self.xmpp.features:
return False
enforce_limit = False
limited_mechs = self.use_mechs
if limited_mechs is None:
limited_mechs = set()
elif limited_mechs and not isinstance(limited_mechs, set):
limited_mechs = set(limited_mechs)
enforce_limit = True
if self.use_mech:
limited_mechs.add(self.use_mech)
enforce_limit = True
if enforce_limit:
self.use_mechs = limited_mechs
self.mech_list = set(features['mechanisms'])
return self._send_auth()
def _send_auth(self):
mech_list = self.mech_list - self.attempted_mechs
try:
self.mech = sasl.choose(mech_list,
self.sasl_callback,
self.security_callback,
limit=self.use_mechs,
min_mech=self.min_mech)
except sasl.SASLNoAppropriateMechanism:
log.error("No appropriate login method.")
self.xmpp.event("no_auth", direct=True)
self.xmpp.event("failed_auth", direct=True)
self.attempted_mechs = set()
return self.xmpp.disconnect()
except StringPrepError:
log.exception("A credential value did not pass SASLprep.")
self.xmpp.disconnect()
resp = stanza.Auth(self.xmpp)
resp['mechanism'] = self.mech.name
try:
resp['value'] = self.mech.process()
except sasl.SASLCancelled:
self.attempted_mechs.add(self.mech.name)
self._send_auth()
except sasl.SASLFailed:
self.attempted_mechs.add(self.mech.name)
self._send_auth()
except sasl.SASLMutualAuthFailed:
log.error("Mutual authentication failed! " + \
"A security breach is possible.")
self.attempted_mechs.add(self.mech.name)
self.xmpp.disconnect()
else:
resp.send(now=True)
return True
def _handle_challenge(self, stanza):
resp = self.stanza.Response(self.xmpp)
try:
resp['value'] = self.mech.process(stanza['value'])
except sasl.SASLCancelled:
self.stanza.Abort(self.xmpp).send()
except sasl.SASLFailed:
self.stanza.Abort(self.xmpp).send()
except sasl.SASLMutualAuthFailed:
log.error("Mutual authentication failed! " + \
"A security breach is possible.")
self.attempted_mechs.add(self.mech.name)
self.xmpp.disconnect()
else:
resp.send(now=True)
def _handle_success(self, stanza):
try:
final = self.mech.process(stanza['value'])
except sasl.SASLMutualAuthFailed:
log.error("Mutual authentication failed! " + \
"A security breach is possible.")
self.attempted_mechs.add(self.mech.name)
self.xmpp.disconnect()
else:
self.attempted_mechs = set()
self.xmpp.authenticated = True
self.xmpp.features.add('mechanisms')
self.xmpp.event('auth_success', stanza, direct=True)
raise RestartStream()
def _handle_fail(self, stanza):
self.attempted_mechs.add(self.mech.name)
log.info("Authentication failed: %s", stanza['condition'])
self.xmpp.event("failed_auth", stanza, direct=True)
self._send_auth()
return True
| true
| true
|
f71a9c42ba701b954c3fcb36fd4b72ea81d1eb78
| 7,255
|
py
|
Python
|
duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/20_features/numtrees_8/rule_1.py
|
apcarrik/kaggle
|
6e2d4db58017323e7ba5510bcc2598e01a4ee7bf
|
[
"MIT"
] | null | null | null |
duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/20_features/numtrees_8/rule_1.py
|
apcarrik/kaggle
|
6e2d4db58017323e7ba5510bcc2598e01a4ee7bf
|
[
"MIT"
] | null | null | null |
duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/20_features/numtrees_8/rule_1.py
|
apcarrik/kaggle
|
6e2d4db58017323e7ba5510bcc2598e01a4ee7bf
|
[
"MIT"
] | null | null | null |
def findDecision(obj): #obj[0]: Driving_to, obj[1]: Passanger, obj[2]: Weather, obj[3]: Temperature, obj[4]: Time, obj[5]: Coupon, obj[6]: Coupon_validity, obj[7]: Gender, obj[8]: Age, obj[9]: Maritalstatus, obj[10]: Children, obj[11]: Education, obj[12]: Occupation, obj[13]: Income, obj[14]: Bar, obj[15]: Coffeehouse, obj[16]: Restaurantlessthan20, obj[17]: Restaurant20to50, obj[18]: Direction_same, obj[19]: Distance
# {"feature": "Age", "instances": 127, "metric_value": 0.9978, "depth": 1}
if obj[8]>1:
# {"feature": "Education", "instances": 88, "metric_value": 0.9865, "depth": 2}
if obj[11]<=3:
# {"feature": "Coupon", "instances": 84, "metric_value": 0.9737, "depth": 3}
if obj[5]>0:
# {"feature": "Direction_same", "instances": 73, "metric_value": 0.9934, "depth": 4}
if obj[18]<=0:
# {"feature": "Occupation", "instances": 63, "metric_value": 0.9691, "depth": 5}
if obj[12]>1:
# {"feature": "Bar", "instances": 57, "metric_value": 0.9348, "depth": 6}
if obj[14]<=2.0:
# {"feature": "Restaurantlessthan20", "instances": 52, "metric_value": 0.8905, "depth": 7}
if obj[16]>1.0:
# {"feature": "Income", "instances": 46, "metric_value": 0.8281, "depth": 8}
if obj[13]<=6:
# {"feature": "Restaurant20to50", "instances": 43, "metric_value": 0.8542, "depth": 9}
if obj[17]<=1.0:
# {"feature": "Driving_to", "instances": 28, "metric_value": 0.7496, "depth": 10}
if obj[0]<=1:
# {"feature": "Maritalstatus", "instances": 21, "metric_value": 0.5917, "depth": 11}
if obj[9]>0:
return 'False'
elif obj[9]<=0:
# {"feature": "Passanger", "instances": 8, "metric_value": 0.9544, "depth": 12}
if obj[1]>0:
# {"feature": "Coupon_validity", "instances": 7, "metric_value": 0.8631, "depth": 13}
if obj[6]>0:
# {"feature": "Temperature", "instances": 4, "metric_value": 1.0, "depth": 14}
if obj[3]>55:
# {"feature": "Coffeehouse", "instances": 3, "metric_value": 0.9183, "depth": 15}
if obj[15]>1.0:
return 'False'
elif obj[15]<=1.0:
return 'True'
else: return 'True'
elif obj[3]<=55:
return 'True'
else: return 'True'
elif obj[6]<=0:
return 'False'
else: return 'False'
elif obj[1]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[0]>1:
# {"feature": "Coupon_validity", "instances": 7, "metric_value": 0.9852, "depth": 11}
if obj[6]>0:
return 'False'
elif obj[6]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[17]>1.0:
# {"feature": "Time", "instances": 15, "metric_value": 0.971, "depth": 10}
if obj[4]<=1:
# {"feature": "Maritalstatus", "instances": 8, "metric_value": 0.5436, "depth": 11}
if obj[9]<=1:
return 'False'
elif obj[9]>1:
# {"feature": "Weather", "instances": 2, "metric_value": 1.0, "depth": 12}
if obj[2]<=0:
return 'False'
elif obj[2]>0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[4]>1:
# {"feature": "Coffeehouse", "instances": 7, "metric_value": 0.8631, "depth": 11}
if obj[15]>0.0:
return 'True'
elif obj[15]<=0.0:
# {"feature": "Coupon_validity", "instances": 3, "metric_value": 0.9183, "depth": 12}
if obj[6]>0:
return 'False'
elif obj[6]<=0:
return 'True'
else: return 'True'
else: return 'False'
else: return 'True'
else: return 'False'
elif obj[13]>6:
return 'False'
else: return 'False'
elif obj[16]<=1.0:
# {"feature": "Maritalstatus", "instances": 6, "metric_value": 0.9183, "depth": 8}
if obj[9]<=0:
return 'True'
elif obj[9]>0:
# {"feature": "Temperature", "instances": 3, "metric_value": 0.9183, "depth": 9}
if obj[3]>30:
return 'False'
elif obj[3]<=30:
return 'True'
else: return 'True'
else: return 'False'
else: return 'True'
elif obj[14]>2.0:
# {"feature": "Time", "instances": 5, "metric_value": 0.7219, "depth": 7}
if obj[4]<=2:
return 'True'
elif obj[4]>2:
return 'False'
else: return 'False'
else: return 'True'
elif obj[12]<=1:
# {"feature": "Children", "instances": 6, "metric_value": 0.65, "depth": 6}
if obj[10]>0:
return 'True'
elif obj[10]<=0:
return 'False'
else: return 'False'
else: return 'True'
elif obj[18]>0:
# {"feature": "Occupation", "instances": 10, "metric_value": 0.7219, "depth": 5}
if obj[12]>5:
return 'True'
elif obj[12]<=5:
# {"feature": "Driving_to", "instances": 4, "metric_value": 1.0, "depth": 6}
if obj[0]<=1:
# {"feature": "Maritalstatus", "instances": 3, "metric_value": 0.9183, "depth": 7}
if obj[9]<=1:
return 'False'
elif obj[9]>1:
return 'True'
else: return 'True'
elif obj[0]>1:
return 'True'
else: return 'True'
else: return 'True'
else: return 'True'
elif obj[5]<=0:
# {"feature": "Passanger", "instances": 11, "metric_value": 0.4395, "depth": 4}
if obj[1]>0:
return 'False'
elif obj[1]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[11]>3:
return 'True'
else: return 'True'
elif obj[8]<=1:
# {"feature": "Restaurant20to50", "instances": 39, "metric_value": 0.8213, "depth": 2}
if obj[17]<=1.0:
# {"feature": "Occupation", "instances": 25, "metric_value": 0.5294, "depth": 3}
if obj[12]<=20:
# {"feature": "Income", "instances": 22, "metric_value": 0.2668, "depth": 4}
if obj[13]<=6:
return 'True'
elif obj[13]>6:
return 'False'
else: return 'False'
elif obj[12]>20:
# {"feature": "Time", "instances": 3, "metric_value": 0.9183, "depth": 4}
if obj[4]>0:
return 'False'
elif obj[4]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[17]>1.0:
# {"feature": "Passanger", "instances": 14, "metric_value": 1.0, "depth": 3}
if obj[1]<=2:
# {"feature": "Income", "instances": 11, "metric_value": 0.9457, "depth": 4}
if obj[13]>2:
# {"feature": "Coupon", "instances": 6, "metric_value": 0.9183, "depth": 5}
if obj[5]>2:
return 'False'
elif obj[5]<=2:
# {"feature": "Weather", "instances": 3, "metric_value": 0.9183, "depth": 6}
if obj[2]<=1:
return 'True'
elif obj[2]>1:
return 'False'
else: return 'False'
else: return 'True'
elif obj[13]<=2:
return 'True'
else: return 'True'
elif obj[1]>2:
return 'False'
else: return 'False'
else: return 'True'
else: return 'True'
| 38.590426
| 421
| 0.513853
|
def findDecision(obj):
if obj[8]>1:
if obj[11]<=3:
if obj[5]>0:
if obj[18]<=0:
if obj[12]>1:
if obj[14]<=2.0:
if obj[16]>1.0:
if obj[13]<=6:
if obj[17]<=1.0:
if obj[0]<=1:
if obj[9]>0:
return 'False'
elif obj[9]<=0:
if obj[1]>0:
if obj[6]>0:
if obj[3]>55:
if obj[15]>1.0:
return 'False'
elif obj[15]<=1.0:
return 'True'
else: return 'True'
elif obj[3]<=55:
return 'True'
else: return 'True'
elif obj[6]<=0:
return 'False'
else: return 'False'
elif obj[1]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[0]>1:
if obj[6]>0:
return 'False'
elif obj[6]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[17]>1.0:
if obj[4]<=1:
if obj[9]<=1:
return 'False'
elif obj[9]>1:
if obj[2]<=0:
return 'False'
elif obj[2]>0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[4]>1:
if obj[15]>0.0:
return 'True'
elif obj[15]<=0.0:
if obj[6]>0:
return 'False'
elif obj[6]<=0:
return 'True'
else: return 'True'
else: return 'False'
else: return 'True'
else: return 'False'
elif obj[13]>6:
return 'False'
else: return 'False'
elif obj[16]<=1.0:
if obj[9]<=0:
return 'True'
elif obj[9]>0:
if obj[3]>30:
return 'False'
elif obj[3]<=30:
return 'True'
else: return 'True'
else: return 'False'
else: return 'True'
elif obj[14]>2.0:
if obj[4]<=2:
return 'True'
elif obj[4]>2:
return 'False'
else: return 'False'
else: return 'True'
elif obj[12]<=1:
if obj[10]>0:
return 'True'
elif obj[10]<=0:
return 'False'
else: return 'False'
else: return 'True'
elif obj[18]>0:
if obj[12]>5:
return 'True'
elif obj[12]<=5:
if obj[0]<=1:
if obj[9]<=1:
return 'False'
elif obj[9]>1:
return 'True'
else: return 'True'
elif obj[0]>1:
return 'True'
else: return 'True'
else: return 'True'
else: return 'True'
elif obj[5]<=0:
if obj[1]>0:
return 'False'
elif obj[1]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[11]>3:
return 'True'
else: return 'True'
elif obj[8]<=1:
if obj[17]<=1.0:
if obj[12]<=20:
if obj[13]<=6:
return 'True'
elif obj[13]>6:
return 'False'
else: return 'False'
elif obj[12]>20:
if obj[4]>0:
return 'False'
elif obj[4]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[17]>1.0:
if obj[1]<=2:
if obj[13]>2:
if obj[5]>2:
return 'False'
elif obj[5]<=2:
if obj[2]<=1:
return 'True'
elif obj[2]>1:
return 'False'
else: return 'False'
else: return 'True'
elif obj[13]<=2:
return 'True'
else: return 'True'
elif obj[1]>2:
return 'False'
else: return 'False'
else: return 'True'
else: return 'True'
| true
| true
|
f71a9c674644f0d53c2687dddfa077e5ece93d13
| 62
|
py
|
Python
|
acq4/modules/TaskRunner/analysisModules/Photostim/__init__.py
|
aleonlein/acq4
|
4b1fcb9ad2c5e8d4595a2b9cf99d50ece0c0f555
|
[
"MIT"
] | 47
|
2015-01-05T16:18:10.000Z
|
2022-03-16T13:09:30.000Z
|
acq4/modules/TaskRunner/analysisModules/Photostim/__init__.py
|
aleonlein/acq4
|
4b1fcb9ad2c5e8d4595a2b9cf99d50ece0c0f555
|
[
"MIT"
] | 48
|
2015-04-19T16:51:41.000Z
|
2022-03-31T14:48:16.000Z
|
acq4/modules/TaskRunner/analysisModules/Photostim/__init__.py
|
sensapex/acq4
|
9561ba73caff42c609bd02270527858433862ad8
|
[
"MIT"
] | 32
|
2015-01-15T14:11:49.000Z
|
2021-07-15T13:44:52.000Z
|
from __future__ import print_function
from .Photostim import *
| 31
| 37
| 0.854839
|
from __future__ import print_function
from .Photostim import *
| true
| true
|
f71a9cbf524b1e94c7bb76e86a3a25344ade1dab
| 21,169
|
py
|
Python
|
cages/.shared/protocol_xmlrpc.py
|
targeted/pythomnic3k
|
c59f8c11302c0a568f45ec626ec6a0065527aa79
|
[
"BSD-3-Clause"
] | null | null | null |
cages/.shared/protocol_xmlrpc.py
|
targeted/pythomnic3k
|
c59f8c11302c0a568f45ec626ec6a0065527aa79
|
[
"BSD-3-Clause"
] | 7
|
2019-06-06T15:47:56.000Z
|
2019-06-15T18:09:30.000Z
|
cages/.shared/protocol_xmlrpc.py
|
targeted/pythomnic3k
|
c59f8c11302c0a568f45ec626ec6a0065527aa79
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
#-*- coding: iso-8859-1 -*-
################################################################################
#
# This module contains an implementation of XMLRPC interface/resource.
#
# Sample XMLRPC interface configuration (config_interface_xmlrpc_1.py):
#
# config = dict \
# (
# protocol = "xmlrpc", # meta
# request_timeout = None, # meta, optional
# listener_address = ("127.0.0.1", 8000), # tcp
# max_connections = 100, # tcp
# ssl_key_cert_file = None, # ssl, optional filename
# ssl_ca_cert_file = None, # ssl, optional filename
# ssl_ciphers = None, # ssl, optional str
# ssl_protocol = None, # ssl, optional "SSLv23", "TLSv1", "TLSv1_1", "TLSv1_2" or "TLS"
# response_encoding = "windows-1251", # http
# original_ip_header_fields = ("X-Forwarded-For", ), # http
# keep_alive_support = True, # http
# keep_alive_idle_timeout = 120.0, # http
# keep_alive_max_requests = 10, # http
# allow_none = False, # xmlrpc, Python-specific, optional
# )
#
# Sample processing module (interface_xmlrpc_1.py):
#
# def process_request(request, response):
# module, method = request["method"].split(".")
# args = request["args"]
# result = pmnc.__getattr__(module).__getattr__(method)(*args)
# response["result"] = result
#
# Sample XMLRPC resource configuration (config_resource_xmlrpc_1.py)
#
# config = dict \
# (
# protocol = "xmlrpc", # meta
# server_address = ("127.0.0.1", 8000), # tcp
# connect_timeout = 3.0, # tcp
# ssl_key_cert_file = None, # ssl, optional filename
# ssl_ca_cert_file = None, # ssl, optional filename
# ssl_ciphers = None, # ssl, optional str
# ssl_protocol = None, # ssl, optional "SSLv23", "TLSv1", "TLSv1_1", "TLSv1_2" or "TLS"
# ssl_server_hostname = None, # ssl, optional str
# ssl_ignore_hostname = False, # ssl, ignore certificate common/alt name name mismatch
# extra_headers = { "Authorization": "Basic dXNlcjpwYXNz" }, # http
# http_version = "HTTP/1.1", # http
# server_uri = "/xmlrpc", # xmlrpc
# request_encoding = "windows-1251", # xmlrpc
# allow_none = False, # xmlrpc, Python-specific, optional
# )
#
# Sample resource usage (anywhere):
#
# xa = pmnc.transaction.create()
# xa.xmlrpc_1.Module.Method(*args)
# result = xa.execute()[0]
#
# or if the only transaction participant:
#
# result = pmnc.transaction.xmlrpc_1.Module.Method(*args)
#
# Pythomnic3k project
# (c) 2005-2019, Dmitry Dvoinikov <dmitry@targeted.org>
# Distributed under BSD license
#
###############################################################################
__all__ = [ "Interface", "Resource", "process_http_request" ]
###############################################################################
import os; from os import path as os_path
import xmlrpc.client; from xmlrpc.client import loads, dumps, Fault
if __name__ == "__main__": # add pythomnic/lib to sys.path
import os; import sys
main_module_dir = os.path.dirname(sys.modules["__main__"].__file__) or os.getcwd()
sys.path.insert(0, os.path.normpath(os.path.join(main_module_dir, "..", "..", "lib")))
import typecheck; from typecheck import typecheck, typecheck_with_exceptions, \
optional, tuple_of, dict_of, callable, one_of
import exc_string; from exc_string import exc_string
import pmnc.resource_pool; from pmnc.resource_pool import TransactionalResource, ResourceError
###############################################################################
class Interface: # XMLRPC interface built on top of HTTP interface
@typecheck
def __init__(self, name: str, *,
listener_address: (str, int),
max_connections: int,
ssl_key_cert_file: optional(os_path.isfile),
ssl_ca_cert_file: optional(os_path.isfile),
ssl_ciphers: optional(str) = None,
ssl_protocol: optional(one_of("SSLv23", "TLSv1", "TLSv1_1", "TLSv1_2", "TLS")) = None,
response_encoding: str,
original_ip_header_fields: tuple_of(str),
keep_alive_support: bool,
keep_alive_idle_timeout: float,
keep_alive_max_requests: int,
request_timeout: optional(float) = None,
allow_none: optional(bool) = False,
**kwargs): # this kwargs allows for extra application-specific
# settings in config_interface_xmlrpc_X.py
# create an instance of underlying HTTP interface
request_timeout = request_timeout or \
pmnc.config_interfaces.get("request_timeout") # this is now static
self._http_interface = \
pmnc.protocol_http.Interface(name,
listener_address = listener_address,
max_connections = max_connections,
ssl_key_cert_file = ssl_key_cert_file,
ssl_ca_cert_file = ssl_ca_cert_file,
ssl_ciphers = ssl_ciphers,
ssl_protocol = ssl_protocol,
response_encoding = response_encoding,
original_ip_header_fields = original_ip_header_fields,
allowed_methods = ("POST", ),
keep_alive_support = keep_alive_support,
keep_alive_idle_timeout = keep_alive_idle_timeout,
keep_alive_max_requests = keep_alive_max_requests,
gzip_content_types = (),
request_timeout = request_timeout)
# override the default process_http_request method of the created HTTP interface,
# having the HTTP handler method to be called through a pmnc call allows
# online modifications to this module, when it is reloaded
if pmnc.request.self_test == __name__: # self-test
self.process_xmlrpc_request = kwargs["process_xmlrpc_request"]
self._http_interface.process_http_request = \
lambda http_request, http_response: \
pmnc.__getattr__(__name__).process_http_request(http_request, http_response,
self.process_xmlrpc_request,
response_encoding = response_encoding,
allow_none = allow_none or False)
name = property(lambda self: self._http_interface.name)
listener_address = property(lambda self: self._http_interface.listener_address)
###################################
def start(self):
self._http_interface.start()
def cease(self):
self._http_interface.cease()
def stop(self):
self._http_interface.stop()
###################################
def process_xmlrpc_request(self, request, response):
handler_module_name = "interface_{0:s}".format(self.name)
pmnc.__getattr__(handler_module_name).process_request(request, response)
###############################################################################
def process_http_request(http_request: dict, http_response: dict,
process_xmlrpc_request: callable, *,
response_encoding: str, allow_none: bool):
assert http_request["method"] == "POST"
headers = http_request["headers"]
content = http_request["content"]
content_type = headers.get("content-type", "application/octet-stream")
if not content_type.startswith("text/xml"):
http_response["status_code"] = 415 # unsupported media type
return
# extract xmlrpc request from http request content, the parser
# will deduce the bytes encoding from the <?xml encoding attribute
try:
args, method = loads(content)
except:
raise Exception("invalid XMLRPC request: {0:s}".format(exc_string()))
# now we know more about the request
auth_tokens = pmnc.request.parameters["auth_tokens"]
pmnc.request.describe("XMLRPC{0:s} request {1:s} from {2:s}".\
format(auth_tokens["encrypted"] and "S" or "",
method, auth_tokens["peer_ip"]))
# the request contained a valid xmlrpc packet,
# it would be polite to respond with one as well
try:
# populate the request parameters with XMLRPC-specific values
pmnc.request.protocol = "xmlrpc"
xmlrpc_request = dict(method = method, args = args)
xmlrpc_response = dict(result = None)
# invoke the application handler
process_xmlrpc_request(xmlrpc_request, xmlrpc_response)
# fetch the XMLRPC call result
result = xmlrpc_response["result"]
if result is None:
result = ()
# marshal the result in an XMLRPC packet
content = dumps((result, ), methodresponse = True,
encoding = response_encoding, allow_none = allow_none)
except:
error = exc_string()
content = dumps(Fault(500, error), methodresponse = True, # 500 as in "Internal Server Error"
encoding = response_encoding, allow_none = allow_none)
pmnc.log.error("returning XMLRPC fault: {0:s}".format(error))
else:
if pmnc.log.debug:
pmnc.log.debug("returning XMLRPC response")
http_response["headers"]["content-type"] = "text/xml"
http_response["content"] = content
###############################################################################
class Resource(TransactionalResource): # XMLRPC resource
@typecheck
def __init__(self, name, *,
server_address: (str, int),
connect_timeout: float,
ssl_key_cert_file: optional(os_path.isfile),
ssl_ca_cert_file: optional(os_path.isfile),
ssl_ciphers: optional(str) = None,
ssl_protocol: optional(one_of("SSLv23", "TLSv1", "TLSv1_1", "TLSv1_2", "TLS")) = None,
ssl_server_hostname: optional(str) = None,
ssl_ignore_hostname: optional(bool) = False,
extra_headers: dict_of(str, str),
http_version: str,
server_uri: str,
request_encoding: str,
allow_none: optional(bool) = False):
TransactionalResource.__init__(self, name)
self._server_uri = server_uri
self._request_encoding = request_encoding
self._allow_none = allow_none
self._http_resource = \
pmnc.protocol_http.Resource(name,
server_address = server_address,
connect_timeout = connect_timeout,
ssl_key_cert_file = ssl_key_cert_file,
ssl_ca_cert_file = ssl_ca_cert_file,
ssl_ciphers = ssl_ciphers,
ssl_protocol = ssl_protocol,
ssl_server_hostname = ssl_server_hostname,
ssl_ignore_hostname = ssl_ignore_hostname,
extra_headers = extra_headers,
http_version = http_version)
###################################
def connect(self):
TransactionalResource.connect(self)
self._attrs = []
self._http_resource.connect()
def disconnect(self):
try:
self._http_resource.disconnect()
finally:
TransactionalResource.disconnect(self)
###################################
# overriding the following methods allows the contained HTTP
# resource to time out at the same time with this resource
def set_idle_timeout(self, idle_timeout):
self._http_resource.set_idle_timeout(idle_timeout)
TransactionalResource.set_idle_timeout(self, idle_timeout)
def reset_idle_timeout(self):
self._http_resource.reset_idle_timeout()
TransactionalResource.reset_idle_timeout(self)
def set_max_age(self, max_age):
self._http_resource.set_max_age(max_age)
TransactionalResource.set_max_age(self, max_age)
def _expired(self):
return self._http_resource.expired or \
TransactionalResource._expired(self)
###################################
def __getattr__(self, name):
self._attrs.append(name)
return self
###################################
def __call__(self, *args):
try:
method, self._attrs = ".".join(self._attrs), []
request = dumps(args, methodname = method,
encoding = self._request_encoding, allow_none = self._allow_none)
request_description = "XMLRPC request {0:s} to {1:s}".\
format(method, self._http_resource.server_info)
except:
ResourceError.rethrow(recoverable = True)
pmnc.log.info("sending {0:s}".format(request_description))
try:
status_code, headers, content = \
self._http_resource.post(self._server_uri, request.encode(self._request_encoding),
{ "Content-Type": "text/xml" })
if status_code != 200:
raise Exception("HTTP request returned code {0:d}".format(status_code))
result = loads(content)[0][0]
except Fault as e:
pmnc.log.warning("{0:s} returned fault {1:d}: {2:s}".\
format(request_description, e.faultCode, e.faultString))
ResourceError.rethrow(code = e.faultCode,
description = e.faultString, terminal = False)
except:
pmnc.log.warning("{0:s} failed: {1:s}".\
format(request_description, exc_string()))
raise
else:
pmnc.log.info("XMLRPC request returned successfully")
return result
###############################################################################
def self_test():
from socket import socket, AF_INET, SOCK_STREAM
from pmnc.request import fake_request
from pmnc.self_test import active_interface
def sendall(ifc, data):
s = socket(AF_INET, SOCK_STREAM)
s.connect(ifc.listener_address)
s.sendall(data)
return s
def recvall(s):
result = b""
data = s.recv(1024)
while data:
result += data
data = s.recv(1024)
return result
rus = "\u0410\u0411\u0412\u0413\u0414\u0415\u0401\u0416\u0417\u0418\u0419" \
"\u041a\u041b\u041c\u041d\u041e\u041f\u0420\u0421\u0422\u0423\u0424" \
"\u0425\u0426\u0427\u0428\u0429\u042c\u042b\u042a\u042d\u042e\u042f" \
"\u0430\u0431\u0432\u0433\u0434\u0435\u0451\u0436\u0437\u0438\u0439" \
"\u043a\u043b\u043c\u043d\u043e\u043f\u0440\u0441\u0442\u0443\u0444" \
"\u0445\u0446\u0447\u0448\u0449\u044c\u044b\u044a\u044d\u044e\u044f"
def post_string(ifc, method, s, request_encoding):
req = "<?xml version=\"1.0\" encoding=\"{0:s}\"?>" \
"<methodCall><methodName>{1:s}</methodName>" \
"<params><param><value><string>{2:s}</string>" \
"</value></param></params></methodCall>".format(request_encoding, method, s).encode(request_encoding)
hdr = "POST / HTTP/1.0\nContent-Type: text/xml\nContent-Length: {0:d}\n\n".format(len(req))
s = sendall(ifc, hdr.encode(request_encoding) + req)
resp = recvall(s)
assert resp.startswith(b"HTTP/1.1 200 OK\r\n")
resp = resp.split(b"\r\n\r\n", 1)[1]
return loads(resp)[0][0]
###################################
test_interface_config = dict \
(
protocol = "xmlrpc",
listener_address = ("127.0.0.1", 23673),
max_connections = 100,
ssl_key_cert_file = None,
ssl_ca_cert_file = None,
ssl_ciphers = None,
ssl_protocol = None,
response_encoding = "windows-1251",
original_ip_header_fields = ("X-Forwarded-For", ),
keep_alive_support = True,
keep_alive_idle_timeout = 3.0,
keep_alive_max_requests = 3,
allow_none = True
)
def interface_config(**kwargs):
result = test_interface_config.copy()
result.update(kwargs)
return result
###################################
def test_interface_start_stop():
def process_xmlrpc_request(request, response):
pass
with active_interface("xmlrpc_1", **interface_config(process_xmlrpc_request = process_xmlrpc_request)):
pass
test_interface_start_stop()
###################################
def test_interface_broken_requests():
def process_xmlrpc_request(request, response):
pass
with active_interface("xmlrpc_1", **interface_config(process_xmlrpc_request = process_xmlrpc_request)) as ifc:
s = sendall(ifc, b"POST / HTTP/1.0\nContent-Type: text/plain\n\n")
resp = recvall(s)
assert resp.startswith(b"HTTP/1.1 415 Unsupported Media Type\r\n")
s = sendall(ifc, b"POST / HTTP/1.0\nContent-Type: text/xml\nContent-Length: 3\n\nfoo")
resp = recvall(s)
assert resp.startswith(b"HTTP/1.1 500 Internal Server Error\r\n")
assert b"invalid XMLRPC request" in resp
test_interface_broken_requests()
###################################
def test_interface_marshaling():
def process_xmlrpc_request(request, response):
if request["method"] == "raise":
raise Exception(request["args"][0])
response["result"] = [request["method"], request["args"]]
with active_interface("xmlrpc_1", **interface_config(process_xmlrpc_request = process_xmlrpc_request)) as ifc:
assert post_string(ifc, "MethodName", "foo", "utf-8") == ["MethodName", ["foo"]]
assert post_string(ifc, rus, rus, "cp866") == [rus, [rus]]
try:
post_string(ifc, "raise", "foo", "iso-8859-5")
except Fault as e:
assert e.faultCode == 500 and e.faultString.startswith("Exception(\"foo\")")
else:
assert False
try:
post_string(ifc, "raise", rus, "utf-8")
except Fault as e:
assert e.faultCode == 500 and e.faultString.startswith("Exception(\"" + rus + "\")")
else:
assert False
test_interface_marshaling()
################################### TESTING RESOURCE
def test_resource():
def process_xmlrpc_request(request, response):
if request["method"] == "ShouldBe.Failing":
raise Exception(request["args"][0])
else:
response["result"] = request, pmnc.request.parameters["auth_tokens"]
with active_interface("xmlrpc_1", **interface_config(process_xmlrpc_request = process_xmlrpc_request)):
fake_request(10.0)
for i in range(16):
s = "*" * 2 ** i
n = "n" + str(i)
result = pmnc.transaction.xmlrpc_1.Module.Method(i, s, [ s ], { s: i, n: None })
assert result == [ { "method": "Module.Method", "args": [ i, s, [ s ], { s: i, n: None } ] },
{ "username": "user", "peer_ip": "127.0.0.1", "password": "pass", "encrypted": False } ]
try:
pmnc.transaction.xmlrpc_1.ShouldBe.Failing("some error")
except ResourceError as e:
assert e.code == 500 and e.description.startswith("Exception(\"some error\")")
assert not e.recoverable and not e.terminal
test_resource()
###################################
if __name__ == "__main__": import pmnc.self_test; pmnc.self_test.run()
###############################################################################
# EOF
| 41.184825
| 125
| 0.544003
| true
| true
|
|
f71a9d6ee5b89554965ee5cfb0da2b1898c17923
| 529
|
py
|
Python
|
examples/ethernet/eth_connection.py
|
ingeniamc/ingenialink-python
|
6011931697e48456f5638c2848303aac2e5bcb75
|
[
"MIT"
] | 15
|
2017-08-30T13:43:14.000Z
|
2022-03-29T07:04:30.000Z
|
examples/ethernet/eth_connection.py
|
ingeniamc/ingenialink-python
|
6011931697e48456f5638c2848303aac2e5bcb75
|
[
"MIT"
] | 11
|
2017-08-28T11:23:18.000Z
|
2022-03-28T23:48:11.000Z
|
examples/ethernet/eth_connection.py
|
ingeniamc/ingenialink-python
|
6011931697e48456f5638c2848303aac2e5bcb75
|
[
"MIT"
] | 9
|
2017-09-30T08:28:42.000Z
|
2022-03-12T19:11:43.000Z
|
import sys
from ingenialink.ethernet.network import EthernetNetwork, NET_TRANS_PROT
def connection_example():
net = EthernetNetwork()
servo = net.connect_to_slave("192.168.2.22",
"../../resources/dictionaries/eve-net-c_eth_1.8.1.xdf",
1061,
NET_TRANS_PROT.UDP)
print(servo.read('DRV_ID_SOFTWARE_VERSION'))
net.disconnect_from_slave(servo)
if __name__ == '__main__':
connection_example()
sys.exit(0)
| 25.190476
| 88
| 0.597353
|
import sys
from ingenialink.ethernet.network import EthernetNetwork, NET_TRANS_PROT
def connection_example():
net = EthernetNetwork()
servo = net.connect_to_slave("192.168.2.22",
"../../resources/dictionaries/eve-net-c_eth_1.8.1.xdf",
1061,
NET_TRANS_PROT.UDP)
print(servo.read('DRV_ID_SOFTWARE_VERSION'))
net.disconnect_from_slave(servo)
if __name__ == '__main__':
connection_example()
sys.exit(0)
| true
| true
|
f71a9e181df7b219ef25d20d2a8f66302f4a6696
| 355
|
py
|
Python
|
experiments/heat-3d/tmp_files/9010.py
|
LoopTilingBenchmark/benchmark
|
52a3d2e70216552a498fd91de02a2fa9cb62122c
|
[
"BSD-2-Clause"
] | null | null | null |
experiments/heat-3d/tmp_files/9010.py
|
LoopTilingBenchmark/benchmark
|
52a3d2e70216552a498fd91de02a2fa9cb62122c
|
[
"BSD-2-Clause"
] | null | null | null |
experiments/heat-3d/tmp_files/9010.py
|
LoopTilingBenchmark/benchmark
|
52a3d2e70216552a498fd91de02a2fa9cb62122c
|
[
"BSD-2-Clause"
] | null | null | null |
from chill import *
source('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/polybench/polybench-code/stencils/heat-3d/kernel.c')
destination('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/experiments/heat-3d/tmp_files/9010.c')
procedure('kernel_heat_3d')
loop(0)
known('n>3')
tile(0,2,8,2)
tile(0,4,64,3)
tile(1,2,8,2)
tile(1,4,64,3)
| 25.357143
| 116
| 0.752113
|
from chill import *
source('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/polybench/polybench-code/stencils/heat-3d/kernel.c')
destination('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/experiments/heat-3d/tmp_files/9010.c')
procedure('kernel_heat_3d')
loop(0)
known('n>3')
tile(0,2,8,2)
tile(0,4,64,3)
tile(1,2,8,2)
tile(1,4,64,3)
| true
| true
|
f71aa1575b68c457900ef0939ac431d1293e82a4
| 2,733
|
py
|
Python
|
tensorflow_datasets/testing/starcraft.py
|
haideraltahan/datasets
|
aad5c7ea705949d20817fcc49a892bb2a21532f0
|
[
"Apache-2.0"
] | 14
|
2019-03-30T02:11:29.000Z
|
2021-11-16T12:06:32.000Z
|
tensorflow_datasets/testing/starcraft.py
|
haideraltahan/datasets
|
aad5c7ea705949d20817fcc49a892bb2a21532f0
|
[
"Apache-2.0"
] | 1
|
2019-09-13T15:10:18.000Z
|
2019-09-13T21:05:46.000Z
|
tensorflow_datasets/testing/starcraft.py
|
haideraltahan/datasets
|
aad5c7ea705949d20817fcc49a892bb2a21532f0
|
[
"Apache-2.0"
] | 10
|
2019-03-31T08:35:29.000Z
|
2021-09-01T06:28:43.000Z
|
# coding=utf-8
# Copyright 2019 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for preparing test example of Starcraft dataset.
./starcraft --resolution=64 --output_file=test.tfrecords
./starcraft --resolution=64 --output_file=train_0.tfrecords
./starcraft --resolution=64 --output_file=train_1.tfrecords
./starcraft --resolution=64 --output_file=valid.tfrecords
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import numpy as np
import png
import six
import tensorflow as tf
FLAGS = flags.FLAGS
flags.DEFINE_integer("resolution", 64, "Resolution of the video.")
flags.DEFINE_string("output_file", None, "Path to the output file.")
def main(argv):
if len(argv) > 1:
raise tf.app.UsageError("Too many command-line arguments.")
writer = tf.io.TFRecordWriter(FLAGS.output_file)
feature_list = {}
frame_list = []
for _ in range(20):
# generate 20 frames.
png_image = six.StringIO()
png.from_array(
np.random.randint(
low=0,
high=255,
size=(FLAGS.resolution, FLAGS.resolution, 3),
dtype=np.uint8), "RGB").save(png_image)
frame_list.append(
tf.train.Feature(
bytes_list=tf.train.BytesList(value=[png_image.getvalue()])))
png_image.close()
feature_list["rgb_screen"] = tf.train.FeatureList(feature=frame_list)
context_feature = {}
context_feature["game_duration_loops"] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[20]))
context_feature["game_duration_seconds"] = tf.train.Feature(
float_list=tf.train.FloatList(value=[20.0]))
context_feature["n_steps"] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[20]))
context_feature["screen_size"] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[FLAGS.resolution, FLAGS.resolution]))
example = tf.train.SequenceExample(
feature_lists=tf.train.FeatureLists(feature_list=feature_list),
context=tf.train.Features(feature=context_feature))
writer.write(example.SerializeToString())
writer.close()
if __name__ == "__main__":
app.run(main)
| 32.152941
| 80
| 0.726674
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import numpy as np
import png
import six
import tensorflow as tf
FLAGS = flags.FLAGS
flags.DEFINE_integer("resolution", 64, "Resolution of the video.")
flags.DEFINE_string("output_file", None, "Path to the output file.")
def main(argv):
if len(argv) > 1:
raise tf.app.UsageError("Too many command-line arguments.")
writer = tf.io.TFRecordWriter(FLAGS.output_file)
feature_list = {}
frame_list = []
for _ in range(20):
png_image = six.StringIO()
png.from_array(
np.random.randint(
low=0,
high=255,
size=(FLAGS.resolution, FLAGS.resolution, 3),
dtype=np.uint8), "RGB").save(png_image)
frame_list.append(
tf.train.Feature(
bytes_list=tf.train.BytesList(value=[png_image.getvalue()])))
png_image.close()
feature_list["rgb_screen"] = tf.train.FeatureList(feature=frame_list)
context_feature = {}
context_feature["game_duration_loops"] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[20]))
context_feature["game_duration_seconds"] = tf.train.Feature(
float_list=tf.train.FloatList(value=[20.0]))
context_feature["n_steps"] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[20]))
context_feature["screen_size"] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[FLAGS.resolution, FLAGS.resolution]))
example = tf.train.SequenceExample(
feature_lists=tf.train.FeatureLists(feature_list=feature_list),
context=tf.train.Features(feature=context_feature))
writer.write(example.SerializeToString())
writer.close()
if __name__ == "__main__":
app.run(main)
| true
| true
|
f71aa2e9fb55e8ff5df09593abc82b3ea64662a2
| 3,133
|
py
|
Python
|
core/storage/recommendations/gae_models.py
|
kaylahardie/oppia
|
e93ed02dfc7f654ef4fb62268c1a9b9d9ded30ec
|
[
"Apache-2.0"
] | 1
|
2021-06-26T00:31:08.000Z
|
2021-06-26T00:31:08.000Z
|
core/storage/recommendations/gae_models.py
|
kaylahardie/oppia
|
e93ed02dfc7f654ef4fb62268c1a9b9d9ded30ec
|
[
"Apache-2.0"
] | 1
|
2020-03-02T21:05:42.000Z
|
2020-03-03T07:09:51.000Z
|
core/storage/recommendations/gae_models.py
|
kaylahardie/oppia
|
e93ed02dfc7f654ef4fb62268c1a9b9d9ded30ec
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright 2015 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models for Oppia recommendations."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from core.platform import models
from google.appengine.ext import ndb
(base_models,) = models.Registry.import_models([models.NAMES.base_model])
TOPIC_SIMILARITIES_ID = 'topics'
class ExplorationRecommendationsModel(
base_models.BaseMapReduceBatchResultsModel):
"""A list of recommended explorations similar to an exploration.
Instances of this class are keyed by exploration id.
"""
# Ids of recommended explorations.
recommended_exploration_ids = ndb.StringProperty(
repeated=True, indexed=False)
@staticmethod
def get_deletion_policy():
"""Exploration recommendations are deleted only if the corresponding
exploration is not public.
"""
return base_models.DELETION_POLICY.KEEP_IF_PUBLIC
@staticmethod
def get_export_policy():
"""Model does not contain user data."""
return base_models.EXPORT_POLICY.NOT_APPLICABLE
@classmethod
def has_reference_to_user_id(cls, unused_user_id):
"""ExplorationRecommendationsModel doesn't reference any user_id
directly.
Args:
unused_user_id: str. The (unused) ID of the user whose data
should be checked.
Returns:
bool. Whether any models refer to the given user ID.
"""
return False
class TopicSimilaritiesModel(base_models.BaseModel):
"""This model stores the similarity between any two topics. The topic
similarities are stored as a JSON object, representing a 2D dict where the
keys are topic names and the values are the similarities. The dict should
be symmetric. A similarity value is a real number between 0.0 and 1.0.
There should only be one instance of this class, and it is keyed by
TOPIC_SIMILARITIES_ID.
Currently, topics are the same as the default categories. However, this may
change in the future.
"""
content = ndb.JsonProperty(required=True)
@staticmethod
def get_deletion_policy():
"""There is only a single TopicSimilaritiesModel in the entire
codebase.
"""
return base_models.DELETION_POLICY.NOT_APPLICABLE
@staticmethod
def get_export_policy():
"""Model does not contain user data."""
return base_models.EXPORT_POLICY.NOT_APPLICABLE
| 32.978947
| 79
| 0.719757
|
from __future__ import absolute_import
from __future__ import unicode_literals
from core.platform import models
from google.appengine.ext import ndb
(base_models,) = models.Registry.import_models([models.NAMES.base_model])
TOPIC_SIMILARITIES_ID = 'topics'
class ExplorationRecommendationsModel(
base_models.BaseMapReduceBatchResultsModel):
recommended_exploration_ids = ndb.StringProperty(
repeated=True, indexed=False)
@staticmethod
def get_deletion_policy():
return base_models.DELETION_POLICY.KEEP_IF_PUBLIC
@staticmethod
def get_export_policy():
return base_models.EXPORT_POLICY.NOT_APPLICABLE
@classmethod
def has_reference_to_user_id(cls, unused_user_id):
return False
class TopicSimilaritiesModel(base_models.BaseModel):
content = ndb.JsonProperty(required=True)
@staticmethod
def get_deletion_policy():
return base_models.DELETION_POLICY.NOT_APPLICABLE
@staticmethod
def get_export_policy():
return base_models.EXPORT_POLICY.NOT_APPLICABLE
| true
| true
|
f71aa357327a98795cb190e3909dda5f261e7b6a
| 25,206
|
py
|
Python
|
acore/classifier_cov_pow_toy_pvalue.py
|
zhao-david/ACORE-LFI
|
91de88b77f0be110e42ed91bbb7a50b7ca83319a
|
[
"MIT"
] | null | null | null |
acore/classifier_cov_pow_toy_pvalue.py
|
zhao-david/ACORE-LFI
|
91de88b77f0be110e42ed91bbb7a50b7ca83319a
|
[
"MIT"
] | null | null | null |
acore/classifier_cov_pow_toy_pvalue.py
|
zhao-david/ACORE-LFI
|
91de88b77f0be110e42ed91bbb7a50b7ca83319a
|
[
"MIT"
] | null | null | null |
from warnings import simplefilter
simplefilter(action='ignore', category=FutureWarning)
import numpy as np
import argparse
import pandas as pd
from tqdm.auto import tqdm
from datetime import datetime
from sklearn.metrics import log_loss
import seaborn as sns
import matplotlib.pyplot as plt
from utils.functions import train_clf, compute_statistics_single_t0, clf_prob_value, compute_bayesfactor_single_t0, \
odds_ratio_loss, train_pvalue_clf
from models.toy_poisson import ToyPoissonLoader
from models.toy_gmm import ToyGMMLoader
from models.toy_gamma import ToyGammaLoader
from or_classifiers.toy_example_list import classifier_dict, classifier_dict_mlpcomp, classifier_pvalue_dict
model_dict = {
'poisson': ToyPoissonLoader,
'gmm': ToyGMMLoader,
'gamma': ToyGammaLoader
}
def main(run, rep, b, b_prime, alpha, t0_val, sample_size_obs, test_statistic, mlp_comp=False,
monte_carlo_samples=500, debug=False, seed=7, size_check=1000, verbose=False, marginal=False,
size_marginal=1000, guided_sim=False, guided_sample=1000, empirical_marginal=True):
# Changing values if debugging
b = b if not debug else 100
b_prime = b_prime if not debug else 100
size_check = size_check if not debug else 100
rep = rep if not debug else 2
model_obj = model_dict[run](marginal=marginal, size_marginal=size_marginal, empirical_marginal=empirical_marginal)
classifier_dict_run = classifier_dict_mlpcomp if mlp_comp else classifier_dict
# Get the correct functions
msnh_sampling_func = model_obj.sample_msnh_algo5
grid_param = model_obj.grid
gen_obs_func = model_obj.sample_sim
gen_sample_func = model_obj.generate_sample
gen_param_fun = model_obj.sample_param_values
t0_grid = model_obj.pred_grid
tp_func = model_obj.compute_exact_prob
# Creating sample to check entropy about
np.random.seed(seed)
sample_check = gen_sample_func(sample_size=size_check, marginal=marginal)
theta_vec = sample_check[:, :model_obj.d]
x_vec = sample_check[:, (model_obj.d + 1):]
bern_vec = sample_check[:, model_obj.d]
true_prob_vec = tp_func(theta_vec=theta_vec, x_vec=x_vec)
entropy_est = -np.average([np.log(true_prob_vec[kk]) if el == 1
else np.log(1 - true_prob_vec[kk])
for kk, el in enumerate(bern_vec)])
# Loop over repetitions and classifiers
# Each time we train the different classifiers, we build the intervals and we record
# whether the point is in or not.
out_val = []
out_cols = ['test_statistic', 'b_prime', 'b', 'classifier', 'classifier_pvalue', 'run', 'rep', 'sample_size_obs',
'cross_entropy_loss', 'cross_entropy_loss_pvalue', 't0_true_val', 'theta_0_current', 'on_true_t0',
'estimated_pvalue', 'in_confint', 'out_confint', 'size_CI', 'true_entropy', 'or_loss_value',
'monte_carlo_samples', 'guided_sim', 'empirical_marginal', 'guided_sample']
pbar = tqdm(total=rep, desc='Toy Example for Simulations, n=%s, b=%s' % (sample_size_obs, b))
rep_counter = 0
not_update_flag = False
while rep_counter < rep:
# Generates samples for each t0 values, so to be able to check both coverage and power
x_obs = gen_obs_func(sample_size=sample_size_obs, true_param=t0_val)
# Train the classifier for the odds
clf_odds_fitted = {}
clf_pvalue_fitted = {}
for clf_name, clf_model in sorted(classifier_dict_run.items(), key=lambda x: x[0]):
clf_odds = train_clf(sample_size=b, clf_model=clf_model, gen_function=gen_sample_func,
clf_name=clf_name, nn_square_root=True)
if verbose:
print('----- %s Trained' % clf_name)
if test_statistic == 'acore':
tau_obs = np.array([
compute_statistics_single_t0(
clf=clf_odds, obs_sample=x_obs, t0=theta_0, grid_param_t1=grid_param,
d=model_obj.d, d_obs=model_obj.d_obs) for theta_0 in t0_grid])
elif test_statistic == 'avgacore':
tau_obs = np.array([
compute_bayesfactor_single_t0(
clf=clf_odds, obs_sample=x_obs, t0=theta_0, gen_param_fun=gen_param_fun,
d=model_obj.d, d_obs=model_obj.d_obs, log_out=False) for theta_0 in t0_grid])
elif test_statistic == 'logavgacore':
tau_obs = np.array([
compute_bayesfactor_single_t0(
clf=clf_odds, obs_sample=x_obs, t0=theta_0, gen_param_fun=gen_param_fun,
d=model_obj.d, d_obs=model_obj.d_obs, log_out=True) for theta_0 in t0_grid])
else:
raise ValueError('The variable test_statistic needs to be either acore, avgacore, logavgacore.'
' Currently %s' % test_statistic)
# Calculating cross-entropy
est_prob_vec = clf_prob_value(clf=clf_odds, x_vec=x_vec, theta_vec=theta_vec, d=model_obj.d,
d_obs=model_obj.d_obs)
loss_value = log_loss(y_true=bern_vec, y_pred=est_prob_vec)
# Calculating or loss
or_loss_value = odds_ratio_loss(clf=clf_odds, x_vec=x_vec, theta_vec=theta_vec,
bern_vec=bern_vec, d=1, d_obs=1)
clf_odds_fitted[clf_name] = (tau_obs, loss_value, or_loss_value)
# Train the P-value regression algorithm for confidence levels
if guided_sim:
# Commenting the above -- we now sample a set of thetas from the parameter (of size guided_sample)
# budget, then resample them according to the odds values, fit a gaussian and then sample the
# datasets from that.
theta_mat_sample = gen_param_fun(sample_size=guided_sample)
if test_statistic == 'acore':
stats_sample = np.apply_along_axis(arr=theta_mat_sample.reshape(-1, 1), axis=1,
func1d=lambda row: compute_statistics_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row,
grid_param_t1=grid_param,
d=model_obj.d,
d_obs=model_obj.d_obs
))
elif test_statistic == 'avgacore':
stats_sample = np.apply_along_axis(arr=theta_mat_sample.reshape(-1, 1), axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row,
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples
))
elif test_statistic == 'logavgacore':
stats_sample = np.apply_along_axis(arr=theta_mat_sample.reshape(-1, 1), axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row,
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples,
log_out=True
))
else:
raise ValueError('The variable test_statistic needs to be either acore, avgacore, logavgacore.'
' Currently %s' % test_statistic)
# If there are log-odds, then some of the values might be negative, so we need to exponentiate them
# so to make sure that the large negative numbers are counted correctly (i.e. as very low probability,
# not probabilities with large magnitudes).
if test_statistic in ['acore', 'logavgacore']:
stats_sample = np.exp(stats_sample)
stats_sample = stats_sample/np.sum(stats_sample)
theta_mat_gaussian_fit = np.random.choice(a=theta_mat_sample, p=stats_sample.reshape(-1, ),
size=guided_sample)
std_gaussian_fit = np.std(theta_mat_gaussian_fit) if np.std(theta_mat_gaussian_fit) == 0.0 else 1.0
theta_mat = np.clip(
a=np.random.normal(size=b_prime, loc=np.mean(theta_mat_gaussian_fit),
scale=std_gaussian_fit),
a_min=model_obj.low_int, a_max=model_obj.high_int)
sample_mat = np.apply_along_axis(arr=theta_mat.reshape(-1, 1), axis=1,
func1d=lambda row: gen_obs_func(sample_size=sample_size_obs,
true_param=row))
else:
# Generate a matrix with values for both the sampled thetas as the actual samples
theta_mat, sample_mat = msnh_sampling_func(b_prime=b_prime, sample_size=sample_size_obs)
full_mat = np.hstack((theta_mat.reshape(-1, 1), sample_mat))
if test_statistic == 'acore':
stats_mat_generated = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_statistics_single_t0(
clf=clf_odds,
obs_sample=row[model_obj.d:],
t0=row[:model_obj.d],
grid_param_t1=grid_param,
d=model_obj.d,
d_obs=model_obj.d_obs
))
stats_mat_observed = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_statistics_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row[:model_obj.d],
grid_param_t1=grid_param,
d=model_obj.d,
d_obs=model_obj.d_obs
))
elif test_statistic == 'avgacore':
stats_mat_generated = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=row[model_obj.d:],
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples
))
stats_mat_observed = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples
))
elif test_statistic == 'logavgacore':
stats_mat_generated = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=row[model_obj.d:],
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples,
log_out=True
))
stats_mat_observed = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples,
log_out=True
))
else:
raise ValueError('The variable test_statistic needs to be either acore, avgacore, logavgacore.'
' Currently %s' % test_statistic)
if np.any(np.isnan(stats_mat_generated)) or not np.all(np.isfinite(stats_mat_generated)) or \
np.any(np.isnan(stats_mat_observed)) or not np.all(np.isfinite(stats_mat_observed)):
not_update_flag = True
break
# Comparing the two vectors of values
clf_pvalue_fitted[clf_name] = {}
indicator_vec = np.greater(stats_mat_observed, stats_mat_generated).astype(int)
for clf_name_pvalue, clf_model_pvalue in sorted(classifier_pvalue_dict.items(), key=lambda x: x[0]):
# If there the indicator_vec is either all 0 or all 1, do not fit a classifier or sklearn will throw
# an error out. Just return the class.
if sum(indicator_vec) <= 1 or sum(indicator_vec) >= len(indicator_vec) - 1:
pval_pred = np.repeat(sum(indicator_vec) / len(indicator_vec), b_prime)
loss_value_pval = np.nan
else:
clf_pvalue = train_pvalue_clf(clf_model=clf_model_pvalue, X=theta_mat.reshape(-1, model_obj.d),
y=indicator_vec.reshape(-1, ), clf_name=clf_name_pvalue,
nn_square_root=True)
pval_pred = clf_pvalue.predict_proba(t0_grid.reshape(-1, model_obj.d))[:, 1]
theta_mat_pred = clf_pvalue.predict_proba(theta_mat.reshape(-1, model_obj.d))[:, 1]
loss_value_pval = log_loss(y_true=indicator_vec, y_pred=theta_mat_pred)
clf_pvalue_fitted[clf_name][clf_name_pvalue] = (pval_pred, loss_value_pval)
# If there were some problems in calculating the statistics, get out of the loop
if not_update_flag:
not_update_flag = False
continue
# At this point all it's left is to record
for clf_name, (tau_obs_val, cross_ent_loss, or_loss_value) in clf_odds_fitted.items():
for clf_name_qr, (pvalue_val, pvalue_celoss_val) in clf_pvalue_fitted[clf_name].items():
size_temp = np.mean((pvalue_val > alpha).astype(int))
for kk, theta_0_current in enumerate(t0_grid):
out_val.append([
test_statistic, b_prime, b, clf_name, clf_name_qr, run, rep_counter, sample_size_obs,
cross_ent_loss, pvalue_celoss_val, t0_val, theta_0_current, int(t0_val == theta_0_current),
pvalue_val[kk], int(pvalue_val[kk] > alpha),
int(pvalue_val[kk] <= alpha), size_temp, entropy_est, or_loss_value,
monte_carlo_samples, int(guided_sim), int(empirical_marginal), guided_sample
])
pbar.update(1)
rep_counter += 1
# Saving the results
out_df = pd.DataFrame.from_records(data=out_val, index=range(len(out_val)), columns=out_cols)
out_dir = 'sims/classifier_cov_pow_toy/'
out_filename = 'classifier_reps_cov_pow_toy_pvalues_%steststats_%s_%sB_%sBprime_%s_%srep_alpha%s_sampleobs%s_t0val%s%s_%s.csv' % (
test_statistic, 'mlp_comp' if mlp_comp else 'toyclassifiers', b, b_prime, run, rep,
str(alpha).replace('.', '-'), sample_size_obs,
str(t0_val).replace('.', '-'),
'_empirmarg' if empirical_marginal else '',
datetime.strftime(datetime.today(), '%Y-%m-%d-%H-%M')
)
out_df.to_csv(out_dir + out_filename)
# Print results
cov_df = out_df[out_df['on_true_t0'] == 1][['classifier', 'classifier_pvalue', 'in_confint',
'cross_entropy_loss', 'cross_entropy_loss_pvalue', 'size_CI']]
print(cov_df.groupby(['classifier', 'classifier_pvalue']).agg({'in_confint': [np.average],
'size_CI': [np.average, np.std],
'cross_entropy_loss': [np.average],
'cross_entropy_loss_pvalue': [np.average]}))
# Power plots
out_df['class_combo'] = out_df[['classifier', 'classifier_pvalue']].apply(lambda x: x[0] + '---' + x[1], axis = 1)
plot_df = out_df[['class_combo', 'theta_0_current', 'out_confint']].groupby(
['class_combo', 'theta_0_current']).mean().reset_index()
fig = plt.figure(figsize=(20, 10))
sns.lineplot(x='theta_0_current', y='out_confint', hue='class_combo', data=plot_df, palette='cubehelix')
plt.legend(loc='best', fontsize=25)
plt.xlabel(r'$\theta$', fontsize=25)
plt.ylabel('Power', fontsize=25)
plt.title("Power of Hypothesis Test, B=%s, B'=%s, n=%s, %s" % (
b, b_prime, sample_size_obs, run.title()), fontsize=25)
out_dir = 'images/classifier_cov_pow_toy/'
outfile_name = 'power_classifier_reps_pvalue_%steststats_%sB_%sBprime_%s_%srep_alpha%s_sampleobs%s_t0val%s_%s.pdf' % (
test_statistic, b, b_prime, run, rep, str(alpha).replace('.', '-'), sample_size_obs,
str(t0_val).replace('.', '-'),
datetime.strftime(datetime.today(), '%Y-%m-%d')
)
plt.tight_layout()
plt.savefig(out_dir + outfile_name)
plt.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--seed', action="store", type=int, default=7,
help='Random State')
parser.add_argument('--rep', action="store", type=int, default=10,
help='Number of Repetitions for calculating the Pinball loss')
parser.add_argument('--b', action="store", type=int, default=5000,
help='Sample size to train the classifier for calculating odds')
parser.add_argument('--b_prime', action="store", type=int, default=1000,
help='Sample size to train the quantile regression algorithm')
parser.add_argument('--marginal', action='store_true', default=False,
help='Whether we are using a parametric approximation of the marginal or'
'the baseline reference G')
parser.add_argument('--alpha', action="store", type=float, default=0.1,
help='Statistical confidence level')
parser.add_argument('--run', action="store", type=str, default='poisson',
help='Problem to run')
parser.add_argument('--debug', action='store_true', default=False,
help='If true, a very small value for the sample sizes is fit to make sure the'
'file can run quickly for debugging purposes')
parser.add_argument('--verbose', action='store_true', default=False,
help='If true, logs are printed to the terminal')
parser.add_argument('--sample_size_obs', action="store", type=int, default=10,
help='Sample size of the actual observed data.')
parser.add_argument('--t0_val', action="store", type=float, default=10.0,
help='True parameter which generates the observed dataset')
parser.add_argument('--size_marginal', action="store", type=int, default=1000,
help='Sample size of the actual marginal distribution, if marginal is True.')
parser.add_argument('--monte_carlo_samples', action="store", type=int, default=500,
help='Sample size for the calculation of the avgacore and logavgacore statistic.')
parser.add_argument('--test_statistic', action="store", type=str, default='acore',
help='Test statistic to compute confidence intervals. Can be acore|avgacore|logavgacore')
parser.add_argument('--mlp_comp', action='store_true', default=False,
help='If true, we compare different MLP training algorithm.')
parser.add_argument('--empirical_marginal', action='store_true', default=False,
help='Whether we are sampling directly from the empirical marginal for G')
parser.add_argument('--guided_sim', action='store_true', default=False,
help='If true, we guided the sampling for the B prime in order to get meaningful results.')
parser.add_argument('--guided_sample', action="store", type=int, default=2500,
help='The sample size to be used for the guided simulation. Only used if guided_sim is True.')
argument_parsed = parser.parse_args()
# b_vec = [100, 500, 1000]
# for b_val in b_vec:
main(
run=argument_parsed.run,
rep=argument_parsed.rep,
marginal=argument_parsed.marginal,
b=argument_parsed.b,
b_prime=argument_parsed.b_prime,
alpha=argument_parsed.alpha,
debug=argument_parsed.debug,
sample_size_obs=argument_parsed.sample_size_obs,
t0_val=argument_parsed.t0_val,
seed=argument_parsed.seed,
verbose=argument_parsed.verbose,
size_marginal=argument_parsed.size_marginal,
monte_carlo_samples=argument_parsed.monte_carlo_samples,
test_statistic=argument_parsed.test_statistic,
mlp_comp=argument_parsed.mlp_comp,
empirical_marginal=argument_parsed.empirical_marginal,
guided_sim=argument_parsed.guided_sim,
guided_sample=argument_parsed.guided_sample
)
| 63.491184
| 134
| 0.519281
|
from warnings import simplefilter
simplefilter(action='ignore', category=FutureWarning)
import numpy as np
import argparse
import pandas as pd
from tqdm.auto import tqdm
from datetime import datetime
from sklearn.metrics import log_loss
import seaborn as sns
import matplotlib.pyplot as plt
from utils.functions import train_clf, compute_statistics_single_t0, clf_prob_value, compute_bayesfactor_single_t0, \
odds_ratio_loss, train_pvalue_clf
from models.toy_poisson import ToyPoissonLoader
from models.toy_gmm import ToyGMMLoader
from models.toy_gamma import ToyGammaLoader
from or_classifiers.toy_example_list import classifier_dict, classifier_dict_mlpcomp, classifier_pvalue_dict
model_dict = {
'poisson': ToyPoissonLoader,
'gmm': ToyGMMLoader,
'gamma': ToyGammaLoader
}
def main(run, rep, b, b_prime, alpha, t0_val, sample_size_obs, test_statistic, mlp_comp=False,
monte_carlo_samples=500, debug=False, seed=7, size_check=1000, verbose=False, marginal=False,
size_marginal=1000, guided_sim=False, guided_sample=1000, empirical_marginal=True):
b = b if not debug else 100
b_prime = b_prime if not debug else 100
size_check = size_check if not debug else 100
rep = rep if not debug else 2
model_obj = model_dict[run](marginal=marginal, size_marginal=size_marginal, empirical_marginal=empirical_marginal)
classifier_dict_run = classifier_dict_mlpcomp if mlp_comp else classifier_dict
msnh_sampling_func = model_obj.sample_msnh_algo5
grid_param = model_obj.grid
gen_obs_func = model_obj.sample_sim
gen_sample_func = model_obj.generate_sample
gen_param_fun = model_obj.sample_param_values
t0_grid = model_obj.pred_grid
tp_func = model_obj.compute_exact_prob
np.random.seed(seed)
sample_check = gen_sample_func(sample_size=size_check, marginal=marginal)
theta_vec = sample_check[:, :model_obj.d]
x_vec = sample_check[:, (model_obj.d + 1):]
bern_vec = sample_check[:, model_obj.d]
true_prob_vec = tp_func(theta_vec=theta_vec, x_vec=x_vec)
entropy_est = -np.average([np.log(true_prob_vec[kk]) if el == 1
else np.log(1 - true_prob_vec[kk])
for kk, el in enumerate(bern_vec)])
out_val = []
out_cols = ['test_statistic', 'b_prime', 'b', 'classifier', 'classifier_pvalue', 'run', 'rep', 'sample_size_obs',
'cross_entropy_loss', 'cross_entropy_loss_pvalue', 't0_true_val', 'theta_0_current', 'on_true_t0',
'estimated_pvalue', 'in_confint', 'out_confint', 'size_CI', 'true_entropy', 'or_loss_value',
'monte_carlo_samples', 'guided_sim', 'empirical_marginal', 'guided_sample']
pbar = tqdm(total=rep, desc='Toy Example for Simulations, n=%s, b=%s' % (sample_size_obs, b))
rep_counter = 0
not_update_flag = False
while rep_counter < rep:
x_obs = gen_obs_func(sample_size=sample_size_obs, true_param=t0_val)
clf_odds_fitted = {}
clf_pvalue_fitted = {}
for clf_name, clf_model in sorted(classifier_dict_run.items(), key=lambda x: x[0]):
clf_odds = train_clf(sample_size=b, clf_model=clf_model, gen_function=gen_sample_func,
clf_name=clf_name, nn_square_root=True)
if verbose:
print('----- %s Trained' % clf_name)
if test_statistic == 'acore':
tau_obs = np.array([
compute_statistics_single_t0(
clf=clf_odds, obs_sample=x_obs, t0=theta_0, grid_param_t1=grid_param,
d=model_obj.d, d_obs=model_obj.d_obs) for theta_0 in t0_grid])
elif test_statistic == 'avgacore':
tau_obs = np.array([
compute_bayesfactor_single_t0(
clf=clf_odds, obs_sample=x_obs, t0=theta_0, gen_param_fun=gen_param_fun,
d=model_obj.d, d_obs=model_obj.d_obs, log_out=False) for theta_0 in t0_grid])
elif test_statistic == 'logavgacore':
tau_obs = np.array([
compute_bayesfactor_single_t0(
clf=clf_odds, obs_sample=x_obs, t0=theta_0, gen_param_fun=gen_param_fun,
d=model_obj.d, d_obs=model_obj.d_obs, log_out=True) for theta_0 in t0_grid])
else:
raise ValueError('The variable test_statistic needs to be either acore, avgacore, logavgacore.'
' Currently %s' % test_statistic)
est_prob_vec = clf_prob_value(clf=clf_odds, x_vec=x_vec, theta_vec=theta_vec, d=model_obj.d,
d_obs=model_obj.d_obs)
loss_value = log_loss(y_true=bern_vec, y_pred=est_prob_vec)
or_loss_value = odds_ratio_loss(clf=clf_odds, x_vec=x_vec, theta_vec=theta_vec,
bern_vec=bern_vec, d=1, d_obs=1)
clf_odds_fitted[clf_name] = (tau_obs, loss_value, or_loss_value)
if guided_sim:
theta_mat_sample = gen_param_fun(sample_size=guided_sample)
if test_statistic == 'acore':
stats_sample = np.apply_along_axis(arr=theta_mat_sample.reshape(-1, 1), axis=1,
func1d=lambda row: compute_statistics_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row,
grid_param_t1=grid_param,
d=model_obj.d,
d_obs=model_obj.d_obs
))
elif test_statistic == 'avgacore':
stats_sample = np.apply_along_axis(arr=theta_mat_sample.reshape(-1, 1), axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row,
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples
))
elif test_statistic == 'logavgacore':
stats_sample = np.apply_along_axis(arr=theta_mat_sample.reshape(-1, 1), axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row,
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples,
log_out=True
))
else:
raise ValueError('The variable test_statistic needs to be either acore, avgacore, logavgacore.'
' Currently %s' % test_statistic)
if test_statistic in ['acore', 'logavgacore']:
stats_sample = np.exp(stats_sample)
stats_sample = stats_sample/np.sum(stats_sample)
theta_mat_gaussian_fit = np.random.choice(a=theta_mat_sample, p=stats_sample.reshape(-1, ),
size=guided_sample)
std_gaussian_fit = np.std(theta_mat_gaussian_fit) if np.std(theta_mat_gaussian_fit) == 0.0 else 1.0
theta_mat = np.clip(
a=np.random.normal(size=b_prime, loc=np.mean(theta_mat_gaussian_fit),
scale=std_gaussian_fit),
a_min=model_obj.low_int, a_max=model_obj.high_int)
sample_mat = np.apply_along_axis(arr=theta_mat.reshape(-1, 1), axis=1,
func1d=lambda row: gen_obs_func(sample_size=sample_size_obs,
true_param=row))
else:
theta_mat, sample_mat = msnh_sampling_func(b_prime=b_prime, sample_size=sample_size_obs)
full_mat = np.hstack((theta_mat.reshape(-1, 1), sample_mat))
if test_statistic == 'acore':
stats_mat_generated = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_statistics_single_t0(
clf=clf_odds,
obs_sample=row[model_obj.d:],
t0=row[:model_obj.d],
grid_param_t1=grid_param,
d=model_obj.d,
d_obs=model_obj.d_obs
))
stats_mat_observed = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_statistics_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row[:model_obj.d],
grid_param_t1=grid_param,
d=model_obj.d,
d_obs=model_obj.d_obs
))
elif test_statistic == 'avgacore':
stats_mat_generated = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=row[model_obj.d:],
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples
))
stats_mat_observed = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples
))
elif test_statistic == 'logavgacore':
stats_mat_generated = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=row[model_obj.d:],
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples,
log_out=True
))
stats_mat_observed = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples,
log_out=True
))
else:
raise ValueError('The variable test_statistic needs to be either acore, avgacore, logavgacore.'
' Currently %s' % test_statistic)
if np.any(np.isnan(stats_mat_generated)) or not np.all(np.isfinite(stats_mat_generated)) or \
np.any(np.isnan(stats_mat_observed)) or not np.all(np.isfinite(stats_mat_observed)):
not_update_flag = True
break
clf_pvalue_fitted[clf_name] = {}
indicator_vec = np.greater(stats_mat_observed, stats_mat_generated).astype(int)
for clf_name_pvalue, clf_model_pvalue in sorted(classifier_pvalue_dict.items(), key=lambda x: x[0]):
if sum(indicator_vec) <= 1 or sum(indicator_vec) >= len(indicator_vec) - 1:
pval_pred = np.repeat(sum(indicator_vec) / len(indicator_vec), b_prime)
loss_value_pval = np.nan
else:
clf_pvalue = train_pvalue_clf(clf_model=clf_model_pvalue, X=theta_mat.reshape(-1, model_obj.d),
y=indicator_vec.reshape(-1, ), clf_name=clf_name_pvalue,
nn_square_root=True)
pval_pred = clf_pvalue.predict_proba(t0_grid.reshape(-1, model_obj.d))[:, 1]
theta_mat_pred = clf_pvalue.predict_proba(theta_mat.reshape(-1, model_obj.d))[:, 1]
loss_value_pval = log_loss(y_true=indicator_vec, y_pred=theta_mat_pred)
clf_pvalue_fitted[clf_name][clf_name_pvalue] = (pval_pred, loss_value_pval)
if not_update_flag:
not_update_flag = False
continue
for clf_name, (tau_obs_val, cross_ent_loss, or_loss_value) in clf_odds_fitted.items():
for clf_name_qr, (pvalue_val, pvalue_celoss_val) in clf_pvalue_fitted[clf_name].items():
size_temp = np.mean((pvalue_val > alpha).astype(int))
for kk, theta_0_current in enumerate(t0_grid):
out_val.append([
test_statistic, b_prime, b, clf_name, clf_name_qr, run, rep_counter, sample_size_obs,
cross_ent_loss, pvalue_celoss_val, t0_val, theta_0_current, int(t0_val == theta_0_current),
pvalue_val[kk], int(pvalue_val[kk] > alpha),
int(pvalue_val[kk] <= alpha), size_temp, entropy_est, or_loss_value,
monte_carlo_samples, int(guided_sim), int(empirical_marginal), guided_sample
])
pbar.update(1)
rep_counter += 1
# Saving the results
out_df = pd.DataFrame.from_records(data=out_val, index=range(len(out_val)), columns=out_cols)
out_dir = 'sims/classifier_cov_pow_toy/'
out_filename = 'classifier_reps_cov_pow_toy_pvalues_%steststats_%s_%sB_%sBprime_%s_%srep_alpha%s_sampleobs%s_t0val%s%s_%s.csv' % (
test_statistic, 'mlp_comp' if mlp_comp else 'toyclassifiers', b, b_prime, run, rep,
str(alpha).replace('.', '-'), sample_size_obs,
str(t0_val).replace('.', '-'),
'_empirmarg' if empirical_marginal else '',
datetime.strftime(datetime.today(), '%Y-%m-%d-%H-%M')
)
out_df.to_csv(out_dir + out_filename)
# Print results
cov_df = out_df[out_df['on_true_t0'] == 1][['classifier', 'classifier_pvalue', 'in_confint',
'cross_entropy_loss', 'cross_entropy_loss_pvalue', 'size_CI']]
print(cov_df.groupby(['classifier', 'classifier_pvalue']).agg({'in_confint': [np.average],
'size_CI': [np.average, np.std],
'cross_entropy_loss': [np.average],
'cross_entropy_loss_pvalue': [np.average]}))
# Power plots
out_df['class_combo'] = out_df[['classifier', 'classifier_pvalue']].apply(lambda x: x[0] + '---' + x[1], axis = 1)
plot_df = out_df[['class_combo', 'theta_0_current', 'out_confint']].groupby(
['class_combo', 'theta_0_current']).mean().reset_index()
fig = plt.figure(figsize=(20, 10))
sns.lineplot(x='theta_0_current', y='out_confint', hue='class_combo', data=plot_df, palette='cubehelix')
plt.legend(loc='best', fontsize=25)
plt.xlabel(r'$\theta$', fontsize=25)
plt.ylabel('Power', fontsize=25)
plt.title("Power of Hypothesis Test, B=%s, B'=%s, n=%s, %s" % (
b, b_prime, sample_size_obs, run.title()), fontsize=25)
out_dir = 'images/classifier_cov_pow_toy/'
outfile_name = 'power_classifier_reps_pvalue_%steststats_%sB_%sBprime_%s_%srep_alpha%s_sampleobs%s_t0val%s_%s.pdf' % (
test_statistic, b, b_prime, run, rep, str(alpha).replace('.', '-'), sample_size_obs,
str(t0_val).replace('.', '-'),
datetime.strftime(datetime.today(), '%Y-%m-%d')
)
plt.tight_layout()
plt.savefig(out_dir + outfile_name)
plt.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--seed', action="store", type=int, default=7,
help='Random State')
parser.add_argument('--rep', action="store", type=int, default=10,
help='Number of Repetitions for calculating the Pinball loss')
parser.add_argument('--b', action="store", type=int, default=5000,
help='Sample size to train the classifier for calculating odds')
parser.add_argument('--b_prime', action="store", type=int, default=1000,
help='Sample size to train the quantile regression algorithm')
parser.add_argument('--marginal', action='store_true', default=False,
help='Whether we are using a parametric approximation of the marginal or'
'the baseline reference G')
parser.add_argument('--alpha', action="store", type=float, default=0.1,
help='Statistical confidence level')
parser.add_argument('--run', action="store", type=str, default='poisson',
help='Problem to run')
parser.add_argument('--debug', action='store_true', default=False,
help='If true, a very small value for the sample sizes is fit to make sure the'
'file can run quickly for debugging purposes')
parser.add_argument('--verbose', action='store_true', default=False,
help='If true, logs are printed to the terminal')
parser.add_argument('--sample_size_obs', action="store", type=int, default=10,
help='Sample size of the actual observed data.')
parser.add_argument('--t0_val', action="store", type=float, default=10.0,
help='True parameter which generates the observed dataset')
parser.add_argument('--size_marginal', action="store", type=int, default=1000,
help='Sample size of the actual marginal distribution, if marginal is True.')
parser.add_argument('--monte_carlo_samples', action="store", type=int, default=500,
help='Sample size for the calculation of the avgacore and logavgacore statistic.')
parser.add_argument('--test_statistic', action="store", type=str, default='acore',
help='Test statistic to compute confidence intervals. Can be acore|avgacore|logavgacore')
parser.add_argument('--mlp_comp', action='store_true', default=False,
help='If true, we compare different MLP training algorithm.')
parser.add_argument('--empirical_marginal', action='store_true', default=False,
help='Whether we are sampling directly from the empirical marginal for G')
parser.add_argument('--guided_sim', action='store_true', default=False,
help='If true, we guided the sampling for the B prime in order to get meaningful results.')
parser.add_argument('--guided_sample', action="store", type=int, default=2500,
help='The sample size to be used for the guided simulation. Only used if guided_sim is True.')
argument_parsed = parser.parse_args()
main(
run=argument_parsed.run,
rep=argument_parsed.rep,
marginal=argument_parsed.marginal,
b=argument_parsed.b,
b_prime=argument_parsed.b_prime,
alpha=argument_parsed.alpha,
debug=argument_parsed.debug,
sample_size_obs=argument_parsed.sample_size_obs,
t0_val=argument_parsed.t0_val,
seed=argument_parsed.seed,
verbose=argument_parsed.verbose,
size_marginal=argument_parsed.size_marginal,
monte_carlo_samples=argument_parsed.monte_carlo_samples,
test_statistic=argument_parsed.test_statistic,
mlp_comp=argument_parsed.mlp_comp,
empirical_marginal=argument_parsed.empirical_marginal,
guided_sim=argument_parsed.guided_sim,
guided_sample=argument_parsed.guided_sample
)
| true
| true
|
f71aa447e93126ff1ef79e05d8bb36f39e9bc2a4
| 4,210
|
py
|
Python
|
openshift/test/test_v1_load_balancer_ingress.py
|
flaper87/openshift-restclient-python
|
13d5d86ca89035b9f596032e7a34f3cc33bf8f18
|
[
"Apache-2.0"
] | null | null | null |
openshift/test/test_v1_load_balancer_ingress.py
|
flaper87/openshift-restclient-python
|
13d5d86ca89035b9f596032e7a34f3cc33bf8f18
|
[
"Apache-2.0"
] | null | null | null |
openshift/test/test_v1_load_balancer_ingress.py
|
flaper87/openshift-restclient-python
|
13d5d86ca89035b9f596032e7a34f3cc33bf8f18
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
OpenShift API (with Kubernetes)
OpenShift provides builds, application lifecycle, image content management, and administrative policy on top of Kubernetes. The API allows consistent management of those objects. All API operations are authenticated via an Authorization bearer token that is provided for service accounts as a generated secret (in JWT form) or via the native OAuth endpoint located at /oauth/authorize. Core infrastructure components may use openshift.client certificates that require no authentication. All API operations return a 'resourceVersion' string that represents the version of the object in the underlying storage. The standard LIST operation performs a snapshot read of the underlying objects, returning a resourceVersion representing a consistent version of the listed objects. The WATCH operation allows all updates to a set of objects after the provided resourceVersion to be observed by a openshift.client. By listing and beginning a watch from the returned resourceVersion, openshift.clients may observe a consistent view of the state of one or more objects. Note that WATCH always returns the update after the provided resourceVersion. Watch may be extended a limited time in the past - using etcd 2 the watch window is 1000 events (which on a large cluster may only be a few tens of seconds) so openshift.clients must explicitly handle the \"watch to old error\" by re-listing. Objects are divided into two rough categories - those that have a lifecycle and must reflect the state of the cluster, and those that have no state. Objects with lifecycle typically have three main sections: * 'metadata' common to all objects * a 'spec' that represents the desired state * a 'status' that represents how much of the desired state is reflected on the cluster at the current time Objects that have no state have 'metadata' but may lack a 'spec' or 'status' section. Objects are divided into those that are namespace scoped (only exist inside of a namespace) and those that are cluster scoped (exist outside of a namespace). A namespace scoped resource will be deleted when the namespace is deleted and cannot be created if the namespace has not yet been created or is in the process of deletion. Cluster scoped resources are typically only accessible to admins - resources like nodes, persistent volumes, and cluster policy. All objects have a schema that is a combination of the 'kind' and 'apiVersion' fields. This schema is additive only for any given version - no backwards incompatible changes are allowed without incrementing the apiVersion. The server will return and accept a number of standard responses that share a common schema - for instance, the common error type is 'unversioned.Status' (described below) and will be returned on any error from the API server. The API is available in multiple serialization formats - the default is JSON (Accept: application/json and Content-Type: application/json) but openshift.clients may also use YAML (application/yaml) or the native Protobuf schema (application/vnd.kubernetes.protobuf). Note that the format of the WATCH API call is slightly different - for JSON it returns newline delimited objects while for Protobuf it returns length-delimited frames (4 bytes in network-order) that contain a 'versioned.Watch' Protobuf object. See the OpenShift documentation at https://docs.openshift.org for more information.
OpenAPI spec version: v3.6.0-alpha.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import openshift.client
from kubernetes.client.rest import ApiException
from openshift.client.models.v1_load_balancer_ingress import V1LoadBalancerIngress
class TestV1LoadBalancerIngress(unittest.TestCase):
""" V1LoadBalancerIngress unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1LoadBalancerIngress(self):
"""
Test V1LoadBalancerIngress
"""
model = openshift.client.models.v1_load_balancer_ingress.V1LoadBalancerIngress()
if __name__ == '__main__':
unittest.main()
| 97.906977
| 3,380
| 0.791211
|
from __future__ import absolute_import
import os
import sys
import unittest
import openshift.client
from kubernetes.client.rest import ApiException
from openshift.client.models.v1_load_balancer_ingress import V1LoadBalancerIngress
class TestV1LoadBalancerIngress(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testV1LoadBalancerIngress(self):
model = openshift.client.models.v1_load_balancer_ingress.V1LoadBalancerIngress()
if __name__ == '__main__':
unittest.main()
| true
| true
|
f71aa5297e2e652741a2be68088de722b87d9713
| 3,419
|
py
|
Python
|
openGaussBase/testcase/TOOLS/INTERNAL_TOOLS/gaussdb/Opengauss_Function_Tools_Gaussdb_Case0014.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/TOOLS/INTERNAL_TOOLS/gaussdb/Opengauss_Function_Tools_Gaussdb_Case0014.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/TOOLS/INTERNAL_TOOLS/gaussdb/Opengauss_Function_Tools_Gaussdb_Case0014.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
"""
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : tools
Case Name : 启动gaussdb进程时,使用-e参数把缺省日期风格设置为"European"是否成功
Description :
1.查看当前日期风格
show datestyle;
2.关闭正在运行的数据库
gs_ctl stop -D /opt/openGauss_zl/cluster/dn1
3.查看进程,确定关闭成功
ps -ef|grep zl
4.使用gaussdb工具后台运行进程,缺省日期风格设置为"European"
gaussdb -D /opt/openGauss_zl/cluster/dn1 -p 19701 -e -M primary &
5.查看当前日期风格,是否为European风格
show datestyle;
Expect :
1.查看当前日期风格成功,显示为:ISO, MDY
2.关闭正在运行的数据库成功
3.查看进程,确定关闭成功
查看进程成功,确认数据库已关闭
4.使用gaussdb工具后台运行进程,缺省日期风格设置为"European"成功
5.查看当前日期风格,为European风格,显示为:ISO, DMY
show datestyle;
History :
"""
import unittest
from testcase.utils.ComThread import ComThread
from yat.test import Node
from yat.test import macro
from testcase.utils.Common import Common
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Logger import Logger
class Tools(unittest.TestCase):
def setUp(self):
self.logger = Logger()
self.logger.info('--Opengauss_Function_Tools_Gaussdb_Case0014 start--')
self.userNode = Node('PrimaryDbUser')
self.userNode2 = Node('PrimaryDbUser')
self.DB_ENV_PATH = macro.DB_ENV_PATH
self.DB_INSTANCE_PATH = macro.DB_INSTANCE_PATH
self.sh_primy = CommonSH('PrimaryDbUser')
self.common = Common()
def test_systools(self):
self.logger.info('--------关闭正在运行的数据库--------')
excute_cmd1 = f'source {self.DB_ENV_PATH};' \
f'gs_ctl stop -D {self.DB_INSTANCE_PATH}'
self.logger.info(excute_cmd1)
msg1 = self.userNode.sh(excute_cmd1).result()
self.logger.info(msg1)
self.logger.info('--------查看进程,确定关闭成功--------')
excute_cmd2 = f'ps -ef|grep {self.userNode.ssh_user}'
self.logger.info(excute_cmd2)
msg2 = self.userNode.sh(excute_cmd2).result()
self.logger.info(msg2)
self.assertFalse(self.DB_INSTANCE_PATH in msg2)
self.logger.info('使用gaussdb工具后台运行进程,缺省日期风格设置为European')
excute_cmd3 = f'source {self.DB_ENV_PATH};' \
f'gaussdb -D {self.DB_INSTANCE_PATH} -p ' \
f'{self.userNode.db_port} -e -M primary'
self.logger.info(excute_cmd3)
thread_2 = ComThread(self.userNode2.sh, args=(excute_cmd3,))
thread_2.setDaemon(True)
thread_2.start()
thread_2.join(10)
msg_result_2 = thread_2.get_result()
self.logger.info(msg_result_2)
self.logger.info('--------查看当前日期风格,是否为European风格--------')
sql_cmd3 = f'show datestyle;'
self.logger.info(excute_cmd3)
msg3 = self.sh_primy.execut_db_sql(sql_cmd3)
self.logger.info(msg3)
self.common.equal_sql_mdg(msg3, 'DateStyle', 'ISO, DMY', '(1 row)',
flag='1')
def tearDown(self):
self.logger.info('-Opengauss_Function_Tools_Gaussdb_Case0014 finish-')
| 36.763441
| 84
| 0.664814
|
import unittest
from testcase.utils.ComThread import ComThread
from yat.test import Node
from yat.test import macro
from testcase.utils.Common import Common
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Logger import Logger
class Tools(unittest.TestCase):
def setUp(self):
self.logger = Logger()
self.logger.info('--Opengauss_Function_Tools_Gaussdb_Case0014 start--')
self.userNode = Node('PrimaryDbUser')
self.userNode2 = Node('PrimaryDbUser')
self.DB_ENV_PATH = macro.DB_ENV_PATH
self.DB_INSTANCE_PATH = macro.DB_INSTANCE_PATH
self.sh_primy = CommonSH('PrimaryDbUser')
self.common = Common()
def test_systools(self):
self.logger.info('--------关闭正在运行的数据库--------')
excute_cmd1 = f'source {self.DB_ENV_PATH};' \
f'gs_ctl stop -D {self.DB_INSTANCE_PATH}'
self.logger.info(excute_cmd1)
msg1 = self.userNode.sh(excute_cmd1).result()
self.logger.info(msg1)
self.logger.info('--------查看进程,确定关闭成功--------')
excute_cmd2 = f'ps -ef|grep {self.userNode.ssh_user}'
self.logger.info(excute_cmd2)
msg2 = self.userNode.sh(excute_cmd2).result()
self.logger.info(msg2)
self.assertFalse(self.DB_INSTANCE_PATH in msg2)
self.logger.info('使用gaussdb工具后台运行进程,缺省日期风格设置为European')
excute_cmd3 = f'source {self.DB_ENV_PATH};' \
f'gaussdb -D {self.DB_INSTANCE_PATH} -p ' \
f'{self.userNode.db_port} -e -M primary'
self.logger.info(excute_cmd3)
thread_2 = ComThread(self.userNode2.sh, args=(excute_cmd3,))
thread_2.setDaemon(True)
thread_2.start()
thread_2.join(10)
msg_result_2 = thread_2.get_result()
self.logger.info(msg_result_2)
self.logger.info('--------查看当前日期风格,是否为European风格--------')
sql_cmd3 = f'show datestyle;'
self.logger.info(excute_cmd3)
msg3 = self.sh_primy.execut_db_sql(sql_cmd3)
self.logger.info(msg3)
self.common.equal_sql_mdg(msg3, 'DateStyle', 'ISO, DMY', '(1 row)',
flag='1')
def tearDown(self):
self.logger.info('-Opengauss_Function_Tools_Gaussdb_Case0014 finish-')
| true
| true
|
f71aa56817ca77eba5df4a2dd11cb0c4a9a7ea1c
| 3,699
|
py
|
Python
|
tqdm/_monitor.py
|
insilications/tqdm-clr
|
b09a24af7ffe5c85ed0e8e64b33059b43b1be020
|
[
"MIT"
] | 22,617
|
2015-06-03T20:26:05.000Z
|
2022-03-31T22:25:42.000Z
|
tqdm/_monitor.py
|
insilications/tqdm-clr
|
b09a24af7ffe5c85ed0e8e64b33059b43b1be020
|
[
"MIT"
] | 1,230
|
2015-06-03T13:56:41.000Z
|
2022-03-30T06:03:12.000Z
|
tqdm/_monitor.py
|
insilications/tqdm-clr
|
b09a24af7ffe5c85ed0e8e64b33059b43b1be020
|
[
"MIT"
] | 1,445
|
2015-06-03T14:01:33.000Z
|
2022-03-29T14:41:52.000Z
|
import atexit
from threading import Event, Thread, current_thread
from time import time
from warnings import warn
__all__ = ["TMonitor", "TqdmSynchronisationWarning"]
class TqdmSynchronisationWarning(RuntimeWarning):
"""tqdm multi-thread/-process errors which may cause incorrect nesting
but otherwise no adverse effects"""
pass
class TMonitor(Thread):
"""
Monitoring thread for tqdm bars.
Monitors if tqdm bars are taking too much time to display
and readjusts miniters automatically if necessary.
Parameters
----------
tqdm_cls : class
tqdm class to use (can be core tqdm or a submodule).
sleep_interval : float
Time to sleep between monitoring checks.
"""
_test = {} # internal vars for unit testing
def __init__(self, tqdm_cls, sleep_interval):
Thread.__init__(self)
self.daemon = True # kill thread when main killed (KeyboardInterrupt)
self.woken = 0 # last time woken up, to sync with monitor
self.tqdm_cls = tqdm_cls
self.sleep_interval = sleep_interval
self._time = self._test.get("time", time)
self.was_killed = self._test.get("Event", Event)()
atexit.register(self.exit)
self.start()
def exit(self):
self.was_killed.set()
if self is not current_thread():
self.join()
return self.report()
def get_instances(self):
# returns a copy of started `tqdm_cls` instances
return [i for i in self.tqdm_cls._instances.copy()
# Avoid race by checking that the instance started
if hasattr(i, 'start_t')]
def run(self):
cur_t = self._time()
while True:
# After processing and before sleeping, notify that we woke
# Need to be done just before sleeping
self.woken = cur_t
# Sleep some time...
self.was_killed.wait(self.sleep_interval)
# Quit if killed
if self.was_killed.is_set():
return
# Then monitor!
# Acquire lock (to access _instances)
with self.tqdm_cls.get_lock():
cur_t = self._time()
# Check tqdm instances are waiting too long to print
instances = self.get_instances()
for instance in instances:
# Check event in loop to reduce blocking time on exit
if self.was_killed.is_set():
return
# Only if mininterval > 1 (else iterations are just slow)
# and last refresh exceeded maxinterval
if (
instance.miniters > 1
and (cur_t - instance.last_print_t) >= instance.maxinterval
):
# force bypassing miniters on next iteration
# (dynamic_miniters adjusts mininterval automatically)
instance.miniters = 1
# Refresh now! (works only for manual tqdm)
instance.refresh(nolock=True)
# Remove accidental long-lived strong reference
del instance
if instances != self.get_instances(): # pragma: nocover
warn("Set changed size during iteration" +
" (see https://github.com/tqdm/tqdm/issues/481)",
TqdmSynchronisationWarning, stacklevel=2)
# Remove accidental long-lived strong references
del instances
def report(self):
return not self.was_killed.is_set()
| 38.53125
| 83
| 0.575561
|
import atexit
from threading import Event, Thread, current_thread
from time import time
from warnings import warn
__all__ = ["TMonitor", "TqdmSynchronisationWarning"]
class TqdmSynchronisationWarning(RuntimeWarning):
pass
class TMonitor(Thread):
_test = {}
def __init__(self, tqdm_cls, sleep_interval):
Thread.__init__(self)
self.daemon = True
self.woken = 0
self.tqdm_cls = tqdm_cls
self.sleep_interval = sleep_interval
self._time = self._test.get("time", time)
self.was_killed = self._test.get("Event", Event)()
atexit.register(self.exit)
self.start()
def exit(self):
self.was_killed.set()
if self is not current_thread():
self.join()
return self.report()
def get_instances(self):
return [i for i in self.tqdm_cls._instances.copy()
if hasattr(i, 'start_t')]
def run(self):
cur_t = self._time()
while True:
self.woken = cur_t
self.was_killed.wait(self.sleep_interval)
if self.was_killed.is_set():
return
with self.tqdm_cls.get_lock():
cur_t = self._time()
instances = self.get_instances()
for instance in instances:
if self.was_killed.is_set():
return
if (
instance.miniters > 1
and (cur_t - instance.last_print_t) >= instance.maxinterval
):
instance.miniters = 1
instance.refresh(nolock=True)
del instance
if instances != self.get_instances():
warn("Set changed size during iteration" +
" (see https://github.com/tqdm/tqdm/issues/481)",
TqdmSynchronisationWarning, stacklevel=2)
del instances
def report(self):
return not self.was_killed.is_set()
| true
| true
|
f71aa6cb7d4d15f3ea5cab82e19b2bc1a59ef4a5
| 3,466
|
py
|
Python
|
tests/get_bst_feature_api_ct.py
|
r-cc-c/ops-broadview
|
b9002fab5fe1f27628bfd403631840ff4a118cf2
|
[
"Apache-2.0"
] | null | null | null |
tests/get_bst_feature_api_ct.py
|
r-cc-c/ops-broadview
|
b9002fab5fe1f27628bfd403631840ff4a118cf2
|
[
"Apache-2.0"
] | null | null | null |
tests/get_bst_feature_api_ct.py
|
r-cc-c/ops-broadview
|
b9002fab5fe1f27628bfd403631840ff4a118cf2
|
[
"Apache-2.0"
] | 1
|
2021-09-10T08:19:29.000Z
|
2021-09-10T08:19:29.000Z
|
'''
*
* (C) Copyright Broadcom Corporation 2015
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
'''
#!/usr/bin/env python
import os
import sys
import ConfigParser
import json
import pprint
from bstUtil import *
from BstRestService import *
import bstRest as rest
class get_bst_feature_api_ct(object):
def __init__(self,ip,port,params="",debug=False):
self.obj = BstRestService(ip,port)
self.debug = debug
self.params = params
def step1(self,jsonData):
"""Get BST Feature Status"""
try:
resp = self.obj.postResponse(jsonData)
if resp[0] == "INVALID":
return "FAIL","Connection refused/Invalid JSON request... Please check the ip address provided in 'ini' file/BroadViewAgent is running or not/JSON data is valid or not ..."
except Exception,e:
return "FAIL","Unable to perform the rest call with given JSON data, Occured Exception ... "+str(e)
try:
self.obj.debugJsonPrint(self.debug,jsonData,resp)
except:
return "FAIL","Invalid JSON Response data received"
if returnStatus(resp[0], 200)[0] == "FAIL": return "FAIL","Obtained {0}".format(resp[0])
if not resp[1]: return "FAIL","Got null response"
resp_ = resp[1].replace('Content-Type: text/json', '')
data_dict = json.loads(resp_)
if not "result" in data_dict: return "FAIL","No Result key in Response JSON Data"
result = data_dict['result']
plist = self.params.split(",")
plist = [p.strip() for p in plist]
return returnStatus(sorted(plist),sorted(result.keys()),"","get_bst_feature params lists contains invalid param keys")
def getSteps(self):
return sorted([ i for i in dir(self) if i.startswith('step') ], key=lambda item: int(item.replace('step','')))
def main(ip_address,port):
jsonText = ConfigParser.ConfigParser()
cwdir, f = os.path.split(__file__)
jsonText.read(cwdir + '/testCaseJsonStrings.ini')
json_dict = dict(jsonText.items('get_bst_feature_api_ct'))
params=json_dict.get("paramslist","")
tcObj = get_bst_feature_api_ct(ip_address,port,params,debug=True)
stepResultMap = {}
printStepHeader()
for step in tcObj.getSteps():
if step in json_dict:
resp=getattr(tcObj,step)(json_dict[step])
desc=getattr(tcObj,step).__doc__
stepResultMap[step] = resp
printStepResult(step,desc,resp[0], resp[1])
else:
resp=getattr(tcObj,step)()
desc=""
stepResultMap[step] = resp
printStepResult(step,desc,resp[0], resp[1])
if resp[0] == 'FAIL': break
printStepFooter()
statusMsgTuple = [ s for s in stepResultMap.values() if s[0] == "FAIL" ]
if statusMsgTuple:
return False, statusMsgTuple[0][1]
return True, "Test Case Passed"
if __name__ == '__main__':
main()
| 35.367347
| 188
| 0.648586
|
'''
*
* (C) Copyright Broadcom Corporation 2015
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
'''
import os
import sys
import ConfigParser
import json
import pprint
from bstUtil import *
from BstRestService import *
import bstRest as rest
class get_bst_feature_api_ct(object):
def __init__(self,ip,port,params="",debug=False):
self.obj = BstRestService(ip,port)
self.debug = debug
self.params = params
def step1(self,jsonData):
"""Get BST Feature Status"""
try:
resp = self.obj.postResponse(jsonData)
if resp[0] == "INVALID":
return "FAIL","Connection refused/Invalid JSON request... Please check the ip address provided in 'ini' file/BroadViewAgent is running or not/JSON data is valid or not ..."
except Exception,e:
return "FAIL","Unable to perform the rest call with given JSON data, Occured Exception ... "+str(e)
try:
self.obj.debugJsonPrint(self.debug,jsonData,resp)
except:
return "FAIL","Invalid JSON Response data received"
if returnStatus(resp[0], 200)[0] == "FAIL": return "FAIL","Obtained {0}".format(resp[0])
if not resp[1]: return "FAIL","Got null response"
resp_ = resp[1].replace('Content-Type: text/json', '')
data_dict = json.loads(resp_)
if not "result" in data_dict: return "FAIL","No Result key in Response JSON Data"
result = data_dict['result']
plist = self.params.split(",")
plist = [p.strip() for p in plist]
return returnStatus(sorted(plist),sorted(result.keys()),"","get_bst_feature params lists contains invalid param keys")
def getSteps(self):
return sorted([ i for i in dir(self) if i.startswith('step') ], key=lambda item: int(item.replace('step','')))
def main(ip_address,port):
jsonText = ConfigParser.ConfigParser()
cwdir, f = os.path.split(__file__)
jsonText.read(cwdir + '/testCaseJsonStrings.ini')
json_dict = dict(jsonText.items('get_bst_feature_api_ct'))
params=json_dict.get("paramslist","")
tcObj = get_bst_feature_api_ct(ip_address,port,params,debug=True)
stepResultMap = {}
printStepHeader()
for step in tcObj.getSteps():
if step in json_dict:
resp=getattr(tcObj,step)(json_dict[step])
desc=getattr(tcObj,step).__doc__
stepResultMap[step] = resp
printStepResult(step,desc,resp[0], resp[1])
else:
resp=getattr(tcObj,step)()
desc=""
stepResultMap[step] = resp
printStepResult(step,desc,resp[0], resp[1])
if resp[0] == 'FAIL': break
printStepFooter()
statusMsgTuple = [ s for s in stepResultMap.values() if s[0] == "FAIL" ]
if statusMsgTuple:
return False, statusMsgTuple[0][1]
return True, "Test Case Passed"
if __name__ == '__main__':
main()
| false
| true
|
f71aa6cce65ae0f1ec42a02146d24feaa44f2307
| 98
|
py
|
Python
|
alg4.py
|
devilnotcry77/devil_not_cry
|
a9d342d053c788ec6db2d1c5967ed55104b40045
|
[
"Apache-2.0"
] | null | null | null |
alg4.py
|
devilnotcry77/devil_not_cry
|
a9d342d053c788ec6db2d1c5967ed55104b40045
|
[
"Apache-2.0"
] | null | null | null |
alg4.py
|
devilnotcry77/devil_not_cry
|
a9d342d053c788ec6db2d1c5967ed55104b40045
|
[
"Apache-2.0"
] | null | null | null |
n=int(100)
for i in range(n):
for j in range(10):
print("*", end="")
print()
| 16.333333
| 27
| 0.459184
|
n=int(100)
for i in range(n):
for j in range(10):
print("*", end="")
print()
| true
| true
|
f71aa81665c674b5cc3278ea94c533b98549fe90
| 935
|
py
|
Python
|
Swap Nodes in Pairs.py
|
H-isaac23/Data-Structures
|
2a860549ebc87155cdcf98ca951f1e345dd40499
|
[
"MIT"
] | null | null | null |
Swap Nodes in Pairs.py
|
H-isaac23/Data-Structures
|
2a860549ebc87155cdcf98ca951f1e345dd40499
|
[
"MIT"
] | null | null | null |
Swap Nodes in Pairs.py
|
H-isaac23/Data-Structures
|
2a860549ebc87155cdcf98ca951f1e345dd40499
|
[
"MIT"
] | null | null | null |
"""Given a linked list, swap every two adjacent nodes and return its head.
Example 1:
Input: head = [1,2,3,4]
Output: [2,1,4,3]
Example 2:
Input: head = []
Output: []
Example 3:
Input: head = [1]
Output: [1]
Constraints:
The number of nodes in the list is in the range [0, 100].
0 <= Node.val <= 100
Follow up: Can you solve the problem without modifying the values in the list's nodes? (i.e., Only nodes themselves may
be changed.)"""
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def swapPairs(self, head: ListNode) -> ListNode:
if head is None or head.next is None:
return head
first = head.next
second = head.next.next
first.next = head
head.next = self.swapPairs(second)
return first
# Submission Details:
# Runtime: >85.13%
# Memory: >50.67%
| 21.744186
| 119
| 0.640642
|
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def swapPairs(self, head: ListNode) -> ListNode:
if head is None or head.next is None:
return head
first = head.next
second = head.next.next
first.next = head
head.next = self.swapPairs(second)
return first
| true
| true
|
f71aa89acd39eaae1c4ded0a372a3dc7b494d67c
| 189
|
py
|
Python
|
blueapps/account/components/bk_token/forms.py
|
jin-cc/bastion-test
|
9feecbe927e5446213ab25b4da4a5eca23cf6bae
|
[
"Apache-2.0"
] | 42
|
2021-06-16T12:06:03.000Z
|
2022-03-29T13:18:00.000Z
|
blueapps/account/components/bk_token/forms.py
|
jin-cc/bastion-test
|
9feecbe927e5446213ab25b4da4a5eca23cf6bae
|
[
"Apache-2.0"
] | 3
|
2020-06-05T20:56:09.000Z
|
2021-06-10T21:29:05.000Z
|
blueapps/account/components/bk_token/forms.py
|
wangzishuo111/bk_prometheus
|
c6aa16d8a547a3d00fbca317f6846ad35b1297ea
|
[
"MIT"
] | 16
|
2021-07-13T01:17:57.000Z
|
2022-03-01T12:39:32.000Z
|
# -*- coding: utf-8 -*-
from django import forms
class AuthenticationForm(forms.Form):
# bk_token format: KH7P4-VSFi_nOEoV3kj0ytcs0uZnGOegIBLV-eM3rw8
bk_token = forms.CharField()
| 23.625
| 66
| 0.740741
|
from django import forms
class AuthenticationForm(forms.Form):
bk_token = forms.CharField()
| true
| true
|
f71aa8c11ea59751ae59caa6184f21489f218f12
| 422
|
py
|
Python
|
CookieTTS/_2_ttm/GANTTS/run_every_epoch.py
|
AstraliteHeart/cookietts
|
c871f5f7b5790656d5b57bcd9e63946a2da52f0f
|
[
"BSD-3-Clause"
] | 25
|
2020-07-07T20:07:41.000Z
|
2021-12-17T11:27:36.000Z
|
CookieTTS/_2_ttm/GANTTS/run_every_epoch.py
|
AstraliteHeart/cookietts
|
c871f5f7b5790656d5b57bcd9e63946a2da52f0f
|
[
"BSD-3-Clause"
] | 26
|
2020-07-04T00:06:25.000Z
|
2022-02-10T03:28:35.000Z
|
CookieTTS/_2_ttm/GANTTS/run_every_epoch.py
|
AstraliteHeart/cookietts
|
c871f5f7b5790656d5b57bcd9e63946a2da52f0f
|
[
"BSD-3-Clause"
] | 11
|
2020-07-02T21:39:59.000Z
|
2022-01-17T22:09:46.000Z
|
current_iteration = iteration
##########################################################################
### GAN-TTS : HIGH FIDELITY SPEECH SYNTHESIS WITH ADVERSARIAL NETWORKS ###
##########################################################################
# Learning Rate / Optimization
decay_start = 99999999
A_ = 0.2e-5
B_ = 40000
C_ = 0e-5
min_learning_rate = 1e-6
grad_clip_thresh = 75
descriminator_loss_scale = 0.1
| 28.133333
| 74
| 0.490521
|
current_iteration = iteration
| true
| true
|
f71aa8d7c382bafc56b06793ddb3976f1a195ca1
| 11,480
|
py
|
Python
|
StructVBERT/tasks/vqa.py
|
onlyrico/AliceMind
|
a6a070b1610e4c4bfe84ee6c4195b2bc4f725ded
|
[
"Apache-2.0"
] | 1
|
2021-08-05T05:41:50.000Z
|
2021-08-05T05:41:50.000Z
|
StructVBERT/tasks/vqa.py
|
onlyrico/AliceMind
|
a6a070b1610e4c4bfe84ee6c4195b2bc4f725ded
|
[
"Apache-2.0"
] | null | null | null |
StructVBERT/tasks/vqa.py
|
onlyrico/AliceMind
|
a6a070b1610e4c4bfe84ee6c4195b2bc4f725ded
|
[
"Apache-2.0"
] | 1
|
2021-07-10T09:50:47.000Z
|
2021-07-10T09:50:47.000Z
|
# coding=utf-8
# Copyleft 2019 project LXRT.
import os
import collections
import torch
import torch.nn as nn
import logging
from torch.utils.data.dataloader import DataLoader
from tqdm import tqdm
from param import args
from lxrt.qa_answer_table import load_lxmert_qa
from tasks.vqa_model import VQAModel
from tasks.vqa_data import VQADataset, VQATorchDataset, VQAEvaluator
DataTuple = collections.namedtuple("DataTuple", 'dataset loader evaluator')
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO)
logger = logging.getLogger(__name__)
def get_data_tuple(splits: str, bs:int, shuffle=False, drop_last=False) -> DataTuple:
dset = VQADataset(splits)
tset = VQATorchDataset(dset)
evaluator = VQAEvaluator(dset)
data_loader = DataLoader(
tset, batch_size=bs,
shuffle=shuffle, num_workers=args.num_workers,
drop_last=drop_last, pin_memory=True
)
return DataTuple(dataset=dset, loader=data_loader, evaluator=evaluator)
class WarmupOptimizer(object):
def __init__(self, _lr_base, optimizer, _data_size, _batch_size):
self.optimizer = optimizer
self._step = 0
self._lr_base = _lr_base
self._rate = 0
self._data_size = _data_size
self._batch_size = _batch_size
def step(self):
self._step += 1
rate = self.rate()
for p in self.optimizer.param_groups:
p['lr'] = rate
self._rate = rate
self.optimizer.step()
def zero_grad(self):
self.optimizer.zero_grad()
def rate(self, step=None):
if step is None:
step = self._step
if step <= int(self._data_size / self._batch_size * 1):
r = self._lr_base * 1/4.
elif step <= int(self._data_size / self._batch_size * 2):
r = self._lr_base * 2/4.
elif step <= int(self._data_size / self._batch_size * 3):
r = self._lr_base * 3/4.
else:
r = self._lr_base
return r
def adjust_learning_rate(optimizer, decay_rate):
optimizer._lr_base *= decay_rate
class VQA:
def __init__(self):
# Datasets
self.train_tuple = get_data_tuple(
args.train, bs=args.batch_size, shuffle=True, drop_last=True
)
if args.valid != "":
self.valid_tuple = get_data_tuple(
args.valid, bs=256, # for large model
shuffle=False, drop_last=False
)
else:
self.valid_tuple = None
# Model
self.model = VQAModel(self.train_tuple.dataset.num_answers)
self._lr_decay_epoch_list = [8, 10]
self._lr_decay_rate = 0.2
# Load pre-trained weights
if args.load_lxmert is not None:
self.model.lxrt_encoder.load(args.load_lxmert)
if args.load_lxmert_qa is not None:
load_lxmert_qa(args.load_lxmert_qa, self.model,
label2ans=self.train_tuple.dataset.label2ans)
if args.fix_language_bert:
assert args.patial_load
state_dict = torch.load(args.patial_load)
for k in state_dict.copy():
if not k.startswith('bert.'):
state_dict['bert.' + k.replace('gamma', 'weight').replace('beta', 'bias')] = state_dict.pop(k)
# fix bert parameters
for name, param in self.model.lxrt_encoder.model.named_parameters():
# if 'pooler' in name: # pooler not fixed
# continue
if name in state_dict:
logger.info('fix param for: {}'.format(name))
param.requires_grad = False
# GPU options
self.model = self.model.cuda()
# Loss and Optimizer
self.bce_loss = nn.BCEWithLogitsLoss()
if 'bert' in args.optim:
batch_per_epoch = len(self.train_tuple.loader)
t_total = int(batch_per_epoch * args.epochs)
logger.info("BertAdam Total Iters: %d" % t_total)
from lxrt.optimization import BertAdam
self.optim = BertAdam(list(self.model.parameters()),
lr=args.lr,
warmup=0.1,
t_total=t_total)
elif 'adam' in args.optim:
batch_per_epoch = len(self.train_tuple.loader)
optim = args.optimizer(filter(lambda p: p.requires_grad, self.model.parameters()), lr=0, betas=(0.9, 0.98), eps=1e-9)
self.optim = WarmupOptimizer(args.lr, optim, batch_per_epoch * args.batch_size, args.batch_size)
else:
self.optim = args.optimizer(self.model.parameters(), args.lr)
if args.amp_type is not None:
try:
from apex import amp
except ImportError:
raise ImportError("Please install apex from https://www.github.com/nvidia/apex to run this example.")
self.model, self.optim = amp.initialize(self.model, self.optim, opt_level=args.amp_type)
if args.multiGPU:
self.model.lxrt_encoder.multi_gpu()
# Output Directory
self.output = args.output
os.makedirs(self.output, exist_ok=True)
def train(self, train_tuple, eval_tuple):
dset, loader, evaluator = train_tuple
iter_wrapper = (lambda x: tqdm(x, total=len(loader))) if args.tqdm else (lambda x: x)
best_valid = 0.
for epoch in range(args.epochs):
quesid2ans = {}
if 'adam' in args.optim and epoch in self._lr_decay_epoch_list:
adjust_learning_rate(self.optim, self._lr_decay_rate)
for i, (ques_id, feats, boxes, sent, target) in iter_wrapper(enumerate(loader)):
self.model.train()
self.optim.zero_grad()
feats, boxes, target = feats.cuda(), boxes.cuda(), target.cuda()
logit = self.model(feats, boxes, sent)
assert logit.dim() == target.dim() == 2
loss = self.bce_loss(logit, target)
loss = loss * logit.size(1)
if args.multiGPU:
loss = loss.mean() # mean() to average on multi-gpu.
if args.amp_type is not None:
from apex import amp
with amp.scale_loss(loss, self.optim) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
nn.utils.clip_grad_norm_(self.model.parameters(), args.clip_norm)
self.optim.step()
score, label = logit.max(1)
for qid, l in zip(ques_id, label.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = ans
log_str = "\nEpoch %d: Train %0.2f\n" % (epoch, evaluator.evaluate(quesid2ans) * 100.)
if self.valid_tuple is not None: # Do Validation
valid_score = self.evaluate(eval_tuple)
if valid_score > best_valid:
best_valid = valid_score
self.save("BEST")
log_str += "Epoch %d: Valid %0.2f\n" % (epoch, valid_score * 100.) + \
"Epoch %d: Best %0.2f\n" % (epoch, best_valid * 100.)
logger.info(log_str)
with open(self.output + "/log.log", 'a') as f:
f.write(log_str)
f.flush()
self.save("LAST")
def predict(self, eval_tuple: DataTuple, dump=None):
"""
Predict the answers to questions in a data split.
:param eval_tuple: The data tuple to be evaluated.
:param dump: The path of saved file to dump results.
:return: A dict of question_id to answer.
"""
self.model.eval()
dset, loader, evaluator = eval_tuple
quesid2ans = {}
for i, datum_tuple in enumerate(loader):
ques_id, feats, boxes, sent = datum_tuple[:4] # Avoid seeing ground truth
with torch.no_grad():
feats, boxes = feats.cuda(), boxes.cuda()
logit = self.model(feats, boxes, sent)
if args.with_score:
logit = nn.Softmax(dim=1)(logit)
score, label = logit.max(1)
if args.with_score:
for qid, l, s in zip(ques_id, label.cpu().numpy(), score.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = (ans, str(s))
else:
for qid, l in zip(ques_id, label.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = ans
if dump is not None:
evaluator.dump_result(quesid2ans, dump)
return quesid2ans
def evaluate(self, eval_tuple: DataTuple, dump=None):
"""Evaluate all data in data_tuple."""
quesid2ans = self.predict(eval_tuple, dump)
return eval_tuple.evaluator.evaluate(quesid2ans)
@staticmethod
def oracle_score(data_tuple):
dset, loader, evaluator = data_tuple
quesid2ans = {}
for i, (ques_id, feats, boxes, sent, target) in enumerate(loader):
_, label = target.max(1)
for qid, l in zip(ques_id, label.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = ans
return evaluator.evaluate(quesid2ans)
def save(self, name):
torch.save(self.model.state_dict(),
os.path.join(self.output, "%s.pth" % name))
def load(self, path):
logger.info("Load model from %s" % path)
state_dict = torch.load("%s.pth" % path)
self.model.load_state_dict(state_dict)
if __name__ == "__main__":
# Build Class
vqa = VQA()
# Load VQA model weights
if args.load is not None:
vqa.load(args.load)
# Test or Train
if args.test is not None:
args.fast = args.tiny = False # Always loading all data in test
if 'test' in args.test:
vqa.predict(
get_data_tuple(args.test, bs=950,
shuffle=False, drop_last=False),
dump=os.path.join(args.output, 'test_predict.json')
)
elif 'val' in args.test:
# Since part of valididation data are used in pre-training/fine-tuning,
# only validate on the minival set.
result = vqa.evaluate(
get_data_tuple('minival', bs=950,
shuffle=False, drop_last=False),
dump=os.path.join(args.output, 'minival_predict.json')
)
logger.info(result)
else:
assert False, "No such test option for %s" % args.test
else:
# print('Splits in Train data:', vqa.train_tuple.dataset.splits)
logger.info('Splits in Train data: {}'.format(vqa.train_tuple.dataset.splits))
if vqa.valid_tuple is not None:
logger.info('Splits in Valid data: {}'.format(vqa.valid_tuple.dataset.splits))
logger.info("Valid Oracle: %0.2f" % (vqa.oracle_score(vqa.valid_tuple) * 100))
else:
logger.info("DO NOT USE VALIDATION")
vqa.train(vqa.train_tuple, vqa.valid_tuple)
| 38.394649
| 129
| 0.567334
|
import os
import collections
import torch
import torch.nn as nn
import logging
from torch.utils.data.dataloader import DataLoader
from tqdm import tqdm
from param import args
from lxrt.qa_answer_table import load_lxmert_qa
from tasks.vqa_model import VQAModel
from tasks.vqa_data import VQADataset, VQATorchDataset, VQAEvaluator
DataTuple = collections.namedtuple("DataTuple", 'dataset loader evaluator')
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO)
logger = logging.getLogger(__name__)
def get_data_tuple(splits: str, bs:int, shuffle=False, drop_last=False) -> DataTuple:
dset = VQADataset(splits)
tset = VQATorchDataset(dset)
evaluator = VQAEvaluator(dset)
data_loader = DataLoader(
tset, batch_size=bs,
shuffle=shuffle, num_workers=args.num_workers,
drop_last=drop_last, pin_memory=True
)
return DataTuple(dataset=dset, loader=data_loader, evaluator=evaluator)
class WarmupOptimizer(object):
def __init__(self, _lr_base, optimizer, _data_size, _batch_size):
self.optimizer = optimizer
self._step = 0
self._lr_base = _lr_base
self._rate = 0
self._data_size = _data_size
self._batch_size = _batch_size
def step(self):
self._step += 1
rate = self.rate()
for p in self.optimizer.param_groups:
p['lr'] = rate
self._rate = rate
self.optimizer.step()
def zero_grad(self):
self.optimizer.zero_grad()
def rate(self, step=None):
if step is None:
step = self._step
if step <= int(self._data_size / self._batch_size * 1):
r = self._lr_base * 1/4.
elif step <= int(self._data_size / self._batch_size * 2):
r = self._lr_base * 2/4.
elif step <= int(self._data_size / self._batch_size * 3):
r = self._lr_base * 3/4.
else:
r = self._lr_base
return r
def adjust_learning_rate(optimizer, decay_rate):
optimizer._lr_base *= decay_rate
class VQA:
def __init__(self):
self.train_tuple = get_data_tuple(
args.train, bs=args.batch_size, shuffle=True, drop_last=True
)
if args.valid != "":
self.valid_tuple = get_data_tuple(
args.valid, bs=256,
shuffle=False, drop_last=False
)
else:
self.valid_tuple = None
self.model = VQAModel(self.train_tuple.dataset.num_answers)
self._lr_decay_epoch_list = [8, 10]
self._lr_decay_rate = 0.2
if args.load_lxmert is not None:
self.model.lxrt_encoder.load(args.load_lxmert)
if args.load_lxmert_qa is not None:
load_lxmert_qa(args.load_lxmert_qa, self.model,
label2ans=self.train_tuple.dataset.label2ans)
if args.fix_language_bert:
assert args.patial_load
state_dict = torch.load(args.patial_load)
for k in state_dict.copy():
if not k.startswith('bert.'):
state_dict['bert.' + k.replace('gamma', 'weight').replace('beta', 'bias')] = state_dict.pop(k)
for name, param in self.model.lxrt_encoder.model.named_parameters():
if name in state_dict:
logger.info('fix param for: {}'.format(name))
param.requires_grad = False
self.model = self.model.cuda()
self.bce_loss = nn.BCEWithLogitsLoss()
if 'bert' in args.optim:
batch_per_epoch = len(self.train_tuple.loader)
t_total = int(batch_per_epoch * args.epochs)
logger.info("BertAdam Total Iters: %d" % t_total)
from lxrt.optimization import BertAdam
self.optim = BertAdam(list(self.model.parameters()),
lr=args.lr,
warmup=0.1,
t_total=t_total)
elif 'adam' in args.optim:
batch_per_epoch = len(self.train_tuple.loader)
optim = args.optimizer(filter(lambda p: p.requires_grad, self.model.parameters()), lr=0, betas=(0.9, 0.98), eps=1e-9)
self.optim = WarmupOptimizer(args.lr, optim, batch_per_epoch * args.batch_size, args.batch_size)
else:
self.optim = args.optimizer(self.model.parameters(), args.lr)
if args.amp_type is not None:
try:
from apex import amp
except ImportError:
raise ImportError("Please install apex from https://www.github.com/nvidia/apex to run this example.")
self.model, self.optim = amp.initialize(self.model, self.optim, opt_level=args.amp_type)
if args.multiGPU:
self.model.lxrt_encoder.multi_gpu()
self.output = args.output
os.makedirs(self.output, exist_ok=True)
def train(self, train_tuple, eval_tuple):
dset, loader, evaluator = train_tuple
iter_wrapper = (lambda x: tqdm(x, total=len(loader))) if args.tqdm else (lambda x: x)
best_valid = 0.
for epoch in range(args.epochs):
quesid2ans = {}
if 'adam' in args.optim and epoch in self._lr_decay_epoch_list:
adjust_learning_rate(self.optim, self._lr_decay_rate)
for i, (ques_id, feats, boxes, sent, target) in iter_wrapper(enumerate(loader)):
self.model.train()
self.optim.zero_grad()
feats, boxes, target = feats.cuda(), boxes.cuda(), target.cuda()
logit = self.model(feats, boxes, sent)
assert logit.dim() == target.dim() == 2
loss = self.bce_loss(logit, target)
loss = loss * logit.size(1)
if args.multiGPU:
loss = loss.mean()
if args.amp_type is not None:
from apex import amp
with amp.scale_loss(loss, self.optim) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
nn.utils.clip_grad_norm_(self.model.parameters(), args.clip_norm)
self.optim.step()
score, label = logit.max(1)
for qid, l in zip(ques_id, label.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = ans
log_str = "\nEpoch %d: Train %0.2f\n" % (epoch, evaluator.evaluate(quesid2ans) * 100.)
if self.valid_tuple is not None:
valid_score = self.evaluate(eval_tuple)
if valid_score > best_valid:
best_valid = valid_score
self.save("BEST")
log_str += "Epoch %d: Valid %0.2f\n" % (epoch, valid_score * 100.) + \
"Epoch %d: Best %0.2f\n" % (epoch, best_valid * 100.)
logger.info(log_str)
with open(self.output + "/log.log", 'a') as f:
f.write(log_str)
f.flush()
self.save("LAST")
def predict(self, eval_tuple: DataTuple, dump=None):
self.model.eval()
dset, loader, evaluator = eval_tuple
quesid2ans = {}
for i, datum_tuple in enumerate(loader):
ques_id, feats, boxes, sent = datum_tuple[:4]
with torch.no_grad():
feats, boxes = feats.cuda(), boxes.cuda()
logit = self.model(feats, boxes, sent)
if args.with_score:
logit = nn.Softmax(dim=1)(logit)
score, label = logit.max(1)
if args.with_score:
for qid, l, s in zip(ques_id, label.cpu().numpy(), score.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = (ans, str(s))
else:
for qid, l in zip(ques_id, label.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = ans
if dump is not None:
evaluator.dump_result(quesid2ans, dump)
return quesid2ans
def evaluate(self, eval_tuple: DataTuple, dump=None):
quesid2ans = self.predict(eval_tuple, dump)
return eval_tuple.evaluator.evaluate(quesid2ans)
@staticmethod
def oracle_score(data_tuple):
dset, loader, evaluator = data_tuple
quesid2ans = {}
for i, (ques_id, feats, boxes, sent, target) in enumerate(loader):
_, label = target.max(1)
for qid, l in zip(ques_id, label.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = ans
return evaluator.evaluate(quesid2ans)
def save(self, name):
torch.save(self.model.state_dict(),
os.path.join(self.output, "%s.pth" % name))
def load(self, path):
logger.info("Load model from %s" % path)
state_dict = torch.load("%s.pth" % path)
self.model.load_state_dict(state_dict)
if __name__ == "__main__":
vqa = VQA()
if args.load is not None:
vqa.load(args.load)
if args.test is not None:
args.fast = args.tiny = False
if 'test' in args.test:
vqa.predict(
get_data_tuple(args.test, bs=950,
shuffle=False, drop_last=False),
dump=os.path.join(args.output, 'test_predict.json')
)
elif 'val' in args.test:
result = vqa.evaluate(
get_data_tuple('minival', bs=950,
shuffle=False, drop_last=False),
dump=os.path.join(args.output, 'minival_predict.json')
)
logger.info(result)
else:
assert False, "No such test option for %s" % args.test
else:
logger.info('Splits in Train data: {}'.format(vqa.train_tuple.dataset.splits))
if vqa.valid_tuple is not None:
logger.info('Splits in Valid data: {}'.format(vqa.valid_tuple.dataset.splits))
logger.info("Valid Oracle: %0.2f" % (vqa.oracle_score(vqa.valid_tuple) * 100))
else:
logger.info("DO NOT USE VALIDATION")
vqa.train(vqa.train_tuple, vqa.valid_tuple)
| true
| true
|
f71aa988a5098b28bbada6d39c5173f2c7f1034c
| 1,683
|
py
|
Python
|
python/ctci/1_arrays_strings/6_Compression.py
|
othonreyes/code_problems
|
6e65b26120b0b9d6e5ac7342a4d964696b7bd5bf
|
[
"MIT"
] | null | null | null |
python/ctci/1_arrays_strings/6_Compression.py
|
othonreyes/code_problems
|
6e65b26120b0b9d6e5ac7342a4d964696b7bd5bf
|
[
"MIT"
] | null | null | null |
python/ctci/1_arrays_strings/6_Compression.py
|
othonreyes/code_problems
|
6e65b26120b0b9d6e5ac7342a4d964696b7bd5bf
|
[
"MIT"
] | null | null | null |
# Create a function that implements a basic compression algorithm by counting the chars
# thtat are present in a string, if the result string is longer than input
# then return original input.
#
# Examples:
# aaabcccccaaa: a3b1c5a3
# abcdef: abcdef
# aaaaaaaaaaba: a10b1a1
### Note: Don't use extra space
import unittest
from collections import Counter
def compress2(s1):
newStr = []
count = 0
for i in range(len(s1)):
# Explanation
# the i != 0 is used to deal with the first character.
# we could have done but requirs extra code:
# char = s1[0] # requires to check if the s1 is not empty
# - or -
# char = '' # requires to check if char != ''
if i != 0 and s1[i] != s1[i-1]:
newStr.append(s1[i-1] + str(count))
count = 0
count += 1
newStr.append(s1[-1] + str(count)) # we do this to deal with the last characters
return min(s1, ''.join(newStr), key=len)
def compress(s1):
newStr = ''
char = ''
count = 0
for i in range(len(s1)):
if char != s1[i]:
if char != '': # we do this to deal with the initial case
newStr += char + str(count)
char = s1[i]
count = 1
else:
count += 1
newStr += char + str(count) # we do this to deal with the last characters
if len(newStr) > len(s1):
return s1
return newStr
class Test(unittest.TestCase):
valid = (
('aaabcccccaaa', 'a3b1c5a3'),
('abcdef', 'abcdef'),
('aaaaaaaaaaba', 'a10b1a1')
)
def test(self):
for [input, expected] in self.valid:
print(input,' vs ',expected)
result = compress(input)
self.assertEqual(result, expected)
if __name__ == "__main__":
unittest.main()
| 25.892308
| 87
| 0.618538
|
ress2(s1):
newStr = []
count = 0
for i in range(len(s1)):
# Explanation
# the i != 0 is used to deal with the first character.
# we could have done but requirs extra code:
# char = s1[0] # requires to check if the s1 is not empty
# - or -
# char = '' # requires to check if char != ''
if i != 0 and s1[i] != s1[i-1]:
newStr.append(s1[i-1] + str(count))
count = 0
count += 1
newStr.append(s1[-1] + str(count)) # we do this to deal with the last characters
return min(s1, ''.join(newStr), key=len)
def compress(s1):
newStr = ''
char = ''
count = 0
for i in range(len(s1)):
if char != s1[i]:
if char != '': # we do this to deal with the initial case
newStr += char + str(count)
char = s1[i]
count = 1
else:
count += 1
newStr += char + str(count) # we do this to deal with the last characters
if len(newStr) > len(s1):
return s1
return newStr
class Test(unittest.TestCase):
valid = (
('aaabcccccaaa', 'a3b1c5a3'),
('abcdef', 'abcdef'),
('aaaaaaaaaaba', 'a10b1a1')
)
def test(self):
for [input, expected] in self.valid:
print(input,' vs ',expected)
result = compress(input)
self.assertEqual(result, expected)
if __name__ == "__main__":
unittest.main()
| true
| true
|
f71aaa4225770dc4b16e09cec972c3086fd80ff7
| 291
|
py
|
Python
|
subsets/subsets.py
|
YasinEhsan/interview-prep
|
ed9f95af5a37b05304e45b41511068b6f72533e7
|
[
"Apache-2.0"
] | 11
|
2019-05-02T22:27:01.000Z
|
2020-10-30T08:43:02.000Z
|
subsets/subsets.py
|
YasinEhsan/interview-prep
|
ed9f95af5a37b05304e45b41511068b6f72533e7
|
[
"Apache-2.0"
] | null | null | null |
subsets/subsets.py
|
YasinEhsan/interview-prep
|
ed9f95af5a37b05304e45b41511068b6f72533e7
|
[
"Apache-2.0"
] | 3
|
2019-11-01T01:35:01.000Z
|
2020-01-11T18:00:39.000Z
|
def find_subsets(nums):
subsets = []
# TODO: Write your code here
subsets.append([])
for i in range(len(nums)):
storeLen = len(subsets)
for j in range(0,storeLen):
currSet = list(subsets[j])
currSet.append(nums[i])
subsets.append(currSet)
return subsets
| 22.384615
| 32
| 0.639175
|
def find_subsets(nums):
subsets = []
subsets.append([])
for i in range(len(nums)):
storeLen = len(subsets)
for j in range(0,storeLen):
currSet = list(subsets[j])
currSet.append(nums[i])
subsets.append(currSet)
return subsets
| true
| true
|
f71aaa5221fcf2fa717ae33f34cf3b565947d0e8
| 6,099
|
py
|
Python
|
lib/models/spin.py
|
ziniuwan/maed
|
9e1f1c37eba81da86c8d9c62dc9be41a01abff5b
|
[
"MIT"
] | 145
|
2021-08-15T13:22:08.000Z
|
2022-03-29T13:37:19.000Z
|
lib/models/spin.py
|
vkirilenko/maed
|
9e1f1c37eba81da86c8d9c62dc9be41a01abff5b
|
[
"MIT"
] | 9
|
2021-09-17T14:58:15.000Z
|
2022-03-29T07:43:08.000Z
|
lib/models/spin.py
|
vkirilenko/maed
|
9e1f1c37eba81da86c8d9c62dc9be41a01abff5b
|
[
"MIT"
] | 17
|
2021-08-15T13:22:10.000Z
|
2022-01-17T02:34:14.000Z
|
"""
This script is brought from https://github.com/nkolot/SPIN
Adhere to their licence to use this script
"""
import math
import torch
import numpy as np
import os.path as osp
import torch.nn as nn
from lib.core.config import DATA_DIR
from lib.utils.geometry import rotation_matrix_to_angle_axis, rot6d_to_rotmat
from lib.models.smpl import SMPL, SMPL_MODEL_DIR, H36M_TO_J17, SMPL_MEAN_PARAMS
class Regressor(nn.Module):
def __init__(self, smpl_mean_params=SMPL_MEAN_PARAMS, feat_dim=2048, hidden_dim=1024, **kwargs):
super(Regressor, self).__init__()
self.smpl = SMPL(
SMPL_MODEL_DIR,
create_transl=False,
create_global_orient=False,
create_body_pose=False,
create_betas=False,
)
npose = 24 * 6
nshape = 10
self.fc1 = nn.Linear(feat_dim + npose + nshape + 3, hidden_dim)
self.drop1 = nn.Dropout()
self.fc2 = nn.Linear(hidden_dim, hidden_dim)
self.drop2 = nn.Dropout()
self.decpose = nn.Linear(hidden_dim, npose)
self.decshape = nn.Linear(hidden_dim, nshape)
self.deccam = nn.Linear(hidden_dim, 3)
nn.init.xavier_uniform_(self.decpose.weight, gain=0.01)
nn.init.xavier_uniform_(self.decshape.weight, gain=0.01)
nn.init.xavier_uniform_(self.deccam.weight, gain=0.01)
mean_params = np.load(smpl_mean_params)
init_pose = torch.from_numpy(mean_params['pose'][:]).unsqueeze(0)
init_shape = torch.from_numpy(mean_params['shape'][:].astype('float32')).unsqueeze(0)
init_cam = torch.from_numpy(mean_params['cam']).unsqueeze(0)
self.register_buffer('init_pose', init_pose)
self.register_buffer('init_shape', init_shape)
self.register_buffer('init_cam', init_cam)
def iterative_regress(self, x, init_pose=None, init_shape=None, init_cam=None, n_iter=3):
nt = x.shape[0]
if init_pose is None:
init_pose = self.init_pose.expand(nt, -1)
if init_shape is None:
init_shape = self.init_shape.expand(nt, -1)
if init_cam is None:
init_cam = self.init_cam.expand(nt, -1)
pred_pose = init_pose
pred_shape = init_shape
pred_cam = init_cam
for i in range(n_iter):
xc = torch.cat([x, pred_pose, pred_shape, pred_cam], 1)
xc = self.fc1(xc)
xc = self.drop1(xc)
xc = self.fc2(xc)
xc = self.drop2(xc)
pred_pose = self.decpose(xc) + pred_pose
pred_shape = self.decshape(xc) + pred_shape
pred_cam = self.deccam(xc) + pred_cam
return pred_pose, pred_shape, pred_cam
def forward(self, x, seqlen, J_regressor=None,
init_pose=None, init_shape=None, init_cam=None, n_iter=3, **kwargs):
nt = x.shape[0]
N = nt//seqlen
pred_pose, pred_shape, pred_cam = self.iterative_regress(x, init_pose, init_shape, init_cam, n_iter=3)
output_regress = self.get_output(pred_pose, pred_shape, pred_cam, J_regressor)
return output_regress
def get_output(self, pred_pose, pred_shape, pred_cam, J_regressor):
output = {}
nt = pred_pose.shape[0]
pred_rotmat = rot6d_to_rotmat(pred_pose).reshape(nt, -1, 3, 3)
pred_output = self.smpl(
betas=pred_shape,
body_pose=pred_rotmat[:, 1:],
global_orient=pred_rotmat[:, 0].unsqueeze(1),
pose2rot=False
)
pred_vertices = pred_output.vertices[:nt]
pred_joints = pred_output.joints[:nt]
if J_regressor is not None:
J_regressor_batch = J_regressor[None, :].expand(pred_vertices.shape[0], -1, -1).to(pred_vertices.device)
pred_joints = torch.matmul(J_regressor_batch, pred_vertices)
pred_keypoints_2d = projection(pred_joints, pred_cam)
pose = rotation_matrix_to_angle_axis(pred_rotmat.reshape(-1, 3, 3)).reshape(nt, -1)
output['theta'] = torch.cat([pred_cam, pose, pred_shape], dim=1)
output['verts'] = pred_vertices
output['kp_2d'] = pred_keypoints_2d
output['kp_3d'] = pred_joints
output['rotmat'] = pred_rotmat
return output
def projection(pred_joints, pred_camera):
pred_cam_t = torch.stack([pred_camera[:, 1],
pred_camera[:, 2],
2 * 5000. / (224. * pred_camera[:, 0] + 1e-9)], dim=-1)
batch_size = pred_joints.shape[0]
camera_center = torch.zeros(batch_size, 2)
pred_keypoints_2d = perspective_projection(pred_joints,
rotation=torch.eye(3).unsqueeze(0).expand(batch_size, -1, -1).to(pred_joints.device),
translation=pred_cam_t,
focal_length=5000.,
camera_center=camera_center)
# Normalize keypoints to [-1,1]
pred_keypoints_2d = pred_keypoints_2d / (224. / 2.)
return pred_keypoints_2d
def perspective_projection(points, rotation, translation,
focal_length, camera_center):
"""
This function computes the perspective projection of a set of points.
Input:
points (bs, N, 3): 3D points
rotation (bs, 3, 3): Camera rotation
translation (bs, 3): Camera translation
focal_length (bs,) or scalar: Focal length
camera_center (bs, 2): Camera center
"""
batch_size = points.shape[0]
K = torch.zeros([batch_size, 3, 3], device=points.device)
K[:,0,0] = focal_length
K[:,1,1] = focal_length
K[:,2,2] = 1.
K[:,:-1, -1] = camera_center
# Transform points
points = torch.einsum('bij,bkj->bki', rotation, points)
points = points + translation.unsqueeze(1)
# Apply perspective distortion
projected_points = points / points[:,:,-1].unsqueeze(-1)
# Apply camera intrinsics
projected_points = torch.einsum('bij,bkj->bki', K, projected_points)
return projected_points[:, :, :-1]
| 38.601266
| 132
| 0.620102
|
import math
import torch
import numpy as np
import os.path as osp
import torch.nn as nn
from lib.core.config import DATA_DIR
from lib.utils.geometry import rotation_matrix_to_angle_axis, rot6d_to_rotmat
from lib.models.smpl import SMPL, SMPL_MODEL_DIR, H36M_TO_J17, SMPL_MEAN_PARAMS
class Regressor(nn.Module):
def __init__(self, smpl_mean_params=SMPL_MEAN_PARAMS, feat_dim=2048, hidden_dim=1024, **kwargs):
super(Regressor, self).__init__()
self.smpl = SMPL(
SMPL_MODEL_DIR,
create_transl=False,
create_global_orient=False,
create_body_pose=False,
create_betas=False,
)
npose = 24 * 6
nshape = 10
self.fc1 = nn.Linear(feat_dim + npose + nshape + 3, hidden_dim)
self.drop1 = nn.Dropout()
self.fc2 = nn.Linear(hidden_dim, hidden_dim)
self.drop2 = nn.Dropout()
self.decpose = nn.Linear(hidden_dim, npose)
self.decshape = nn.Linear(hidden_dim, nshape)
self.deccam = nn.Linear(hidden_dim, 3)
nn.init.xavier_uniform_(self.decpose.weight, gain=0.01)
nn.init.xavier_uniform_(self.decshape.weight, gain=0.01)
nn.init.xavier_uniform_(self.deccam.weight, gain=0.01)
mean_params = np.load(smpl_mean_params)
init_pose = torch.from_numpy(mean_params['pose'][:]).unsqueeze(0)
init_shape = torch.from_numpy(mean_params['shape'][:].astype('float32')).unsqueeze(0)
init_cam = torch.from_numpy(mean_params['cam']).unsqueeze(0)
self.register_buffer('init_pose', init_pose)
self.register_buffer('init_shape', init_shape)
self.register_buffer('init_cam', init_cam)
def iterative_regress(self, x, init_pose=None, init_shape=None, init_cam=None, n_iter=3):
nt = x.shape[0]
if init_pose is None:
init_pose = self.init_pose.expand(nt, -1)
if init_shape is None:
init_shape = self.init_shape.expand(nt, -1)
if init_cam is None:
init_cam = self.init_cam.expand(nt, -1)
pred_pose = init_pose
pred_shape = init_shape
pred_cam = init_cam
for i in range(n_iter):
xc = torch.cat([x, pred_pose, pred_shape, pred_cam], 1)
xc = self.fc1(xc)
xc = self.drop1(xc)
xc = self.fc2(xc)
xc = self.drop2(xc)
pred_pose = self.decpose(xc) + pred_pose
pred_shape = self.decshape(xc) + pred_shape
pred_cam = self.deccam(xc) + pred_cam
return pred_pose, pred_shape, pred_cam
def forward(self, x, seqlen, J_regressor=None,
init_pose=None, init_shape=None, init_cam=None, n_iter=3, **kwargs):
nt = x.shape[0]
N = nt//seqlen
pred_pose, pred_shape, pred_cam = self.iterative_regress(x, init_pose, init_shape, init_cam, n_iter=3)
output_regress = self.get_output(pred_pose, pred_shape, pred_cam, J_regressor)
return output_regress
def get_output(self, pred_pose, pred_shape, pred_cam, J_regressor):
output = {}
nt = pred_pose.shape[0]
pred_rotmat = rot6d_to_rotmat(pred_pose).reshape(nt, -1, 3, 3)
pred_output = self.smpl(
betas=pred_shape,
body_pose=pred_rotmat[:, 1:],
global_orient=pred_rotmat[:, 0].unsqueeze(1),
pose2rot=False
)
pred_vertices = pred_output.vertices[:nt]
pred_joints = pred_output.joints[:nt]
if J_regressor is not None:
J_regressor_batch = J_regressor[None, :].expand(pred_vertices.shape[0], -1, -1).to(pred_vertices.device)
pred_joints = torch.matmul(J_regressor_batch, pred_vertices)
pred_keypoints_2d = projection(pred_joints, pred_cam)
pose = rotation_matrix_to_angle_axis(pred_rotmat.reshape(-1, 3, 3)).reshape(nt, -1)
output['theta'] = torch.cat([pred_cam, pose, pred_shape], dim=1)
output['verts'] = pred_vertices
output['kp_2d'] = pred_keypoints_2d
output['kp_3d'] = pred_joints
output['rotmat'] = pred_rotmat
return output
def projection(pred_joints, pred_camera):
pred_cam_t = torch.stack([pred_camera[:, 1],
pred_camera[:, 2],
2 * 5000. / (224. * pred_camera[:, 0] + 1e-9)], dim=-1)
batch_size = pred_joints.shape[0]
camera_center = torch.zeros(batch_size, 2)
pred_keypoints_2d = perspective_projection(pred_joints,
rotation=torch.eye(3).unsqueeze(0).expand(batch_size, -1, -1).to(pred_joints.device),
translation=pred_cam_t,
focal_length=5000.,
camera_center=camera_center)
pred_keypoints_2d = pred_keypoints_2d / (224. / 2.)
return pred_keypoints_2d
def perspective_projection(points, rotation, translation,
focal_length, camera_center):
batch_size = points.shape[0]
K = torch.zeros([batch_size, 3, 3], device=points.device)
K[:,0,0] = focal_length
K[:,1,1] = focal_length
K[:,2,2] = 1.
K[:,:-1, -1] = camera_center
points = torch.einsum('bij,bkj->bki', rotation, points)
points = points + translation.unsqueeze(1)
projected_points = points / points[:,:,-1].unsqueeze(-1)
projected_points = torch.einsum('bij,bkj->bki', K, projected_points)
return projected_points[:, :, :-1]
| true
| true
|
f71aabf71da050ef5d5829467e28176e4164c3ea
| 8,924
|
py
|
Python
|
sk_typing/decomposition.py
|
thomasjpfan/sk_typing
|
e6aacfedbce44d7748cf7c49cd2b949952f2e427
|
[
"MIT"
] | 1
|
2021-02-19T20:57:36.000Z
|
2021-02-19T20:57:36.000Z
|
sk_typing/decomposition.py
|
thomasjpfan/sk_typing
|
e6aacfedbce44d7748cf7c49cd2b949952f2e427
|
[
"MIT"
] | null | null | null |
sk_typing/decomposition.py
|
thomasjpfan/sk_typing
|
e6aacfedbce44d7748cf7c49cd2b949952f2e427
|
[
"MIT"
] | null | null | null |
from typing import Optional
from typing import Union
from collections.abc import Callable
import numpy as np
from .typing import RandomStateType
from .typing import Literal
class DictionaryLearning:
components_: np.ndarray
error_: np.ndarray
n_iter_: int
def __init__(
self,
n_components: Optional[int] = None,
alpha: float = 1,
max_iter: int = 1000,
tol: float = 1e-08,
fit_algorithm: Literal["lars", "cd"] = "lars",
transform_algorithm: Literal[
"lasso_lars", "lasso_cd", "lars", "omp", "threshold"
] = "omp",
transform_n_nonzero_coefs: Optional[int] = None,
transform_alpha: Optional[float] = None,
n_jobs: Optional[int] = None,
code_init: Optional[np.ndarray] = None,
dict_init: Optional[np.ndarray] = None,
verbose: bool = False,
split_sign: bool = False,
random_state: RandomStateType = None,
positive_code: bool = False,
positive_dict: bool = False,
transform_max_iter: int = 1000,
):
...
class FactorAnalysis:
components_: np.ndarray
loglike_: list
noise_variance_: np.ndarray
n_iter_: int
mean_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
tol: float = 0.01,
copy: bool = True,
max_iter: int = 1000,
noise_variance_init: Optional[np.ndarray] = None,
svd_method: Literal["lapack", "randomized"] = "randomized",
iterated_power: int = 3,
random_state: RandomStateType = 0,
):
...
class FastICA:
components_: np.ndarray
mixing_: np.ndarray
mean_: np.ndarray
n_iter_: int
whitening_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
algorithm: Literal["parallel", "deflation"] = "parallel",
whiten: bool = True,
fun: Union[Literal["logcosh", "exp", "cube"], Callable] = "logcosh",
fun_args: Optional[dict] = None,
max_iter: int = 200,
tol: float = 0.0001,
w_init: Optional[np.ndarray] = None,
random_state: RandomStateType = None,
):
...
class IncrementalPCA:
components_: np.ndarray
explained_variance_: np.ndarray
explained_variance_ratio_: np.ndarray
singular_values_: np.ndarray
mean_: np.ndarray
var_: np.ndarray
noise_variance_: float
n_components_: int
n_samples_seen_: int
def __init__(
self,
n_components: Optional[int] = None,
whiten: bool = False,
copy: bool = True,
batch_size: Optional[int] = None,
):
...
class KernelPCA:
lambdas_: np.ndarray
alphas_: np.ndarray
dual_coef_: np.ndarray
X_transformed_fit_: np.ndarray
X_fit_: np.ndarray
def __init__(
self,
n_components: Optional[None] = None,
kernel: Literal[
"linear", "poly", "rbf", "sigmoid", "cosine", "precomputed"
] = "linear",
gamma: Optional[float] = None,
degree: int = 3,
coef0: float = 1,
kernel_params: Optional[dict] = None,
alpha: float = 1.0,
fit_inverse_transform: bool = False,
eigen_solver: Literal["auto", "dense", "arpack"] = "auto",
tol: float = 0,
max_iter: Optional[None] = None,
remove_zero_eig: bool = False,
random_state: RandomStateType = None,
copy_X: bool = True,
n_jobs: Optional[int] = None,
):
...
class LatentDirichletAllocation:
components_: np.ndarray
n_batch_iter_: int
n_iter_: int
bound_: float
doc_topic_prior_: float
topic_word_prior_: float
def __init__(
self,
n_components: int = 10,
doc_topic_prior: Optional[float] = None,
topic_word_prior: Optional[float] = None,
learning_method: Literal["batch", "online"] = "batch",
learning_decay: float = 0.7,
learning_offset: float = 10.0,
max_iter: int = 10,
batch_size: int = 128,
evaluate_every: int = -1,
total_samples: int = 1_000_000,
perp_tol: float = 0.1,
mean_change_tol: float = 0.001,
max_doc_update_iter: int = 100,
n_jobs: Optional[int] = None,
verbose: int = 0,
random_state: RandomStateType = None,
):
...
class MiniBatchDictionaryLearning:
components_: np.ndarray
inner_stats_: tuple
n_iter_: int
iter_offset_: int
random_state_: np.random.RandomState
def __init__(
self,
n_components: Optional[None] = None,
alpha: float = 1,
n_iter: int = 1000,
fit_algorithm: Literal["lars", "cd"] = "lars",
n_jobs: Optional[int] = None,
batch_size: int = 3,
shuffle: bool = True,
dict_init: Optional[np.ndarray] = None,
transform_algorithm: Literal[
"lasso_lars", "lasso_cd", "lars", "omp", "threshold"
] = "omp",
transform_n_nonzero_coefs: Optional[int] = None,
transform_alpha: Optional[float] = None,
verbose: bool = False,
split_sign: bool = False,
random_state: RandomStateType = None,
positive_code: bool = False,
positive_dict: bool = False,
transform_max_iter: int = 1000,
):
...
class MiniBatchSparsePCA:
components_: np.ndarray
n_iter_: int
mean_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
alpha: int = 1,
ridge_alpha: float = 0.01,
n_iter: int = 100,
callback: Optional[Callable] = None,
batch_size: int = 3,
verbose: Union[int, bool] = False,
shuffle: bool = True,
n_jobs: Optional[int] = None,
method: Literal["lars", "cd"] = "lars",
random_state: RandomStateType = None,
normalize_components: str = "deprecated",
):
...
class NMF:
components_: np.ndarray
n_components_: int
reconstruction_err_: float
n_iter_: int
def __init__(
self,
n_components: Optional[int] = None,
init: Optional[
Literal["random", "nndsvd", "nndsvda", "nndsvdar", "custom", "warn"]
] = None,
solver: Literal["cd", "mu"] = "cd",
beta_loss: Union[
float, Literal["frobenius", "kullback-leibler", "itakura-saito"]
] = "frobenius",
tol: float = 0.0001,
max_iter: int = 200,
random_state: RandomStateType = None,
alpha: float = 0.0,
l1_ratio: float = 0.0,
verbose: int = 0,
shuffle: bool = False,
):
...
class PCA:
components_: np.ndarray
explained_variance_: np.ndarray
explained_variance_ratio_: np.ndarray
singular_values_: np.ndarray
mean_: np.ndarray
n_components_: np.ndarray
n_features_: int
n_samples_: int
noise_variance_: float
def __init__(
self,
n_components: Union[int, float, None, Literal["mle"]] = None,
copy: bool = True,
whiten: bool = False,
svd_solver: Literal["auto", "full", "arpack", "randomized"] = "auto",
tol: float = 0.0,
iterated_power: Union[int, Literal["auto"]] = "auto",
random_state: RandomStateType = None,
):
...
class SparseCoder:
components_: np.ndarray
def __init__(
self,
dictionary: np.ndarray,
transform_algorithm: Literal[
"lasso_lars", "lasso_cd", "lars", "omp", "threshold"
] = "omp",
transform_n_nonzero_coefs: Optional[int] = None,
transform_alpha: Optional[float] = None,
split_sign: bool = False,
n_jobs: Optional[int] = None,
positive_code: bool = False,
transform_max_iter: int = 1000,
):
...
class SparsePCA:
components_: np.ndarray
error_: np.ndarray
n_iter_: int
mean_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
alpha: float = 1,
ridge_alpha: float = 0.01,
max_iter: int = 1000,
tol: float = 1e-08,
method: Literal["lars", "cd"] = "lars",
n_jobs: Optional[int] = None,
U_init: Optional[np.ndarray] = None,
V_init: Optional[np.ndarray] = None,
verbose: Union[int, bool] = False,
random_state: RandomStateType = None,
normalize_components: str = "deprecated",
):
...
class TruncatedSVD:
components_: np.ndarray
explained_variance_: np.ndarray
explained_variance_ratio_: np.ndarray
singular_values_: np.ndarray
def __init__(
self,
n_components: int = 2,
algorithm: Literal["arpack", "randomized"] = "randomized",
n_iter: int = 5,
random_state: RandomStateType = None,
tol: float = 0.0,
):
...
| 27.12462
| 80
| 0.584043
|
from typing import Optional
from typing import Union
from collections.abc import Callable
import numpy as np
from .typing import RandomStateType
from .typing import Literal
class DictionaryLearning:
components_: np.ndarray
error_: np.ndarray
n_iter_: int
def __init__(
self,
n_components: Optional[int] = None,
alpha: float = 1,
max_iter: int = 1000,
tol: float = 1e-08,
fit_algorithm: Literal["lars", "cd"] = "lars",
transform_algorithm: Literal[
"lasso_lars", "lasso_cd", "lars", "omp", "threshold"
] = "omp",
transform_n_nonzero_coefs: Optional[int] = None,
transform_alpha: Optional[float] = None,
n_jobs: Optional[int] = None,
code_init: Optional[np.ndarray] = None,
dict_init: Optional[np.ndarray] = None,
verbose: bool = False,
split_sign: bool = False,
random_state: RandomStateType = None,
positive_code: bool = False,
positive_dict: bool = False,
transform_max_iter: int = 1000,
):
...
class FactorAnalysis:
components_: np.ndarray
loglike_: list
noise_variance_: np.ndarray
n_iter_: int
mean_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
tol: float = 0.01,
copy: bool = True,
max_iter: int = 1000,
noise_variance_init: Optional[np.ndarray] = None,
svd_method: Literal["lapack", "randomized"] = "randomized",
iterated_power: int = 3,
random_state: RandomStateType = 0,
):
...
class FastICA:
components_: np.ndarray
mixing_: np.ndarray
mean_: np.ndarray
n_iter_: int
whitening_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
algorithm: Literal["parallel", "deflation"] = "parallel",
whiten: bool = True,
fun: Union[Literal["logcosh", "exp", "cube"], Callable] = "logcosh",
fun_args: Optional[dict] = None,
max_iter: int = 200,
tol: float = 0.0001,
w_init: Optional[np.ndarray] = None,
random_state: RandomStateType = None,
):
...
class IncrementalPCA:
components_: np.ndarray
explained_variance_: np.ndarray
explained_variance_ratio_: np.ndarray
singular_values_: np.ndarray
mean_: np.ndarray
var_: np.ndarray
noise_variance_: float
n_components_: int
n_samples_seen_: int
def __init__(
self,
n_components: Optional[int] = None,
whiten: bool = False,
copy: bool = True,
batch_size: Optional[int] = None,
):
...
class KernelPCA:
lambdas_: np.ndarray
alphas_: np.ndarray
dual_coef_: np.ndarray
X_transformed_fit_: np.ndarray
X_fit_: np.ndarray
def __init__(
self,
n_components: Optional[None] = None,
kernel: Literal[
"linear", "poly", "rbf", "sigmoid", "cosine", "precomputed"
] = "linear",
gamma: Optional[float] = None,
degree: int = 3,
coef0: float = 1,
kernel_params: Optional[dict] = None,
alpha: float = 1.0,
fit_inverse_transform: bool = False,
eigen_solver: Literal["auto", "dense", "arpack"] = "auto",
tol: float = 0,
max_iter: Optional[None] = None,
remove_zero_eig: bool = False,
random_state: RandomStateType = None,
copy_X: bool = True,
n_jobs: Optional[int] = None,
):
...
class LatentDirichletAllocation:
components_: np.ndarray
n_batch_iter_: int
n_iter_: int
bound_: float
doc_topic_prior_: float
topic_word_prior_: float
def __init__(
self,
n_components: int = 10,
doc_topic_prior: Optional[float] = None,
topic_word_prior: Optional[float] = None,
learning_method: Literal["batch", "online"] = "batch",
learning_decay: float = 0.7,
learning_offset: float = 10.0,
max_iter: int = 10,
batch_size: int = 128,
evaluate_every: int = -1,
total_samples: int = 1_000_000,
perp_tol: float = 0.1,
mean_change_tol: float = 0.001,
max_doc_update_iter: int = 100,
n_jobs: Optional[int] = None,
verbose: int = 0,
random_state: RandomStateType = None,
):
...
class MiniBatchDictionaryLearning:
components_: np.ndarray
inner_stats_: tuple
n_iter_: int
iter_offset_: int
random_state_: np.random.RandomState
def __init__(
self,
n_components: Optional[None] = None,
alpha: float = 1,
n_iter: int = 1000,
fit_algorithm: Literal["lars", "cd"] = "lars",
n_jobs: Optional[int] = None,
batch_size: int = 3,
shuffle: bool = True,
dict_init: Optional[np.ndarray] = None,
transform_algorithm: Literal[
"lasso_lars", "lasso_cd", "lars", "omp", "threshold"
] = "omp",
transform_n_nonzero_coefs: Optional[int] = None,
transform_alpha: Optional[float] = None,
verbose: bool = False,
split_sign: bool = False,
random_state: RandomStateType = None,
positive_code: bool = False,
positive_dict: bool = False,
transform_max_iter: int = 1000,
):
...
class MiniBatchSparsePCA:
components_: np.ndarray
n_iter_: int
mean_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
alpha: int = 1,
ridge_alpha: float = 0.01,
n_iter: int = 100,
callback: Optional[Callable] = None,
batch_size: int = 3,
verbose: Union[int, bool] = False,
shuffle: bool = True,
n_jobs: Optional[int] = None,
method: Literal["lars", "cd"] = "lars",
random_state: RandomStateType = None,
normalize_components: str = "deprecated",
):
...
class NMF:
components_: np.ndarray
n_components_: int
reconstruction_err_: float
n_iter_: int
def __init__(
self,
n_components: Optional[int] = None,
init: Optional[
Literal["random", "nndsvd", "nndsvda", "nndsvdar", "custom", "warn"]
] = None,
solver: Literal["cd", "mu"] = "cd",
beta_loss: Union[
float, Literal["frobenius", "kullback-leibler", "itakura-saito"]
] = "frobenius",
tol: float = 0.0001,
max_iter: int = 200,
random_state: RandomStateType = None,
alpha: float = 0.0,
l1_ratio: float = 0.0,
verbose: int = 0,
shuffle: bool = False,
):
...
class PCA:
components_: np.ndarray
explained_variance_: np.ndarray
explained_variance_ratio_: np.ndarray
singular_values_: np.ndarray
mean_: np.ndarray
n_components_: np.ndarray
n_features_: int
n_samples_: int
noise_variance_: float
def __init__(
self,
n_components: Union[int, float, None, Literal["mle"]] = None,
copy: bool = True,
whiten: bool = False,
svd_solver: Literal["auto", "full", "arpack", "randomized"] = "auto",
tol: float = 0.0,
iterated_power: Union[int, Literal["auto"]] = "auto",
random_state: RandomStateType = None,
):
...
class SparseCoder:
components_: np.ndarray
def __init__(
self,
dictionary: np.ndarray,
transform_algorithm: Literal[
"lasso_lars", "lasso_cd", "lars", "omp", "threshold"
] = "omp",
transform_n_nonzero_coefs: Optional[int] = None,
transform_alpha: Optional[float] = None,
split_sign: bool = False,
n_jobs: Optional[int] = None,
positive_code: bool = False,
transform_max_iter: int = 1000,
):
...
class SparsePCA:
components_: np.ndarray
error_: np.ndarray
n_iter_: int
mean_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
alpha: float = 1,
ridge_alpha: float = 0.01,
max_iter: int = 1000,
tol: float = 1e-08,
method: Literal["lars", "cd"] = "lars",
n_jobs: Optional[int] = None,
U_init: Optional[np.ndarray] = None,
V_init: Optional[np.ndarray] = None,
verbose: Union[int, bool] = False,
random_state: RandomStateType = None,
normalize_components: str = "deprecated",
):
...
class TruncatedSVD:
components_: np.ndarray
explained_variance_: np.ndarray
explained_variance_ratio_: np.ndarray
singular_values_: np.ndarray
def __init__(
self,
n_components: int = 2,
algorithm: Literal["arpack", "randomized"] = "randomized",
n_iter: int = 5,
random_state: RandomStateType = None,
tol: float = 0.0,
):
...
| true
| true
|
f71aac40a529a6f8ae2786769f649c443c11c279
| 8,926
|
py
|
Python
|
Model Monitoring.py
|
MSJemutai/DSCC202-402-Forecasting-Flight-Delay-Final-Project
|
e6fc287ebfac59fd2edbc7d19241b61787ce14fb
|
[
"MIT"
] | null | null | null |
Model Monitoring.py
|
MSJemutai/DSCC202-402-Forecasting-Flight-Delay-Final-Project
|
e6fc287ebfac59fd2edbc7d19241b61787ce14fb
|
[
"MIT"
] | null | null | null |
Model Monitoring.py
|
MSJemutai/DSCC202-402-Forecasting-Flight-Delay-Final-Project
|
e6fc287ebfac59fd2edbc7d19241b61787ce14fb
|
[
"MIT"
] | null | null | null |
# Databricks notebook source
# MAGIC %md
# MAGIC ## Model Monitoring
# COMMAND ----------
# MAGIC %run ./includes/utilities
# COMMAND ----------
# MAGIC %run ./includes/configuration
# COMMAND ----------
# grab the station information (system wide)
stationDF=get_bike_stations()[['name','station_id','lat','lon']]
# grab the stations of interest
stationsOfInterestDF = spark.sql("""select distinct(station_id) from from citibike.forecast_regression_timeweather;""").toPandas()
stationDF = stationDF[stationDF['station_id'].apply(lambda x: int(x) in list(stationsOfInterestDF.values.flatten()))]
# COMMAND ----------
from datetime import datetime as dt
from datetime import timedelta
dbutils.widgets.removeAll()
dbutils.widgets.dropdown("00.Airport_Code", "JFK", ["JFK","SEA","BOS","ATL","LAX","SFO","DEN","DFW","ORD","CVG","CLT","DCA","IAH"])
dbutils.widgets.text('01.training_start_date', "2018-01-01")
dbutils.widgets.text('02.training_end_date', "2019-03-15")
dbutils.widgets.text('03.inference_date', (dt.strptime(str(dbutils.widgets.get('02.training_end_date')), "%Y-%m-%d") + timedelta(days=1)).strftime("%Y-%m-%d"))
dbutils.widgets.text('04.promote_model', "No")
training_start_date = str(dbutils.widgets.get('01.training_start_date'))
training_end_date = str(dbutils.widgets.get('02.training_end_date'))
inference_date = str(dbutils.widgets.get('03.inference_date'))
airport_code = str(dbutils.widgets.get('00.Airport_Code'))
if dbutils.widgets.get("05.promote_model")=='Yes':
promote_model = True
else:
promote_model = False
print(airport_code,training_start_date,training_end_date,inference_date,promote_model)
# COMMAND ----------
# MAGIC %md
# MAGIC ## Forecast flight delay at selected airport
# COMMAND ----------
import mlflow
from pprint import pprint
from mlflow.tracking import MlflowClient
import plotly.express as px
from datetime import timedelta, datetime
client = MlflowClient()
# COMMAND ----------
# assemble dataset for forecasting
fdf = spark.sql('''
SELECT
a.hour as ds,
EXTRACT(year from a.hour) as year,
EXTRACT(dayofweek from a.hour) as dayofweek,
EXTRACT(hour from a.hour) as hour,
CASE WHEN d.date IS NULL THEN 0 ELSE 1 END as is_holiday,
COALESCE(c.tot_precip_mm,0) as precip_mm,
c.avg_temp_f as temp_f
FROM ( -- all rental hours by currently active stations
SELECT
y.station_id,
x.hour
FROM citibike.periods x
INNER JOIN citibike.stations_most_active y
ON x.hour BETWEEN '{0}' AND '{1}'
) a
LEFT OUTER JOIN citibike.rentals b
ON a.station_id=b.station_id AND a.hour=b.hour
LEFT OUTER JOIN citibike.weather c
ON a.hour=c.time
LEFT OUTER JOIN citibike.holidays d
ON TO_DATE(a.hour)=d.date
WHERE a.station_id = '{2}'
'''.format(end_date, (datetime.strptime(end_date, '%Y-%m-%d') + timedelta(hours=int(hours_to_forecast))).strftime("%Y-%m-%d %H:%M:%S"), station_id)
)
# COMMAND ----------
# Forecast using the production and staging models
df1=fdf.toPandas().fillna(method='ffill').fillna(method='bfill')
df1['model']='Production'
df1['yhat']=prod_model.predict(df1.drop(["ds","model"], axis=1).values)
df2=fdf.toPandas().fillna(method='ffill').fillna(method='bfill')
df2['model']='Staging'
df2['yhat']=stage_model.predict(df2.drop(["ds","model"], axis=1).values)
# COMMAND ----------
df = pd.concat([df1,df2]).reset_index()
labels={
"ds": "Forecast Time",
"yhat": "Forecasted Delay",
"model": "Model Stage"
}
fig = px.line(df, x="ds", y="yhat", color='model', title=f"{airport_code} delay forecast by model stage", labels=labels)
fig.show()
# COMMAND ----------
# MAGIC %md
# MAGIC ## Monitoring the model performance
# COMMAND ----------
train_df = spark.sql('''
SELECT
a.hour as ds,
EXTRACT(year from a.hour) as year,
EXTRACT(dayofweek from a.hour) as dayofweek,
EXTRACT(hour from a.hour) as hour,
CASE WHEN d.date IS NULL THEN 0 ELSE 1 END as is_holiday,
COALESCE(c.tot_precip_mm,0) as precip_mm,
c.avg_temp_f as temp_f
FROM ( -- all rental hours by currently active stations
SELECT
y.station_id,
x.hour
FROM citibike.periods x
INNER JOIN citibike.stations_most_active y
ON x.hour BETWEEN '{0}' AND '{1}'
) a
LEFT OUTER JOIN citibike.rentals b
ON a.station_id=b.station_id AND a.hour=b.hour
LEFT OUTER JOIN citibike.weather c
ON a.hour=c.time
LEFT OUTER JOIN citibike.holidays d
ON TO_DATE(a.hour)=d.date
WHERE a.station_id = '{2}'
'''.format((datetime.strptime(end_date, '%Y-%m-%d') - timedelta(hours=int(hours_to_forecast))).strftime("%Y-%m-%d %H:%M:%S"), end_date, station_id)
)
# COMMAND ----------
airport = dbutils.widgets.get('00.Airport_Code')
airport_id = stationDF[stationDF['name']==airport]['station_id'].values[0]
model_name = "{}-reg-rf-model".format(airport_id)
prod_version = None
stage_version = None
# get the respective versions
for mv in client.search_model_versions(f"name='{model_name}'"):
if dict(mv)['current_stage'] == 'Staging':
stage_version=dict(mv)['version']
elif dict(mv)['current_stage'] == 'Production':
prod_version=dict(mv)['version']
if prod_version is not None:
# load the training data associated with the production model
prod_model = mlflow.sklearn.load_model(f"models:/{model_name}/Production")
pdf = spark.sql(f"""SELECT * from citibike.forecast_regression_timeweather WHERE station_id = '{station_id}' and model_version = '{prod_version}';""").toPandas()
if stage_version is not None:
# load the training data assocaited with the staging model
stage_model = mlflow.sklearn.load_model(f"models:/{model_name}/Staging")
sdf = spark.sql(f"""SELECT * from citibike.forecast_regression_timeweather WHERE station_id = '{station_id}' and model_version = '{stage_version}';""").toPandas()
# COMMAND ----------
pdf['stage']="prod"
pdf['residual']=pdf['y']-pdf['yhat']
sdf['stage']="staging"
sdf['residual']=sdf['y']-sdf['yhat']
df=pd.concat([pdf,sdf])
# COMMAND ----------
fig = px.scatter(
df, x='yhat', y='residual',
marginal_y='violin',
color='stage', trendline='ols',
title=f"{airport} delay forecast model performance comparison"
)
fig.show()
# COMMAND ----------
# MAGIC %md
# MAGIC ## Use Tensorflow Validation Library
# MAGIC - check schema between the training and serving periods of time
# MAGIC - check for data drift and skew between training and serving
# COMMAND ----------
from sklearn.model_selection import train_test_split
import tensorflow_data_validation as tfdv
from tensorflow_data_validation.utils.display_util import get_statistics_html
import warnings
warnings.filterwarnings("ignore", message=r"Passing", category=FutureWarning)
stats_train=tfdv.generate_statistics_from_dataframe(dataframe=train_df.toPandas())
stats_serve=tfdv.generate_statistics_from_dataframe(dataframe=fdf.toPandas())
schema = tfdv.infer_schema(statistics=stats_train)
tfdv.display_schema(schema=schema)
# COMMAND ----------
# Compare evaluation data with training data
displayHTML(get_statistics_html(lhs_statistics=stats_serve, rhs_statistics=stats_train,
lhs_name='SERVE_DATASET', rhs_name='TRAIN_DATASET'))
# COMMAND ----------
anomalies = tfdv.validate_statistics(statistics=stats_serve, schema=schema)
tfdv.display_anomalies(anomalies)
# COMMAND ----------
# Add skew and drift comparators
temp_f = tfdv.get_feature(schema, 'temp_f')
temp_f.skew_comparator.jensen_shannon_divergence.threshold = 0
temp_f.drift_comparator.jensen_shannon_divergence.threshold = 0
precip_mm = tfdv.get_feature(schema, 'precip_mm')
precip_mm.skew_comparator.jensen_shannon_divergence.threshold = 0
precip_mm.drift_comparator.jensen_shannon_divergence.threshold = 0
_anomalies = tfdv.validate_statistics(stats_train, schema, serving_statistics=stats_serve)
hour = tfdv.get_feature(schema, 'hour')
hour.skew_comparator.jensen_shannon_divergence.threshold = 0
hour.drift_comparator.jensen_shannon_divergence.threshold = 0
dayofweek = tfdv.get_feature(schema, 'dayofweek')
dayofweek.skew_comparator.jensen_shannon_divergence.threshold = 0
dayofweek.drift_comparator.jensen_shannon_divergence.threshold = 0
_anomalies = tfdv.validate_statistics(stats_train, schema, serving_statistics=stats_serve)
tfdv.display_anomalies(_anomalies)
# COMMAND ----------
# MAGIC %md
# MAGIC ## Promote model if selected
# COMMAND ----------
# promote staging to production
if promote_model and stage_version is not None and prod_version is not None:
# Archive the production model
client.transition_model_version_stage(
name=model_name,
version=prod_version,
stage="Archived"
)
# Staging --> Production
client.transition_model_version_stage(
name=model_name,
version=stage_version,
stage="Production"
)
# COMMAND ----------
import json
# Return Success
dbutils.notebook.exit(json.dumps({"exit_code": "Success"}))
| 31.652482
| 164
| 0.720816
|
ons()[['name','station_id','lat','lon']]
stationsOfInterestDF = spark.sql("""select distinct(station_id) from from citibike.forecast_regression_timeweather;""").toPandas()
stationDF = stationDF[stationDF['station_id'].apply(lambda x: int(x) in list(stationsOfInterestDF.values.flatten()))]
from datetime import datetime as dt
from datetime import timedelta
dbutils.widgets.removeAll()
dbutils.widgets.dropdown("00.Airport_Code", "JFK", ["JFK","SEA","BOS","ATL","LAX","SFO","DEN","DFW","ORD","CVG","CLT","DCA","IAH"])
dbutils.widgets.text('01.training_start_date', "2018-01-01")
dbutils.widgets.text('02.training_end_date', "2019-03-15")
dbutils.widgets.text('03.inference_date', (dt.strptime(str(dbutils.widgets.get('02.training_end_date')), "%Y-%m-%d") + timedelta(days=1)).strftime("%Y-%m-%d"))
dbutils.widgets.text('04.promote_model', "No")
training_start_date = str(dbutils.widgets.get('01.training_start_date'))
training_end_date = str(dbutils.widgets.get('02.training_end_date'))
inference_date = str(dbutils.widgets.get('03.inference_date'))
airport_code = str(dbutils.widgets.get('00.Airport_Code'))
if dbutils.widgets.get("05.promote_model")=='Yes':
promote_model = True
else:
promote_model = False
print(airport_code,training_start_date,training_end_date,inference_date,promote_model)
port plotly.express as px
from datetime import timedelta, datetime
client = MlflowClient()
fdf = spark.sql('''
SELECT
a.hour as ds,
EXTRACT(year from a.hour) as year,
EXTRACT(dayofweek from a.hour) as dayofweek,
EXTRACT(hour from a.hour) as hour,
CASE WHEN d.date IS NULL THEN 0 ELSE 1 END as is_holiday,
COALESCE(c.tot_precip_mm,0) as precip_mm,
c.avg_temp_f as temp_f
FROM ( -- all rental hours by currently active stations
SELECT
y.station_id,
x.hour
FROM citibike.periods x
INNER JOIN citibike.stations_most_active y
ON x.hour BETWEEN '{0}' AND '{1}'
) a
LEFT OUTER JOIN citibike.rentals b
ON a.station_id=b.station_id AND a.hour=b.hour
LEFT OUTER JOIN citibike.weather c
ON a.hour=c.time
LEFT OUTER JOIN citibike.holidays d
ON TO_DATE(a.hour)=d.date
WHERE a.station_id = '{2}'
'''.format(end_date, (datetime.strptime(end_date, '%Y-%m-%d') + timedelta(hours=int(hours_to_forecast))).strftime("%Y-%m-%d %H:%M:%S"), station_id)
)
df1=fdf.toPandas().fillna(method='ffill').fillna(method='bfill')
df1['model']='Production'
df1['yhat']=prod_model.predict(df1.drop(["ds","model"], axis=1).values)
df2=fdf.toPandas().fillna(method='ffill').fillna(method='bfill')
df2['model']='Staging'
df2['yhat']=stage_model.predict(df2.drop(["ds","model"], axis=1).values)
df = pd.concat([df1,df2]).reset_index()
labels={
"ds": "Forecast Time",
"yhat": "Forecasted Delay",
"model": "Model Stage"
}
fig = px.line(df, x="ds", y="yhat", color='model', title=f"{airport_code} delay forecast by model stage", labels=labels)
fig.show()
year from a.hour) as year,
EXTRACT(dayofweek from a.hour) as dayofweek,
EXTRACT(hour from a.hour) as hour,
CASE WHEN d.date IS NULL THEN 0 ELSE 1 END as is_holiday,
COALESCE(c.tot_precip_mm,0) as precip_mm,
c.avg_temp_f as temp_f
FROM ( -- all rental hours by currently active stations
SELECT
y.station_id,
x.hour
FROM citibike.periods x
INNER JOIN citibike.stations_most_active y
ON x.hour BETWEEN '{0}' AND '{1}'
) a
LEFT OUTER JOIN citibike.rentals b
ON a.station_id=b.station_id AND a.hour=b.hour
LEFT OUTER JOIN citibike.weather c
ON a.hour=c.time
LEFT OUTER JOIN citibike.holidays d
ON TO_DATE(a.hour)=d.date
WHERE a.station_id = '{2}'
'''.format((datetime.strptime(end_date, '%Y-%m-%d') - timedelta(hours=int(hours_to_forecast))).strftime("%Y-%m-%d %H:%M:%S"), end_date, station_id)
)
airport = dbutils.widgets.get('00.Airport_Code')
airport_id = stationDF[stationDF['name']==airport]['station_id'].values[0]
model_name = "{}-reg-rf-model".format(airport_id)
prod_version = None
stage_version = None
for mv in client.search_model_versions(f"name='{model_name}'"):
if dict(mv)['current_stage'] == 'Staging':
stage_version=dict(mv)['version']
elif dict(mv)['current_stage'] == 'Production':
prod_version=dict(mv)['version']
if prod_version is not None:
prod_model = mlflow.sklearn.load_model(f"models:/{model_name}/Production")
pdf = spark.sql(f"""SELECT * from citibike.forecast_regression_timeweather WHERE station_id = '{station_id}' and model_version = '{prod_version}';""").toPandas()
if stage_version is not None:
stage_model = mlflow.sklearn.load_model(f"models:/{model_name}/Staging")
sdf = spark.sql(f"""SELECT * from citibike.forecast_regression_timeweather WHERE station_id = '{station_id}' and model_version = '{stage_version}';""").toPandas()
pdf['stage']="prod"
pdf['residual']=pdf['y']-pdf['yhat']
sdf['stage']="staging"
sdf['residual']=sdf['y']-sdf['yhat']
df=pd.concat([pdf,sdf])
fig = px.scatter(
df, x='yhat', y='residual',
marginal_y='violin',
color='stage', trendline='ols',
title=f"{airport} delay forecast model performance comparison"
)
fig.show()
rflow_data_validation as tfdv
from tensorflow_data_validation.utils.display_util import get_statistics_html
import warnings
warnings.filterwarnings("ignore", message=r"Passing", category=FutureWarning)
stats_train=tfdv.generate_statistics_from_dataframe(dataframe=train_df.toPandas())
stats_serve=tfdv.generate_statistics_from_dataframe(dataframe=fdf.toPandas())
schema = tfdv.infer_schema(statistics=stats_train)
tfdv.display_schema(schema=schema)
displayHTML(get_statistics_html(lhs_statistics=stats_serve, rhs_statistics=stats_train,
lhs_name='SERVE_DATASET', rhs_name='TRAIN_DATASET'))
anomalies = tfdv.validate_statistics(statistics=stats_serve, schema=schema)
tfdv.display_anomalies(anomalies)
temp_f = tfdv.get_feature(schema, 'temp_f')
temp_f.skew_comparator.jensen_shannon_divergence.threshold = 0
temp_f.drift_comparator.jensen_shannon_divergence.threshold = 0
precip_mm = tfdv.get_feature(schema, 'precip_mm')
precip_mm.skew_comparator.jensen_shannon_divergence.threshold = 0
precip_mm.drift_comparator.jensen_shannon_divergence.threshold = 0
_anomalies = tfdv.validate_statistics(stats_train, schema, serving_statistics=stats_serve)
hour = tfdv.get_feature(schema, 'hour')
hour.skew_comparator.jensen_shannon_divergence.threshold = 0
hour.drift_comparator.jensen_shannon_divergence.threshold = 0
dayofweek = tfdv.get_feature(schema, 'dayofweek')
dayofweek.skew_comparator.jensen_shannon_divergence.threshold = 0
dayofweek.drift_comparator.jensen_shannon_divergence.threshold = 0
_anomalies = tfdv.validate_statistics(stats_train, schema, serving_statistics=stats_serve)
tfdv.display_anomalies(_anomalies)
prod_version is not None:
client.transition_model_version_stage(
name=model_name,
version=prod_version,
stage="Archived"
)
client.transition_model_version_stage(
name=model_name,
version=stage_version,
stage="Production"
)
import json
dbutils.notebook.exit(json.dumps({"exit_code": "Success"}))
| true
| true
|
f71aac54f88d8ccd203f824b5e35a7cfb34c929b
| 15,156
|
py
|
Python
|
colour/models/rgb/transfer_functions/canon_log.py
|
soma2000-lang/colour
|
bb7ee23ac65e09613af78bd18dd98dffb1a2904a
|
[
"BSD-3-Clause"
] | 1
|
2022-02-12T06:28:15.000Z
|
2022-02-12T06:28:15.000Z
|
colour/models/rgb/transfer_functions/canon_log.py
|
soma2000-lang/colour
|
bb7ee23ac65e09613af78bd18dd98dffb1a2904a
|
[
"BSD-3-Clause"
] | null | null | null |
colour/models/rgb/transfer_functions/canon_log.py
|
soma2000-lang/colour
|
bb7ee23ac65e09613af78bd18dd98dffb1a2904a
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Canon Log Encodings
===================
Defines the *Canon Log* encodings:
- :func:`colour.models.log_encoding_CanonLog`
- :func:`colour.models.log_decoding_CanonLog`
- :func:`colour.models.log_encoding_CanonLog2`
- :func:`colour.models.log_decoding_CanonLog2`
- :func:`colour.models.log_encoding_CanonLog3`
- :func:`colour.models.log_decoding_CanonLog3`
Notes
-----
- :cite:`Canona` is available as a *Drivers & Downloads* *Software* for
Windows 10 (x64) *Operating System*, a copy of the archive is hosted at
this url: https://drive.google.com/open?id=0B_IQZQdc4Vy8ZGYyY29pMEVwZU0
References
----------
- :cite:`Canona` : Canon. (2016). EOS C300 Mark II - EOS C300 Mark II Input
Transform Version 2.0 (for Cinema Gamut / BT.2020). Retrieved August 23,
2016, from
https://www.usa.canon.com/internet/portal/us/home/support/details/cameras/cinema-eos/eos-c300-mark-ii
- :cite:`Thorpe2012a` : Thorpe, L. (2012). CANON-LOG TRANSFER CHARACTERISTIC.
Retrieved September 25, 2014, from
http://downloads.canon.com/CDLC/Canon-Log_Transfer_Characteristic_6-20-2012.pdf
"""
from __future__ import annotations
import numpy as np
from colour.hints import (
Boolean,
FloatingOrArrayLike,
FloatingOrNDArray,
Integer,
)
from colour.models.rgb.transfer_functions import full_to_legal, legal_to_full
from colour.utilities import (
as_float,
domain_range_scale,
from_range_1,
to_domain_1,
)
__author__ = "Colour Developers"
__copyright__ = "Copyright (C) 2013-2022 - Colour Developers"
__license__ = "New BSD License - https://opensource.org/licenses/BSD-3-Clause"
__maintainer__ = "Colour Developers"
__email__ = "colour-developers@colour-science.org"
__status__ = "Production"
__all__ = [
"log_encoding_CanonLog",
"log_decoding_CanonLog",
"log_encoding_CanonLog2",
"log_decoding_CanonLog2",
"log_encoding_CanonLog3",
"log_decoding_CanonLog3",
]
def log_encoding_CanonLog(
x: FloatingOrArrayLike,
bit_depth: Integer = 10,
out_normalised_code_value: Boolean = True,
in_reflection: Boolean = True,
) -> FloatingOrNDArray:
"""
Defines the *Canon Log* log encoding curve / opto-electronic transfer
function.
Parameters
----------
x
Linear data :math:`x`.
bit_depth
Bit depth used for conversion.
out_normalised_code_value
Whether the *Canon Log* non-linear data is encoded as normalised code
values.
in_reflection
Whether the light level :math:`x` to a camera is reflection.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
*Canon Log* non-linear data.
References
----------
:cite:`Thorpe2012a`
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``clog`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
Examples
--------
>>> log_encoding_CanonLog(0.18) * 100 # doctest: +ELLIPSIS
34.3389651...
The values of *Table 2 Canon-Log Code Values* table in :cite:`Thorpe2012a`
are obtained as follows:
>>> x = np.array([0, 2, 18, 90, 720]) / 100
>>> np.around(log_encoding_CanonLog(x) * (2 ** 10 - 1)).astype(np.int)
array([ 128, 169, 351, 614, 1016])
>>> np.around(log_encoding_CanonLog(x, 10, False) * 100, 1)
array([ 7.3, 12. , 32.8, 62.7, 108.7])
"""
x = to_domain_1(x)
if in_reflection:
x = x / 0.9
with domain_range_scale("ignore"):
clog = np.where(
x < log_decoding_CanonLog(0.0730597, bit_depth, False),
-(0.529136 * (np.log10(-x * 10.1596 + 1)) - 0.0730597),
0.529136 * np.log10(10.1596 * x + 1) + 0.0730597,
)
clog_cv = (
full_to_legal(clog, bit_depth) if out_normalised_code_value else clog
)
return as_float(from_range_1(clog_cv))
def log_decoding_CanonLog(
clog: FloatingOrArrayLike,
bit_depth: Integer = 10,
in_normalised_code_value: Boolean = True,
out_reflection: Boolean = True,
) -> FloatingOrNDArray:
"""
Defines the *Canon Log* log decoding curve / electro-optical transfer
function.
Parameters
----------
clog
*Canon Log* non-linear data.
bit_depth
Bit depth used for conversion.
in_normalised_code_value
Whether the *Canon Log* non-linear data is encoded with normalised
code values.
out_reflection
Whether the light level :math:`x` to a camera is reflection.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Linear data :math:`x`.
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``clog`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`Thorpe2012a`
Examples
--------
>>> log_decoding_CanonLog(34.338965172606912 / 100) # doctest: +ELLIPSIS
0.17999999...
"""
clog = to_domain_1(clog)
clog = legal_to_full(clog, bit_depth) if in_normalised_code_value else clog
x = np.where(
clog < 0.0730597,
-(10 ** ((0.0730597 - clog) / 0.529136) - 1) / 10.1596,
(10 ** ((clog - 0.0730597) / 0.529136) - 1) / 10.1596,
)
if out_reflection:
x = x * 0.9
return as_float(from_range_1(x))
def log_encoding_CanonLog2(
x: FloatingOrArrayLike,
bit_depth: Integer = 10,
out_normalised_code_value: Boolean = True,
in_reflection: Boolean = True,
) -> FloatingOrNDArray:
"""
Defines the *Canon Log 2* log encoding curve / opto-electronic transfer
function.
Parameters
----------
x
Linear data :math:`x`.
bit_depth
Bit depth used for conversion.
out_normalised_code_value
Whether the *Canon Log 2* non-linear data is encoded as normalised
code values.
in_reflection
Whether the light level :math:`x` to a camera is reflection.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
*Canon Log 2* non-linear data.
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``clog2`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`Canona`
Examples
--------
>>> log_encoding_CanonLog2(0.18) * 100 # doctest: +ELLIPSIS
39.8254694...
"""
x = to_domain_1(x)
if in_reflection:
x = x / 0.9
with domain_range_scale("ignore"):
clog2 = np.where(
x < log_decoding_CanonLog2(0.035388128, bit_depth, False),
-(0.281863093 * (np.log10(-x * 87.09937546 + 1)) - 0.035388128),
0.281863093 * np.log10(x * 87.09937546 + 1) + 0.035388128,
)
clog2_cv = (
full_to_legal(clog2, bit_depth) if out_normalised_code_value else clog2
)
return as_float(from_range_1(clog2_cv))
def log_decoding_CanonLog2(
clog2: FloatingOrArrayLike,
bit_depth: Integer = 10,
in_normalised_code_value: Boolean = True,
out_reflection: Boolean = True,
) -> FloatingOrNDArray:
"""
Defines the *Canon Log 2* log decoding curve / electro-optical transfer
function.
Parameters
----------
clog2
*Canon Log 2* non-linear data.
bit_depth
Bit depth used for conversion.
in_normalised_code_value
Whether the *Canon Log 2* non-linear data is encoded with normalised
code values.
out_reflection
Whether the light level :math:`x` to a camera is reflection.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Linear data :math:`x`.
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``clog2`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`Canona`
Examples
--------
>>> log_decoding_CanonLog2(39.825469498316735 / 100) # doctest: +ELLIPSIS
0.1799999...
"""
clog2 = to_domain_1(clog2)
clog2 = (
legal_to_full(clog2, bit_depth) if in_normalised_code_value else clog2
)
x = np.where(
clog2 < 0.035388128,
-(10 ** ((0.035388128 - clog2) / 0.281863093) - 1) / 87.09937546,
(10 ** ((clog2 - 0.035388128) / 0.281863093) - 1) / 87.09937546,
)
if out_reflection:
x = x * 0.9
return as_float(from_range_1(x))
def log_encoding_CanonLog3(
x: FloatingOrArrayLike,
bit_depth: Integer = 10,
out_normalised_code_value: Boolean = True,
in_reflection: Boolean = True,
) -> FloatingOrNDArray:
"""
Defines the *Canon Log 3* log encoding curve / opto-electronic transfer
function.
Parameters
----------
x
Linear data :math:`x`.
bit_depth
Bit depth used for conversion.
out_normalised_code_value
Whether the *Canon Log 3* non-linear data is encoded as normalised code
values.
in_reflection
Whether the light level :math:`x` to a camera is reflection.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
*Canon Log 3* non-linear data.
Notes
-----
- Introspection of the grafting points by Shaw, N. (2018) shows that the
*Canon Log 3* IDT was likely derived from its encoding curve as the
later is grafted at *+/-0.014*::
>>> clog3 = 0.04076162
>>> (clog3 - 0.073059361) / 2.3069815
-0.014000000000000002
>>> clog3 = 0.105357102
>>> (clog3 - 0.073059361) / 2.3069815
0.013999999999999997
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``clog3`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`Canona`
Examples
--------
>>> log_encoding_CanonLog3(0.18) * 100 # doctest: +ELLIPSIS
34.3389369...
"""
x = to_domain_1(x)
if in_reflection:
x = x / 0.9
with domain_range_scale("ignore"):
clog3 = np.select(
(
x
< log_decoding_CanonLog3(0.04076162, bit_depth, False, False),
x
<= log_decoding_CanonLog3(
0.105357102, bit_depth, False, False
),
x
> log_decoding_CanonLog3(0.105357102, bit_depth, False, False),
),
(
-0.42889912 * np.log10(-x * 14.98325 + 1) + 0.07623209,
2.3069815 * x + 0.073059361,
0.42889912 * np.log10(x * 14.98325 + 1) + 0.069886632,
),
)
clog3_cv = (
full_to_legal(clog3, bit_depth) if out_normalised_code_value else clog3
)
return as_float(from_range_1(clog3_cv))
def log_decoding_CanonLog3(
clog3: FloatingOrArrayLike,
bit_depth: Integer = 10,
in_normalised_code_value: Boolean = True,
out_reflection: Boolean = True,
) -> FloatingOrNDArray:
"""
Defines the *Canon Log 3* log decoding curve / electro-optical transfer
function.
Parameters
----------
clog3
*Canon Log 3* non-linear data.
bit_depth
Bit depth used for conversion.
in_normalised_code_value
Whether the *Canon Log 3* non-linear data is encoded with normalised
code values.
out_reflection
Whether the light level :math:`x` to a camera is reflection.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Linear data :math:`x`.
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``clog3`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`Canona`
Examples
--------
>>> log_decoding_CanonLog3(34.338936938868677 / 100) # doctest: +ELLIPSIS
0.1800000...
"""
clog3 = to_domain_1(clog3)
clog3 = (
legal_to_full(clog3, bit_depth) if in_normalised_code_value else clog3
)
x = np.select(
(clog3 < 0.04076162, clog3 <= 0.105357102, clog3 > 0.105357102),
(
-(10 ** ((0.07623209 - clog3) / 0.42889912) - 1) / 14.98325,
(clog3 - 0.073059361) / 2.3069815,
(10 ** ((clog3 - 0.069886632) / 0.42889912) - 1) / 14.98325,
),
)
if out_reflection:
x = x * 0.9
return as_float(from_range_1(x))
| 29.202312
| 105
| 0.490433
|
from __future__ import annotations
import numpy as np
from colour.hints import (
Boolean,
FloatingOrArrayLike,
FloatingOrNDArray,
Integer,
)
from colour.models.rgb.transfer_functions import full_to_legal, legal_to_full
from colour.utilities import (
as_float,
domain_range_scale,
from_range_1,
to_domain_1,
)
__author__ = "Colour Developers"
__copyright__ = "Copyright (C) 2013-2022 - Colour Developers"
__license__ = "New BSD License - https://opensource.org/licenses/BSD-3-Clause"
__maintainer__ = "Colour Developers"
__email__ = "colour-developers@colour-science.org"
__status__ = "Production"
__all__ = [
"log_encoding_CanonLog",
"log_decoding_CanonLog",
"log_encoding_CanonLog2",
"log_decoding_CanonLog2",
"log_encoding_CanonLog3",
"log_decoding_CanonLog3",
]
def log_encoding_CanonLog(
x: FloatingOrArrayLike,
bit_depth: Integer = 10,
out_normalised_code_value: Boolean = True,
in_reflection: Boolean = True,
) -> FloatingOrNDArray:
x = to_domain_1(x)
if in_reflection:
x = x / 0.9
with domain_range_scale("ignore"):
clog = np.where(
x < log_decoding_CanonLog(0.0730597, bit_depth, False),
-(0.529136 * (np.log10(-x * 10.1596 + 1)) - 0.0730597),
0.529136 * np.log10(10.1596 * x + 1) + 0.0730597,
)
clog_cv = (
full_to_legal(clog, bit_depth) if out_normalised_code_value else clog
)
return as_float(from_range_1(clog_cv))
def log_decoding_CanonLog(
clog: FloatingOrArrayLike,
bit_depth: Integer = 10,
in_normalised_code_value: Boolean = True,
out_reflection: Boolean = True,
) -> FloatingOrNDArray:
clog = to_domain_1(clog)
clog = legal_to_full(clog, bit_depth) if in_normalised_code_value else clog
x = np.where(
clog < 0.0730597,
-(10 ** ((0.0730597 - clog) / 0.529136) - 1) / 10.1596,
(10 ** ((clog - 0.0730597) / 0.529136) - 1) / 10.1596,
)
if out_reflection:
x = x * 0.9
return as_float(from_range_1(x))
def log_encoding_CanonLog2(
x: FloatingOrArrayLike,
bit_depth: Integer = 10,
out_normalised_code_value: Boolean = True,
in_reflection: Boolean = True,
) -> FloatingOrNDArray:
x = to_domain_1(x)
if in_reflection:
x = x / 0.9
with domain_range_scale("ignore"):
clog2 = np.where(
x < log_decoding_CanonLog2(0.035388128, bit_depth, False),
-(0.281863093 * (np.log10(-x * 87.09937546 + 1)) - 0.035388128),
0.281863093 * np.log10(x * 87.09937546 + 1) + 0.035388128,
)
clog2_cv = (
full_to_legal(clog2, bit_depth) if out_normalised_code_value else clog2
)
return as_float(from_range_1(clog2_cv))
def log_decoding_CanonLog2(
clog2: FloatingOrArrayLike,
bit_depth: Integer = 10,
in_normalised_code_value: Boolean = True,
out_reflection: Boolean = True,
) -> FloatingOrNDArray:
clog2 = to_domain_1(clog2)
clog2 = (
legal_to_full(clog2, bit_depth) if in_normalised_code_value else clog2
)
x = np.where(
clog2 < 0.035388128,
-(10 ** ((0.035388128 - clog2) / 0.281863093) - 1) / 87.09937546,
(10 ** ((clog2 - 0.035388128) / 0.281863093) - 1) / 87.09937546,
)
if out_reflection:
x = x * 0.9
return as_float(from_range_1(x))
def log_encoding_CanonLog3(
x: FloatingOrArrayLike,
bit_depth: Integer = 10,
out_normalised_code_value: Boolean = True,
in_reflection: Boolean = True,
) -> FloatingOrNDArray:
x = to_domain_1(x)
if in_reflection:
x = x / 0.9
with domain_range_scale("ignore"):
clog3 = np.select(
(
x
< log_decoding_CanonLog3(0.04076162, bit_depth, False, False),
x
<= log_decoding_CanonLog3(
0.105357102, bit_depth, False, False
),
x
> log_decoding_CanonLog3(0.105357102, bit_depth, False, False),
),
(
-0.42889912 * np.log10(-x * 14.98325 + 1) + 0.07623209,
2.3069815 * x + 0.073059361,
0.42889912 * np.log10(x * 14.98325 + 1) + 0.069886632,
),
)
clog3_cv = (
full_to_legal(clog3, bit_depth) if out_normalised_code_value else clog3
)
return as_float(from_range_1(clog3_cv))
def log_decoding_CanonLog3(
clog3: FloatingOrArrayLike,
bit_depth: Integer = 10,
in_normalised_code_value: Boolean = True,
out_reflection: Boolean = True,
) -> FloatingOrNDArray:
clog3 = to_domain_1(clog3)
clog3 = (
legal_to_full(clog3, bit_depth) if in_normalised_code_value else clog3
)
x = np.select(
(clog3 < 0.04076162, clog3 <= 0.105357102, clog3 > 0.105357102),
(
-(10 ** ((0.07623209 - clog3) / 0.42889912) - 1) / 14.98325,
(clog3 - 0.073059361) / 2.3069815,
(10 ** ((clog3 - 0.069886632) / 0.42889912) - 1) / 14.98325,
),
)
if out_reflection:
x = x * 0.9
return as_float(from_range_1(x))
| true
| true
|
f71aad03581521af34e46f4263fc80abdb4a99c3
| 6,135
|
py
|
Python
|
asposewordscloud/models/requests/insert_list_online_request.py
|
aspose-words-cloud/aspose-words-cloud-python
|
65c7b55fa4aac69b60d41e7f54aed231df285479
|
[
"MIT"
] | 14
|
2018-07-15T17:01:52.000Z
|
2018-11-29T06:15:33.000Z
|
asposewordscloud/models/requests/insert_list_online_request.py
|
aspose-words-cloud/aspose-words-cloud-python
|
65c7b55fa4aac69b60d41e7f54aed231df285479
|
[
"MIT"
] | 1
|
2018-09-28T12:59:34.000Z
|
2019-10-08T08:42:59.000Z
|
asposewordscloud/models/requests/insert_list_online_request.py
|
aspose-words-cloud/aspose-words-cloud-python
|
65c7b55fa4aac69b60d41e7f54aed231df285479
|
[
"MIT"
] | 2
|
2020-12-21T07:59:17.000Z
|
2022-02-16T21:41:25.000Z
|
# coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose" file="insert_list_online_request.py">
# Copyright (c) 2021 Aspose.Words for Cloud
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import json
from six.moves.urllib.parse import quote
from asposewordscloud import *
from asposewordscloud.models import *
from asposewordscloud.models.requests import *
from asposewordscloud.models.responses import *
class InsertListOnlineRequest(BaseRequestObject):
"""
Request model for insert_list_online operation.
Initializes a new instance.
:param document The document.
:param list_insert List object.
:param load_encoding Encoding that will be used to load an HTML (or TXT) document if the encoding is not specified in HTML.
:param password Password for opening an encrypted document.
:param dest_file_name Result path of the document after the operation. If this parameter is omitted then result of the operation will be saved as the source document.
:param revision_author Initials of the author to use for revisions.If you set this parameter and then make some changes to the document programmatically, save the document and later open the document in MS Word you will see these changes as revisions.
:param revision_date_time The date and time to use for revisions.
"""
def __init__(self, document, list_insert, load_encoding=None, password=None, dest_file_name=None, revision_author=None, revision_date_time=None):
self.document = document
self.list_insert = list_insert
self.load_encoding = load_encoding
self.password = password
self.dest_file_name = dest_file_name
self.revision_author = revision_author
self.revision_date_time = revision_date_time
def create_http_request(self, api_client):
# verify the required parameter 'document' is set
if self.document is None:
raise ValueError("Missing the required parameter `document` when calling `insert_list_online`") # noqa: E501
# verify the required parameter 'list_insert' is set
if self.list_insert is None:
raise ValueError("Missing the required parameter `list_insert` when calling `insert_list_online`") # noqa: E501
path = '/v4.0/words/online/post/lists'
path_params = {}
# path parameters
collection_formats = {}
if path_params:
path_params = api_client.sanitize_for_serialization(path_params)
path_params = api_client.parameters_to_tuples(path_params, collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
path = path.replace(
'{%s}' % k,
quote(str(v), safe=api_client.configuration.safe_chars_for_path_param)
)
# remove optional path parameters
path = path.replace('//', '/')
query_params = []
if self.load_encoding is not None:
query_params.append(('loadEncoding', self.load_encoding)) # noqa: E501
if self.password is not None:
query_params.append(('password', self.password)) # noqa: E501
if self.dest_file_name is not None:
query_params.append(('destFileName', self.dest_file_name)) # noqa: E501
if self.revision_author is not None:
query_params.append(('revisionAuthor', self.revision_author)) # noqa: E501
if self.revision_date_time is not None:
query_params.append(('revisionDateTime', self.revision_date_time)) # noqa: E501
header_params = {}
# HTTP header `Content-Type`
header_params['Content-Type'] = api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
form_params = []
if self.document is not None:
form_params.append(['document', self.document, 'file']) # noqa: E501
if self.list_insert is not None:
form_params.append(['listInsert', self.list_insert.to_json(), 'string']) # noqa: E501
body_params = None
return {
"method": "PUT",
"path": path,
"query_params": query_params,
"header_params": header_params,
"form_params": form_params,
"body": body_params,
"collection_formats": collection_formats,
"response_type": 'InsertListOnlineResponse' # noqa: E501
}
def get_response_type(self):
return 'InsertListOnlineResponse' # noqa: E501
def deserialize_response(self, api_client, response):
multipart = self.getparts(response)
return InsertListOnlineResponse(
self.deserialize(json.loads(multipart[0].text), ListResponse, api_client),
self.deserialize_file(multipart[1].content, multipart[1].headers, api_client))
| 49.08
| 255
| 0.669927
|
import json
from six.moves.urllib.parse import quote
from asposewordscloud import *
from asposewordscloud.models import *
from asposewordscloud.models.requests import *
from asposewordscloud.models.responses import *
class InsertListOnlineRequest(BaseRequestObject):
def __init__(self, document, list_insert, load_encoding=None, password=None, dest_file_name=None, revision_author=None, revision_date_time=None):
self.document = document
self.list_insert = list_insert
self.load_encoding = load_encoding
self.password = password
self.dest_file_name = dest_file_name
self.revision_author = revision_author
self.revision_date_time = revision_date_time
def create_http_request(self, api_client):
if self.document is None:
raise ValueError("Missing the required parameter `document` when calling `insert_list_online`")
if self.list_insert is None:
raise ValueError("Missing the required parameter `list_insert` when calling `insert_list_online`")
path = '/v4.0/words/online/post/lists'
path_params = {}
collection_formats = {}
if path_params:
path_params = api_client.sanitize_for_serialization(path_params)
path_params = api_client.parameters_to_tuples(path_params, collection_formats)
for k, v in path_params:
path = path.replace(
'{%s}' % k,
quote(str(v), safe=api_client.configuration.safe_chars_for_path_param)
)
path = path.replace('//', '/')
query_params = []
if self.load_encoding is not None:
query_params.append(('loadEncoding', self.load_encoding))
if self.password is not None:
query_params.append(('password', self.password))
if self.dest_file_name is not None:
query_params.append(('destFileName', self.dest_file_name))
if self.revision_author is not None:
query_params.append(('revisionAuthor', self.revision_author))
if self.revision_date_time is not None:
query_params.append(('revisionDateTime', self.revision_date_time))
header_params = {}
header_params['Content-Type'] = api_client.select_header_content_type(
['multipart/form-data'])
form_params = []
if self.document is not None:
form_params.append(['document', self.document, 'file'])
if self.list_insert is not None:
form_params.append(['listInsert', self.list_insert.to_json(), 'string'])
body_params = None
return {
"method": "PUT",
"path": path,
"query_params": query_params,
"header_params": header_params,
"form_params": form_params,
"body": body_params,
"collection_formats": collection_formats,
"response_type": 'InsertListOnlineResponse'
}
def get_response_type(self):
return 'InsertListOnlineResponse'
def deserialize_response(self, api_client, response):
multipart = self.getparts(response)
return InsertListOnlineResponse(
self.deserialize(json.loads(multipart[0].text), ListResponse, api_client),
self.deserialize_file(multipart[1].content, multipart[1].headers, api_client))
| true
| true
|
f71aad2d5eeb4c38a35396239e2ecb41a34883a8
| 1,177
|
py
|
Python
|
test/test_execute_python.py
|
RuneLjungmann/excelbind
|
29522ec43ce691dfd591b0452d63b7e1b36ad875
|
[
"MIT"
] | 8
|
2020-09-25T08:57:31.000Z
|
2022-02-02T18:52:09.000Z
|
test/test_execute_python.py
|
RuneLjungmann/excelbind
|
29522ec43ce691dfd591b0452d63b7e1b36ad875
|
[
"MIT"
] | 2
|
2021-09-05T11:19:36.000Z
|
2021-09-08T00:13:48.000Z
|
test/test_execute_python.py
|
RuneLjungmann/excelbind
|
29522ec43ce691dfd591b0452d63b7e1b36ad875
|
[
"MIT"
] | 1
|
2020-09-25T08:56:25.000Z
|
2020-09-25T08:56:25.000Z
|
from test.utilities.env_vars import set_env_vars
from test.utilities.excel import Excel
def test_simple_script_for_addition(xll_addin_path):
with set_env_vars('basic_functions'):
with Excel() as excel:
excel.register_xll(xll_addin_path)
(
excel.new_workbook()
.range('A1').set(3.0)
.range('A2').set(4.0)
.range('B1').set_formula('=excelbind.execute_python("return arg0 + arg1", A1, A2)')
.calculate()
)
assert excel.range('B1').value == 7.0
print("done testing")
def test_combination_str_n_float(xll_addin_path):
with set_env_vars('basic_functions'):
with Excel() as excel:
excel.register_xll(xll_addin_path)
(
excel.new_workbook()
.range('A1').set("Hello times ")
.range('A2').set(3.0)
.range('B1').set_formula('=excelbind.execute_python("return arg0 + str(arg1)", A1, A2)')
.calculate()
)
assert excel.range('B1').value == 'Hello times 3.0'
print("done testing")
| 31.810811
| 104
| 0.548853
|
from test.utilities.env_vars import set_env_vars
from test.utilities.excel import Excel
def test_simple_script_for_addition(xll_addin_path):
with set_env_vars('basic_functions'):
with Excel() as excel:
excel.register_xll(xll_addin_path)
(
excel.new_workbook()
.range('A1').set(3.0)
.range('A2').set(4.0)
.range('B1').set_formula('=excelbind.execute_python("return arg0 + arg1", A1, A2)')
.calculate()
)
assert excel.range('B1').value == 7.0
print("done testing")
def test_combination_str_n_float(xll_addin_path):
with set_env_vars('basic_functions'):
with Excel() as excel:
excel.register_xll(xll_addin_path)
(
excel.new_workbook()
.range('A1').set("Hello times ")
.range('A2').set(3.0)
.range('B1').set_formula('=excelbind.execute_python("return arg0 + str(arg1)", A1, A2)')
.calculate()
)
assert excel.range('B1').value == 'Hello times 3.0'
print("done testing")
| true
| true
|
f71aad9b00e3ad94ed69d13f4f8b2c42d39eda6d
| 2,324
|
py
|
Python
|
tempest/tests/lib/services/compute/test_tenant_networks_client.py
|
mail2nsrajesh/tempest
|
1a3b3dc50b418d3a15839830d7d1ff88c8c76cff
|
[
"Apache-2.0"
] | 2
|
2015-08-13T00:07:49.000Z
|
2020-08-07T06:38:44.000Z
|
tempest/tests/lib/services/compute/test_tenant_networks_client.py
|
mail2nsrajesh/tempest
|
1a3b3dc50b418d3a15839830d7d1ff88c8c76cff
|
[
"Apache-2.0"
] | 1
|
2019-08-08T10:36:44.000Z
|
2019-08-09T05:58:23.000Z
|
tempest/tests/lib/services/compute/test_tenant_networks_client.py
|
mail2nsrajesh/tempest
|
1a3b3dc50b418d3a15839830d7d1ff88c8c76cff
|
[
"Apache-2.0"
] | 5
|
2016-06-24T20:03:52.000Z
|
2020-02-05T10:14:54.000Z
|
# Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib.services.compute import tenant_networks_client
from tempest.tests.lib import fake_auth_provider
from tempest.tests.lib.services import base
class TestTenantNetworksClient(base.BaseServiceTest):
FAKE_NETWORK = {
"cidr": "None",
"id": "c2329eb4-cc8e-4439-ac4c-932369309e36",
"label": u'\u30d7'
}
FAKE_NETWORKS = [FAKE_NETWORK]
NETWORK_ID = FAKE_NETWORK['id']
def setUp(self):
super(TestTenantNetworksClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.client = tenant_networks_client.TenantNetworksClient(
fake_auth, 'compute', 'regionOne')
def _test_list_tenant_networks(self, bytes_body=False):
self.check_service_client_function(
self.client.list_tenant_networks,
'tempest.lib.common.rest_client.RestClient.get',
{"networks": self.FAKE_NETWORKS},
bytes_body)
def test_list_tenant_networks_with_str_body(self):
self._test_list_tenant_networks()
def test_list_tenant_networks_with_bytes_body(self):
self._test_list_tenant_networks(bytes_body=True)
def _test_show_tenant_network(self, bytes_body=False):
self.check_service_client_function(
self.client.show_tenant_network,
'tempest.lib.common.rest_client.RestClient.get',
{"network": self.FAKE_NETWORK},
bytes_body,
network_id=self.NETWORK_ID)
def test_show_tenant_network_with_str_body(self):
self._test_show_tenant_network()
def test_show_tenant_network_with_bytes_body(self):
self._test_show_tenant_network(bytes_body=True)
| 36.3125
| 78
| 0.711274
|
from tempest.lib.services.compute import tenant_networks_client
from tempest.tests.lib import fake_auth_provider
from tempest.tests.lib.services import base
class TestTenantNetworksClient(base.BaseServiceTest):
FAKE_NETWORK = {
"cidr": "None",
"id": "c2329eb4-cc8e-4439-ac4c-932369309e36",
"label": u'\u30d7'
}
FAKE_NETWORKS = [FAKE_NETWORK]
NETWORK_ID = FAKE_NETWORK['id']
def setUp(self):
super(TestTenantNetworksClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.client = tenant_networks_client.TenantNetworksClient(
fake_auth, 'compute', 'regionOne')
def _test_list_tenant_networks(self, bytes_body=False):
self.check_service_client_function(
self.client.list_tenant_networks,
'tempest.lib.common.rest_client.RestClient.get',
{"networks": self.FAKE_NETWORKS},
bytes_body)
def test_list_tenant_networks_with_str_body(self):
self._test_list_tenant_networks()
def test_list_tenant_networks_with_bytes_body(self):
self._test_list_tenant_networks(bytes_body=True)
def _test_show_tenant_network(self, bytes_body=False):
self.check_service_client_function(
self.client.show_tenant_network,
'tempest.lib.common.rest_client.RestClient.get',
{"network": self.FAKE_NETWORK},
bytes_body,
network_id=self.NETWORK_ID)
def test_show_tenant_network_with_str_body(self):
self._test_show_tenant_network()
def test_show_tenant_network_with_bytes_body(self):
self._test_show_tenant_network(bytes_body=True)
| true
| true
|
f71aadd3961afa04dc66e19d75c3c36540a1b948
| 1,264
|
py
|
Python
|
bilalcoin/flatpages_main/migrations/0001_initial.py
|
jphaser/bilalcoin
|
31d8b466912e009c31615b0b1df1afe68ab4bdb8
|
[
"MIT"
] | null | null | null |
bilalcoin/flatpages_main/migrations/0001_initial.py
|
jphaser/bilalcoin
|
31d8b466912e009c31615b0b1df1afe68ab4bdb8
|
[
"MIT"
] | 1
|
2022-03-31T03:16:16.000Z
|
2022-03-31T03:16:16.000Z
|
bilalcoin/flatpages_main/migrations/0001_initial.py
|
jphaser/bilalcoin
|
31d8b466912e009c31615b0b1df1afe68ab4bdb8
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.3 on 2021-05-21 04:17
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='FAQ',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('question', models.CharField(blank=True, max_length=500, null=True, unique=True, verbose_name='FAQ Question')),
('answer', models.TextField(blank=True, null=True, unique=True, verbose_name='FAQ Answer')),
('active', models.BooleanField(default=False, verbose_name='FAQ Active?')),
],
options={
'verbose_name': 'FAQ',
'verbose_name_plural': 'FAQs',
'ordering': ['created'],
},
),
]
| 38.30303
| 147
| 0.613924
|
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='FAQ',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('question', models.CharField(blank=True, max_length=500, null=True, unique=True, verbose_name='FAQ Question')),
('answer', models.TextField(blank=True, null=True, unique=True, verbose_name='FAQ Answer')),
('active', models.BooleanField(default=False, verbose_name='FAQ Active?')),
],
options={
'verbose_name': 'FAQ',
'verbose_name_plural': 'FAQs',
'ordering': ['created'],
},
),
]
| true
| true
|
f71aaddfd333847ace11e0163cb2a3644b0168e0
| 49,007
|
py
|
Python
|
salt/crypt.py
|
preoctopus/salt
|
aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d
|
[
"Apache-2.0"
] | null | null | null |
salt/crypt.py
|
preoctopus/salt
|
aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d
|
[
"Apache-2.0"
] | null | null | null |
salt/crypt.py
|
preoctopus/salt
|
aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
The crypt module manages all of the cryptography functions for minions and
masters, encrypting and decrypting payloads, preparing messages, and
authenticating peers
'''
# Import python libs
from __future__ import absolute_import, print_function
import os
import sys
import copy
import time
import hmac
import base64
import hashlib
import logging
import stat
import traceback
import binascii
import weakref
# Import third party libs
import salt.ext.six as six
from salt.ext.six.moves import zip # pylint: disable=import-error,redefined-builtin
try:
from Crypto.Cipher import AES, PKCS1_OAEP
from Crypto.Hash import SHA
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
# let this be imported, if possible
import Crypto.Random # pylint: disable=W0611
except ImportError:
# No need for crypt in local mode
pass
# Import salt libs
import salt.defaults.exitcodes
import salt.utils
import salt.payload
import salt.transport.client
import salt.utils.rsax931
import salt.utils.verify
import salt.version
from salt.exceptions import (
AuthenticationError, SaltClientError, SaltReqTimeoutError, SaltSystemExit
)
import tornado.gen
log = logging.getLogger(__name__)
def dropfile(cachedir, user=None):
'''
Set an AES dropfile to request the master update the publish session key
'''
dfn = os.path.join(cachedir, '.dfn')
# set a mask (to avoid a race condition on file creation) and store original.
mask = os.umask(191)
try:
log.info('Rotating AES key')
if os.path.isfile(dfn):
log.info('AES key rotation already requested')
return
if os.path.isfile(dfn) and not os.access(dfn, os.W_OK):
os.chmod(dfn, stat.S_IRUSR | stat.S_IWUSR)
with salt.utils.fopen(dfn, 'wb+') as fp_:
fp_.write('')
os.chmod(dfn, stat.S_IRUSR)
if user:
try:
import pwd
uid = pwd.getpwnam(user).pw_uid
os.chown(dfn, uid, -1)
except (KeyError, ImportError, OSError, IOError):
pass
finally:
os.umask(mask) # restore original umask
def gen_keys(keydir, keyname, keysize, user=None):
'''
Generate a RSA public keypair for use with salt
:param str keydir: The directory to write the keypair to
:param str keyname: The type of salt server for whom this key should be written. (i.e. 'master' or 'minion')
:param int keysize: The number of bits in the key
:param str user: The user on the system who should own this keypair
:rtype: str
:return: Path on the filesystem to the RSA private key
'''
base = os.path.join(keydir, keyname)
priv = '{0}.pem'.format(base)
pub = '{0}.pub'.format(base)
salt.utils.reinit_crypto()
gen = RSA.generate(bits=keysize, e=65537)
if os.path.isfile(priv):
# Between first checking and the generation another process has made
# a key! Use the winner's key
return priv
cumask = os.umask(191)
with salt.utils.fopen(priv, 'wb+') as f:
f.write(gen.exportKey('PEM'))
os.umask(cumask)
with salt.utils.fopen(pub, 'wb+') as f:
f.write(gen.publickey().exportKey('PEM'))
os.chmod(priv, 256)
if user:
try:
import pwd
uid = pwd.getpwnam(user).pw_uid
os.chown(priv, uid, -1)
os.chown(pub, uid, -1)
except (KeyError, ImportError, OSError):
# The specified user was not found, allow the backup systems to
# report the error
pass
return priv
def sign_message(privkey_path, message):
'''
Use Crypto.Signature.PKCS1_v1_5 to sign a message. Returns the signature.
'''
log.debug('salt.crypt.sign_message: Loading private key')
with salt.utils.fopen(privkey_path) as f:
key = RSA.importKey(f.read())
log.debug('salt.crypt.sign_message: Signing message.')
signer = PKCS1_v1_5.new(key)
return signer.sign(SHA.new(message))
def verify_signature(pubkey_path, message, signature):
'''
Use Crypto.Signature.PKCS1_v1_5 to verify the signature on a message.
Returns True for valid signature.
'''
log.debug('salt.crypt.verify_signature: Loading public key')
with salt.utils.fopen(pubkey_path) as f:
pubkey = RSA.importKey(f.read())
log.debug('salt.crypt.verify_signature: Verifying signature')
verifier = PKCS1_v1_5.new(pubkey)
return verifier.verify(SHA.new(message), signature)
def gen_signature(priv_path, pub_path, sign_path):
'''
creates a signature for the given public-key with
the given private key and writes it to sign_path
'''
with salt.utils.fopen(pub_path) as fp_:
mpub_64 = fp_.read()
mpub_sig = sign_message(priv_path, mpub_64)
mpub_sig_64 = binascii.b2a_base64(mpub_sig)
if os.path.isfile(sign_path):
return False
log.trace('Calculating signature for {0} with {1}'
.format(os.path.basename(pub_path),
os.path.basename(priv_path)))
if os.path.isfile(sign_path):
log.trace('Signature file {0} already exists, please '
'remove it first and try again'.format(sign_path))
else:
with salt.utils.fopen(sign_path, 'wb+') as sig_f:
sig_f.write(mpub_sig_64)
log.trace('Wrote signature to {0}'.format(sign_path))
return True
def private_encrypt(key, message):
'''
Generate an M2Crypto-compatible signature
:param Crypto.PublicKey.RSA._RSAobj key: The RSA key object
:param str message: The message to sign
:rtype: str
:return: The signature, or an empty string if the signature operation failed
'''
signer = salt.utils.rsax931.RSAX931Signer(key.exportKey('PEM'))
return signer.sign(message)
def public_decrypt(pub, message):
'''
Verify an M2Crypto-compatible signature
:param Crypto.PublicKey.RSA._RSAobj key: The RSA public key object
:param str message: The signed message to verify
:rtype: str
:return: The message (or digest) recovered from the signature, or an
empty string if the verification failed
'''
verifier = salt.utils.rsax931.RSAX931Verifier(pub.exportKey('PEM'))
return verifier.verify(message)
class MasterKeys(dict):
'''
The Master Keys class is used to manage the RSA public key pair used for
authentication by the master.
It also generates a signing key-pair if enabled with master_sign_key_name.
'''
def __init__(self, opts):
super(MasterKeys, self).__init__()
self.opts = opts
self.pub_path = os.path.join(self.opts['pki_dir'], 'master.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'master.pem')
self.key = self.__get_keys()
self.pub_signature = None
# set names for the signing key-pairs
if opts['master_sign_pubkey']:
# if only the signature is available, use that
if opts['master_use_pubkey_signature']:
self.sig_path = os.path.join(self.opts['pki_dir'],
opts['master_pubkey_signature'])
if os.path.isfile(self.sig_path):
self.pub_signature = salt.utils.fopen(self.sig_path).read()
log.info('Read {0}\'s signature from {1}'
''.format(os.path.basename(self.pub_path),
self.opts['master_pubkey_signature']))
else:
log.error('Signing the master.pub key with a signature is enabled '
'but no signature file found at the defined location '
'{0}'.format(self.sig_path))
log.error('The signature-file may be either named differently '
'or has to be created with \'salt-key --gen-signature\'')
sys.exit(1)
# create a new signing key-pair to sign the masters
# auth-replies when a minion tries to connect
else:
self.pub_sign_path = os.path.join(self.opts['pki_dir'],
opts['master_sign_key_name'] + '.pub')
self.rsa_sign_path = os.path.join(self.opts['pki_dir'],
opts['master_sign_key_name'] + '.pem')
self.sign_key = self.__get_keys(name=opts['master_sign_key_name'])
# We need __setstate__ and __getstate__ to avoid pickling errors since
# some of the member variables correspond to Cython objects which are
# not picklable.
# These methods are only used when pickling so will not be used on
# non-Windows platforms.
def __setstate__(self, state):
self.__init__(state['opts'])
def __getstate__(self):
return {'opts': self.opts}
def __get_keys(self, name='master'):
'''
Returns a key object for a key in the pki-dir
'''
path = os.path.join(self.opts['pki_dir'],
name + '.pem')
if os.path.exists(path):
with salt.utils.fopen(path) as f:
key = RSA.importKey(f.read())
log.debug('Loaded {0} key: {1}'.format(name, path))
else:
log.info('Generating {0} keys: {1}'.format(name, self.opts['pki_dir']))
gen_keys(self.opts['pki_dir'],
name,
self.opts['keysize'],
self.opts.get('user'))
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
return key
def get_pub_str(self, name='master'):
'''
Return the string representation of a public key
in the pki-directory
'''
path = os.path.join(self.opts['pki_dir'],
name + '.pub')
if not os.path.isfile(path):
key = self.__get_keys()
with salt.utils.fopen(path, 'wb+') as f:
f.write(key.publickey().exportKey('PEM'))
return salt.utils.fopen(path).read()
def get_mkey_paths(self):
return self.pub_path, self.rsa_path
def get_sign_paths(self):
return self.pub_sign_path, self.rsa_sign_path
def pubkey_signature(self):
'''
returns the base64 encoded signature from the signature file
or None if the master has its own signing keys
'''
return self.pub_signature
class AsyncAuth(object):
'''
Set up an Async object to maintain authentication with the salt master
'''
# This class is only a singleton per minion/master pair
# mapping of io_loop -> {key -> auth}
instance_map = weakref.WeakKeyDictionary()
# mapping of key -> creds
creds_map = {}
def __new__(cls, opts, io_loop=None):
'''
Only create one instance of SAuth per __key()
'''
# do we have any mapping for this io_loop
io_loop = io_loop or tornado.ioloop.IOLoop.current()
if io_loop not in AsyncAuth.instance_map:
AsyncAuth.instance_map[io_loop] = weakref.WeakValueDictionary()
loop_instance_map = AsyncAuth.instance_map[io_loop]
key = cls.__key(opts)
if key not in loop_instance_map:
log.debug('Initializing new SAuth for {0}'.format(key))
# we need to make a local variable for this, as we are going to store
# it in a WeakValueDictionary-- which will remove the item if no one
# references it-- this forces a reference while we return to the caller
new_auth = object.__new__(cls)
new_auth.__singleton_init__(opts, io_loop=io_loop)
loop_instance_map[key] = new_auth
else:
log.debug('Re-using SAuth for {0}'.format(key))
return loop_instance_map[key]
@classmethod
def __key(cls, opts, io_loop=None):
return (opts['pki_dir'], # where the keys are stored
opts['id'], # minion ID
opts['master_uri'], # master ID
)
# has to remain empty for singletons, since __init__ will *always* be called
def __init__(self, opts, io_loop=None):
pass
# an init for the singleton instance to call
def __singleton_init__(self, opts, io_loop=None):
'''
Init an Auth instance
:param dict opts: Options for this server
:return: Auth instance
:rtype: Auth
'''
self.opts = opts
self.token = Crypticle.generate_key_string()
self.serial = salt.payload.Serial(self.opts)
self.pub_path = os.path.join(self.opts['pki_dir'], 'minion.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'minion.pem')
if 'syndic_master' in self.opts:
self.mpub = 'syndic_master.pub'
elif 'alert_master' in self.opts:
self.mpub = 'monitor_master.pub'
else:
self.mpub = 'minion_master.pub'
if not os.path.isfile(self.pub_path):
self.get_keys()
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
salt.utils.reinit_crypto()
key = self.__key(self.opts)
# TODO: if we already have creds for this key, lets just re-use
if key in AsyncAuth.creds_map:
creds = AsyncAuth.creds_map[key]
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
self._authenticate_future = tornado.concurrent.Future()
self._authenticate_future.set_result(True)
else:
self.authenticate()
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls, copy.deepcopy(self.opts, memo), io_loop=None)
memo[id(self)] = result
for key in self.__dict__:
if key in ('io_loop',):
# The io_loop has a thread Lock which will fail to be deep
# copied. Skip it because it will just be recreated on the
# new copy.
continue
setattr(result, key, copy.deepcopy(self.__dict__[key], memo))
return result
@property
def creds(self):
return self._creds
@property
def crypticle(self):
return self._crypticle
@property
def authenticated(self):
return hasattr(self, '_authenticate_future') and \
self._authenticate_future.done() and \
self._authenticate_future.exception() is None
def invalidate(self):
if self.authenticated:
del self._authenticate_future
key = self.__key(self.opts)
if key in AsyncAuth.creds_map:
del AsyncAuth.creds_map[key]
def authenticate(self, callback=None):
'''
Ask for this client to reconnect to the origin
This function will de-dupe all calls here and return a *single* future
for the sign-in-- whis way callers can all assume there aren't others
'''
# if an auth is in flight-- and not done-- just pass that back as the future to wait on
if hasattr(self, '_authenticate_future') and not self._authenticate_future.done():
future = self._authenticate_future
else:
future = tornado.concurrent.Future()
self._authenticate_future = future
self.io_loop.add_callback(self._authenticate)
if callback is not None:
def handle_future(future):
response = future.result()
self.io_loop.add_callback(callback, response)
future.add_done_callback(handle_future)
return future
@tornado.gen.coroutine
def _authenticate(self):
'''
Authenticate with the master, this method breaks the functional
paradigm, it will update the master information from a fresh sign
in, signing in can occur as often as needed to keep up with the
revolving master AES key.
:rtype: Crypticle
:returns: A crypticle used for encryption operations
'''
acceptance_wait_time = self.opts['acceptance_wait_time']
acceptance_wait_time_max = self.opts['acceptance_wait_time_max']
if not acceptance_wait_time_max:
acceptance_wait_time_max = acceptance_wait_time
creds = None
while True:
try:
creds = yield self.sign_in()
except SaltClientError:
break
if creds == 'retry':
if self.opts.get('caller'):
print('Minion failed to authenticate with the master, '
'has the minion key been accepted?')
sys.exit(2)
if acceptance_wait_time:
log.info('Waiting {0} seconds before retry.'.format(acceptance_wait_time))
yield tornado.gen.sleep(acceptance_wait_time)
if acceptance_wait_time < acceptance_wait_time_max:
acceptance_wait_time += acceptance_wait_time
log.debug('Authentication wait time is {0}'.format(acceptance_wait_time))
continue
break
if not isinstance(creds, dict) or 'aes' not in creds:
try:
del AsyncAuth.creds_map[self.__key(self.opts)]
except KeyError:
pass
self._authenticate_future.set_exception(
SaltClientError('Attempt to authenticate with the salt master failed')
)
else:
AsyncAuth.creds_map[self.__key(self.opts)] = creds
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
self._authenticate_future.set_result(True) # mark the sign-in as complete
@tornado.gen.coroutine
def sign_in(self, timeout=60, safe=True, tries=1):
'''
Send a sign in request to the master, sets the key information and
returns a dict containing the master publish interface to bind to
and the decrypted aes key for transport decryption.
:param int timeout: Number of seconds to wait before timing out the sign-in request
:param bool safe: If True, do not raise an exception on timeout. Retry instead.
:param int tries: The number of times to try to authenticate before giving up.
:raises SaltReqTimeoutError: If the sign-in request has timed out and :param safe: is not set
:return: Return a string on failure indicating the reason for failure. On success, return a dictionary
with the publication port and the shared AES key.
'''
auth = {}
auth_timeout = self.opts.get('auth_timeout', None)
if auth_timeout is not None:
timeout = auth_timeout
auth_safemode = self.opts.get('auth_safemode', None)
if auth_safemode is not None:
safe = auth_safemode
auth_tries = self.opts.get('auth_tries', None)
if auth_tries is not None:
tries = auth_tries
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
auth['master_uri'] = self.opts['master_uri']
channel = salt.transport.client.AsyncReqChannel.factory(self.opts,
crypt='clear',
io_loop=self.io_loop)
try:
payload = yield channel.send(
self.minion_sign_in_payload(),
tries=tries,
timeout=timeout
)
except SaltReqTimeoutError as e:
if safe:
log.warning('SaltReqTimeoutError: {0}'.format(e))
raise tornado.gen.Return('retry')
raise SaltClientError('Attempt to authenticate with the salt master failed with timeout error')
if 'load' in payload:
if 'ret' in payload['load']:
if not payload['load']['ret']:
if self.opts['rejected_retry']:
log.error(
'The Salt Master has rejected this minion\'s public '
'key.\nTo repair this issue, delete the public key '
'for this minion on the Salt Master.\nThe Salt '
'Minion will attempt to to re-authenicate.'
)
raise tornado.gen.Return('retry')
else:
log.critical(
'The Salt Master has rejected this minion\'s public '
'key!\nTo repair this issue, delete the public key '
'for this minion on the Salt Master and restart this '
'minion.\nOr restart the Salt Master in open mode to '
'clean out the keys. The Salt Minion will now exit.'
)
sys.exit(salt.defaults.exitcodes.EX_OK)
# has the master returned that its maxed out with minions?
elif payload['load']['ret'] == 'full':
raise tornado.gen.Return('full')
else:
log.error(
'The Salt Master has cached the public key for this '
'node, this salt minion will wait for {0} seconds '
'before attempting to re-authenticate'.format(
self.opts['acceptance_wait_time']
)
)
raise tornado.gen.Return('retry')
auth['aes'] = self.verify_master(payload)
if not auth['aes']:
log.critical(
'The Salt Master server\'s public key did not authenticate!\n'
'The master may need to be updated if it is a version of Salt '
'lower than {0}, or\n'
'If you are confident that you are connecting to a valid Salt '
'Master, then remove the master public key and restart the '
'Salt Minion.\nThe master public key can be found '
'at:\n{1}'.format(salt.version.__version__, m_pub_fn)
)
raise SaltSystemExit('Invalid master key')
if self.opts.get('syndic_master', False): # Is syndic
syndic_finger = self.opts.get('syndic_finger', self.opts.get('master_finger', False))
if syndic_finger:
if salt.utils.pem_finger(m_pub_fn) != syndic_finger:
self._finger_fail(syndic_finger, m_pub_fn)
else:
if self.opts.get('master_finger', False):
if salt.utils.pem_finger(m_pub_fn) != self.opts['master_finger']:
self._finger_fail(self.opts['master_finger'], m_pub_fn)
auth['publish_port'] = payload['publish_port']
raise tornado.gen.Return(auth)
def get_keys(self):
'''
Return keypair object for the minion.
:rtype: Crypto.PublicKey.RSA._RSAobj
:return: The RSA keypair
'''
# Make sure all key parent directories are accessible
user = self.opts.get('user', 'root')
salt.utils.verify.check_path_traversal(self.opts['pki_dir'], user)
if os.path.exists(self.rsa_path):
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
log.debug('Loaded minion key: {0}'.format(self.rsa_path))
else:
log.info('Generating keys: {0}'.format(self.opts['pki_dir']))
gen_keys(self.opts['pki_dir'],
'minion',
self.opts['keysize'],
self.opts.get('user'))
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
return key
def gen_token(self, clear_tok):
'''
Encrypt a string with the minion private key to verify identity
with the master.
:param str clear_tok: A plaintext token to encrypt
:return: Encrypted token
:rtype: str
'''
return private_encrypt(self.get_keys(), clear_tok)
def minion_sign_in_payload(self):
'''
Generates the payload used to authenticate with the master
server. This payload consists of the passed in id_ and the ssh
public key to encrypt the AES key sent back from the master.
:return: Payload dictionary
:rtype: dict
'''
payload = {}
payload['cmd'] = '_auth'
payload['id'] = self.opts['id']
try:
pubkey_path = os.path.join(self.opts['pki_dir'], self.mpub)
with salt.utils.fopen(pubkey_path) as f:
pub = RSA.importKey(f.read())
cipher = PKCS1_OAEP.new(pub)
payload['token'] = cipher.encrypt(self.token)
except Exception:
pass
with salt.utils.fopen(self.pub_path) as f:
payload['pub'] = f.read()
return payload
def decrypt_aes(self, payload, master_pub=True):
'''
This function is used to decrypt the AES seed phrase returned from
the master server. The seed phrase is decrypted with the SSH RSA
host key.
Pass in the encrypted AES key.
Returns the decrypted AES seed key, a string
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', etc)
'sig': The message signature
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The public key of the sender.
:rtype: str
:return: The decrypted token that was provided, with padding.
:rtype: str
:return: The decrypted AES seed key
'''
if self.opts.get('auth_trb', False):
log.warning(
'Auth Called: {0}'.format(
''.join(traceback.format_stack())
)
)
else:
log.debug('Decrypting the current master AES key')
key = self.get_keys()
cipher = PKCS1_OAEP.new(key)
key_str = cipher.decrypt(payload['aes'])
if 'sig' in payload:
m_path = os.path.join(self.opts['pki_dir'], self.mpub)
if os.path.exists(m_path):
try:
with salt.utils.fopen(m_path) as f:
mkey = RSA.importKey(f.read())
except Exception:
return '', ''
digest = hashlib.sha256(key_str).hexdigest()
m_digest = public_decrypt(mkey.publickey(), payload['sig'])
if m_digest != digest:
return '', ''
else:
return '', ''
if '_|-' in key_str:
return key_str.split('_|-')
else:
if 'token' in payload:
token = cipher.decrypt(payload['token'])
return key_str, token
elif not master_pub:
return key_str, ''
return '', ''
def verify_pubkey_sig(self, message, sig):
'''
Wraps the verify_signature method so we have
additional checks.
:rtype: bool
:return: Success or failure of public key verification
'''
if self.opts['master_sign_key_name']:
path = os.path.join(self.opts['pki_dir'],
self.opts['master_sign_key_name'] + '.pub')
if os.path.isfile(path):
res = verify_signature(path,
message,
binascii.a2b_base64(sig))
else:
log.error('Verification public key {0} does not exist. You '
'need to copy it from the master to the minions '
'pki directory'.format(os.path.basename(path)))
return False
if res:
log.debug('Successfully verified signature of master '
'public key with verification public key '
'{0}'.format(self.opts['master_sign_key_name'] + '.pub'))
return True
else:
log.debug('Failed to verify signature of public key')
return False
else:
log.error('Failed to verify the signature of the message because '
'the verification key-pairs name is not defined. Please '
'make sure that master_sign_key_name is defined.')
return False
def verify_signing_master(self, payload):
try:
if self.verify_pubkey_sig(payload['pub_key'],
payload['pub_sig']):
log.info('Received signed and verified master pubkey '
'from master {0}'.format(self.opts['master']))
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
uid = salt.utils.get_uid(self.opts.get('user', None))
with salt.utils.fpopen(m_pub_fn, 'wb+', uid=uid) as wfh:
wfh.write(payload['pub_key'])
return True
else:
log.error('Received signed public-key from master {0} '
'but signature verification failed!'.format(self.opts['master']))
return False
except Exception as sign_exc:
log.error('There was an error while verifying the masters public-key signature')
raise Exception(sign_exc)
def check_auth_deps(self, payload):
'''
Checks if both master and minion either sign (master) and
verify (minion). If one side does not, it should fail.
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', 'aes')
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The RSA public key of the sender.
'''
# master and minion sign and verify
if 'pub_sig' in payload and self.opts['verify_master_pubkey_sign']:
return True
# master and minion do NOT sign and do NOT verify
elif 'pub_sig' not in payload and not self.opts['verify_master_pubkey_sign']:
return True
# master signs, but minion does NOT verify
elif 'pub_sig' in payload and not self.opts['verify_master_pubkey_sign']:
log.error('The masters sent its public-key signature, but signature '
'verification is not enabled on the minion. Either enable '
'signature verification on the minion or disable signing '
'the public key on the master!')
return False
# master does NOT sign but minion wants to verify
elif 'pub_sig' not in payload and self.opts['verify_master_pubkey_sign']:
log.error('The master did not send its public-key signature, but '
'signature verification is enabled on the minion. Either '
'disable signature verification on the minion or enable '
'signing the public on the master!')
return False
def extract_aes(self, payload, master_pub=True):
'''
Return the AES key received from the master after the minion has been
successfully authenticated.
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', etc)
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The RSA public key of the sender.
:rtype: str
:return: The shared AES key received from the master.
'''
if master_pub:
try:
aes, token = self.decrypt_aes(payload, master_pub)
if token != self.token:
log.error(
'The master failed to decrypt the random minion token'
)
return ''
except Exception:
log.error(
'The master failed to decrypt the random minion token'
)
return ''
return aes
else:
aes, token = self.decrypt_aes(payload, master_pub)
return aes
def verify_master(self, payload):
'''
Verify that the master is the same one that was previously accepted.
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', etc)
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The RSA public key of the sender.
:rtype: str
:return: An empty string on verification failure. On success, the decrypted AES message in the payload.
'''
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
if os.path.isfile(m_pub_fn) and not self.opts['open_mode']:
local_master_pub = salt.utils.fopen(m_pub_fn).read()
if payload['pub_key'].replace('\n', '').replace('\r', '') != \
local_master_pub.replace('\n', '').replace('\r', ''):
if not self.check_auth_deps(payload):
return ''
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False)
else:
return ''
else:
# This is not the last master we connected to
log.error('The master key has changed, the salt master could '
'have been subverted, verify salt master\'s public '
'key')
return ''
else:
if not self.check_auth_deps(payload):
return ''
# verify the signature of the pubkey even if it has
# not changed compared with the one we already have
if self.opts['always_verify_signature']:
if self.verify_signing_master(payload):
return self.extract_aes(payload)
else:
log.error('The masters public could not be verified. Is the '
'verification pubkey {0} up to date?'
''.format(self.opts['master_sign_key_name'] + '.pub'))
return ''
else:
return self.extract_aes(payload)
else:
if not self.check_auth_deps(payload):
return ''
# verify the masters pubkey signature if the minion
# has not received any masters pubkey before
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False)
else:
return ''
# the minion has not received any masters pubkey yet, write
# the newly received pubkey to minion_master.pub
else:
salt.utils.fopen(m_pub_fn, 'wb+').write(payload['pub_key'])
return self.extract_aes(payload, master_pub=False)
# TODO: remove, we should just return a sync wrapper of AsyncAuth
class SAuth(AsyncAuth):
'''
Set up an object to maintain authentication with the salt master
'''
# This class is only a singleton per minion/master pair
instances = weakref.WeakValueDictionary()
def __new__(cls, opts, io_loop=None):
'''
Only create one instance of SAuth per __key()
'''
key = cls.__key(opts)
if key not in SAuth.instances:
log.debug('Initializing new SAuth for {0}'.format(key))
new_auth = object.__new__(cls)
new_auth.__singleton_init__(opts)
SAuth.instances[key] = new_auth
else:
log.debug('Re-using SAuth for {0}'.format(key))
return SAuth.instances[key]
@classmethod
def __key(cls, opts, io_loop=None):
return (opts['pki_dir'], # where the keys are stored
opts['id'], # minion ID
opts['master_uri'], # master ID
)
# has to remain empty for singletons, since __init__ will *always* be called
def __init__(self, opts, io_loop=None):
super(SAuth, self).__init__(opts, io_loop=io_loop)
# an init for the singleton instance to call
def __singleton_init__(self, opts, io_loop=None):
'''
Init an Auth instance
:param dict opts: Options for this server
:return: Auth instance
:rtype: Auth
'''
self.opts = opts
self.token = Crypticle.generate_key_string()
self.serial = salt.payload.Serial(self.opts)
self.pub_path = os.path.join(self.opts['pki_dir'], 'minion.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'minion.pem')
if 'syndic_master' in self.opts:
self.mpub = 'syndic_master.pub'
elif 'alert_master' in self.opts:
self.mpub = 'monitor_master.pub'
else:
self.mpub = 'minion_master.pub'
if not os.path.isfile(self.pub_path):
self.get_keys()
@property
def creds(self):
if not hasattr(self, '_creds'):
self.authenticate()
return self._creds
@property
def crypticle(self):
if not hasattr(self, '_crypticle'):
self.authenticate()
return self._crypticle
def authenticate(self, _=None): # TODO: remove unused var
'''
Authenticate with the master, this method breaks the functional
paradigm, it will update the master information from a fresh sign
in, signing in can occur as often as needed to keep up with the
revolving master AES key.
:rtype: Crypticle
:returns: A crypticle used for encryption operations
'''
acceptance_wait_time = self.opts['acceptance_wait_time']
acceptance_wait_time_max = self.opts['acceptance_wait_time_max']
if not acceptance_wait_time_max:
acceptance_wait_time_max = acceptance_wait_time
while True:
creds = self.sign_in()
if creds == 'retry':
if self.opts.get('caller'):
print('Minion failed to authenticate with the master, '
'has the minion key been accepted?')
sys.exit(2)
if acceptance_wait_time:
log.info('Waiting {0} seconds before retry.'.format(acceptance_wait_time))
time.sleep(acceptance_wait_time)
if acceptance_wait_time < acceptance_wait_time_max:
acceptance_wait_time += acceptance_wait_time
log.debug('Authentication wait time is {0}'.format(acceptance_wait_time))
continue
break
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
def sign_in(self, timeout=60, safe=True, tries=1):
'''
Send a sign in request to the master, sets the key information and
returns a dict containing the master publish interface to bind to
and the decrypted aes key for transport decryption.
:param int timeout: Number of seconds to wait before timing out the sign-in request
:param bool safe: If True, do not raise an exception on timeout. Retry instead.
:param int tries: The number of times to try to authenticate before giving up.
:raises SaltReqTimeoutError: If the sign-in request has timed out and :param safe: is not set
:return: Return a string on failure indicating the reason for failure. On success, return a dictionary
with the publication port and the shared AES key.
'''
auth = {}
auth_timeout = self.opts.get('auth_timeout', None)
if auth_timeout is not None:
timeout = auth_timeout
auth_safemode = self.opts.get('auth_safemode', None)
if auth_safemode is not None:
safe = auth_safemode
auth_tries = self.opts.get('auth_tries', None)
if auth_tries is not None:
tries = auth_tries
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
auth['master_uri'] = self.opts['master_uri']
channel = salt.transport.client.ReqChannel.factory(self.opts, crypt='clear')
try:
payload = channel.send(
self.minion_sign_in_payload(),
tries=tries,
timeout=timeout
)
except SaltReqTimeoutError as e:
if safe:
log.warning('SaltReqTimeoutError: {0}'.format(e))
return 'retry'
raise SaltClientError('Attempt to authenticate with the salt master failed')
if 'load' in payload:
if 'ret' in payload['load']:
if not payload['load']['ret']:
if self.opts['rejected_retry']:
log.error(
'The Salt Master has rejected this minion\'s public '
'key.\nTo repair this issue, delete the public key '
'for this minion on the Salt Master.\nThe Salt '
'Minion will attempt to to re-authenicate.'
)
return 'retry'
else:
log.critical(
'The Salt Master has rejected this minion\'s public '
'key!\nTo repair this issue, delete the public key '
'for this minion on the Salt Master and restart this '
'minion.\nOr restart the Salt Master in open mode to '
'clean out the keys. The Salt Minion will now exit.'
)
sys.exit(salt.defaults.exitcodes.EX_OK)
# has the master returned that its maxed out with minions?
elif payload['load']['ret'] == 'full':
return 'full'
else:
log.error(
'The Salt Master has cached the public key for this '
'node. If this is the first time connecting to this master '
'then this key may need to be accepted using \'salt-key -a {0}\' on '
'the salt master. This salt minion will wait for {1} seconds '
'before attempting to re-authenticate.'.format(
self.opts['id'],
self.opts['acceptance_wait_time']
)
)
return 'retry'
auth['aes'] = self.verify_master(payload)
if not auth['aes']:
log.critical(
'The Salt Master server\'s public key did not authenticate!\n'
'The master may need to be updated if it is a version of Salt '
'lower than {0}, or\n'
'If you are confident that you are connecting to a valid Salt '
'Master, then remove the master public key and restart the '
'Salt Minion.\nThe master public key can be found '
'at:\n{1}'.format(salt.version.__version__, m_pub_fn)
)
sys.exit(42)
if self.opts.get('syndic_master', False): # Is syndic
syndic_finger = self.opts.get('syndic_finger', self.opts.get('master_finger', False))
if syndic_finger:
if salt.utils.pem_finger(m_pub_fn, sum_type=self.opts['hash_type']) != syndic_finger:
self._finger_fail(syndic_finger, m_pub_fn)
else:
if self.opts.get('master_finger', False):
if salt.utils.pem_finger(m_pub_fn, sum_type=self.opts['hash_type']) != self.opts['master_finger']:
self._finger_fail(self.opts['master_finger'], m_pub_fn)
auth['publish_port'] = payload['publish_port']
return auth
def _finger_fail(self, finger, master_key):
log.critical(
'The specified fingerprint in the master configuration '
'file:\n{0}\nDoes not match the authenticating master\'s '
'key:\n{1}\nVerify that the configured fingerprint '
'matches the fingerprint of the correct master and that '
'this minion is not subject to a man-in-the-middle attack.'
.format(
finger,
salt.utils.pem_finger(master_key, sum_type=self.opts['hash_type'])
)
)
sys.exit(42)
class Crypticle(object):
'''
Authenticated encryption class
Encryption algorithm: AES-CBC
Signing algorithm: HMAC-SHA256
'''
PICKLE_PAD = 'pickle::'
AES_BLOCK_SIZE = 16
SIG_SIZE = hashlib.sha256().digest_size
def __init__(self, opts, key_string, key_size=192):
self.key_string = key_string
self.keys = self.extract_keys(self.key_string, key_size)
self.key_size = key_size
self.serial = salt.payload.Serial(opts)
@classmethod
def generate_key_string(cls, key_size=192):
key = os.urandom(key_size // 8 + cls.SIG_SIZE)
b64key = base64.b64encode(key)
if six.PY3:
b64key = b64key.decode('utf-8')
return b64key.replace('\n', '')
@classmethod
def extract_keys(cls, key_string, key_size):
key = key_string.decode('base64')
assert len(key) == key_size / 8 + cls.SIG_SIZE, 'invalid key'
return key[:-cls.SIG_SIZE], key[-cls.SIG_SIZE:]
def encrypt(self, data):
'''
encrypt data with AES-CBC and sign it with HMAC-SHA256
'''
aes_key, hmac_key = self.keys
pad = self.AES_BLOCK_SIZE - len(data) % self.AES_BLOCK_SIZE
data = data + pad * chr(pad)
iv_bytes = os.urandom(self.AES_BLOCK_SIZE)
cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes)
data = iv_bytes + cypher.encrypt(data)
sig = hmac.new(hmac_key, data, hashlib.sha256).digest()
return data + sig
def decrypt(self, data):
'''
verify HMAC-SHA256 signature and decrypt data with AES-CBC
'''
aes_key, hmac_key = self.keys
sig = data[-self.SIG_SIZE:]
data = data[:-self.SIG_SIZE]
mac_bytes = hmac.new(hmac_key, data, hashlib.sha256).digest()
if len(mac_bytes) != len(sig):
log.debug('Failed to authenticate message')
raise AuthenticationError('message authentication failed')
result = 0
for zipped_x, zipped_y in zip(mac_bytes, sig):
result |= ord(zipped_x) ^ ord(zipped_y)
if result != 0:
log.debug('Failed to authenticate message')
raise AuthenticationError('message authentication failed')
iv_bytes = data[:self.AES_BLOCK_SIZE]
data = data[self.AES_BLOCK_SIZE:]
cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes)
data = cypher.decrypt(data)
return data[:-ord(data[-1])]
def dumps(self, obj):
'''
Serialize and encrypt a python object
'''
return self.encrypt(self.PICKLE_PAD + self.serial.dumps(obj))
def loads(self, data):
'''
Decrypt and un-serialize a python object
'''
data = self.decrypt(data)
# simple integrity check to verify that we got meaningful data
if not data.startswith(self.PICKLE_PAD):
return {}
return self.serial.loads(data[len(self.PICKLE_PAD):])
| 40.568709
| 114
| 0.576061
|
from __future__ import absolute_import, print_function
import os
import sys
import copy
import time
import hmac
import base64
import hashlib
import logging
import stat
import traceback
import binascii
import weakref
import salt.ext.six as six
from salt.ext.six.moves import zip
try:
from Crypto.Cipher import AES, PKCS1_OAEP
from Crypto.Hash import SHA
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
import Crypto.Random
except ImportError:
pass
import salt.defaults.exitcodes
import salt.utils
import salt.payload
import salt.transport.client
import salt.utils.rsax931
import salt.utils.verify
import salt.version
from salt.exceptions import (
AuthenticationError, SaltClientError, SaltReqTimeoutError, SaltSystemExit
)
import tornado.gen
log = logging.getLogger(__name__)
def dropfile(cachedir, user=None):
dfn = os.path.join(cachedir, '.dfn')
mask = os.umask(191)
try:
log.info('Rotating AES key')
if os.path.isfile(dfn):
log.info('AES key rotation already requested')
return
if os.path.isfile(dfn) and not os.access(dfn, os.W_OK):
os.chmod(dfn, stat.S_IRUSR | stat.S_IWUSR)
with salt.utils.fopen(dfn, 'wb+') as fp_:
fp_.write('')
os.chmod(dfn, stat.S_IRUSR)
if user:
try:
import pwd
uid = pwd.getpwnam(user).pw_uid
os.chown(dfn, uid, -1)
except (KeyError, ImportError, OSError, IOError):
pass
finally:
os.umask(mask)
def gen_keys(keydir, keyname, keysize, user=None):
base = os.path.join(keydir, keyname)
priv = '{0}.pem'.format(base)
pub = '{0}.pub'.format(base)
salt.utils.reinit_crypto()
gen = RSA.generate(bits=keysize, e=65537)
if os.path.isfile(priv):
return priv
cumask = os.umask(191)
with salt.utils.fopen(priv, 'wb+') as f:
f.write(gen.exportKey('PEM'))
os.umask(cumask)
with salt.utils.fopen(pub, 'wb+') as f:
f.write(gen.publickey().exportKey('PEM'))
os.chmod(priv, 256)
if user:
try:
import pwd
uid = pwd.getpwnam(user).pw_uid
os.chown(priv, uid, -1)
os.chown(pub, uid, -1)
except (KeyError, ImportError, OSError):
# The specified user was not found, allow the backup systems to
# report the error
pass
return priv
def sign_message(privkey_path, message):
log.debug('salt.crypt.sign_message: Loading private key')
with salt.utils.fopen(privkey_path) as f:
key = RSA.importKey(f.read())
log.debug('salt.crypt.sign_message: Signing message.')
signer = PKCS1_v1_5.new(key)
return signer.sign(SHA.new(message))
def verify_signature(pubkey_path, message, signature):
log.debug('salt.crypt.verify_signature: Loading public key')
with salt.utils.fopen(pubkey_path) as f:
pubkey = RSA.importKey(f.read())
log.debug('salt.crypt.verify_signature: Verifying signature')
verifier = PKCS1_v1_5.new(pubkey)
return verifier.verify(SHA.new(message), signature)
def gen_signature(priv_path, pub_path, sign_path):
with salt.utils.fopen(pub_path) as fp_:
mpub_64 = fp_.read()
mpub_sig = sign_message(priv_path, mpub_64)
mpub_sig_64 = binascii.b2a_base64(mpub_sig)
if os.path.isfile(sign_path):
return False
log.trace('Calculating signature for {0} with {1}'
.format(os.path.basename(pub_path),
os.path.basename(priv_path)))
if os.path.isfile(sign_path):
log.trace('Signature file {0} already exists, please '
'remove it first and try again'.format(sign_path))
else:
with salt.utils.fopen(sign_path, 'wb+') as sig_f:
sig_f.write(mpub_sig_64)
log.trace('Wrote signature to {0}'.format(sign_path))
return True
def private_encrypt(key, message):
signer = salt.utils.rsax931.RSAX931Signer(key.exportKey('PEM'))
return signer.sign(message)
def public_decrypt(pub, message):
verifier = salt.utils.rsax931.RSAX931Verifier(pub.exportKey('PEM'))
return verifier.verify(message)
class MasterKeys(dict):
def __init__(self, opts):
super(MasterKeys, self).__init__()
self.opts = opts
self.pub_path = os.path.join(self.opts['pki_dir'], 'master.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'master.pem')
self.key = self.__get_keys()
self.pub_signature = None
# set names for the signing key-pairs
if opts['master_sign_pubkey']:
# if only the signature is available, use that
if opts['master_use_pubkey_signature']:
self.sig_path = os.path.join(self.opts['pki_dir'],
opts['master_pubkey_signature'])
if os.path.isfile(self.sig_path):
self.pub_signature = salt.utils.fopen(self.sig_path).read()
log.info('Read {0}\'s signature from {1}'
''.format(os.path.basename(self.pub_path),
self.opts['master_pubkey_signature']))
else:
log.error('Signing the master.pub key with a signature is enabled '
'but no signature file found at the defined location '
'{0}'.format(self.sig_path))
log.error('The signature-file may be either named differently '
'or has to be created with \'salt-key --gen-signature\'')
sys.exit(1)
else:
self.pub_sign_path = os.path.join(self.opts['pki_dir'],
opts['master_sign_key_name'] + '.pub')
self.rsa_sign_path = os.path.join(self.opts['pki_dir'],
opts['master_sign_key_name'] + '.pem')
self.sign_key = self.__get_keys(name=opts['master_sign_key_name'])
def __setstate__(self, state):
self.__init__(state['opts'])
def __getstate__(self):
return {'opts': self.opts}
def __get_keys(self, name='master'):
path = os.path.join(self.opts['pki_dir'],
name + '.pem')
if os.path.exists(path):
with salt.utils.fopen(path) as f:
key = RSA.importKey(f.read())
log.debug('Loaded {0} key: {1}'.format(name, path))
else:
log.info('Generating {0} keys: {1}'.format(name, self.opts['pki_dir']))
gen_keys(self.opts['pki_dir'],
name,
self.opts['keysize'],
self.opts.get('user'))
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
return key
def get_pub_str(self, name='master'):
path = os.path.join(self.opts['pki_dir'],
name + '.pub')
if not os.path.isfile(path):
key = self.__get_keys()
with salt.utils.fopen(path, 'wb+') as f:
f.write(key.publickey().exportKey('PEM'))
return salt.utils.fopen(path).read()
def get_mkey_paths(self):
return self.pub_path, self.rsa_path
def get_sign_paths(self):
return self.pub_sign_path, self.rsa_sign_path
def pubkey_signature(self):
return self.pub_signature
class AsyncAuth(object):
instance_map = weakref.WeakKeyDictionary()
creds_map = {}
def __new__(cls, opts, io_loop=None):
io_loop = io_loop or tornado.ioloop.IOLoop.current()
if io_loop not in AsyncAuth.instance_map:
AsyncAuth.instance_map[io_loop] = weakref.WeakValueDictionary()
loop_instance_map = AsyncAuth.instance_map[io_loop]
key = cls.__key(opts)
if key not in loop_instance_map:
log.debug('Initializing new SAuth for {0}'.format(key))
new_auth = object.__new__(cls)
new_auth.__singleton_init__(opts, io_loop=io_loop)
loop_instance_map[key] = new_auth
else:
log.debug('Re-using SAuth for {0}'.format(key))
return loop_instance_map[key]
@classmethod
def __key(cls, opts, io_loop=None):
return (opts['pki_dir'],
opts['id'],
opts['master_uri'],
)
def __init__(self, opts, io_loop=None):
pass
def __singleton_init__(self, opts, io_loop=None):
self.opts = opts
self.token = Crypticle.generate_key_string()
self.serial = salt.payload.Serial(self.opts)
self.pub_path = os.path.join(self.opts['pki_dir'], 'minion.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'minion.pem')
if 'syndic_master' in self.opts:
self.mpub = 'syndic_master.pub'
elif 'alert_master' in self.opts:
self.mpub = 'monitor_master.pub'
else:
self.mpub = 'minion_master.pub'
if not os.path.isfile(self.pub_path):
self.get_keys()
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
salt.utils.reinit_crypto()
key = self.__key(self.opts)
if key in AsyncAuth.creds_map:
creds = AsyncAuth.creds_map[key]
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
self._authenticate_future = tornado.concurrent.Future()
self._authenticate_future.set_result(True)
else:
self.authenticate()
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls, copy.deepcopy(self.opts, memo), io_loop=None)
memo[id(self)] = result
for key in self.__dict__:
if key in ('io_loop',):
continue
setattr(result, key, copy.deepcopy(self.__dict__[key], memo))
return result
@property
def creds(self):
return self._creds
@property
def crypticle(self):
return self._crypticle
@property
def authenticated(self):
return hasattr(self, '_authenticate_future') and \
self._authenticate_future.done() and \
self._authenticate_future.exception() is None
def invalidate(self):
if self.authenticated:
del self._authenticate_future
key = self.__key(self.opts)
if key in AsyncAuth.creds_map:
del AsyncAuth.creds_map[key]
def authenticate(self, callback=None):
if hasattr(self, '_authenticate_future') and not self._authenticate_future.done():
future = self._authenticate_future
else:
future = tornado.concurrent.Future()
self._authenticate_future = future
self.io_loop.add_callback(self._authenticate)
if callback is not None:
def handle_future(future):
response = future.result()
self.io_loop.add_callback(callback, response)
future.add_done_callback(handle_future)
return future
@tornado.gen.coroutine
def _authenticate(self):
acceptance_wait_time = self.opts['acceptance_wait_time']
acceptance_wait_time_max = self.opts['acceptance_wait_time_max']
if not acceptance_wait_time_max:
acceptance_wait_time_max = acceptance_wait_time
creds = None
while True:
try:
creds = yield self.sign_in()
except SaltClientError:
break
if creds == 'retry':
if self.opts.get('caller'):
print('Minion failed to authenticate with the master, '
'has the minion key been accepted?')
sys.exit(2)
if acceptance_wait_time:
log.info('Waiting {0} seconds before retry.'.format(acceptance_wait_time))
yield tornado.gen.sleep(acceptance_wait_time)
if acceptance_wait_time < acceptance_wait_time_max:
acceptance_wait_time += acceptance_wait_time
log.debug('Authentication wait time is {0}'.format(acceptance_wait_time))
continue
break
if not isinstance(creds, dict) or 'aes' not in creds:
try:
del AsyncAuth.creds_map[self.__key(self.opts)]
except KeyError:
pass
self._authenticate_future.set_exception(
SaltClientError('Attempt to authenticate with the salt master failed')
)
else:
AsyncAuth.creds_map[self.__key(self.opts)] = creds
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
self._authenticate_future.set_result(True)
@tornado.gen.coroutine
def sign_in(self, timeout=60, safe=True, tries=1):
auth = {}
auth_timeout = self.opts.get('auth_timeout', None)
if auth_timeout is not None:
timeout = auth_timeout
auth_safemode = self.opts.get('auth_safemode', None)
if auth_safemode is not None:
safe = auth_safemode
auth_tries = self.opts.get('auth_tries', None)
if auth_tries is not None:
tries = auth_tries
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
auth['master_uri'] = self.opts['master_uri']
channel = salt.transport.client.AsyncReqChannel.factory(self.opts,
crypt='clear',
io_loop=self.io_loop)
try:
payload = yield channel.send(
self.minion_sign_in_payload(),
tries=tries,
timeout=timeout
)
except SaltReqTimeoutError as e:
if safe:
log.warning('SaltReqTimeoutError: {0}'.format(e))
raise tornado.gen.Return('retry')
raise SaltClientError('Attempt to authenticate with the salt master failed with timeout error')
if 'load' in payload:
if 'ret' in payload['load']:
if not payload['load']['ret']:
if self.opts['rejected_retry']:
log.error(
'The Salt Master has rejected this minion\'s public '
'key.\nTo repair this issue, delete the public key '
'for this minion on the Salt Master.\nThe Salt '
'Minion will attempt to to re-authenicate.'
)
raise tornado.gen.Return('retry')
else:
log.critical(
'The Salt Master has rejected this minion\'s public '
'key!\nTo repair this issue, delete the public key '
'for this minion on the Salt Master and restart this '
'minion.\nOr restart the Salt Master in open mode to '
'clean out the keys. The Salt Minion will now exit.'
)
sys.exit(salt.defaults.exitcodes.EX_OK)
elif payload['load']['ret'] == 'full':
raise tornado.gen.Return('full')
else:
log.error(
'The Salt Master has cached the public key for this '
'node, this salt minion will wait for {0} seconds '
'before attempting to re-authenticate'.format(
self.opts['acceptance_wait_time']
)
)
raise tornado.gen.Return('retry')
auth['aes'] = self.verify_master(payload)
if not auth['aes']:
log.critical(
'The Salt Master server\'s public key did not authenticate!\n'
'The master may need to be updated if it is a version of Salt '
'lower than {0}, or\n'
'If you are confident that you are connecting to a valid Salt '
'Master, then remove the master public key and restart the '
'Salt Minion.\nThe master public key can be found '
'at:\n{1}'.format(salt.version.__version__, m_pub_fn)
)
raise SaltSystemExit('Invalid master key')
if self.opts.get('syndic_master', False): # Is syndic
syndic_finger = self.opts.get('syndic_finger', self.opts.get('master_finger', False))
if syndic_finger:
if salt.utils.pem_finger(m_pub_fn) != syndic_finger:
self._finger_fail(syndic_finger, m_pub_fn)
else:
if self.opts.get('master_finger', False):
if salt.utils.pem_finger(m_pub_fn) != self.opts['master_finger']:
self._finger_fail(self.opts['master_finger'], m_pub_fn)
auth['publish_port'] = payload['publish_port']
raise tornado.gen.Return(auth)
def get_keys(self):
# Make sure all key parent directories are accessible
user = self.opts.get('user', 'root')
salt.utils.verify.check_path_traversal(self.opts['pki_dir'], user)
if os.path.exists(self.rsa_path):
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
log.debug('Loaded minion key: {0}'.format(self.rsa_path))
else:
log.info('Generating keys: {0}'.format(self.opts['pki_dir']))
gen_keys(self.opts['pki_dir'],
'minion',
self.opts['keysize'],
self.opts.get('user'))
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
return key
def gen_token(self, clear_tok):
return private_encrypt(self.get_keys(), clear_tok)
def minion_sign_in_payload(self):
payload = {}
payload['cmd'] = '_auth'
payload['id'] = self.opts['id']
try:
pubkey_path = os.path.join(self.opts['pki_dir'], self.mpub)
with salt.utils.fopen(pubkey_path) as f:
pub = RSA.importKey(f.read())
cipher = PKCS1_OAEP.new(pub)
payload['token'] = cipher.encrypt(self.token)
except Exception:
pass
with salt.utils.fopen(self.pub_path) as f:
payload['pub'] = f.read()
return payload
def decrypt_aes(self, payload, master_pub=True):
if self.opts.get('auth_trb', False):
log.warning(
'Auth Called: {0}'.format(
''.join(traceback.format_stack())
)
)
else:
log.debug('Decrypting the current master AES key')
key = self.get_keys()
cipher = PKCS1_OAEP.new(key)
key_str = cipher.decrypt(payload['aes'])
if 'sig' in payload:
m_path = os.path.join(self.opts['pki_dir'], self.mpub)
if os.path.exists(m_path):
try:
with salt.utils.fopen(m_path) as f:
mkey = RSA.importKey(f.read())
except Exception:
return '', ''
digest = hashlib.sha256(key_str).hexdigest()
m_digest = public_decrypt(mkey.publickey(), payload['sig'])
if m_digest != digest:
return '', ''
else:
return '', ''
if '_|-' in key_str:
return key_str.split('_|-')
else:
if 'token' in payload:
token = cipher.decrypt(payload['token'])
return key_str, token
elif not master_pub:
return key_str, ''
return '', ''
def verify_pubkey_sig(self, message, sig):
if self.opts['master_sign_key_name']:
path = os.path.join(self.opts['pki_dir'],
self.opts['master_sign_key_name'] + '.pub')
if os.path.isfile(path):
res = verify_signature(path,
message,
binascii.a2b_base64(sig))
else:
log.error('Verification public key {0} does not exist. You '
'need to copy it from the master to the minions '
'pki directory'.format(os.path.basename(path)))
return False
if res:
log.debug('Successfully verified signature of master '
'public key with verification public key '
'{0}'.format(self.opts['master_sign_key_name'] + '.pub'))
return True
else:
log.debug('Failed to verify signature of public key')
return False
else:
log.error('Failed to verify the signature of the message because '
'the verification key-pairs name is not defined. Please '
'make sure that master_sign_key_name is defined.')
return False
def verify_signing_master(self, payload):
try:
if self.verify_pubkey_sig(payload['pub_key'],
payload['pub_sig']):
log.info('Received signed and verified master pubkey '
'from master {0}'.format(self.opts['master']))
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
uid = salt.utils.get_uid(self.opts.get('user', None))
with salt.utils.fpopen(m_pub_fn, 'wb+', uid=uid) as wfh:
wfh.write(payload['pub_key'])
return True
else:
log.error('Received signed public-key from master {0} '
'but signature verification failed!'.format(self.opts['master']))
return False
except Exception as sign_exc:
log.error('There was an error while verifying the masters public-key signature')
raise Exception(sign_exc)
def check_auth_deps(self, payload):
# master and minion sign and verify
if 'pub_sig' in payload and self.opts['verify_master_pubkey_sign']:
return True
# master and minion do NOT sign and do NOT verify
elif 'pub_sig' not in payload and not self.opts['verify_master_pubkey_sign']:
return True
# master signs, but minion does NOT verify
elif 'pub_sig' in payload and not self.opts['verify_master_pubkey_sign']:
log.error('The masters sent its public-key signature, but signature '
'verification is not enabled on the minion. Either enable '
'signature verification on the minion or disable signing '
'the public key on the master!')
return False
# master does NOT sign but minion wants to verify
elif 'pub_sig' not in payload and self.opts['verify_master_pubkey_sign']:
log.error('The master did not send its public-key signature, but '
'signature verification is enabled on the minion. Either '
'disable signature verification on the minion or enable '
'signing the public on the master!')
return False
def extract_aes(self, payload, master_pub=True):
if master_pub:
try:
aes, token = self.decrypt_aes(payload, master_pub)
if token != self.token:
log.error(
'The master failed to decrypt the random minion token'
)
return ''
except Exception:
log.error(
'The master failed to decrypt the random minion token'
)
return ''
return aes
else:
aes, token = self.decrypt_aes(payload, master_pub)
return aes
def verify_master(self, payload):
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
if os.path.isfile(m_pub_fn) and not self.opts['open_mode']:
local_master_pub = salt.utils.fopen(m_pub_fn).read()
if payload['pub_key'].replace('\n', '').replace('\r', '') != \
local_master_pub.replace('\n', '').replace('\r', ''):
if not self.check_auth_deps(payload):
return ''
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False)
else:
return ''
else:
# This is not the last master we connected to
log.error('The master key has changed, the salt master could '
'have been subverted, verify salt master\'s public '
'key')
return ''
else:
if not self.check_auth_deps(payload):
return ''
if self.opts['always_verify_signature']:
if self.verify_signing_master(payload):
return self.extract_aes(payload)
else:
log.error('The masters public could not be verified. Is the '
'verification pubkey {0} up to date?'
''.format(self.opts['master_sign_key_name'] + '.pub'))
return ''
else:
return self.extract_aes(payload)
else:
if not self.check_auth_deps(payload):
return ''
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False)
else:
return ''
else:
salt.utils.fopen(m_pub_fn, 'wb+').write(payload['pub_key'])
return self.extract_aes(payload, master_pub=False)
class SAuth(AsyncAuth):
instances = weakref.WeakValueDictionary()
def __new__(cls, opts, io_loop=None):
key = cls.__key(opts)
if key not in SAuth.instances:
log.debug('Initializing new SAuth for {0}'.format(key))
new_auth = object.__new__(cls)
new_auth.__singleton_init__(opts)
SAuth.instances[key] = new_auth
else:
log.debug('Re-using SAuth for {0}'.format(key))
return SAuth.instances[key]
@classmethod
def __key(cls, opts, io_loop=None):
return (opts['pki_dir'],
opts['id'],
opts['master_uri'],
)
def __init__(self, opts, io_loop=None):
super(SAuth, self).__init__(opts, io_loop=io_loop)
def __singleton_init__(self, opts, io_loop=None):
self.opts = opts
self.token = Crypticle.generate_key_string()
self.serial = salt.payload.Serial(self.opts)
self.pub_path = os.path.join(self.opts['pki_dir'], 'minion.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'minion.pem')
if 'syndic_master' in self.opts:
self.mpub = 'syndic_master.pub'
elif 'alert_master' in self.opts:
self.mpub = 'monitor_master.pub'
else:
self.mpub = 'minion_master.pub'
if not os.path.isfile(self.pub_path):
self.get_keys()
@property
def creds(self):
if not hasattr(self, '_creds'):
self.authenticate()
return self._creds
@property
def crypticle(self):
if not hasattr(self, '_crypticle'):
self.authenticate()
return self._crypticle
def authenticate(self, _=None):
acceptance_wait_time = self.opts['acceptance_wait_time']
acceptance_wait_time_max = self.opts['acceptance_wait_time_max']
if not acceptance_wait_time_max:
acceptance_wait_time_max = acceptance_wait_time
while True:
creds = self.sign_in()
if creds == 'retry':
if self.opts.get('caller'):
print('Minion failed to authenticate with the master, '
'has the minion key been accepted?')
sys.exit(2)
if acceptance_wait_time:
log.info('Waiting {0} seconds before retry.'.format(acceptance_wait_time))
time.sleep(acceptance_wait_time)
if acceptance_wait_time < acceptance_wait_time_max:
acceptance_wait_time += acceptance_wait_time
log.debug('Authentication wait time is {0}'.format(acceptance_wait_time))
continue
break
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
def sign_in(self, timeout=60, safe=True, tries=1):
auth = {}
auth_timeout = self.opts.get('auth_timeout', None)
if auth_timeout is not None:
timeout = auth_timeout
auth_safemode = self.opts.get('auth_safemode', None)
if auth_safemode is not None:
safe = auth_safemode
auth_tries = self.opts.get('auth_tries', None)
if auth_tries is not None:
tries = auth_tries
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
auth['master_uri'] = self.opts['master_uri']
channel = salt.transport.client.ReqChannel.factory(self.opts, crypt='clear')
try:
payload = channel.send(
self.minion_sign_in_payload(),
tries=tries,
timeout=timeout
)
except SaltReqTimeoutError as e:
if safe:
log.warning('SaltReqTimeoutError: {0}'.format(e))
return 'retry'
raise SaltClientError('Attempt to authenticate with the salt master failed')
if 'load' in payload:
if 'ret' in payload['load']:
if not payload['load']['ret']:
if self.opts['rejected_retry']:
log.error(
'The Salt Master has rejected this minion\'s public '
'key.\nTo repair this issue, delete the public key '
'for this minion on the Salt Master.\nThe Salt '
'Minion will attempt to to re-authenicate.'
)
return 'retry'
else:
log.critical(
'The Salt Master has rejected this minion\'s public '
'key!\nTo repair this issue, delete the public key '
'for this minion on the Salt Master and restart this '
'minion.\nOr restart the Salt Master in open mode to '
'clean out the keys. The Salt Minion will now exit.'
)
sys.exit(salt.defaults.exitcodes.EX_OK)
elif payload['load']['ret'] == 'full':
return 'full'
else:
log.error(
'The Salt Master has cached the public key for this '
'node. If this is the first time connecting to this master '
'then this key may need to be accepted using \'salt-key -a {0}\' on '
'the salt master. This salt minion will wait for {1} seconds '
'before attempting to re-authenticate.'.format(
self.opts['id'],
self.opts['acceptance_wait_time']
)
)
return 'retry'
auth['aes'] = self.verify_master(payload)
if not auth['aes']:
log.critical(
'The Salt Master server\'s public key did not authenticate!\n'
'The master may need to be updated if it is a version of Salt '
'lower than {0}, or\n'
'If you are confident that you are connecting to a valid Salt '
'Master, then remove the master public key and restart the '
'Salt Minion.\nThe master public key can be found '
'at:\n{1}'.format(salt.version.__version__, m_pub_fn)
)
sys.exit(42)
if self.opts.get('syndic_master', False): # Is syndic
syndic_finger = self.opts.get('syndic_finger', self.opts.get('master_finger', False))
if syndic_finger:
if salt.utils.pem_finger(m_pub_fn, sum_type=self.opts['hash_type']) != syndic_finger:
self._finger_fail(syndic_finger, m_pub_fn)
else:
if self.opts.get('master_finger', False):
if salt.utils.pem_finger(m_pub_fn, sum_type=self.opts['hash_type']) != self.opts['master_finger']:
self._finger_fail(self.opts['master_finger'], m_pub_fn)
auth['publish_port'] = payload['publish_port']
return auth
def _finger_fail(self, finger, master_key):
log.critical(
'The specified fingerprint in the master configuration '
'file:\n{0}\nDoes not match the authenticating master\'s '
'key:\n{1}\nVerify that the configured fingerprint '
'matches the fingerprint of the correct master and that '
'this minion is not subject to a man-in-the-middle attack.'
.format(
finger,
salt.utils.pem_finger(master_key, sum_type=self.opts['hash_type'])
)
)
sys.exit(42)
class Crypticle(object):
PICKLE_PAD = 'pickle::'
AES_BLOCK_SIZE = 16
SIG_SIZE = hashlib.sha256().digest_size
def __init__(self, opts, key_string, key_size=192):
self.key_string = key_string
self.keys = self.extract_keys(self.key_string, key_size)
self.key_size = key_size
self.serial = salt.payload.Serial(opts)
@classmethod
def generate_key_string(cls, key_size=192):
key = os.urandom(key_size // 8 + cls.SIG_SIZE)
b64key = base64.b64encode(key)
if six.PY3:
b64key = b64key.decode('utf-8')
return b64key.replace('\n', '')
@classmethod
def extract_keys(cls, key_string, key_size):
key = key_string.decode('base64')
assert len(key) == key_size / 8 + cls.SIG_SIZE, 'invalid key'
return key[:-cls.SIG_SIZE], key[-cls.SIG_SIZE:]
def encrypt(self, data):
aes_key, hmac_key = self.keys
pad = self.AES_BLOCK_SIZE - len(data) % self.AES_BLOCK_SIZE
data = data + pad * chr(pad)
iv_bytes = os.urandom(self.AES_BLOCK_SIZE)
cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes)
data = iv_bytes + cypher.encrypt(data)
sig = hmac.new(hmac_key, data, hashlib.sha256).digest()
return data + sig
def decrypt(self, data):
aes_key, hmac_key = self.keys
sig = data[-self.SIG_SIZE:]
data = data[:-self.SIG_SIZE]
mac_bytes = hmac.new(hmac_key, data, hashlib.sha256).digest()
if len(mac_bytes) != len(sig):
log.debug('Failed to authenticate message')
raise AuthenticationError('message authentication failed')
result = 0
for zipped_x, zipped_y in zip(mac_bytes, sig):
result |= ord(zipped_x) ^ ord(zipped_y)
if result != 0:
log.debug('Failed to authenticate message')
raise AuthenticationError('message authentication failed')
iv_bytes = data[:self.AES_BLOCK_SIZE]
data = data[self.AES_BLOCK_SIZE:]
cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes)
data = cypher.decrypt(data)
return data[:-ord(data[-1])]
def dumps(self, obj):
return self.encrypt(self.PICKLE_PAD + self.serial.dumps(obj))
def loads(self, data):
data = self.decrypt(data)
if not data.startswith(self.PICKLE_PAD):
return {}
return self.serial.loads(data[len(self.PICKLE_PAD):])
| true
| true
|
f71aae58eb581c2971eaadde48b721f1d5ace501
| 396
|
py
|
Python
|
booktrade/booktrade/wsgi.py
|
rocity/dj-booktrade
|
7ec0876635931e540ce4c0e1c74653b6626fd3fd
|
[
"Apache-2.0"
] | null | null | null |
booktrade/booktrade/wsgi.py
|
rocity/dj-booktrade
|
7ec0876635931e540ce4c0e1c74653b6626fd3fd
|
[
"Apache-2.0"
] | null | null | null |
booktrade/booktrade/wsgi.py
|
rocity/dj-booktrade
|
7ec0876635931e540ce4c0e1c74653b6626fd3fd
|
[
"Apache-2.0"
] | null | null | null |
"""
WSGI config for booktrade project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "booktrade.settings")
application = get_wsgi_application()
| 23.294118
| 78
| 0.787879
|
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "booktrade.settings")
application = get_wsgi_application()
| true
| true
|
f71aae7b0231777a5578550493465da27589a5fd
| 12,552
|
py
|
Python
|
utils.py
|
chaitanyamalaviya/NeuralFactorGraph
|
6cd664b7edc43d56c6f1165baa7e7625eb0f7cd8
|
[
"MIT"
] | 48
|
2018-05-15T12:46:36.000Z
|
2021-03-11T09:34:10.000Z
|
utils.py
|
chaitanyamalaviya/NeuralFactorGraph
|
6cd664b7edc43d56c6f1165baa7e7625eb0f7cd8
|
[
"MIT"
] | 1
|
2018-10-28T21:11:47.000Z
|
2018-10-31T20:31:09.000Z
|
utils.py
|
chaitanyamalaviya/NeuralFactorGraph
|
6cd664b7edc43d56c6f1165baa7e7625eb0f7cd8
|
[
"MIT"
] | 6
|
2018-07-03T01:28:41.000Z
|
2020-01-23T13:25:49.000Z
|
from __future__ import division, print_function
from conllu.parser import parse, parse_tree
from tags import Tags, Tag, Label
import os
import re
import math
import numpy as np
import itertools
import pdb
import pickle
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import torch
from torch.autograd import Variable
import torch.nn.functional as F
np.set_printoptions(threshold=np.nan)
FROZEN_TAG = "__frozen__"
def freeze_dict(obj):
if isinstance(obj, dict):
dict_items = list(obj.items())
dict_items.append((FROZEN_TAG, True))
return tuple([(k, freeze_dict(v)) for k, v in dict_items])
return obj
def unfreeze_dict(obj):
if isinstance(obj, tuple):
if (FROZEN_TAG, True) in obj:
out = dict((k, unfreeze_dict(v)) for k, v in obj)
del out[FROZEN_TAG]
return out
return obj
def get_lang_code_dicts():
"""
Returns lang_to_code, code_to_lang dictionaries
"""
lang_to_code = {}
code_to_lang = {}
bad_chars = ",''"
rgx = re.compile('[%s]' % bad_chars)
with open("data/lang_codes.txt") as f:
data = f.read()
lines = data.split("\n")
split_line = [line.split() for line in lines]
for line in split_line[:-2]:
lang = rgx.sub('', line[0])
code = rgx.sub('', line[2])
lang_to_code[lang] = code
code_to_lang = {v: k for k, v in lang_to_code.iteritems()}
return lang_to_code, code_to_lang
def read_conll(treebank_path, langs, code_to_lang, train_or_dev, tgt_size=None, test=False):
"""
Reads conll formatted file
langs: list of languages
train: read training data
returns: dict with data for each language
as list of tuples of sentences and morph-tags
"""
annot_sents = {}
unique = []
for lang in langs:
train = train_or_dev if not test else "test"
if not test:
for file in os.listdir(treebank_path + "UD_" + code_to_lang[lang]):
if file.endswith("train.conllu"):
filepath = os.path.join(treebank_path + "UD_" + code_to_lang[lang], file)
break
else:
for file in os.listdir(treebank_path + "UD_" + code_to_lang[lang]):
if file.endswith("dev.conllu"):
filepath = os.path.join(treebank_path+ "UD_" + code_to_lang[lang], file)
break
with open(filepath) as f:
data = f.readlines()[:-1]
data = [line for line in data if line[0]!='#']
split_data = " ".join(data).split("\n \n")
ud = [parse(sent)[0] for sent in split_data]
all_text = []
all_tags = []
if langs[-1]==lang and tgt_size:
tgt_size = min(tgt_size, len(ud))
ud = ud[:tgt_size]
for sent in ud:
sent_text = []
sent_tags = []
for word in sent:
word_tags = {}
if word['feats']:
word_tags = dict(word['feats'])
if word['upostag']:
if word_tags:
word_tags.update({'POS':word['upostag']})
else:
word_tags = {'POS':word['upostag']}
if word_tags:
word_tags = freeze_dict(word_tags)
if word_tags not in unique:
unique.append(word_tags)
sent_text.append(word['form'])
sent_tags.append(freeze_dict(word_tags))
all_text.append(sent_text)
all_tags.append(sent_tags)
annot_sents[lang] = [(w, m) for w, m in zip(all_text, all_tags)]
return annot_sents, unique
def addNullLabels(annot_sents, langs, unique_tags):
for lang in langs:
i = 0
for w, m in annot_sents[lang]:
new_tags = []
for tags in m:
tag_dict = unfreeze_dict(tags)
for tag in unique_tags:
if tag.name not in tag_dict:
tag_dict[tag.name] = "NULL"
new_tags.append(freeze_dict(tag_dict))
annot_sents[lang][i] = (w, new_tags)
i += 1
return annot_sents
def sortbylength(data, lang_ids, maxlen=500):
"""
:param data: List of tuples of source sentences and morph tags
:param lang_ids: List of lang IDs for each sentence
:param maxlen: Maximum sentence length permitted
:return: Sorted data and sorted langIDs
"""
src = [elem[0] for elem in data]
tgt = [elem[1] for elem in data]
indexed_src = [(i,src[i]) for i in range(len(src))]
sorted_indexed_src = sorted(indexed_src, key=lambda x: -len(x[1]))
sorted_src = [item[1] for item in sorted_indexed_src if len(item[1])<maxlen]
sort_order = [item[0] for item in sorted_indexed_src if len(item[1])<maxlen]
sorted_tgt = [tgt[i] for i in sort_order]
sorted_lang_ids = [lang_ids[i] for i in sort_order]
sorted_data = [(src, tgt) for src, tgt in zip(sorted_src, sorted_tgt)]
return sorted_data, sorted_lang_ids
def get_train_order(training_data, batch_size, startIdx=0):
"""
:param data: List of tuples of source sentences and morph tags
:return: start idxs of batches
"""
lengths = [len(elem[0]) for elem in training_data]
start_idxs = []
end_idxs = []
prev_length=-1
batch_counter = 0
for i, length in enumerate(lengths, start=startIdx):
if length!=prev_length or batch_counter>batch_size:
start_idxs.append(i)
if prev_length!=-1:
end_idxs.append(i-1)
batch_counter = 1
batch_counter += 1
prev_length = length
end_idxs.append(startIdx + len(lengths)-1)
return [(s,e) for s,e in zip(start_idxs, end_idxs)]
def find_unique_tags(train_data_tags, null_label=False):
unique_tags = Tags()
for tags in train_data_tags:
for tag, label in unfreeze_dict(tags).items():
if not unique_tags.tagExists(tag):
unique_tags.addTag(tag)
curTag = unique_tags.getTagbyName(tag)
if not curTag.labelExists(label):
curTag.addLabel(label)
# Add null labels to unseen tags in each tag set
if null_label:
for tag in unique_tags:
tag.addLabel("NULL")
return unique_tags
def plot_heatmap(uniqueTags, weights, kind):
font = {'family' : 'normal',
'size' : 14,
'weight' : 'bold'}
matplotlib.rc('font', **font)
pairs = list(itertools.combinations(range(uniqueTags.size()), 2))
# weights is a ParameterList
for k, weight in enumerate(weights):
if kind=="pair":
i, j = pairs[k]
tag1 = uniqueTags.getTagbyIdx(i)
tag2 = uniqueTags.getTagbyIdx(j)
tag1_labels = [label.name for label in tag1.labels]
tag2_labels = [label.name for label in tag2.labels]
plt.figure(figsize=(20, 18), dpi=80)
plt.xticks(range(0, len(tag2_labels)), tag2_labels)
plt.yticks(range(0, len(tag1_labels)), tag1_labels)
plt.tick_params(labelsize=25)
plt.xlabel(tag2.name, fontsize=40)
plt.ylabel(tag1.name, fontsize=50)
plt.imshow(weight.data.cpu().numpy(), cmap='Reds', interpolation='nearest')
plt.savefig("figures/" + tag1.name + "_" + tag2.name + ".png", bbox_inches='tight')
plt.close()
elif kind=="trans":
tag = uniqueTags.getTagbyIdx(k)
tag_labels = [label.name for label in tag.labels]
plt.figure(figsize=(20, 18), dpi=80)
plt.xticks(range(0, len(tag_labels)), tag_labels, rotation=45)
plt.yticks(range(0, len(tag_labels)), tag_labels)
plt.tick_params(labelsize=40)
plt.xlabel(tag.name, fontsize=50)
plt.ylabel(tag.name, fontsize=50)
plt.imshow(weight.data.cpu().numpy(), cmap='Greys', interpolation='nearest')
plt.savefig("figures/" + tag.name + "_" + tag.name + ".png", bbox_inches='tight')
plt.close()
def get_var(x, gpu=False, volatile=False):
x = Variable(x, volatile=volatile)
if gpu:
x = x.cuda()
return x
def prepare_sequence(seq, to_ix, gpu=False):
if isinstance(to_ix, dict):
idxs = [to_ix[w] if w in to_ix else to_ix["UNK"] for w in seq]
elif isinstance(to_ix, list):
idxs = [to_ix.index(w) if w in to_ix else to_ix.index("UNK") for w in seq]
tensor = torch.LongTensor(idxs)
return get_var(tensor, gpu)
def to_scalar(var):
# returns a python float
return var.view(-1).data.tolist()[0]
def argmax(vec):
# return the argmax as a python int
_, idx = torch.max(vec, 1)
return to_scalar(idx)
def logSumExp(a, b):
maxi = np.maximum(a, b)
aexp = a - maxi
bexp = b - maxi
sumOfExp = np.exp(aexp) + np.exp(bexp)
return maxi + np.log(sumOfExp)
def logSumExpTensor(vec):
# vec -> 16, tag_size
batch_size = vec.size()[0]
vec = vec.view(batch_size, -1)
max_score = torch.max(vec, 1)[0]
max_score_broadcast = max_score.view(-1, 1).expand(-1, vec.size()[1])
return max_score + \
torch.log(torch.sum(torch.exp(vec - max_score_broadcast), 1))
def logSumExpTensors(a, b):
maxi = torch.max(a, b)
aexp = a - maxi
bexp = b - maxi
sumOfExp = torch.exp(aexp) + torch.exp(bexp)
return maxi + torch.log(sumOfExp)
def logDot(a, b, redAxis=None):
if redAxis==1:
b = b.transpose()
max_a = np.amax(a)
max_b = np.amax(b)
C = np.dot(np.exp(a - max_a), np.exp(b - max_b))
np.log(C, out=C)
# else:
# np.log(C + 1e-300, out=C)
C += max_a + max_b
return C.transpose() if redAxis==1 else C
def logMax(a, b, redAxis=None):
if redAxis==1:
b = b.transpose()
max_a = np.amax(a)
max_b = np.amax(b)
C = np.max(np.exp(a[:, :, None]-max_a) * np.exp(b[None, :, :]-max_b), axis=1)
# if np.isfinite(C).all():
np.log(C, out=C)
# else:
# np.log(C + 1e-300, out=C)
C += max_a + max_b
return C.transpose() if redAxis==1 else C
def logNormalize(a):
denom = np.logaddexp.reduce(a, 1)
return (a.transpose()- denom).transpose()
def logNormalizeTensor(a):
denom = logSumExpTensor(a)
if len(a.size())==2:
denom = denom.view(-1, 1).expand(-1, a.size()[1])
elif len(a.size())==3:
denom = denom.view(a.size()[0], 1, 1).expand(-1, a.size()[1], a.size()[2])
return (a-denom)
def computeF1(hyps, golds, prefix, labels_to_ix=None, baseline=False, write_results=False):
"""
hyps: List of dicts for predicted morphological tags
golds: List of dicts for gold morphological tags
"""
f1_precision_scores = {}
f1_precision_total = {}
f1_recall_scores = {}
f1_recall_total = {}
f1_average = 0.0
if baseline:
hyps = [unfreeze_dict(h) for h in hyps]
golds = [unfreeze_dict(t) for t in golds]
# calculate precision
for i, word_tags in enumerate(hyps, start=0):
for k, v in word_tags.items():
if v=="NULL":
continue
if k not in f1_precision_scores:
f1_precision_scores[k] = 0
f1_precision_total[k] = 0
if k in golds[i]:
if v==golds[i][k]:
f1_precision_scores[k] += 1
f1_precision_total[k] += 1
f1_micro_precision = sum(f1_precision_scores.values())/sum(f1_precision_total.values())
for k in f1_precision_scores.keys():
f1_precision_scores[k] = f1_precision_scores[k]/f1_precision_total[k]
# calculate recall
for i, word_tags in enumerate(golds, start=0):
for k, v in word_tags.items():
if v=="NULL":
continue
if k not in f1_recall_scores:
f1_recall_scores[k] = 0
f1_recall_total[k] = 0
if k in hyps[i]:
if v==hyps[i][k]:
f1_recall_scores[k] += 1
f1_recall_total[k] += 1
f1_micro_recall = sum(f1_recall_scores.values())/sum(f1_recall_total.values())
f1_scores = {}
for k in f1_recall_scores.keys():
f1_recall_scores[k] = f1_recall_scores[k]/f1_recall_total[k]
if f1_recall_scores[k]==0 or k not in f1_precision_scores:
f1_scores[k] = 0
else:
f1_scores[k] = 2 * (f1_precision_scores[k] * f1_recall_scores[k]) / (f1_precision_scores[k] + f1_recall_scores[k])
f1_average += f1_recall_total[k] * f1_scores[k]
f1_average /= sum(f1_recall_total.values())
f1_micro_score = 2 * (f1_micro_precision * f1_micro_recall) / (f1_micro_precision + f1_micro_recall)
if write_results:
print("Writing F1 scores...")
with open(prefix + '_results_f1.txt', 'ab') as file:
file.write(pickle.dumps(f1_scores))
file.write("\nMacro-averaged F1 Score: " + str(f1_average))
file.write("\nMicro-averaged F1 Score: " + str(f1_micro_score))
return f1_average, f1_micro_score
def getCorrectCount(golds, hyps):
correct = 0
for i, word_tags in enumerate(golds, start=0):
allCorrect = True
for k, v in word_tags.items():
if k in hyps[i]:
if v!=hyps[i][k]:
allCorrect = False
break
if allCorrect==True:
correct += 1
return correct
| 27.769912
| 120
| 0.634242
|
from __future__ import division, print_function
from conllu.parser import parse, parse_tree
from tags import Tags, Tag, Label
import os
import re
import math
import numpy as np
import itertools
import pdb
import pickle
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import torch
from torch.autograd import Variable
import torch.nn.functional as F
np.set_printoptions(threshold=np.nan)
FROZEN_TAG = "__frozen__"
def freeze_dict(obj):
if isinstance(obj, dict):
dict_items = list(obj.items())
dict_items.append((FROZEN_TAG, True))
return tuple([(k, freeze_dict(v)) for k, v in dict_items])
return obj
def unfreeze_dict(obj):
if isinstance(obj, tuple):
if (FROZEN_TAG, True) in obj:
out = dict((k, unfreeze_dict(v)) for k, v in obj)
del out[FROZEN_TAG]
return out
return obj
def get_lang_code_dicts():
lang_to_code = {}
code_to_lang = {}
bad_chars = ",''"
rgx = re.compile('[%s]' % bad_chars)
with open("data/lang_codes.txt") as f:
data = f.read()
lines = data.split("\n")
split_line = [line.split() for line in lines]
for line in split_line[:-2]:
lang = rgx.sub('', line[0])
code = rgx.sub('', line[2])
lang_to_code[lang] = code
code_to_lang = {v: k for k, v in lang_to_code.iteritems()}
return lang_to_code, code_to_lang
def read_conll(treebank_path, langs, code_to_lang, train_or_dev, tgt_size=None, test=False):
annot_sents = {}
unique = []
for lang in langs:
train = train_or_dev if not test else "test"
if not test:
for file in os.listdir(treebank_path + "UD_" + code_to_lang[lang]):
if file.endswith("train.conllu"):
filepath = os.path.join(treebank_path + "UD_" + code_to_lang[lang], file)
break
else:
for file in os.listdir(treebank_path + "UD_" + code_to_lang[lang]):
if file.endswith("dev.conllu"):
filepath = os.path.join(treebank_path+ "UD_" + code_to_lang[lang], file)
break
with open(filepath) as f:
data = f.readlines()[:-1]
data = [line for line in data if line[0]!='#']
split_data = " ".join(data).split("\n \n")
ud = [parse(sent)[0] for sent in split_data]
all_text = []
all_tags = []
if langs[-1]==lang and tgt_size:
tgt_size = min(tgt_size, len(ud))
ud = ud[:tgt_size]
for sent in ud:
sent_text = []
sent_tags = []
for word in sent:
word_tags = {}
if word['feats']:
word_tags = dict(word['feats'])
if word['upostag']:
if word_tags:
word_tags.update({'POS':word['upostag']})
else:
word_tags = {'POS':word['upostag']}
if word_tags:
word_tags = freeze_dict(word_tags)
if word_tags not in unique:
unique.append(word_tags)
sent_text.append(word['form'])
sent_tags.append(freeze_dict(word_tags))
all_text.append(sent_text)
all_tags.append(sent_tags)
annot_sents[lang] = [(w, m) for w, m in zip(all_text, all_tags)]
return annot_sents, unique
def addNullLabels(annot_sents, langs, unique_tags):
for lang in langs:
i = 0
for w, m in annot_sents[lang]:
new_tags = []
for tags in m:
tag_dict = unfreeze_dict(tags)
for tag in unique_tags:
if tag.name not in tag_dict:
tag_dict[tag.name] = "NULL"
new_tags.append(freeze_dict(tag_dict))
annot_sents[lang][i] = (w, new_tags)
i += 1
return annot_sents
def sortbylength(data, lang_ids, maxlen=500):
src = [elem[0] for elem in data]
tgt = [elem[1] for elem in data]
indexed_src = [(i,src[i]) for i in range(len(src))]
sorted_indexed_src = sorted(indexed_src, key=lambda x: -len(x[1]))
sorted_src = [item[1] for item in sorted_indexed_src if len(item[1])<maxlen]
sort_order = [item[0] for item in sorted_indexed_src if len(item[1])<maxlen]
sorted_tgt = [tgt[i] for i in sort_order]
sorted_lang_ids = [lang_ids[i] for i in sort_order]
sorted_data = [(src, tgt) for src, tgt in zip(sorted_src, sorted_tgt)]
return sorted_data, sorted_lang_ids
def get_train_order(training_data, batch_size, startIdx=0):
lengths = [len(elem[0]) for elem in training_data]
start_idxs = []
end_idxs = []
prev_length=-1
batch_counter = 0
for i, length in enumerate(lengths, start=startIdx):
if length!=prev_length or batch_counter>batch_size:
start_idxs.append(i)
if prev_length!=-1:
end_idxs.append(i-1)
batch_counter = 1
batch_counter += 1
prev_length = length
end_idxs.append(startIdx + len(lengths)-1)
return [(s,e) for s,e in zip(start_idxs, end_idxs)]
def find_unique_tags(train_data_tags, null_label=False):
unique_tags = Tags()
for tags in train_data_tags:
for tag, label in unfreeze_dict(tags).items():
if not unique_tags.tagExists(tag):
unique_tags.addTag(tag)
curTag = unique_tags.getTagbyName(tag)
if not curTag.labelExists(label):
curTag.addLabel(label)
if null_label:
for tag in unique_tags:
tag.addLabel("NULL")
return unique_tags
def plot_heatmap(uniqueTags, weights, kind):
font = {'family' : 'normal',
'size' : 14,
'weight' : 'bold'}
matplotlib.rc('font', **font)
pairs = list(itertools.combinations(range(uniqueTags.size()), 2))
for k, weight in enumerate(weights):
if kind=="pair":
i, j = pairs[k]
tag1 = uniqueTags.getTagbyIdx(i)
tag2 = uniqueTags.getTagbyIdx(j)
tag1_labels = [label.name for label in tag1.labels]
tag2_labels = [label.name for label in tag2.labels]
plt.figure(figsize=(20, 18), dpi=80)
plt.xticks(range(0, len(tag2_labels)), tag2_labels)
plt.yticks(range(0, len(tag1_labels)), tag1_labels)
plt.tick_params(labelsize=25)
plt.xlabel(tag2.name, fontsize=40)
plt.ylabel(tag1.name, fontsize=50)
plt.imshow(weight.data.cpu().numpy(), cmap='Reds', interpolation='nearest')
plt.savefig("figures/" + tag1.name + "_" + tag2.name + ".png", bbox_inches='tight')
plt.close()
elif kind=="trans":
tag = uniqueTags.getTagbyIdx(k)
tag_labels = [label.name for label in tag.labels]
plt.figure(figsize=(20, 18), dpi=80)
plt.xticks(range(0, len(tag_labels)), tag_labels, rotation=45)
plt.yticks(range(0, len(tag_labels)), tag_labels)
plt.tick_params(labelsize=40)
plt.xlabel(tag.name, fontsize=50)
plt.ylabel(tag.name, fontsize=50)
plt.imshow(weight.data.cpu().numpy(), cmap='Greys', interpolation='nearest')
plt.savefig("figures/" + tag.name + "_" + tag.name + ".png", bbox_inches='tight')
plt.close()
def get_var(x, gpu=False, volatile=False):
x = Variable(x, volatile=volatile)
if gpu:
x = x.cuda()
return x
def prepare_sequence(seq, to_ix, gpu=False):
if isinstance(to_ix, dict):
idxs = [to_ix[w] if w in to_ix else to_ix["UNK"] for w in seq]
elif isinstance(to_ix, list):
idxs = [to_ix.index(w) if w in to_ix else to_ix.index("UNK") for w in seq]
tensor = torch.LongTensor(idxs)
return get_var(tensor, gpu)
def to_scalar(var):
return var.view(-1).data.tolist()[0]
def argmax(vec):
_, idx = torch.max(vec, 1)
return to_scalar(idx)
def logSumExp(a, b):
maxi = np.maximum(a, b)
aexp = a - maxi
bexp = b - maxi
sumOfExp = np.exp(aexp) + np.exp(bexp)
return maxi + np.log(sumOfExp)
def logSumExpTensor(vec):
batch_size = vec.size()[0]
vec = vec.view(batch_size, -1)
max_score = torch.max(vec, 1)[0]
max_score_broadcast = max_score.view(-1, 1).expand(-1, vec.size()[1])
return max_score + \
torch.log(torch.sum(torch.exp(vec - max_score_broadcast), 1))
def logSumExpTensors(a, b):
maxi = torch.max(a, b)
aexp = a - maxi
bexp = b - maxi
sumOfExp = torch.exp(aexp) + torch.exp(bexp)
return maxi + torch.log(sumOfExp)
def logDot(a, b, redAxis=None):
if redAxis==1:
b = b.transpose()
max_a = np.amax(a)
max_b = np.amax(b)
C = np.dot(np.exp(a - max_a), np.exp(b - max_b))
np.log(C, out=C)
C += max_a + max_b
return C.transpose() if redAxis==1 else C
def logMax(a, b, redAxis=None):
if redAxis==1:
b = b.transpose()
max_a = np.amax(a)
max_b = np.amax(b)
C = np.max(np.exp(a[:, :, None]-max_a) * np.exp(b[None, :, :]-max_b), axis=1)
np.log(C, out=C)
C += max_a + max_b
return C.transpose() if redAxis==1 else C
def logNormalize(a):
denom = np.logaddexp.reduce(a, 1)
return (a.transpose()- denom).transpose()
def logNormalizeTensor(a):
denom = logSumExpTensor(a)
if len(a.size())==2:
denom = denom.view(-1, 1).expand(-1, a.size()[1])
elif len(a.size())==3:
denom = denom.view(a.size()[0], 1, 1).expand(-1, a.size()[1], a.size()[2])
return (a-denom)
def computeF1(hyps, golds, prefix, labels_to_ix=None, baseline=False, write_results=False):
f1_precision_scores = {}
f1_precision_total = {}
f1_recall_scores = {}
f1_recall_total = {}
f1_average = 0.0
if baseline:
hyps = [unfreeze_dict(h) for h in hyps]
golds = [unfreeze_dict(t) for t in golds]
for i, word_tags in enumerate(hyps, start=0):
for k, v in word_tags.items():
if v=="NULL":
continue
if k not in f1_precision_scores:
f1_precision_scores[k] = 0
f1_precision_total[k] = 0
if k in golds[i]:
if v==golds[i][k]:
f1_precision_scores[k] += 1
f1_precision_total[k] += 1
f1_micro_precision = sum(f1_precision_scores.values())/sum(f1_precision_total.values())
for k in f1_precision_scores.keys():
f1_precision_scores[k] = f1_precision_scores[k]/f1_precision_total[k]
for i, word_tags in enumerate(golds, start=0):
for k, v in word_tags.items():
if v=="NULL":
continue
if k not in f1_recall_scores:
f1_recall_scores[k] = 0
f1_recall_total[k] = 0
if k in hyps[i]:
if v==hyps[i][k]:
f1_recall_scores[k] += 1
f1_recall_total[k] += 1
f1_micro_recall = sum(f1_recall_scores.values())/sum(f1_recall_total.values())
f1_scores = {}
for k in f1_recall_scores.keys():
f1_recall_scores[k] = f1_recall_scores[k]/f1_recall_total[k]
if f1_recall_scores[k]==0 or k not in f1_precision_scores:
f1_scores[k] = 0
else:
f1_scores[k] = 2 * (f1_precision_scores[k] * f1_recall_scores[k]) / (f1_precision_scores[k] + f1_recall_scores[k])
f1_average += f1_recall_total[k] * f1_scores[k]
f1_average /= sum(f1_recall_total.values())
f1_micro_score = 2 * (f1_micro_precision * f1_micro_recall) / (f1_micro_precision + f1_micro_recall)
if write_results:
print("Writing F1 scores...")
with open(prefix + '_results_f1.txt', 'ab') as file:
file.write(pickle.dumps(f1_scores))
file.write("\nMacro-averaged F1 Score: " + str(f1_average))
file.write("\nMicro-averaged F1 Score: " + str(f1_micro_score))
return f1_average, f1_micro_score
def getCorrectCount(golds, hyps):
correct = 0
for i, word_tags in enumerate(golds, start=0):
allCorrect = True
for k, v in word_tags.items():
if k in hyps[i]:
if v!=hyps[i][k]:
allCorrect = False
break
if allCorrect==True:
correct += 1
return correct
| true
| true
|
f71aaf4aad518c6d9db764a08f3d7f8432489eb7
| 32,580
|
py
|
Python
|
prody/utilities/catchall.py
|
bwingert/ProDy
|
7377a20b4a4841ec59dccaa93fa58e2ee0fe89bc
|
[
"MIT"
] | null | null | null |
prody/utilities/catchall.py
|
bwingert/ProDy
|
7377a20b4a4841ec59dccaa93fa58e2ee0fe89bc
|
[
"MIT"
] | null | null | null |
prody/utilities/catchall.py
|
bwingert/ProDy
|
7377a20b4a4841ec59dccaa93fa58e2ee0fe89bc
|
[
"MIT"
] | null | null | null |
"""This module defines miscellaneous utility functions that is public to users."""
import numpy as np
from numpy import unique, linalg, diag, sqrt, dot
from Bio.Phylo.BaseTree import Tree, Clade
from prody import PY3K
from .misctools import addEnds, interpY, index, isListLike
from .checkers import checkCoords
from .logger import LOGGER
__all__ = ['calcTree', 'clusterMatrix', 'showLines', 'showMatrix',
'reorderMatrix', 'findSubgroups', 'getCoords',
'getLinkage', 'getTreeFromLinkage', 'clusterSubfamilies']
class LinkageError(Exception):
pass
def clusterSubfamilies(similarities, n_clusters=0, linkage='all', method='tsne', cutoff=0.0, **kwargs):
"""Perform clustering based on members of the *ensemble* projected into lower a reduced
dimension.
:arg similarities: a matrix of similarities for each structure in the ensemble, such as
RMSD-matrix, dynamics-based spectral overlap, sequence similarity
:type similarities: :class:`~numpy.ndarray`
:arg n_clusters: the number of clusters to generate. If **0**, will scan a range of
number of clusters and return the best one based on highest
silhouette score. Default is **0**.
:type n_clusters: int
:arg linkage: if **all**, will test all linkage types (ward, average, complete,
single). Otherwise will use only the one(s) given as input. Default is
**all**.
:type linkage: str, list, tuple, :class:`~numpy.ndarray`
:arg method: if set to **spectral**, will generate a Kirchoff matrix based on the
cutoff value given and use that as input as clustering instead of
the values themselves. Default is **tsne**.
:type method: str
:arg cutoff: only used if *method* is set to **spectral**. This value is used for
generating the Kirchoff matrix to use for generating clusters when
doing spectral clustering. Default is **0.0**.
:type cutoff: float
"""
# Import necessary packages
try:
from sklearn.manifold import SpectralEmbedding
from sklearn.cluster import AgglomerativeClustering
from sklearn.metrics import silhouette_score
from sklearn.manifold import TSNE
except ImportError:
raise ImportError('need sklearn module')
'''
try:
import Bio
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
'''
# Check inputs to make sure are of valid types/values
if not isinstance(similarities, np.ndarray):
raise TypeError('similarities should be a numpy ndarray')
dim = similarities.shape
if dim[0] != dim[1]:
raise ValueError('similarities must be a square matrix')
if n_clusters != 0:
if not isinstance(n_clusters, int):
raise TypeError('clusters must be an instance of int')
if n_clusters < 1:
raise ValueError('clusters must be a positive integer')
elif n_clusters > similarities.shape[0]:
raise ValueError('clusters can\'t be longer than similarities matrix')
nclusts = range(n_clusters,n_clusters+1)
else:
nclusts = range(2,10,1)
if linkage != 'all':
# Check if given input for linkage is list-like
if isListLike(linkage):
for val in linkage:
if val.lower() not in ['ward', 'average', 'complete', 'single']:
raise ValueError('linkage must be one or more of: \'ward\', \'average\', \'complete\', or \'single\'')
if len(linkage) > 4:
raise ValueError('linkage must be one or more of: \'ward\', \'average\', \'complete\', or \'single\'')
linkages = [ x.lower() for x in linkage ]
# If not, check if it is a valid string and method name
else:
if not isinstance(linkage, str):
raise TypeError('linkage must be an instance of str or list-like of strs')
if linkage not in ['ward', 'average', 'complete', 'single']:
raise ValueError('linkage must one or more of: \'ward\', \'average\', \'complete\', or \'single\'')
linkages = [linkage]
else:
linkages = ['ward', 'average', 'complete', 'single']
if method != 'tsne':
if not isinstance(method, str):
raise TypeError('method must be an instance of str')
if method != 'spectral':
raise ValueError('method must be either \'tsne\' or \'spectral\'')
if not isinstance(cutoff, float):
raise TypeError('cutoff must be an instance of float')
best_score = -1
best_nclust = 0
best_link = ''
best_labels = []
# Scan over range of clusters
for x in nclusts:
if method == 'tsne':
embedding = TSNE(n_components=2)
transform = embedding.fit_transform(similarities)
else:
kirchhoff = np.where(similarities > cutoff, 0, -1)
embedding = SpectralEmbedding(n_components=2)
transform = embedding.fit_transform(kirchhoff)
for link in linkages:
clustering = AgglomerativeClustering(linkage=link, n_clusters=x)
clustering.fit(transform)
silhouette_avg = silhouette_score(transform, clustering.labels_)
if silhouette_avg > best_score:
best_score = silhouette_avg
best_nclust = x
best_link = link
best_labels = clustering.labels_
return best_labels
def getCoords(data):
try:
data = (data._getCoords() if hasattr(data, '_getCoords') else
data.getCoords())
except AttributeError:
try:
checkCoords(data)
except TypeError:
raise TypeError('data must be a Numpy array or an object '
'with `getCoords` method')
return data
def getLinkage(names, tree):
""" Obtain the :func:`~scipy.cluster.hierarchy.linkage` matrix encoding
``tree``.
:arg names: a list of names, the order determines the values in the
linkage matrix
:type names: list, :class:`~numpy.ndarray`
:arg tree: tree to be converted
:type tree: :class:`~Bio.Phylo.BaseTree.Tree`
"""
tree_terminals = tree.get_terminals()
if len(tree_terminals) != len(names):
raise ValueError('inconsistent number of terminals in tree and names')
terminals = [None] * len(names)
for clade in tree_terminals:
i = index(names, clade.name)
terminals[i] = clade
n = len(terminals)
nonterminals = [c for c in reversed(tree.get_nonterminals())]
if len(nonterminals) != n-1:
raise LinkageError('wrong number of terminal clades')
Z = np.zeros((n-1, 4))
root = tree.root
def _indexOfClade(clade):
if clade.is_terminal():
i = index(terminals, clade)
else:
i = index(nonterminals, clade) + n
return i
def _height_of(clade):
if clade.is_terminal():
height = 0
else:
height = max(_height_of(c) + c.branch_length for c in clade.clades)
return height
def _dfs(clade):
if clade.is_terminal():
return
i = _indexOfClade(clade)
clade_a = clade.clades[0]
clade_b = clade.clades[1]
a = _indexOfClade(clade_a)
b = _indexOfClade(clade_b)
l = min(a, b)
r = max(a, b)
Z[i-n, 0] = l
Z[i-n, 1] = r
Z[i-n, 2] = _height_of(clade) * 2.
Z[i-n, 3] = clade.count_terminals()
_dfs(clade_a)
_dfs(clade_b)
_dfs(root)
return Z
def getTreeFromLinkage(names, linkage):
""" Obtain the tree encoded by ``linkage``.
:arg names: a list of names, the order should correspond to the values in
linkage
:type names: list, :class:`~numpy.ndarray`
:arg linkage: linkage matrix
:type linkage: :class:`~numpy.ndarray`
"""
try:
import Bio
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
from Bio.Phylo.BaseTree import Tree, Clade
if not isinstance(linkage, np.ndarray):
raise TypeError('linkage must be a numpy.ndarray instance')
if linkage.ndim != 2:
raise LinkageError('linkage must be a 2-dimensional matrix')
if linkage.shape[1] != 4:
raise LinkageError('linkage must have exactly 4 columns')
n_terms = len(names)
if linkage.shape[0] != n_terms-1:
raise LinkageError('linkage must have exactly len(names)-1 rows')
clades = []
heights = []
for name in names:
clade = Clade(None, name)
clades.append(clade)
heights.append(0.)
for link in linkage:
l = int(link[0])
r = int(link[1])
height = link[2]
left = clades[l]
right = clades[r]
lh = heights[l]
rh = heights[r]
left.branch_length = height - lh
right.branch_length = height - rh
clade = Clade(None, None)
clade.clades.append(left)
clade.clades.append(right)
clades.append(clade)
heights.append(height)
return Tree(clade)
def calcTree(names, distance_matrix, method='upgma', linkage=False):
""" Given a distance matrix, it creates an returns a tree structure.
:arg names: a list of names
:type names: list, :class:`~numpy.ndarray`
:arg distance_matrix: a square matrix with length of ensemble. If numbers does not match *names*
it will raise an error
:type distance_matrix: :class:`~numpy.ndarray`
:arg method: method used for constructing the tree. Acceptable options are ``"upgma"``, ``"nj"``,
or methods supported by :func:`~scipy.cluster.hierarchy.linkage` such as ``"single"``,
``"average"``, ``"ward"``, etc. Default is ``"upgma"``
:type method: str
:arg linkage: whether the linkage matrix is returned. Note that NJ trees do not support linkage
:type linkage: bool
"""
try:
import Bio
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
from .TreeConstruction import DistanceMatrix, DistanceTreeConstructor
if len(names) != distance_matrix.shape[0] or len(names) != distance_matrix.shape[1]:
raise ValueError("Mismatch between the sizes of matrix and names.")
method = method.lower().strip()
if method in ['ward', 'single', 'average', 'weighted', 'centroid', 'median']:
from scipy.cluster.hierarchy import linkage as hlinkage
from scipy.spatial.distance import squareform
Z = hlinkage(squareform(distance_matrix), method=method)
tree = getTreeFromLinkage(names, Z)
else:
matrix = []
k = 1
Z = None
for row in distance_matrix:
matrix.append(list(row[:k]))
k = k + 1
if isinstance(names, np.ndarray):
names = names.tolist()
dm = DistanceMatrix(names, matrix)
constructor = DistanceTreeConstructor()
method = method.strip().lower()
if method == 'nj':
tree = constructor.nj(dm)
elif method == 'upgma':
tree = constructor.upgma(dm)
if linkage:
Z = getLinkage(names, tree)
else:
raise ValueError('Method can be only either "nj", "upgma" or '
'hierarchical clustering such as "single", "average", etc.')
for node in tree.get_nonterminals():
node.name = None
if linkage:
return tree, Z
else:
return tree
def writeTree(filename, tree, format_str='newick'):
""" Write a tree to file using Biopython.
:arg filename: name for output file
:type filename: str
:arg tree: a square matrix with length of ensemble. If numbers does not match *names*
it will raise an error
:type tree: :class:`~Bio.Phylo.BaseTree.Tree`
:arg format_str: a string specifying the format for the tree
:type format_str: str
"""
try:
from Bio import Phylo
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
if not isinstance(filename, str):
raise TypeError('filename should be a string')
if not isinstance(tree, Phylo.BaseTree.Tree):
raise TypeError('tree should be a Biopython.Phylo Tree object')
if not isinstance(format_str, str):
raise TypeError('format_str should be a string')
Phylo.write(tree, filename, format_str)
def clusterMatrix(distance_matrix=None, similarity_matrix=None, labels=None, return_linkage=None, **kwargs):
"""
Cluster a distance matrix using scipy.cluster.hierarchy and
return the sorted matrix, indices used for sorting, sorted labels (if **labels** are passed),
and linkage matrix (if **return_linkage** is **True**). Set ``similarity=True`` for clustering a similarity matrix
:arg distance_matrix: an N-by-N matrix containing some measure of distance
such as 1. - seqid_matrix, rmsds, or distances in PCA space
:type similarity_matrix: :class:`~numpy.ndarray`
:arg similarity_matrix: an N-by-N matrix containing some measure of similarity
such as sequence identity, mode-mode overlap, or spectral overlap
:type similarity_matrix: :class:`~numpy.ndarray`
:arg labels: labels for each matrix row that can be returned sorted
:type labels: list
:arg no_plot: if **True**, don't plot the dendrogram.
default is **True**
:type no_plot: bool
:arg reversed: if set to **True**, then the sorting indices will be reversed.
:type reversed: bool
Other arguments for :func:`~scipy.hierarchy.linkage` and :func:`~scipy.hierarchy.dendrogram`
can also be provided and will be taken as **kwargs**.
"""
import scipy.cluster.hierarchy as sch
from scipy import spatial
if similarity_matrix is None and distance_matrix is None:
raise ValueError('Please provide a distance matrix or a similarity matrix')
orientation = kwargs.pop('orientiation', 'right')
reversed = kwargs.pop('reversed', False)
no_plot = kwargs.pop('no_plot', True)
if distance_matrix is None:
matrix = similarity_matrix
distance_matrix = 1. - similarity_matrix
else:
matrix = distance_matrix
formatted_distance_matrix = spatial.distance.squareform(distance_matrix)
linkage_matrix = sch.linkage(formatted_distance_matrix, **kwargs)
sorting_dendrogram = sch.dendrogram(linkage_matrix, orientation=orientation, labels=labels, no_plot=no_plot)
indices = sorting_dendrogram['leaves']
sorted_labels = sorting_dendrogram['ivl']
if reversed:
indices = indices[::-1]
sorted_labels = sorted_labels[::-1]
sorted_matrix = matrix[indices, :]
sorted_matrix = sorted_matrix[:, indices]
return_vals = [sorted_matrix, indices]
if labels is not None:
return_vals.append(sorted_labels)
if return_linkage:
return_vals.append(linkage_matrix)
return tuple(return_vals) # convert to tuple to avoid [pylint] E0632:Possible unbalanced tuple unpacking
def showLines(*args, **kwargs):
"""
Show 1-D data using :func:`~matplotlib.axes.Axes.plot`.
:arg x: (optional) x coordinates. *x* can be an 1-D array or a 2-D matrix of
column vectors.
:type x: :class:`~numpy.ndarray`
:arg y: data array. *y* can be an 1-D array or a 2-D matrix of
column vectors.
:type y: :class:`~numpy.ndarray`
:arg dy: an array of variances of *y* which will be plotted as a
band along *y*. It should have the same shape with *y*.
:type dy: :class:`~numpy.ndarray`
:arg lower: an array of lower bounds which will be plotted as a
band along *y*. It should have the same shape with *y* and should be
paired with *upper*.
:type lower: :class:`~numpy.ndarray`
:arg upper: an array of upper bounds which will be plotted as a
band along *y*. It should have the same shape with *y* and should be
paired with *lower*.
:type upper: :class:`~numpy.ndarray`
:arg alpha: the transparency of the band(s) for plotting *dy*.
:type alpha: float
:arg beta: the transparency of the band(s) for plotting *miny* and *maxy*.
:type beta: float
:arg ticklabels: user-defined tick labels for x-axis.
:type ticklabels: list
"""
# note for developers: this function serves as a low-level
# plotting function which provides basic utilities for other
# plotting functions. Therefore showFigure is not handled
# in this function as it should be already handled in the caller.
ticklabels = kwargs.pop('ticklabels', None)
dy = kwargs.pop('dy', None)
miny = kwargs.pop('lower', None)
maxy = kwargs.pop('upper', None)
alpha = kwargs.pop('alpha', 0.5)
beta = kwargs.pop('beta', 0.25)
gap = kwargs.pop('gap', False)
labels = kwargs.pop('label', None)
from matplotlib import cm, ticker
from matplotlib.pyplot import figure, gca, xlim
ax = gca()
lines = ax.plot(*args, **kwargs)
polys = []
for i, line in enumerate(lines):
color = line.get_color()
x, y = line.get_data()
if gap:
x_new, y_new = addEnds(x, y)
line.set_data(x_new, y_new)
else:
x_new, y_new = x, y
if labels is not None:
if np.isscalar(labels):
line.set_label(labels)
else:
try:
line.set_label(labels[i])
except IndexError:
raise ValueError('The number of labels ({0}) and that of y ({1}) do not match.'
.format(len(labels), len(line)))
# the following function needs to be here so that line exists
def sub_array(a, i, tag='a'):
ndim = 0
if a is not None:
if np.isscalar(a[0]):
ndim = 1 # a plain list (array)
else:
ndim = 2 # a nested list (array)
else:
return None
if ndim == 1:
_a = a
else:
try:
_a = a[i]
except IndexError:
raise ValueError('The number of {2} ({0}) and that of y ({1}) do not match.'
.format(len(miny), len(line), tag))
if len(_a) != len(y):
raise ValueError('The shapes of {2} ({0}) and y ({1}) do not match.'
.format(len(_miny), len(y), tag))
return _a
if miny is not None and maxy is not None:
_miny = sub_array(miny, i)
_maxy = sub_array(maxy, i)
if gap:
_, _miny = addEnds(x, _miny)
_, _maxy = addEnds(x, _maxy)
poly = ax.fill_between(x_new, _miny, _maxy,
alpha=beta, facecolor=color, edgecolor=None,
linewidth=1, antialiased=True)
polys.append(poly)
if dy is not None:
_dy = sub_array(dy, i)
if gap:
_, _dy = addEnds(x, _dy)
poly = ax.fill_between(x_new, y_new-_dy, y_new+_dy,
alpha=alpha, facecolor=color, edgecolor=None,
linewidth=1, antialiased=True)
polys.append(poly)
ax.margins(x=0)
if ticklabels is not None:
if callable(ticklabels):
ax.get_xaxis().set_major_formatter(ticker.FuncFormatter(ticklabels))
else:
ax.get_xaxis().set_major_formatter(ticker.IndexFormatter(ticklabels))
ax.xaxis.set_major_locator(ticker.AutoLocator())
ax.xaxis.set_minor_locator(ticker.AutoMinorLocator())
return lines, polys
def showMatrix(matrix, x_array=None, y_array=None, **kwargs):
"""Show a matrix using :meth:`~matplotlib.axes.Axes.imshow`. Curves on x- and y-axis can be added.
:arg matrix: matrix to be displayed
:type matrix: :class:`~numpy.ndarray`
:arg x_array: data to be plotted above the matrix
:type x_array: :class:`~numpy.ndarray`
:arg y_array: data to be plotted on the left side of the matrix
:type y_array: :class:`~numpy.ndarray`
:arg percentile: a percentile threshold to remove outliers, i.e. only showing data within *p*-th
to *100-p*-th percentile
:type percentile: float
:arg interactive: turn on or off the interactive options
:type interactive: bool
:arg xtickrotation: how much to rotate the xticklabels in degrees
default is 0
:type xtickrotation: float
"""
from matplotlib import ticker
from matplotlib.gridspec import GridSpec
from matplotlib.collections import LineCollection
from matplotlib.pyplot import gca, sca, sci, colorbar, subplot
from .drawtools import drawTree
p = kwargs.pop('percentile', None)
vmin = vmax = None
if p is not None:
vmin = np.percentile(matrix, p)
vmax = np.percentile(matrix, 100-p)
vmin = kwargs.pop('vmin', vmin)
vmax = kwargs.pop('vmax', vmax)
vcenter = kwargs.pop('vcenter', None)
norm = kwargs.pop('norm', None)
if vcenter is not None and norm is None:
if PY3K:
try:
from matplotlib.colors import DivergingNorm
except ImportError:
from matplotlib.colors import TwoSlopeNorm as DivergingNorm
norm = DivergingNorm(vmin=vmin, vcenter=0., vmax=vmax)
else:
LOGGER.warn('vcenter cannot be used in Python 2 so norm remains None')
lw = kwargs.pop('linewidth', 1)
W = H = kwargs.pop('ratio', 6)
ticklabels = kwargs.pop('ticklabels', None)
xticklabels = kwargs.pop('xticklabels', ticklabels)
yticklabels = kwargs.pop('yticklabels', ticklabels)
xtickrotation = kwargs.pop('xtickrotation', 0.)
show_colorbar = kwargs.pop('colorbar', True)
cb_extend = kwargs.pop('cb_extend', 'neither')
allticks = kwargs.pop('allticks', False) # this argument is temporary and will be replaced by better implementation
interactive = kwargs.pop('interactive', True)
cmap = kwargs.pop('cmap', 'jet')
origin = kwargs.pop('origin', 'lower')
try:
from Bio import Phylo
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
tree_mode_y = isinstance(y_array, Phylo.BaseTree.Tree)
tree_mode_x = isinstance(x_array, Phylo.BaseTree.Tree)
if x_array is not None and y_array is not None:
nrow = 2; ncol = 2
i = 1; j = 1
width_ratios = [1, W]
height_ratios = [1, H]
aspect = 'auto'
elif x_array is not None and y_array is None:
nrow = 2; ncol = 1
i = 1; j = 0
width_ratios = [W]
height_ratios = [1, H]
aspect = 'auto'
elif x_array is None and y_array is not None:
nrow = 1; ncol = 2
i = 0; j = 1
width_ratios = [1, W]
height_ratios = [H]
aspect = 'auto'
else:
nrow = 1; ncol = 1
i = 0; j = 0
width_ratios = [W]
height_ratios = [H]
aspect = kwargs.pop('aspect', None)
main_index = (i, j)
upper_index = (i-1, j)
left_index = (i, j-1)
complex_layout = nrow > 1 or ncol > 1
ax1 = ax2 = ax3 = None
if complex_layout:
gs = GridSpec(nrow, ncol, width_ratios=width_ratios,
height_ratios=height_ratios, hspace=0., wspace=0.)
## draw matrix
if complex_layout:
ax3 = subplot(gs[main_index])
else:
ax3 = gca()
im = ax3.imshow(matrix, aspect=aspect, vmin=vmin, vmax=vmax,
norm=norm, cmap=cmap, origin=origin, **kwargs)
#ax3.set_xlim([-0.5, matrix.shape[0]+0.5])
#ax3.set_ylim([-0.5, matrix.shape[1]+0.5])
if xticklabels is not None:
ax3.xaxis.set_major_formatter(ticker.IndexFormatter(xticklabels))
if yticklabels is not None and ncol == 1:
ax3.yaxis.set_major_formatter(ticker.IndexFormatter(yticklabels))
if allticks:
ax3.xaxis.set_major_locator(ticker.IndexLocator(offset=0.5, base=1.))
ax3.yaxis.set_major_locator(ticker.IndexLocator(offset=0.5, base=1.))
else:
locator = ticker.AutoLocator()
locator.set_params(integer=True)
minor_locator = ticker.AutoMinorLocator()
ax3.xaxis.set_major_locator(locator)
ax3.xaxis.set_minor_locator(minor_locator)
locator = ticker.AutoLocator()
locator.set_params(integer=True)
minor_locator = ticker.AutoMinorLocator()
ax3.yaxis.set_major_locator(locator)
ax3.yaxis.set_minor_locator(minor_locator)
if ncol > 1:
ax3.yaxis.set_major_formatter(ticker.NullFormatter())
## draw x_ and y_array
lines = []
if nrow > 1:
ax1 = subplot(gs[upper_index])
if tree_mode_x:
Y, X = drawTree(x_array, label_func=None, orientation='vertical',
inverted=True)
miny = min(Y.values())
maxy = max(Y.values())
minx = min(X.values())
maxx = max(X.values())
ax1.set_xlim(minx-.5, maxx+.5)
ax1.set_ylim(miny, 1.05*maxy)
else:
ax1.set_xticklabels([])
y = x_array
xp, yp = interpY(y)
points = np.array([xp, yp]).T.reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
lcy = LineCollection(segments, array=yp, linewidths=lw, cmap=cmap)
lines.append(lcy)
ax1.add_collection(lcy)
ax1.set_xlim(xp.min()-.5, xp.max()+.5)
ax1.set_ylim(yp.min(), yp.max())
if ax3.xaxis_inverted():
ax2.invert_xaxis()
ax1.axis('off')
if ncol > 1:
ax2 = subplot(gs[left_index])
if tree_mode_y:
X, Y = drawTree(y_array, label_func=None, inverted=True)
miny = min(Y.values())
maxy = max(Y.values())
minx = min(X.values())
maxx = max(X.values())
ax2.set_ylim(miny-.5, maxy+.5)
ax2.set_xlim(minx, 1.05*maxx)
else:
ax2.set_xticklabels([])
y = y_array
xp, yp = interpY(y)
points = np.array([yp, xp]).T.reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
lcx = LineCollection(segments, array=yp, linewidths=lw, cmap=cmap)
lines.append(lcx)
ax2.add_collection(lcx)
ax2.set_xlim(yp.min(), yp.max())
ax2.set_ylim(xp.min()-.5, xp.max()+.5)
ax2.invert_xaxis()
if ax3.yaxis_inverted():
ax2.invert_yaxis()
ax2.axis('off')
## draw colorbar
sca(ax3)
cb = None
if show_colorbar:
if nrow > 1:
axes = [ax1, ax2, ax3]
while None in axes:
axes.remove(None)
s = H / (H + 1.)
cb = colorbar(mappable=im, ax=axes, anchor=(0, 0), shrink=s, extend=cb_extend)
else:
cb = colorbar(mappable=im, extend=cb_extend)
sca(ax3)
sci(im)
if interactive:
from prody.utilities import ImageCursor
from matplotlib.pyplot import connect
cursor = ImageCursor(ax3, im)
connect('button_press_event', cursor.onClick)
ax3.tick_params(axis='x', rotation=xtickrotation)
return im, lines, cb
def reorderMatrix(names, matrix, tree, axis=None):
"""
Reorder a matrix based on a tree and return the reordered matrix
and indices for reordering other things.
:arg names: a list of names associated with the rows of the matrix
These names must match the ones used to generate the tree
:type names: list
:arg matrix: any square matrix
:type matrix: :class:`~numpy.ndarray`
:arg tree: any tree from :func:`calcTree`
:type tree: :class:`~Bio.Phylo.BaseTree.Tree`
:arg axis: along which axis the matrix should be reordered.
Default is **None** which reorder along all the axes
:type axis: int
"""
try:
from Bio import Phylo
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
try:
if matrix.ndim != 2:
raise ValueError('matrix should be a 2D matrix.')
except AttributeError:
raise TypeError('matrix should be a numpy array.')
if np.shape(matrix)[0] != np.shape(matrix)[1]:
raise ValueError('matrix should be a square matrix')
names = np.asarray(names)
if np.isscalar(names):
raise TypeError('names should be list-like')
if not len(names):
raise TypeError('names is empty')
if not isinstance(tree, Phylo.BaseTree.Tree):
raise TypeError('tree should be a BioPython Tree')
if len(names) != len(matrix):
raise ValueError('names should have entries for each matrix row/column')
terminals = tree.get_terminals()
if len(names) != len(terminals):
raise ValueError('names should have entries for each tree terminal')
if len(terminals) != len(matrix):
raise ValueError('matrix should have a row for each tree terminal')
indices = []
for terminal in terminals:
name = terminal.name
locs = np.where(names == name)[0]
if not len(locs):
raise ValueError('inconsistent names and tree: %s not in names'%name)
if len(locs) > 1:
raise ValueError('inconsistent names and tree: duplicate name %s in names'%name)
indices.append(locs[0])
# rmatrix = matrix[:, indices]
# rmatrix = rmatrix[indices, :]
if axis is not None:
I = [np.arange(s) for s in matrix.shape]
axes = [axis] if np.isscalar(axis) else axis
for ax in axes:
I[ax] = indices
else:
I = [indices] * matrix.ndim
rmatrix = matrix[np.ix_(*I)]
return rmatrix, indices
def findSubgroups(tree, c, method='naive', **kwargs):
"""
Divide a tree into subgroups using a criterion and a cutoff.
Returns a list of lists with labels divided into subgroups.
"""
method = method.lower().strip()
terminals = tree.get_terminals()
names = [clade.name for clade in terminals]
Z = None
if method != 'naive':
try:
Z = getLinkage(names, tree)
except LinkageError:
print('Failed to build linkage; fall back to naive criterion')
method = 'naive'
if method == 'naive':
subgroups = [[names[0]]]
for i in range(len(terminals)-1):
curr_clade = terminals[i]
next_clade = terminals[i + 1]
d = tree.distance(curr_clade, next_clade)
if d > c:
subgroups.append([])
subgroups[-1].append(next_clade.name)
else:
from scipy.cluster.hierarchy import fcluster
T = fcluster(Z, c, criterion=method, **kwargs)
labels = np.unique(T)
subgroups = [[] for _ in range(len(labels))]
for i, t in enumerate(T):
subgroups[t-1].append(names[i])
return subgroups
| 33.449692
| 122
| 0.594045
|
import numpy as np
from numpy import unique, linalg, diag, sqrt, dot
from Bio.Phylo.BaseTree import Tree, Clade
from prody import PY3K
from .misctools import addEnds, interpY, index, isListLike
from .checkers import checkCoords
from .logger import LOGGER
__all__ = ['calcTree', 'clusterMatrix', 'showLines', 'showMatrix',
'reorderMatrix', 'findSubgroups', 'getCoords',
'getLinkage', 'getTreeFromLinkage', 'clusterSubfamilies']
class LinkageError(Exception):
pass
def clusterSubfamilies(similarities, n_clusters=0, linkage='all', method='tsne', cutoff=0.0, **kwargs):
try:
from sklearn.manifold import SpectralEmbedding
from sklearn.cluster import AgglomerativeClustering
from sklearn.metrics import silhouette_score
from sklearn.manifold import TSNE
except ImportError:
raise ImportError('need sklearn module')
'''
try:
import Bio
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
'''
if not isinstance(similarities, np.ndarray):
raise TypeError('similarities should be a numpy ndarray')
dim = similarities.shape
if dim[0] != dim[1]:
raise ValueError('similarities must be a square matrix')
if n_clusters != 0:
if not isinstance(n_clusters, int):
raise TypeError('clusters must be an instance of int')
if n_clusters < 1:
raise ValueError('clusters must be a positive integer')
elif n_clusters > similarities.shape[0]:
raise ValueError('clusters can\'t be longer than similarities matrix')
nclusts = range(n_clusters,n_clusters+1)
else:
nclusts = range(2,10,1)
if linkage != 'all':
# Check if given input for linkage is list-like
if isListLike(linkage):
for val in linkage:
if val.lower() not in ['ward', 'average', 'complete', 'single']:
raise ValueError('linkage must be one or more of: \'ward\', \'average\', \'complete\', or \'single\'')
if len(linkage) > 4:
raise ValueError('linkage must be one or more of: \'ward\', \'average\', \'complete\', or \'single\'')
linkages = [ x.lower() for x in linkage ]
# If not, check if it is a valid string and method name
else:
if not isinstance(linkage, str):
raise TypeError('linkage must be an instance of str or list-like of strs')
if linkage not in ['ward', 'average', 'complete', 'single']:
raise ValueError('linkage must one or more of: \'ward\', \'average\', \'complete\', or \'single\'')
linkages = [linkage]
else:
linkages = ['ward', 'average', 'complete', 'single']
if method != 'tsne':
if not isinstance(method, str):
raise TypeError('method must be an instance of str')
if method != 'spectral':
raise ValueError('method must be either \'tsne\' or \'spectral\'')
if not isinstance(cutoff, float):
raise TypeError('cutoff must be an instance of float')
best_score = -1
best_nclust = 0
best_link = ''
best_labels = []
# Scan over range of clusters
for x in nclusts:
if method == 'tsne':
embedding = TSNE(n_components=2)
transform = embedding.fit_transform(similarities)
else:
kirchhoff = np.where(similarities > cutoff, 0, -1)
embedding = SpectralEmbedding(n_components=2)
transform = embedding.fit_transform(kirchhoff)
for link in linkages:
clustering = AgglomerativeClustering(linkage=link, n_clusters=x)
clustering.fit(transform)
silhouette_avg = silhouette_score(transform, clustering.labels_)
if silhouette_avg > best_score:
best_score = silhouette_avg
best_nclust = x
best_link = link
best_labels = clustering.labels_
return best_labels
def getCoords(data):
try:
data = (data._getCoords() if hasattr(data, '_getCoords') else
data.getCoords())
except AttributeError:
try:
checkCoords(data)
except TypeError:
raise TypeError('data must be a Numpy array or an object '
'with `getCoords` method')
return data
def getLinkage(names, tree):
tree_terminals = tree.get_terminals()
if len(tree_terminals) != len(names):
raise ValueError('inconsistent number of terminals in tree and names')
terminals = [None] * len(names)
for clade in tree_terminals:
i = index(names, clade.name)
terminals[i] = clade
n = len(terminals)
nonterminals = [c for c in reversed(tree.get_nonterminals())]
if len(nonterminals) != n-1:
raise LinkageError('wrong number of terminal clades')
Z = np.zeros((n-1, 4))
root = tree.root
def _indexOfClade(clade):
if clade.is_terminal():
i = index(terminals, clade)
else:
i = index(nonterminals, clade) + n
return i
def _height_of(clade):
if clade.is_terminal():
height = 0
else:
height = max(_height_of(c) + c.branch_length for c in clade.clades)
return height
def _dfs(clade):
if clade.is_terminal():
return
i = _indexOfClade(clade)
clade_a = clade.clades[0]
clade_b = clade.clades[1]
a = _indexOfClade(clade_a)
b = _indexOfClade(clade_b)
l = min(a, b)
r = max(a, b)
Z[i-n, 0] = l
Z[i-n, 1] = r
Z[i-n, 2] = _height_of(clade) * 2.
Z[i-n, 3] = clade.count_terminals()
_dfs(clade_a)
_dfs(clade_b)
_dfs(root)
return Z
def getTreeFromLinkage(names, linkage):
try:
import Bio
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
from Bio.Phylo.BaseTree import Tree, Clade
if not isinstance(linkage, np.ndarray):
raise TypeError('linkage must be a numpy.ndarray instance')
if linkage.ndim != 2:
raise LinkageError('linkage must be a 2-dimensional matrix')
if linkage.shape[1] != 4:
raise LinkageError('linkage must have exactly 4 columns')
n_terms = len(names)
if linkage.shape[0] != n_terms-1:
raise LinkageError('linkage must have exactly len(names)-1 rows')
clades = []
heights = []
for name in names:
clade = Clade(None, name)
clades.append(clade)
heights.append(0.)
for link in linkage:
l = int(link[0])
r = int(link[1])
height = link[2]
left = clades[l]
right = clades[r]
lh = heights[l]
rh = heights[r]
left.branch_length = height - lh
right.branch_length = height - rh
clade = Clade(None, None)
clade.clades.append(left)
clade.clades.append(right)
clades.append(clade)
heights.append(height)
return Tree(clade)
def calcTree(names, distance_matrix, method='upgma', linkage=False):
try:
import Bio
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
from .TreeConstruction import DistanceMatrix, DistanceTreeConstructor
if len(names) != distance_matrix.shape[0] or len(names) != distance_matrix.shape[1]:
raise ValueError("Mismatch between the sizes of matrix and names.")
method = method.lower().strip()
if method in ['ward', 'single', 'average', 'weighted', 'centroid', 'median']:
from scipy.cluster.hierarchy import linkage as hlinkage
from scipy.spatial.distance import squareform
Z = hlinkage(squareform(distance_matrix), method=method)
tree = getTreeFromLinkage(names, Z)
else:
matrix = []
k = 1
Z = None
for row in distance_matrix:
matrix.append(list(row[:k]))
k = k + 1
if isinstance(names, np.ndarray):
names = names.tolist()
dm = DistanceMatrix(names, matrix)
constructor = DistanceTreeConstructor()
method = method.strip().lower()
if method == 'nj':
tree = constructor.nj(dm)
elif method == 'upgma':
tree = constructor.upgma(dm)
if linkage:
Z = getLinkage(names, tree)
else:
raise ValueError('Method can be only either "nj", "upgma" or '
'hierarchical clustering such as "single", "average", etc.')
for node in tree.get_nonterminals():
node.name = None
if linkage:
return tree, Z
else:
return tree
def writeTree(filename, tree, format_str='newick'):
try:
from Bio import Phylo
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
if not isinstance(filename, str):
raise TypeError('filename should be a string')
if not isinstance(tree, Phylo.BaseTree.Tree):
raise TypeError('tree should be a Biopython.Phylo Tree object')
if not isinstance(format_str, str):
raise TypeError('format_str should be a string')
Phylo.write(tree, filename, format_str)
def clusterMatrix(distance_matrix=None, similarity_matrix=None, labels=None, return_linkage=None, **kwargs):
import scipy.cluster.hierarchy as sch
from scipy import spatial
if similarity_matrix is None and distance_matrix is None:
raise ValueError('Please provide a distance matrix or a similarity matrix')
orientation = kwargs.pop('orientiation', 'right')
reversed = kwargs.pop('reversed', False)
no_plot = kwargs.pop('no_plot', True)
if distance_matrix is None:
matrix = similarity_matrix
distance_matrix = 1. - similarity_matrix
else:
matrix = distance_matrix
formatted_distance_matrix = spatial.distance.squareform(distance_matrix)
linkage_matrix = sch.linkage(formatted_distance_matrix, **kwargs)
sorting_dendrogram = sch.dendrogram(linkage_matrix, orientation=orientation, labels=labels, no_plot=no_plot)
indices = sorting_dendrogram['leaves']
sorted_labels = sorting_dendrogram['ivl']
if reversed:
indices = indices[::-1]
sorted_labels = sorted_labels[::-1]
sorted_matrix = matrix[indices, :]
sorted_matrix = sorted_matrix[:, indices]
return_vals = [sorted_matrix, indices]
if labels is not None:
return_vals.append(sorted_labels)
if return_linkage:
return_vals.append(linkage_matrix)
return tuple(return_vals) # convert to tuple to avoid [pylint] E0632:Possible unbalanced tuple unpacking
def showLines(*args, **kwargs):
# note for developers: this function serves as a low-level
# plotting function which provides basic utilities for other
# plotting functions. Therefore showFigure is not handled
# in this function as it should be already handled in the caller.
ticklabels = kwargs.pop('ticklabels', None)
dy = kwargs.pop('dy', None)
miny = kwargs.pop('lower', None)
maxy = kwargs.pop('upper', None)
alpha = kwargs.pop('alpha', 0.5)
beta = kwargs.pop('beta', 0.25)
gap = kwargs.pop('gap', False)
labels = kwargs.pop('label', None)
from matplotlib import cm, ticker
from matplotlib.pyplot import figure, gca, xlim
ax = gca()
lines = ax.plot(*args, **kwargs)
polys = []
for i, line in enumerate(lines):
color = line.get_color()
x, y = line.get_data()
if gap:
x_new, y_new = addEnds(x, y)
line.set_data(x_new, y_new)
else:
x_new, y_new = x, y
if labels is not None:
if np.isscalar(labels):
line.set_label(labels)
else:
try:
line.set_label(labels[i])
except IndexError:
raise ValueError('The number of labels ({0}) and that of y ({1}) do not match.'
.format(len(labels), len(line)))
# the following function needs to be here so that line exists
def sub_array(a, i, tag='a'):
ndim = 0
if a is not None:
if np.isscalar(a[0]):
ndim = 1 # a plain list (array)
else:
ndim = 2 # a nested list (array)
else:
return None
if ndim == 1:
_a = a
else:
try:
_a = a[i]
except IndexError:
raise ValueError('The number of {2} ({0}) and that of y ({1}) do not match.'
.format(len(miny), len(line), tag))
if len(_a) != len(y):
raise ValueError('The shapes of {2} ({0}) and y ({1}) do not match.'
.format(len(_miny), len(y), tag))
return _a
if miny is not None and maxy is not None:
_miny = sub_array(miny, i)
_maxy = sub_array(maxy, i)
if gap:
_, _miny = addEnds(x, _miny)
_, _maxy = addEnds(x, _maxy)
poly = ax.fill_between(x_new, _miny, _maxy,
alpha=beta, facecolor=color, edgecolor=None,
linewidth=1, antialiased=True)
polys.append(poly)
if dy is not None:
_dy = sub_array(dy, i)
if gap:
_, _dy = addEnds(x, _dy)
poly = ax.fill_between(x_new, y_new-_dy, y_new+_dy,
alpha=alpha, facecolor=color, edgecolor=None,
linewidth=1, antialiased=True)
polys.append(poly)
ax.margins(x=0)
if ticklabels is not None:
if callable(ticklabels):
ax.get_xaxis().set_major_formatter(ticker.FuncFormatter(ticklabels))
else:
ax.get_xaxis().set_major_formatter(ticker.IndexFormatter(ticklabels))
ax.xaxis.set_major_locator(ticker.AutoLocator())
ax.xaxis.set_minor_locator(ticker.AutoMinorLocator())
return lines, polys
def showMatrix(matrix, x_array=None, y_array=None, **kwargs):
from matplotlib import ticker
from matplotlib.gridspec import GridSpec
from matplotlib.collections import LineCollection
from matplotlib.pyplot import gca, sca, sci, colorbar, subplot
from .drawtools import drawTree
p = kwargs.pop('percentile', None)
vmin = vmax = None
if p is not None:
vmin = np.percentile(matrix, p)
vmax = np.percentile(matrix, 100-p)
vmin = kwargs.pop('vmin', vmin)
vmax = kwargs.pop('vmax', vmax)
vcenter = kwargs.pop('vcenter', None)
norm = kwargs.pop('norm', None)
if vcenter is not None and norm is None:
if PY3K:
try:
from matplotlib.colors import DivergingNorm
except ImportError:
from matplotlib.colors import TwoSlopeNorm as DivergingNorm
norm = DivergingNorm(vmin=vmin, vcenter=0., vmax=vmax)
else:
LOGGER.warn('vcenter cannot be used in Python 2 so norm remains None')
lw = kwargs.pop('linewidth', 1)
W = H = kwargs.pop('ratio', 6)
ticklabels = kwargs.pop('ticklabels', None)
xticklabels = kwargs.pop('xticklabels', ticklabels)
yticklabels = kwargs.pop('yticklabels', ticklabels)
xtickrotation = kwargs.pop('xtickrotation', 0.)
show_colorbar = kwargs.pop('colorbar', True)
cb_extend = kwargs.pop('cb_extend', 'neither')
allticks = kwargs.pop('allticks', False) # this argument is temporary and will be replaced by better implementation
interactive = kwargs.pop('interactive', True)
cmap = kwargs.pop('cmap', 'jet')
origin = kwargs.pop('origin', 'lower')
try:
from Bio import Phylo
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
tree_mode_y = isinstance(y_array, Phylo.BaseTree.Tree)
tree_mode_x = isinstance(x_array, Phylo.BaseTree.Tree)
if x_array is not None and y_array is not None:
nrow = 2; ncol = 2
i = 1; j = 1
width_ratios = [1, W]
height_ratios = [1, H]
aspect = 'auto'
elif x_array is not None and y_array is None:
nrow = 2; ncol = 1
i = 1; j = 0
width_ratios = [W]
height_ratios = [1, H]
aspect = 'auto'
elif x_array is None and y_array is not None:
nrow = 1; ncol = 2
i = 0; j = 1
width_ratios = [1, W]
height_ratios = [H]
aspect = 'auto'
else:
nrow = 1; ncol = 1
i = 0; j = 0
width_ratios = [W]
height_ratios = [H]
aspect = kwargs.pop('aspect', None)
main_index = (i, j)
upper_index = (i-1, j)
left_index = (i, j-1)
complex_layout = nrow > 1 or ncol > 1
ax1 = ax2 = ax3 = None
if complex_layout:
gs = GridSpec(nrow, ncol, width_ratios=width_ratios,
height_ratios=height_ratios, hspace=0., wspace=0.)
## draw matrix
if complex_layout:
ax3 = subplot(gs[main_index])
else:
ax3 = gca()
im = ax3.imshow(matrix, aspect=aspect, vmin=vmin, vmax=vmax,
norm=norm, cmap=cmap, origin=origin, **kwargs)
#ax3.set_xlim([-0.5, matrix.shape[0]+0.5])
#ax3.set_ylim([-0.5, matrix.shape[1]+0.5])
if xticklabels is not None:
ax3.xaxis.set_major_formatter(ticker.IndexFormatter(xticklabels))
if yticklabels is not None and ncol == 1:
ax3.yaxis.set_major_formatter(ticker.IndexFormatter(yticklabels))
if allticks:
ax3.xaxis.set_major_locator(ticker.IndexLocator(offset=0.5, base=1.))
ax3.yaxis.set_major_locator(ticker.IndexLocator(offset=0.5, base=1.))
else:
locator = ticker.AutoLocator()
locator.set_params(integer=True)
minor_locator = ticker.AutoMinorLocator()
ax3.xaxis.set_major_locator(locator)
ax3.xaxis.set_minor_locator(minor_locator)
locator = ticker.AutoLocator()
locator.set_params(integer=True)
minor_locator = ticker.AutoMinorLocator()
ax3.yaxis.set_major_locator(locator)
ax3.yaxis.set_minor_locator(minor_locator)
if ncol > 1:
ax3.yaxis.set_major_formatter(ticker.NullFormatter())
## draw x_ and y_array
lines = []
if nrow > 1:
ax1 = subplot(gs[upper_index])
if tree_mode_x:
Y, X = drawTree(x_array, label_func=None, orientation='vertical',
inverted=True)
miny = min(Y.values())
maxy = max(Y.values())
minx = min(X.values())
maxx = max(X.values())
ax1.set_xlim(minx-.5, maxx+.5)
ax1.set_ylim(miny, 1.05*maxy)
else:
ax1.set_xticklabels([])
y = x_array
xp, yp = interpY(y)
points = np.array([xp, yp]).T.reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
lcy = LineCollection(segments, array=yp, linewidths=lw, cmap=cmap)
lines.append(lcy)
ax1.add_collection(lcy)
ax1.set_xlim(xp.min()-.5, xp.max()+.5)
ax1.set_ylim(yp.min(), yp.max())
if ax3.xaxis_inverted():
ax2.invert_xaxis()
ax1.axis('off')
if ncol > 1:
ax2 = subplot(gs[left_index])
if tree_mode_y:
X, Y = drawTree(y_array, label_func=None, inverted=True)
miny = min(Y.values())
maxy = max(Y.values())
minx = min(X.values())
maxx = max(X.values())
ax2.set_ylim(miny-.5, maxy+.5)
ax2.set_xlim(minx, 1.05*maxx)
else:
ax2.set_xticklabels([])
y = y_array
xp, yp = interpY(y)
points = np.array([yp, xp]).T.reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
lcx = LineCollection(segments, array=yp, linewidths=lw, cmap=cmap)
lines.append(lcx)
ax2.add_collection(lcx)
ax2.set_xlim(yp.min(), yp.max())
ax2.set_ylim(xp.min()-.5, xp.max()+.5)
ax2.invert_xaxis()
if ax3.yaxis_inverted():
ax2.invert_yaxis()
ax2.axis('off')
## draw colorbar
sca(ax3)
cb = None
if show_colorbar:
if nrow > 1:
axes = [ax1, ax2, ax3]
while None in axes:
axes.remove(None)
s = H / (H + 1.)
cb = colorbar(mappable=im, ax=axes, anchor=(0, 0), shrink=s, extend=cb_extend)
else:
cb = colorbar(mappable=im, extend=cb_extend)
sca(ax3)
sci(im)
if interactive:
from prody.utilities import ImageCursor
from matplotlib.pyplot import connect
cursor = ImageCursor(ax3, im)
connect('button_press_event', cursor.onClick)
ax3.tick_params(axis='x', rotation=xtickrotation)
return im, lines, cb
def reorderMatrix(names, matrix, tree, axis=None):
try:
from Bio import Phylo
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
try:
if matrix.ndim != 2:
raise ValueError('matrix should be a 2D matrix.')
except AttributeError:
raise TypeError('matrix should be a numpy array.')
if np.shape(matrix)[0] != np.shape(matrix)[1]:
raise ValueError('matrix should be a square matrix')
names = np.asarray(names)
if np.isscalar(names):
raise TypeError('names should be list-like')
if not len(names):
raise TypeError('names is empty')
if not isinstance(tree, Phylo.BaseTree.Tree):
raise TypeError('tree should be a BioPython Tree')
if len(names) != len(matrix):
raise ValueError('names should have entries for each matrix row/column')
terminals = tree.get_terminals()
if len(names) != len(terminals):
raise ValueError('names should have entries for each tree terminal')
if len(terminals) != len(matrix):
raise ValueError('matrix should have a row for each tree terminal')
indices = []
for terminal in terminals:
name = terminal.name
locs = np.where(names == name)[0]
if not len(locs):
raise ValueError('inconsistent names and tree: %s not in names'%name)
if len(locs) > 1:
raise ValueError('inconsistent names and tree: duplicate name %s in names'%name)
indices.append(locs[0])
# rmatrix = matrix[:, indices]
# rmatrix = rmatrix[indices, :]
if axis is not None:
I = [np.arange(s) for s in matrix.shape]
axes = [axis] if np.isscalar(axis) else axis
for ax in axes:
I[ax] = indices
else:
I = [indices] * matrix.ndim
rmatrix = matrix[np.ix_(*I)]
return rmatrix, indices
def findSubgroups(tree, c, method='naive', **kwargs):
method = method.lower().strip()
terminals = tree.get_terminals()
names = [clade.name for clade in terminals]
Z = None
if method != 'naive':
try:
Z = getLinkage(names, tree)
except LinkageError:
print('Failed to build linkage; fall back to naive criterion')
method = 'naive'
if method == 'naive':
subgroups = [[names[0]]]
for i in range(len(terminals)-1):
curr_clade = terminals[i]
next_clade = terminals[i + 1]
d = tree.distance(curr_clade, next_clade)
if d > c:
subgroups.append([])
subgroups[-1].append(next_clade.name)
else:
from scipy.cluster.hierarchy import fcluster
T = fcluster(Z, c, criterion=method, **kwargs)
labels = np.unique(T)
subgroups = [[] for _ in range(len(labels))]
for i, t in enumerate(T):
subgroups[t-1].append(names[i])
return subgroups
| true
| true
|
f71aaf817439b642121452efbd2e412d462ce2b5
| 38,772
|
py
|
Python
|
rpython/rtyper/test/test_rdict.py
|
jptomo/pypy-lang-scheme
|
55edb2cec69d78f86793282a4566fcbc1ef9fcac
|
[
"MIT"
] | 34
|
2015-07-09T04:53:27.000Z
|
2021-07-19T05:22:27.000Z
|
rpython/rtyper/test/test_rdict.py
|
jptomo/pypy-lang-scheme
|
55edb2cec69d78f86793282a4566fcbc1ef9fcac
|
[
"MIT"
] | 6
|
2015-05-30T17:20:45.000Z
|
2017-06-12T14:29:23.000Z
|
rpython/rtyper/test/test_rdict.py
|
jptomo/pypy-lang-scheme
|
55edb2cec69d78f86793282a4566fcbc1ef9fcac
|
[
"MIT"
] | 11
|
2015-09-07T14:26:08.000Z
|
2020-04-10T07:20:41.000Z
|
from rpython.translator.translator import TranslationContext
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rtyper import rint
from rpython.rtyper.lltypesystem import rdict, rstr
from rpython.rtyper.test.tool import BaseRtypingTest
from rpython.rlib.objectmodel import r_dict
from rpython.rlib.rarithmetic import r_int, r_uint, r_longlong, r_ulonglong
import py
py.log.setconsumer("rtyper", py.log.STDOUT)
def not_really_random():
"""A random-ish generator, which also generates nice patterns from time to time.
Could be useful to detect problems associated with specific usage patterns."""
import random
x = random.random()
print 'random seed: %r' % (x,)
for i in range(12000):
r = 3.4 + i/20000.0
x = r*x - x*x
assert 0 <= x < 4
yield x
class BaseTestRDict(BaseRtypingTest):
def test_dict_creation(self):
def createdict(i):
d = self.newdict()
d['hello'] = i
return d['hello']
res = self.interpret(createdict, [42])
assert res == 42
def test_dict_getitem_setitem(self):
def func(i):
d = self.newdict()
d['hello'] = i
d['world'] = i + 1
return d['hello'] * d['world']
res = self.interpret(func, [6])
assert res == 42
def test_dict_getitem_keyerror(self):
def func(i):
d = self.newdict()
d['hello'] = i
try:
return d['world']
except KeyError:
return 0
res = self.interpret(func, [6])
assert res == 0
def test_dict_del_simple(self):
def func(i):
d = self.newdict()
d['hello'] = i
d['world'] = i + 1
del d['hello']
return len(d)
res = self.interpret(func, [6])
assert res == 1
def test_dict_clear(self):
def func(i):
d = self.newdict()
d['abc'] = i
d['def'] = i+1
d.clear()
d['ghi'] = i+2
return ('abc' not in d and 'def' not in d
and d['ghi'] == i+2 and len(d) == 1)
res = self.interpret(func, [7])
assert res == True
def test_empty_strings(self):
def func(i):
d = self.newdict()
d[''] = i
del d['']
try:
d['']
return 0
except KeyError:
pass
return 1
res = self.interpret(func, [6])
assert res == 1
def func(i):
d = self.newdict()
d[''] = i
del d['']
d[''] = i + 1
return len(d)
res = self.interpret(func, [6])
assert res == 1
def test_dict_bool(self):
def func(i):
if i:
d = self.newdict()
else:
d = self.newdict()
d[i] = i+1
if d:
return i
else:
return i+1
assert self.interpret(func, [42]) == 43
assert self.interpret(func, [0]) == 0
def test_contains(self):
def func(x, y):
d = self.newdict()
d[x] = x+1
return y in d
assert self.interpret(func, [42, 0]) == False
assert self.interpret(func, [42, 42]) == True
def test_contains_2(self):
d = self.newdict()
d['5'] = None
d['7'] = None
def func(x):
return chr(x) in d
assert self.interpret(func, [ord('5')]) == True
assert self.interpret(func, [ord('6')]) == False
def func(n):
return str(n) in d
assert self.interpret(func, [512]) == False
def test_dict_iteration(self):
def func(i, j):
d = self.newdict()
d['hello'] = i
d['world'] = j
k = 1
for key in d:
k = k * d[key]
return k
res = self.interpret(func, [6, 7])
assert res == 42
def test_dict_itermethods(self):
def func():
d = self.newdict()
d['hello'] = 6
d['world'] = 7
k1 = k2 = k3 = 1
for key in d.iterkeys():
k1 = k1 * d[key]
for value in d.itervalues():
k2 = k2 * value
for key, value in d.iteritems():
assert d[key] == value
k3 = k3 * value
return k1 + k2 + k3
res = self.interpret(func, [])
assert res == 42 + 42 + 42
def test_dict_get(self):
def func():
dic = self.newdict()
x1 = dic.get('hi', 42)
dic['blah'] = 1 # XXX this triggers type determination
x2 = dic.get('blah', 2)
return x1 * 10 + x2
res = self.interpret(func, ())
assert res == 421
def test_dict_get_empty(self):
def func():
# this time without writing to the dict
dic = self.newdict()
x1 = dic.get('hi', 42)
x2 = dic.get('blah', 2)
return x1 * 10 + x2
res = self.interpret(func, ())
assert res == 422
def test_dict_setdefault(self):
def f():
d = self.newdict()
d.setdefault('a', 2)
return d['a']
res = self.interpret(f, ())
assert res == 2
def f():
d = self.newdict()
d.setdefault('a', 2)
x = d.setdefault('a', -3)
return x
res = self.interpret(f, ())
assert res == 2
def test_dict_copy(self):
def func():
# XXX this does not work if we use chars, only!
dic = self.newdict()
dic['ab'] = 1
dic['b'] = 2
d2 = dic.copy()
ok = 1
for key in d2:
if dic[key] != d2[key]:
ok = 0
ok &= len(dic) == len(d2)
d2['c'] = 3
ok &= len(dic) == len(d2) - 1
return ok
res = self.interpret(func, ())
assert res == 1
def test_dict_update(self):
def func():
dic = self.newdict()
dic['ab'] = 1000
dic['b'] = 200
d2 = self.newdict()
d2['b'] = 30
d2['cb'] = 4
dic.update(d2)
ok = len(dic) == 3
sum = ok
for key in dic:
sum += dic[key]
return sum
res = self.interpret(func, ())
assert res == 1035
def test_dict_keys(self):
def func():
dic = self.newdict()
dic[' 4'] = 1000
dic[' 8'] = 200
keys = dic.keys()
return ord(keys[0][1]) + ord(keys[1][1]) - 2*ord('0') + len(keys)
res = self.interpret(func, ())#, view=True)
assert res == 14
def test_list_dict(self):
def func():
dic = self.newdict()
dic[' 4'] = 1000
dic[' 8'] = 200
keys = list(dic)
return ord(keys[0][1]) + ord(keys[1][1]) - 2*ord('0') + len(keys)
res = self.interpret(func, ())#, view=True)
assert res == 14
def test_dict_inst_keys(self):
class Empty:
pass
class A(Empty):
pass
def func():
dic0 = self.newdict()
dic0[Empty()] = 2
dic = self.newdict()
dic[A()] = 1
dic[A()] = 2
keys = dic.keys()
return (isinstance(keys[1], A))*2+(isinstance(keys[0],A))
res = self.interpret(func, [])
assert res == 3
def test_dict_inst_iterkeys(self):
class Empty:
pass
class A(Empty):
pass
def func():
dic0 = self.newdict()
dic0[Empty()] = 2
dic = self.newdict()
dic[A()] = 1
dic[A()] = 2
a = 0
for k in dic.iterkeys():
a += isinstance(k, A)
return a
res = self.interpret(func, [])
assert res == 2
def test_dict_values(self):
def func():
dic = self.newdict()
dic[' 4'] = 1000
dic[' 8'] = 200
values = dic.values()
return values[0] + values[1] + len(values)
res = self.interpret(func, ())
assert res == 1202
def test_dict_inst_values(self):
class A:
pass
def func():
dic = self.newdict()
dic[1] = A()
dic[2] = A()
vals = dic.values()
return (isinstance(vals[1], A))*2+(isinstance(vals[0],A))
res = self.interpret(func, [])
assert res == 3
def test_dict_inst_itervalues(self):
class A:
pass
def func():
dic = self.newdict()
dic[1] = A()
dic[2] = A()
a = 0
for v in dic.itervalues():
a += isinstance(v, A)
return a
res = self.interpret(func, [])
assert res == 2
def test_dict_inst_items(self):
class Empty:
pass
class A:
pass
class B(Empty):
pass
def func():
dic0 = self.newdict()
dic0[Empty()] = A()
dic = self.newdict()
dic[B()] = A()
dic[B()] = A()
items = dic.items()
b = 0
a = 0
for k, v in items:
b += isinstance(k, B)
a += isinstance(v, A)
return 3*b+a
res = self.interpret(func, [])
assert res == 8
def test_dict_inst_iteritems(self):
class Empty:
pass
class A:
pass
class B(Empty):
pass
def func():
dic0 = self.newdict()
dic0[Empty()] = A()
dic = self.newdict()
dic[B()] = A()
dic[B()] = A()
b = 0
a = 0
for k, v in dic.iteritems():
b += isinstance(k, B)
a += isinstance(v, A)
return 3*b+a
res = self.interpret(func, [])
assert res == 8
def test_dict_items(self):
def func():
dic = self.newdict()
dic[' 4'] = 1000
dic[' 8'] = 200
items = dic.items()
res = len(items)
for key, value in items:
res += ord(key[1]) - ord('0') + value
return res
res = self.interpret(func, ())
assert res == 1214
def test_dict_contains(self):
def func():
dic = self.newdict()
dic[' 4'] = 1000
dic[' 8'] = 200
return ' 4' in dic and ' 9' not in dic
res = self.interpret(func, ())
assert res is True
def test_dict_contains_with_constant_dict(self):
dic = self.newdict()
dic['4'] = 1000
dic['8'] = 200
def func(i):
return chr(i) in dic
res = self.interpret(func, [ord('4')])
assert res is True
res = self.interpret(func, [1])
assert res is False
def test_dict_or_none(self):
class A:
pass
def negate(d):
return not d
def func(n):
a = A()
a.d = None
if n > 0:
a.d = self.newdict()
a.d[str(n)] = 1
a.d["42"] = 2
del a.d["42"]
return negate(a.d)
res = self.interpret(func, [10])
assert res is False
res = self.interpret(func, [0])
assert res is True
res = self.interpret(func, [42])
assert res is True
def test_int_dict(self):
def func(a, b):
dic = self.newdict()
dic[12] = 34
dic[a] = 1000
return dic.get(b, -123)
res = self.interpret(func, [12, 12])
assert res == 1000
res = self.interpret(func, [12, 13])
assert res == -123
res = self.interpret(func, [524, 12])
assert res == 34
res = self.interpret(func, [524, 524])
assert res == 1000
res = self.interpret(func, [524, 1036])
assert res == -123
def test_id_instances_keys(self):
class A:
pass
class B(A):
pass
def f():
a = A()
b = B()
d = self.newdict()
d[b] = 7
d[a] = 3
return len(d) + d[a] + d[b]
res = self.interpret(f, [])
assert res == 12
def test_captured_get(self):
d = self.newdict()
d[1] = 2
get = d.get
def f():
return get(1, 3)+get(2, 4)
res = self.interpret(f, [])
assert res == 6
def g(h):
return h(1, 3)
def f():
return g(get)
res = self.interpret(f, [])
assert res == 2
def test_specific_obscure_bug(self):
class A: pass
class B: pass # unrelated kinds of instances
def f():
lst = [A()]
res1 = A() in lst
d2 = self.newdict()
d2[B()] = None
d2[B()] = None
return res1+len(d2)
res = self.interpret(f, [])
assert res == 2
def test_identity_hash_is_fast(self):
class A(object):
pass
def f():
d = self.newdict()
d[A()] = 1
return d
t = TranslationContext()
s = t.buildannotator().build_types(f, [])
rtyper = t.buildrtyper()
rtyper.specialize()
r_dict = rtyper.getrepr(s)
assert not hasattr(r_dict.lowleveltype.TO.entries.TO.OF, "f_hash")
def test_tuple_dict(self):
def f(i):
d = self.newdict()
d[(1, 4.5, (str(i), 2), 2)] = 4
d[(1, 4.5, (str(i), 2), 3)] = 6
return d[(1, 4.5, (str(i), 2), i)]
res = self.interpret(f, [2])
assert res == f(2)
def test_dict_of_dict(self):
def f(n):
d = self.newdict()
d[5] = d
d[6] = self.newdict()
return len(d[n])
res = self.interpret(f, [5])
assert res == 2
res = self.interpret(f, [6])
assert res == 0
def test_cls_dict(self):
class A(object):
pass
class B(A):
pass
def f(i):
d = self.newdict()
d[A] = 3
d[B] = 4
if i:
cls = A
else:
cls = B
return d[cls]
res = self.interpret(f, [1])
assert res == 3
res = self.interpret(f, [0])
assert res == 4
def test_prebuilt_cls_dict(self):
class A(object):
pass
class B(A):
pass
d = self.newdict()
d[(A, 3)] = 3
d[(B, 0)] = 4
def f(i):
if i:
cls = A
else:
cls = B
try:
return d[cls, i]
except KeyError:
return -99
res = self.interpret(f, [0])
assert res == 4
res = self.interpret(f, [3])
assert res == 3
res = self.interpret(f, [10])
assert res == -99
def test_access_in_try(self):
def f(d):
try:
return d[2]
except ZeroDivisionError:
return 42
return -1
def g(n):
d = self.newdict()
d[1] = n
d[2] = 2*n
return f(d)
res = self.interpret(g, [3])
assert res == 6
def test_access_in_try_set(self):
def f(d):
try:
d[2] = 77
except ZeroDivisionError:
return 42
return -1
def g(n):
d = self.newdict()
d[1] = n
f(d)
return d[2]
res = self.interpret(g, [3])
assert res == 77
def test_resize_during_iteration(self):
def func():
d = self.newdict()
d[5] = 1
d[6] = 2
d[7] = 3
try:
for key, value in d.iteritems():
d[key^16] = value*2
except RuntimeError:
pass
total = 0
for key in d:
total += key
return total
res = self.interpret(func, [])
assert 5 + 6 + 7 <= res <= 5 + 6 + 7 + (5^16) + (6^16) + (7^16)
def test_change_during_iteration(self):
def func():
d = self.newdict()
d['a'] = 1
d['b'] = 2
for key in d:
d[key] = 42
return d['a']
assert self.interpret(func, []) == 42
def test_dict_of_floats(self):
d = self.newdict()
d[3.0] = 42
d[3.1] = 43
d[3.2] = 44
d[3.3] = 45
d[3.4] = 46
def fn(f):
return d[f]
res = self.interpret(fn, [3.0])
assert res == 42
def test_dict_of_r_uint(self):
for r_t in [r_uint, r_longlong, r_ulonglong]:
if r_t is r_int:
continue # for 64-bit platforms: skip r_longlong
d = self.newdict()
d[r_t(2)] = 3
d[r_t(4)] = 5
def fn(x, y):
d[r_t(x)] = 123
return d[r_t(y)]
res = self.interpret(fn, [4, 2])
assert res == 3
res = self.interpret(fn, [3, 3])
assert res == 123
def test_dict_popitem(self):
def func():
d = self.newdict()
d[5] = 2
d[6] = 3
k1, v1 = d.popitem()
assert len(d) == 1
k2, v2 = d.popitem()
try:
d.popitem()
except KeyError:
pass
else:
assert 0, "should have raised KeyError"
assert len(d) == 0
return k1*1000 + v1*100 + k2*10 + v2
res = self.interpret(func, [])
assert res in [5263, 6352]
def test_dict_pop(self):
def f(n, default):
d = self.newdict()
d[2] = 3
d[4] = 5
if default == -1:
try:
x = d.pop(n)
except KeyError:
x = -1
else:
x = d.pop(n, default)
return x * 10 + len(d)
res = self.interpret(f, [2, -1])
assert res == 31
res = self.interpret(f, [3, -1])
assert res == -8
res = self.interpret(f, [2, 5])
assert res == 31
def test_dict_pop_instance(self):
class A(object):
pass
def f(n):
d = self.newdict()
d[2] = A()
x = d.pop(n, None)
if x is None:
return 12
else:
return 15
res = self.interpret(f, [2])
assert res == 15
res = self.interpret(f, [700])
assert res == 12
def test_dict_but_not_with_char_keys(self):
def func(i):
d = self.newdict()
d['h'] = i
try:
return d['hello']
except KeyError:
return 0
res = self.interpret(func, [6])
assert res == 0
def test_dict_valid_resize(self):
# see if we find our keys after resize
def func():
d = self.newdict()
# fill it up
for i in range(10):
d[str(i)] = 0
# delete again
for i in range(10):
del d[str(i)]
res = 0
# if it does not crash, we are fine. It crashes if you forget the hash field.
self.interpret(func, [])
# ____________________________________________________________
def test_dict_of_addresses(self):
from rpython.rtyper.lltypesystem import llmemory
TP = lltype.Struct('x')
a = lltype.malloc(TP, flavor='raw', immortal=True)
b = lltype.malloc(TP, flavor='raw', immortal=True)
def func(i):
d = self.newdict()
d[llmemory.cast_ptr_to_adr(a)] = 123
d[llmemory.cast_ptr_to_adr(b)] = 456
if i > 5:
key = llmemory.cast_ptr_to_adr(a)
else:
key = llmemory.cast_ptr_to_adr(b)
return d[key]
assert self.interpret(func, [3]) == 456
def test_prebuilt_list_of_addresses(self):
from rpython.rtyper.lltypesystem import llmemory
TP = lltype.Struct('x', ('y', lltype.Signed))
a = lltype.malloc(TP, flavor='raw', immortal=True)
b = lltype.malloc(TP, flavor='raw', immortal=True)
c = lltype.malloc(TP, flavor='raw', immortal=True)
a_a = llmemory.cast_ptr_to_adr(a)
a0 = llmemory.cast_ptr_to_adr(a)
assert a_a is not a0
assert a_a == a0
a_b = llmemory.cast_ptr_to_adr(b)
a_c = llmemory.cast_ptr_to_adr(c)
d = self.newdict()
d[a_a] = 3
d[a_b] = 4
d[a_c] = 5
d[a0] = 8
def func(i):
if i == 0:
ptr = a
else:
ptr = b
return d[llmemory.cast_ptr_to_adr(ptr)]
py.test.raises(TypeError, self.interpret, func, [0])
def test_dict_of_voidp(self):
def func():
d = self.newdict()
handle = lltype.nullptr(rffi.VOIDP.TO)
# Use a negative key, so the dict implementation uses
# the value as a marker for empty entries
d[-1] = handle
return len(d)
assert self.interpret(func, []) == 1
from rpython.translator.c.test.test_genc import compile
f = compile(func, [])
res = f()
assert res == 1
def test_dict_with_SHORT_keys(self):
def func(x):
d = self.newdict()
d[rffi.cast(rffi.SHORT, 42)] = 123
d[rffi.cast(rffi.SHORT, -43)] = 321
return d[rffi.cast(rffi.SHORT, x)]
assert self.interpret(func, [42]) == 123
assert self.interpret(func, [2**16 - 43]) == 321
def test_dict_with_bool_keys(self):
def func(x):
d = self.newdict()
d[False] = 123
d[True] = 321
return d[x == 42]
assert self.interpret(func, [5]) == 123
assert self.interpret(func, [42]) == 321
def test_memoryerror_should_not_insert(self):
# This shows a misbehaviour that also exists in CPython 2.7, but not
# any more in CPython 3.3. The behaviour is that even if a dict
# insertion raises MemoryError, the new item is still inserted.
# If we catch the MemoryError, we can keep inserting new items until
# the dict table is completely full. Then the next insertion loops
# forever. This test only checks that after a MemoryError the
# new item was not inserted.
def _check_small_range(self, n):
if n >= 128:
raise MemoryError
return range(n)
original_check_range = lltype._array._check_range
try:
lltype._array._check_range = _check_small_range
#
def do_insert(d, i):
d[i] = i
def func():
d = self.newdict()
i = 0
while True:
try:
do_insert(d, i)
except MemoryError:
return (i in d)
i += 1
res = self.interpret(func, [])
assert res == 0
#
finally:
lltype._array._check_range = original_check_range
def test_dict_with_none_key(self):
def func(i):
d = self.newdict()
d[None] = i
return d[None]
res = self.interpret(func, [42])
assert res == 42
def test_externalvsinternal(self):
class A: pass
class B: pass
class C: pass
class D: pass
def func():
d1 = self.newdict(); d1[A()] = B()
d2 = self.newdict2(); d2[C()] = D()
return (d1, d2)
res = self.interpret(func, [])
assert lltype.typeOf(res.item0) == lltype.typeOf(res.item1)
def test_r_dict(self):
class FooError(Exception):
pass
def myeq(n, m):
return n == m
def myhash(n):
if n < 0:
raise FooError
return -n
def f(n):
d = self.new_r_dict(myeq, myhash)
for i in range(10):
d[i] = i*i
try:
value1 = d[n]
except FooError:
value1 = 99
try:
value2 = n in d
except FooError:
value2 = 99
try:
value3 = d[-n]
except FooError:
value3 = 99
try:
value4 = (-n) in d
except FooError:
value4 = 99
return (value1 * 1000000 +
value2 * 10000 +
value3 * 100 +
value4)
res = self.interpret(f, [5])
assert res == 25019999
def test_r_dict_popitem_hash(self):
def deq(n, m):
return n == m
def dhash(n):
return ~n
def func():
d = self.new_r_dict(deq, dhash)
d[5] = 2
d[6] = 3
k1, v1 = d.popitem()
assert len(d) == 1
k2, v2 = d.popitem()
try:
d.popitem()
except KeyError:
pass
else:
assert 0, "should have raised KeyError"
assert len(d) == 0
return k1*1000 + v1*100 + k2*10 + v2
res = self.interpret(func, [])
assert res in [5263, 6352]
def test_prebuilt_r_dict(self):
def deq(n, m):
return (n & 3) == (m & 3)
def dhash(n):
return n & 3
d = self.new_r_dict(deq, dhash)
d[0x123] = "abcd"
d[0x231] = "efgh"
def func():
return d[0x348973] + d[0x12981]
res = self.interpret(func, [])
res = self.ll_to_string(res)
assert res == "abcdefgh"
class TestRDict(BaseTestRDict):
@staticmethod
def newdict():
return {}
@staticmethod
def newdict2():
return {}
@staticmethod
def new_r_dict(myeq, myhash):
return r_dict(myeq, myhash)
def test_two_dicts_with_different_value_types(self):
def func(i):
d1 = {}
d1['hello'] = i + 1
d2 = {}
d2['world'] = d1
return d2['world']['hello']
res = self.interpret(func, [5])
assert res == 6
def test_type_erase(self):
class A(object):
pass
class B(object):
pass
def f():
d = {}
d[A()] = B()
d2 = {}
d2[B()] = A()
return d, d2
t = TranslationContext()
s = t.buildannotator().build_types(f, [])
rtyper = t.buildrtyper()
rtyper.specialize()
s_AB_dic = s.items[0]
s_BA_dic = s.items[1]
r_AB_dic = rtyper.getrepr(s_AB_dic)
r_BA_dic = rtyper.getrepr(s_AB_dic)
assert r_AB_dic.lowleveltype == r_BA_dic.lowleveltype
def test_dict_resize(self):
py.test.skip("test written for non-ordered dicts, update or kill")
# XXX we no longer automatically resize on 'del'. We need to
# hack a bit in this test to trigger a resize by continuing to
# fill the dict's table while keeping the actual size very low
# in order to force a resize to shrink the table back
def func(want_empty):
d = self.newdict()
for i in range(rdict.DICT_INITSIZE << 1):
d[chr(ord('a') + i)] = i
if want_empty:
for i in range(rdict.DICT_INITSIZE << 1):
del d[chr(ord('a') + i)]
for i in range(rdict.DICT_INITSIZE << 3):
d[chr(ord('A') - i)] = i
del d[chr(ord('A') - i)]
return d
res = self.interpret(func, [0])
assert len(res.entries) > rdict.DICT_INITSIZE
res = self.interpret(func, [1])
assert len(res.entries) == rdict.DICT_INITSIZE
def test_opt_dummykeymarker(self):
def f():
d = {"hello": None}
d["world"] = None
return "hello" in d, d
res = self.interpret(f, [])
assert res.item0 == True
DICT = lltype.typeOf(res.item1).TO
assert not hasattr(DICT.entries.TO.OF, 'f_valid') # strs have a dummy
def test_opt_dummyvaluemarker(self):
def f(n):
d = {-5: "abcd"}
d[123] = "def"
return len(d[n]), d
res = self.interpret(f, [-5])
assert res.item0 == 4
DICT = lltype.typeOf(res.item1).TO
assert not hasattr(DICT.entries.TO.OF, 'f_valid') # strs have a dummy
def test_opt_nonnegint_dummy(self):
def f(n):
d = {n: 12}
d[-87] = 24
del d[n]
return len(d.copy()), d[-87], d
res = self.interpret(f, [5])
assert res.item0 == 1
assert res.item1 == 24
DICT = lltype.typeOf(res.item2).TO
assert not hasattr(DICT.entries.TO.OF, 'f_valid')# nonneg int: dummy -1
def test_opt_no_dummy(self):
def f(n):
d = {n: 12}
d[-87] = -24
del d[n]
return len(d.copy()), d[-87], d
res = self.interpret(f, [5])
assert res.item0 == 1
assert res.item1 == -24
DICT = lltype.typeOf(res.item2).TO
assert hasattr(DICT.entries.TO.OF, 'f_valid') # no dummy available
def test_opt_boolean_has_no_dummy(self):
def f(n):
d = {n: True}
d[-87] = True
del d[n]
return len(d.copy()), d[-87], d
res = self.interpret(f, [5])
assert res.item0 == 1
assert res.item1 is True
DICT = lltype.typeOf(res.item2).TO
assert hasattr(DICT.entries.TO.OF, 'f_valid') # no dummy available
def test_opt_multiple_identical_dicts(self):
def f(n):
s = "x" * n
d1 = {s: 12}
d2 = {s: 24}
d3 = {s: 36}
d1["a"] = d2[s] # 24
d3[s] += d1["a"] # 60
d2["bc"] = d3[s] # 60
return d2["bc"], d1, d2, d3
res = self.interpret(f, [5])
assert res.item0 == 60
# all three dicts should use the same low-level type
assert lltype.typeOf(res.item1) == lltype.typeOf(res.item2)
assert lltype.typeOf(res.item1) == lltype.typeOf(res.item3)
def test_nonnull_hint(self):
def eq(a, b):
return a == b
def rhash(a):
return 3
def func(i):
d = r_dict(eq, rhash, force_non_null=True)
if not i:
d[None] = i
else:
d[str(i)] = i
return "12" in d, d
llres = self.interpret(func, [12])
assert llres.item0 == 1
DICT = lltype.typeOf(llres.item1)
assert sorted(DICT.TO.entries.TO.OF._flds) == ['f_hash', 'key', 'value']
def test_deleted_entry_reusage_with_colliding_hashes(self):
py.test.skip("test written for non-ordered dicts, update or kill")
def lowlevelhash(value):
p = rstr.mallocstr(len(value))
for i in range(len(value)):
p.chars[i] = value[i]
return rstr.LLHelpers.ll_strhash(p)
def func(c1, c2):
c1 = chr(c1)
c2 = chr(c2)
d = self.newdict()
d[c1] = 1
d[c2] = 2
del d[c1]
return d[c2]
char_by_hash = {}
base = rdict.DICT_INITSIZE
for y in range(0, 256):
y = chr(y)
y_hash = lowlevelhash(y) % base
char_by_hash.setdefault(y_hash, []).append(y)
x, y = char_by_hash[0][:2] # find a collision
res = self.interpret(func, [ord(x), ord(y)])
assert res == 2
def func2(c1, c2):
c1 = chr(c1)
c2 = chr(c2)
d = self.newdict()
d[c1] = 1
d[c2] = 2
del d[c1]
d[c1] = 3
return d
res = self.interpret(func2, [ord(x), ord(y)])
for i in range(len(res.entries)):
assert not (res.entries.everused(i) and not res.entries.valid(i))
def func3(c0, c1, c2, c3, c4, c5, c6, c7):
d = self.newdict()
c0 = chr(c0) ; d[c0] = 1; del d[c0]
c1 = chr(c1) ; d[c1] = 1; del d[c1]
c2 = chr(c2) ; d[c2] = 1; del d[c2]
c3 = chr(c3) ; d[c3] = 1; del d[c3]
c4 = chr(c4) ; d[c4] = 1; del d[c4]
c5 = chr(c5) ; d[c5] = 1; del d[c5]
c6 = chr(c6) ; d[c6] = 1; del d[c6]
c7 = chr(c7) ; d[c7] = 1; del d[c7]
return d
if rdict.DICT_INITSIZE != 8:
py.test.skip("make dict tests more indepdent from initsize")
res = self.interpret(func3, [ord(char_by_hash[i][0])
for i in range(rdict.DICT_INITSIZE)])
count_frees = 0
for i in range(len(res.entries)):
if not res.entries.everused(i):
count_frees += 1
assert count_frees >= 3
class TestStress:
def test_stress(self):
from rpython.annotator.dictdef import DictKey, DictValue
from rpython.annotator import model as annmodel
dictrepr = rdict.DictRepr(None, rint.signed_repr, rint.signed_repr,
DictKey(None, annmodel.SomeInteger()),
DictValue(None, annmodel.SomeInteger()))
dictrepr.setup()
l_dict = rdict.ll_newdict(dictrepr.DICT)
referencetable = [None] * 400
referencelength = 0
value = 0
def complete_check():
for n, refvalue in zip(range(len(referencetable)), referencetable):
try:
gotvalue = rdict.ll_dict_getitem(l_dict, n)
except KeyError:
assert refvalue is None
else:
assert gotvalue == refvalue
for x in not_really_random():
n = int(x*100.0) # 0 <= x < 400
op = repr(x)[-1]
if op <= '2' and referencetable[n] is not None:
rdict.ll_dict_delitem(l_dict, n)
referencetable[n] = None
referencelength -= 1
elif op <= '6':
rdict.ll_dict_setitem(l_dict, n, value)
if referencetable[n] is None:
referencelength += 1
referencetable[n] = value
value += 1
else:
try:
gotvalue = rdict.ll_dict_getitem(l_dict, n)
except KeyError:
assert referencetable[n] is None
else:
assert gotvalue == referencetable[n]
if 1.38 <= x <= 1.39:
complete_check()
print 'current dict length:', referencelength
assert l_dict.num_items == referencelength
complete_check()
def test_stress_2(self):
yield self.stress_combination, True, False
yield self.stress_combination, False, True
yield self.stress_combination, False, False
yield self.stress_combination, True, True
def stress_combination(self, key_can_be_none, value_can_be_none):
from rpython.rtyper.lltypesystem.rstr import string_repr
from rpython.annotator.dictdef import DictKey, DictValue
from rpython.annotator import model as annmodel
print
print "Testing combination with can_be_None: keys %s, values %s" % (
key_can_be_none, value_can_be_none)
class PseudoRTyper:
cache_dummy_values = {}
dictrepr = rdict.DictRepr(PseudoRTyper(), string_repr, string_repr,
DictKey(None, annmodel.SomeString(key_can_be_none)),
DictValue(None, annmodel.SomeString(value_can_be_none)))
dictrepr.setup()
print dictrepr.lowleveltype
for key, value in dictrepr.DICTENTRY._adtmeths.items():
print ' %s = %s' % (key, value)
l_dict = rdict.ll_newdict(dictrepr.DICT)
referencetable = [None] * 400
referencelength = 0
values = not_really_random()
keytable = [string_repr.convert_const("foo%d" % n)
for n in range(len(referencetable))]
def complete_check():
for n, refvalue in zip(range(len(referencetable)), referencetable):
try:
gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n])
except KeyError:
assert refvalue is None
else:
assert gotvalue == refvalue
for x in not_really_random():
n = int(x*100.0) # 0 <= x < 400
op = repr(x)[-1]
if op <= '2' and referencetable[n] is not None:
rdict.ll_dict_delitem(l_dict, keytable[n])
referencetable[n] = None
referencelength -= 1
elif op <= '6':
ll_value = string_repr.convert_const(str(values.next()))
rdict.ll_dict_setitem(l_dict, keytable[n], ll_value)
if referencetable[n] is None:
referencelength += 1
referencetable[n] = ll_value
else:
try:
gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n])
except KeyError:
assert referencetable[n] is None
else:
assert gotvalue == referencetable[n]
if 1.38 <= x <= 1.39:
complete_check()
print 'current dict length:', referencelength
assert l_dict.num_items == referencelength
complete_check()
| 29.824615
| 85
| 0.464562
|
from rpython.translator.translator import TranslationContext
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rtyper import rint
from rpython.rtyper.lltypesystem import rdict, rstr
from rpython.rtyper.test.tool import BaseRtypingTest
from rpython.rlib.objectmodel import r_dict
from rpython.rlib.rarithmetic import r_int, r_uint, r_longlong, r_ulonglong
import py
py.log.setconsumer("rtyper", py.log.STDOUT)
def not_really_random():
"""A random-ish generator, which also generates nice patterns from time to time.
Could be useful to detect problems associated with specific usage patterns."""
import random
x = random.random()
print 'random seed: %r' % (x,)
for i in range(12000):
r = 3.4 + i/20000.0
x = r*x - x*x
assert 0 <= x < 4
yield x
class BaseTestRDict(BaseRtypingTest):
def test_dict_creation(self):
def createdict(i):
d = self.newdict()
d['hello'] = i
return d['hello']
res = self.interpret(createdict, [42])
assert res == 42
def test_dict_getitem_setitem(self):
def func(i):
d = self.newdict()
d['hello'] = i
d['world'] = i + 1
return d['hello'] * d['world']
res = self.interpret(func, [6])
assert res == 42
def test_dict_getitem_keyerror(self):
def func(i):
d = self.newdict()
d['hello'] = i
try:
return d['world']
except KeyError:
return 0
res = self.interpret(func, [6])
assert res == 0
def test_dict_del_simple(self):
def func(i):
d = self.newdict()
d['hello'] = i
d['world'] = i + 1
del d['hello']
return len(d)
res = self.interpret(func, [6])
assert res == 1
def test_dict_clear(self):
def func(i):
d = self.newdict()
d['abc'] = i
d['def'] = i+1
d.clear()
d['ghi'] = i+2
return ('abc' not in d and 'def' not in d
and d['ghi'] == i+2 and len(d) == 1)
res = self.interpret(func, [7])
assert res == True
def test_empty_strings(self):
def func(i):
d = self.newdict()
d[''] = i
del d['']
try:
d['']
return 0
except KeyError:
pass
return 1
res = self.interpret(func, [6])
assert res == 1
def func(i):
d = self.newdict()
d[''] = i
del d['']
d[''] = i + 1
return len(d)
res = self.interpret(func, [6])
assert res == 1
def test_dict_bool(self):
def func(i):
if i:
d = self.newdict()
else:
d = self.newdict()
d[i] = i+1
if d:
return i
else:
return i+1
assert self.interpret(func, [42]) == 43
assert self.interpret(func, [0]) == 0
def test_contains(self):
def func(x, y):
d = self.newdict()
d[x] = x+1
return y in d
assert self.interpret(func, [42, 0]) == False
assert self.interpret(func, [42, 42]) == True
def test_contains_2(self):
d = self.newdict()
d['5'] = None
d['7'] = None
def func(x):
return chr(x) in d
assert self.interpret(func, [ord('5')]) == True
assert self.interpret(func, [ord('6')]) == False
def func(n):
return str(n) in d
assert self.interpret(func, [512]) == False
def test_dict_iteration(self):
def func(i, j):
d = self.newdict()
d['hello'] = i
d['world'] = j
k = 1
for key in d:
k = k * d[key]
return k
res = self.interpret(func, [6, 7])
assert res == 42
def test_dict_itermethods(self):
def func():
d = self.newdict()
d['hello'] = 6
d['world'] = 7
k1 = k2 = k3 = 1
for key in d.iterkeys():
k1 = k1 * d[key]
for value in d.itervalues():
k2 = k2 * value
for key, value in d.iteritems():
assert d[key] == value
k3 = k3 * value
return k1 + k2 + k3
res = self.interpret(func, [])
assert res == 42 + 42 + 42
def test_dict_get(self):
def func():
dic = self.newdict()
x1 = dic.get('hi', 42)
dic['blah'] = 1
x2 = dic.get('blah', 2)
return x1 * 10 + x2
res = self.interpret(func, ())
assert res == 421
def test_dict_get_empty(self):
def func():
dic = self.newdict()
x1 = dic.get('hi', 42)
x2 = dic.get('blah', 2)
return x1 * 10 + x2
res = self.interpret(func, ())
assert res == 422
def test_dict_setdefault(self):
def f():
d = self.newdict()
d.setdefault('a', 2)
return d['a']
res = self.interpret(f, ())
assert res == 2
def f():
d = self.newdict()
d.setdefault('a', 2)
x = d.setdefault('a', -3)
return x
res = self.interpret(f, ())
assert res == 2
def test_dict_copy(self):
def func():
dic = self.newdict()
dic['ab'] = 1
dic['b'] = 2
d2 = dic.copy()
ok = 1
for key in d2:
if dic[key] != d2[key]:
ok = 0
ok &= len(dic) == len(d2)
d2['c'] = 3
ok &= len(dic) == len(d2) - 1
return ok
res = self.interpret(func, ())
assert res == 1
def test_dict_update(self):
def func():
dic = self.newdict()
dic['ab'] = 1000
dic['b'] = 200
d2 = self.newdict()
d2['b'] = 30
d2['cb'] = 4
dic.update(d2)
ok = len(dic) == 3
sum = ok
for key in dic:
sum += dic[key]
return sum
res = self.interpret(func, ())
assert res == 1035
def test_dict_keys(self):
def func():
dic = self.newdict()
dic[' 4'] = 1000
dic[' 8'] = 200
keys = dic.keys()
return ord(keys[0][1]) + ord(keys[1][1]) - 2*ord('0') + len(keys)
res = self.interpret(func, ())
assert res == 14
def test_list_dict(self):
def func():
dic = self.newdict()
dic[' 4'] = 1000
dic[' 8'] = 200
keys = list(dic)
return ord(keys[0][1]) + ord(keys[1][1]) - 2*ord('0') + len(keys)
res = self.interpret(func, ())
assert res == 14
def test_dict_inst_keys(self):
class Empty:
pass
class A(Empty):
pass
def func():
dic0 = self.newdict()
dic0[Empty()] = 2
dic = self.newdict()
dic[A()] = 1
dic[A()] = 2
keys = dic.keys()
return (isinstance(keys[1], A))*2+(isinstance(keys[0],A))
res = self.interpret(func, [])
assert res == 3
def test_dict_inst_iterkeys(self):
class Empty:
pass
class A(Empty):
pass
def func():
dic0 = self.newdict()
dic0[Empty()] = 2
dic = self.newdict()
dic[A()] = 1
dic[A()] = 2
a = 0
for k in dic.iterkeys():
a += isinstance(k, A)
return a
res = self.interpret(func, [])
assert res == 2
def test_dict_values(self):
def func():
dic = self.newdict()
dic[' 4'] = 1000
dic[' 8'] = 200
values = dic.values()
return values[0] + values[1] + len(values)
res = self.interpret(func, ())
assert res == 1202
def test_dict_inst_values(self):
class A:
pass
def func():
dic = self.newdict()
dic[1] = A()
dic[2] = A()
vals = dic.values()
return (isinstance(vals[1], A))*2+(isinstance(vals[0],A))
res = self.interpret(func, [])
assert res == 3
def test_dict_inst_itervalues(self):
class A:
pass
def func():
dic = self.newdict()
dic[1] = A()
dic[2] = A()
a = 0
for v in dic.itervalues():
a += isinstance(v, A)
return a
res = self.interpret(func, [])
assert res == 2
def test_dict_inst_items(self):
class Empty:
pass
class A:
pass
class B(Empty):
pass
def func():
dic0 = self.newdict()
dic0[Empty()] = A()
dic = self.newdict()
dic[B()] = A()
dic[B()] = A()
items = dic.items()
b = 0
a = 0
for k, v in items:
b += isinstance(k, B)
a += isinstance(v, A)
return 3*b+a
res = self.interpret(func, [])
assert res == 8
def test_dict_inst_iteritems(self):
class Empty:
pass
class A:
pass
class B(Empty):
pass
def func():
dic0 = self.newdict()
dic0[Empty()] = A()
dic = self.newdict()
dic[B()] = A()
dic[B()] = A()
b = 0
a = 0
for k, v in dic.iteritems():
b += isinstance(k, B)
a += isinstance(v, A)
return 3*b+a
res = self.interpret(func, [])
assert res == 8
def test_dict_items(self):
def func():
dic = self.newdict()
dic[' 4'] = 1000
dic[' 8'] = 200
items = dic.items()
res = len(items)
for key, value in items:
res += ord(key[1]) - ord('0') + value
return res
res = self.interpret(func, ())
assert res == 1214
def test_dict_contains(self):
def func():
dic = self.newdict()
dic[' 4'] = 1000
dic[' 8'] = 200
return ' 4' in dic and ' 9' not in dic
res = self.interpret(func, ())
assert res is True
def test_dict_contains_with_constant_dict(self):
dic = self.newdict()
dic['4'] = 1000
dic['8'] = 200
def func(i):
return chr(i) in dic
res = self.interpret(func, [ord('4')])
assert res is True
res = self.interpret(func, [1])
assert res is False
def test_dict_or_none(self):
class A:
pass
def negate(d):
return not d
def func(n):
a = A()
a.d = None
if n > 0:
a.d = self.newdict()
a.d[str(n)] = 1
a.d["42"] = 2
del a.d["42"]
return negate(a.d)
res = self.interpret(func, [10])
assert res is False
res = self.interpret(func, [0])
assert res is True
res = self.interpret(func, [42])
assert res is True
def test_int_dict(self):
def func(a, b):
dic = self.newdict()
dic[12] = 34
dic[a] = 1000
return dic.get(b, -123)
res = self.interpret(func, [12, 12])
assert res == 1000
res = self.interpret(func, [12, 13])
assert res == -123
res = self.interpret(func, [524, 12])
assert res == 34
res = self.interpret(func, [524, 524])
assert res == 1000
res = self.interpret(func, [524, 1036])
assert res == -123
def test_id_instances_keys(self):
class A:
pass
class B(A):
pass
def f():
a = A()
b = B()
d = self.newdict()
d[b] = 7
d[a] = 3
return len(d) + d[a] + d[b]
res = self.interpret(f, [])
assert res == 12
def test_captured_get(self):
d = self.newdict()
d[1] = 2
get = d.get
def f():
return get(1, 3)+get(2, 4)
res = self.interpret(f, [])
assert res == 6
def g(h):
return h(1, 3)
def f():
return g(get)
res = self.interpret(f, [])
assert res == 2
def test_specific_obscure_bug(self):
class A: pass
class B: pass
def f():
lst = [A()]
res1 = A() in lst
d2 = self.newdict()
d2[B()] = None
d2[B()] = None
return res1+len(d2)
res = self.interpret(f, [])
assert res == 2
def test_identity_hash_is_fast(self):
class A(object):
pass
def f():
d = self.newdict()
d[A()] = 1
return d
t = TranslationContext()
s = t.buildannotator().build_types(f, [])
rtyper = t.buildrtyper()
rtyper.specialize()
r_dict = rtyper.getrepr(s)
assert not hasattr(r_dict.lowleveltype.TO.entries.TO.OF, "f_hash")
def test_tuple_dict(self):
def f(i):
d = self.newdict()
d[(1, 4.5, (str(i), 2), 2)] = 4
d[(1, 4.5, (str(i), 2), 3)] = 6
return d[(1, 4.5, (str(i), 2), i)]
res = self.interpret(f, [2])
assert res == f(2)
def test_dict_of_dict(self):
def f(n):
d = self.newdict()
d[5] = d
d[6] = self.newdict()
return len(d[n])
res = self.interpret(f, [5])
assert res == 2
res = self.interpret(f, [6])
assert res == 0
def test_cls_dict(self):
class A(object):
pass
class B(A):
pass
def f(i):
d = self.newdict()
d[A] = 3
d[B] = 4
if i:
cls = A
else:
cls = B
return d[cls]
res = self.interpret(f, [1])
assert res == 3
res = self.interpret(f, [0])
assert res == 4
def test_prebuilt_cls_dict(self):
class A(object):
pass
class B(A):
pass
d = self.newdict()
d[(A, 3)] = 3
d[(B, 0)] = 4
def f(i):
if i:
cls = A
else:
cls = B
try:
return d[cls, i]
except KeyError:
return -99
res = self.interpret(f, [0])
assert res == 4
res = self.interpret(f, [3])
assert res == 3
res = self.interpret(f, [10])
assert res == -99
def test_access_in_try(self):
def f(d):
try:
return d[2]
except ZeroDivisionError:
return 42
return -1
def g(n):
d = self.newdict()
d[1] = n
d[2] = 2*n
return f(d)
res = self.interpret(g, [3])
assert res == 6
def test_access_in_try_set(self):
def f(d):
try:
d[2] = 77
except ZeroDivisionError:
return 42
return -1
def g(n):
d = self.newdict()
d[1] = n
f(d)
return d[2]
res = self.interpret(g, [3])
assert res == 77
def test_resize_during_iteration(self):
def func():
d = self.newdict()
d[5] = 1
d[6] = 2
d[7] = 3
try:
for key, value in d.iteritems():
d[key^16] = value*2
except RuntimeError:
pass
total = 0
for key in d:
total += key
return total
res = self.interpret(func, [])
assert 5 + 6 + 7 <= res <= 5 + 6 + 7 + (5^16) + (6^16) + (7^16)
def test_change_during_iteration(self):
def func():
d = self.newdict()
d['a'] = 1
d['b'] = 2
for key in d:
d[key] = 42
return d['a']
assert self.interpret(func, []) == 42
def test_dict_of_floats(self):
d = self.newdict()
d[3.0] = 42
d[3.1] = 43
d[3.2] = 44
d[3.3] = 45
d[3.4] = 46
def fn(f):
return d[f]
res = self.interpret(fn, [3.0])
assert res == 42
def test_dict_of_r_uint(self):
for r_t in [r_uint, r_longlong, r_ulonglong]:
if r_t is r_int:
continue
d = self.newdict()
d[r_t(2)] = 3
d[r_t(4)] = 5
def fn(x, y):
d[r_t(x)] = 123
return d[r_t(y)]
res = self.interpret(fn, [4, 2])
assert res == 3
res = self.interpret(fn, [3, 3])
assert res == 123
def test_dict_popitem(self):
def func():
d = self.newdict()
d[5] = 2
d[6] = 3
k1, v1 = d.popitem()
assert len(d) == 1
k2, v2 = d.popitem()
try:
d.popitem()
except KeyError:
pass
else:
assert 0, "should have raised KeyError"
assert len(d) == 0
return k1*1000 + v1*100 + k2*10 + v2
res = self.interpret(func, [])
assert res in [5263, 6352]
def test_dict_pop(self):
def f(n, default):
d = self.newdict()
d[2] = 3
d[4] = 5
if default == -1:
try:
x = d.pop(n)
except KeyError:
x = -1
else:
x = d.pop(n, default)
return x * 10 + len(d)
res = self.interpret(f, [2, -1])
assert res == 31
res = self.interpret(f, [3, -1])
assert res == -8
res = self.interpret(f, [2, 5])
assert res == 31
def test_dict_pop_instance(self):
class A(object):
pass
def f(n):
d = self.newdict()
d[2] = A()
x = d.pop(n, None)
if x is None:
return 12
else:
return 15
res = self.interpret(f, [2])
assert res == 15
res = self.interpret(f, [700])
assert res == 12
def test_dict_but_not_with_char_keys(self):
def func(i):
d = self.newdict()
d['h'] = i
try:
return d['hello']
except KeyError:
return 0
res = self.interpret(func, [6])
assert res == 0
def test_dict_valid_resize(self):
def func():
d = self.newdict()
for i in range(10):
d[str(i)] = 0
for i in range(10):
del d[str(i)]
res = 0
self.interpret(func, [])
def test_dict_of_addresses(self):
from rpython.rtyper.lltypesystem import llmemory
TP = lltype.Struct('x')
a = lltype.malloc(TP, flavor='raw', immortal=True)
b = lltype.malloc(TP, flavor='raw', immortal=True)
def func(i):
d = self.newdict()
d[llmemory.cast_ptr_to_adr(a)] = 123
d[llmemory.cast_ptr_to_adr(b)] = 456
if i > 5:
key = llmemory.cast_ptr_to_adr(a)
else:
key = llmemory.cast_ptr_to_adr(b)
return d[key]
assert self.interpret(func, [3]) == 456
def test_prebuilt_list_of_addresses(self):
from rpython.rtyper.lltypesystem import llmemory
TP = lltype.Struct('x', ('y', lltype.Signed))
a = lltype.malloc(TP, flavor='raw', immortal=True)
b = lltype.malloc(TP, flavor='raw', immortal=True)
c = lltype.malloc(TP, flavor='raw', immortal=True)
a_a = llmemory.cast_ptr_to_adr(a)
a0 = llmemory.cast_ptr_to_adr(a)
assert a_a is not a0
assert a_a == a0
a_b = llmemory.cast_ptr_to_adr(b)
a_c = llmemory.cast_ptr_to_adr(c)
d = self.newdict()
d[a_a] = 3
d[a_b] = 4
d[a_c] = 5
d[a0] = 8
def func(i):
if i == 0:
ptr = a
else:
ptr = b
return d[llmemory.cast_ptr_to_adr(ptr)]
py.test.raises(TypeError, self.interpret, func, [0])
def test_dict_of_voidp(self):
def func():
d = self.newdict()
handle = lltype.nullptr(rffi.VOIDP.TO)
d[-1] = handle
return len(d)
assert self.interpret(func, []) == 1
from rpython.translator.c.test.test_genc import compile
f = compile(func, [])
res = f()
assert res == 1
def test_dict_with_SHORT_keys(self):
def func(x):
d = self.newdict()
d[rffi.cast(rffi.SHORT, 42)] = 123
d[rffi.cast(rffi.SHORT, -43)] = 321
return d[rffi.cast(rffi.SHORT, x)]
assert self.interpret(func, [42]) == 123
assert self.interpret(func, [2**16 - 43]) == 321
def test_dict_with_bool_keys(self):
def func(x):
d = self.newdict()
d[False] = 123
d[True] = 321
return d[x == 42]
assert self.interpret(func, [5]) == 123
assert self.interpret(func, [42]) == 321
def test_memoryerror_should_not_insert(self):
def _check_small_range(self, n):
if n >= 128:
raise MemoryError
return range(n)
original_check_range = lltype._array._check_range
try:
lltype._array._check_range = _check_small_range
def do_insert(d, i):
d[i] = i
def func():
d = self.newdict()
i = 0
while True:
try:
do_insert(d, i)
except MemoryError:
return (i in d)
i += 1
res = self.interpret(func, [])
assert res == 0
finally:
lltype._array._check_range = original_check_range
def test_dict_with_none_key(self):
def func(i):
d = self.newdict()
d[None] = i
return d[None]
res = self.interpret(func, [42])
assert res == 42
def test_externalvsinternal(self):
class A: pass
class B: pass
class C: pass
class D: pass
def func():
d1 = self.newdict(); d1[A()] = B()
d2 = self.newdict2(); d2[C()] = D()
return (d1, d2)
res = self.interpret(func, [])
assert lltype.typeOf(res.item0) == lltype.typeOf(res.item1)
def test_r_dict(self):
class FooError(Exception):
pass
def myeq(n, m):
return n == m
def myhash(n):
if n < 0:
raise FooError
return -n
def f(n):
d = self.new_r_dict(myeq, myhash)
for i in range(10):
d[i] = i*i
try:
value1 = d[n]
except FooError:
value1 = 99
try:
value2 = n in d
except FooError:
value2 = 99
try:
value3 = d[-n]
except FooError:
value3 = 99
try:
value4 = (-n) in d
except FooError:
value4 = 99
return (value1 * 1000000 +
value2 * 10000 +
value3 * 100 +
value4)
res = self.interpret(f, [5])
assert res == 25019999
def test_r_dict_popitem_hash(self):
def deq(n, m):
return n == m
def dhash(n):
return ~n
def func():
d = self.new_r_dict(deq, dhash)
d[5] = 2
d[6] = 3
k1, v1 = d.popitem()
assert len(d) == 1
k2, v2 = d.popitem()
try:
d.popitem()
except KeyError:
pass
else:
assert 0, "should have raised KeyError"
assert len(d) == 0
return k1*1000 + v1*100 + k2*10 + v2
res = self.interpret(func, [])
assert res in [5263, 6352]
def test_prebuilt_r_dict(self):
def deq(n, m):
return (n & 3) == (m & 3)
def dhash(n):
return n & 3
d = self.new_r_dict(deq, dhash)
d[0x123] = "abcd"
d[0x231] = "efgh"
def func():
return d[0x348973] + d[0x12981]
res = self.interpret(func, [])
res = self.ll_to_string(res)
assert res == "abcdefgh"
class TestRDict(BaseTestRDict):
@staticmethod
def newdict():
return {}
@staticmethod
def newdict2():
return {}
@staticmethod
def new_r_dict(myeq, myhash):
return r_dict(myeq, myhash)
def test_two_dicts_with_different_value_types(self):
def func(i):
d1 = {}
d1['hello'] = i + 1
d2 = {}
d2['world'] = d1
return d2['world']['hello']
res = self.interpret(func, [5])
assert res == 6
def test_type_erase(self):
class A(object):
pass
class B(object):
pass
def f():
d = {}
d[A()] = B()
d2 = {}
d2[B()] = A()
return d, d2
t = TranslationContext()
s = t.buildannotator().build_types(f, [])
rtyper = t.buildrtyper()
rtyper.specialize()
s_AB_dic = s.items[0]
s_BA_dic = s.items[1]
r_AB_dic = rtyper.getrepr(s_AB_dic)
r_BA_dic = rtyper.getrepr(s_AB_dic)
assert r_AB_dic.lowleveltype == r_BA_dic.lowleveltype
def test_dict_resize(self):
py.test.skip("test written for non-ordered dicts, update or kill")
# in order to force a resize to shrink the table back
def func(want_empty):
d = self.newdict()
for i in range(rdict.DICT_INITSIZE << 1):
d[chr(ord('a') + i)] = i
if want_empty:
for i in range(rdict.DICT_INITSIZE << 1):
del d[chr(ord('a') + i)]
for i in range(rdict.DICT_INITSIZE << 3):
d[chr(ord('A') - i)] = i
del d[chr(ord('A') - i)]
return d
res = self.interpret(func, [0])
assert len(res.entries) > rdict.DICT_INITSIZE
res = self.interpret(func, [1])
assert len(res.entries) == rdict.DICT_INITSIZE
def test_opt_dummykeymarker(self):
def f():
d = {"hello": None}
d["world"] = None
return "hello" in d, d
res = self.interpret(f, [])
assert res.item0 == True
DICT = lltype.typeOf(res.item1).TO
assert not hasattr(DICT.entries.TO.OF, 'f_valid') # strs have a dummy
def test_opt_dummyvaluemarker(self):
def f(n):
d = {-5: "abcd"}
d[123] = "def"
return len(d[n]), d
res = self.interpret(f, [-5])
assert res.item0 == 4
DICT = lltype.typeOf(res.item1).TO
assert not hasattr(DICT.entries.TO.OF, 'f_valid') # strs have a dummy
def test_opt_nonnegint_dummy(self):
def f(n):
d = {n: 12}
d[-87] = 24
del d[n]
return len(d.copy()), d[-87], d
res = self.interpret(f, [5])
assert res.item0 == 1
assert res.item1 == 24
DICT = lltype.typeOf(res.item2).TO
assert not hasattr(DICT.entries.TO.OF, 'f_valid')# nonneg int: dummy -1
def test_opt_no_dummy(self):
def f(n):
d = {n: 12}
d[-87] = -24
del d[n]
return len(d.copy()), d[-87], d
res = self.interpret(f, [5])
assert res.item0 == 1
assert res.item1 == -24
DICT = lltype.typeOf(res.item2).TO
assert hasattr(DICT.entries.TO.OF, 'f_valid') # no dummy available
def test_opt_boolean_has_no_dummy(self):
def f(n):
d = {n: True}
d[-87] = True
del d[n]
return len(d.copy()), d[-87], d
res = self.interpret(f, [5])
assert res.item0 == 1
assert res.item1 is True
DICT = lltype.typeOf(res.item2).TO
assert hasattr(DICT.entries.TO.OF, 'f_valid') # no dummy available
def test_opt_multiple_identical_dicts(self):
def f(n):
s = "x" * n
d1 = {s: 12}
d2 = {s: 24}
d3 = {s: 36}
d1["a"] = d2[s] # 24
d3[s] += d1["a"] # 60
d2["bc"] = d3[s] # 60
return d2["bc"], d1, d2, d3
res = self.interpret(f, [5])
assert res.item0 == 60
# all three dicts should use the same low-level type
assert lltype.typeOf(res.item1) == lltype.typeOf(res.item2)
assert lltype.typeOf(res.item1) == lltype.typeOf(res.item3)
def test_nonnull_hint(self):
def eq(a, b):
return a == b
def rhash(a):
return 3
def func(i):
d = r_dict(eq, rhash, force_non_null=True)
if not i:
d[None] = i
else:
d[str(i)] = i
return "12" in d, d
llres = self.interpret(func, [12])
assert llres.item0 == 1
DICT = lltype.typeOf(llres.item1)
assert sorted(DICT.TO.entries.TO.OF._flds) == ['f_hash', 'key', 'value']
def test_deleted_entry_reusage_with_colliding_hashes(self):
py.test.skip("test written for non-ordered dicts, update or kill")
def lowlevelhash(value):
p = rstr.mallocstr(len(value))
for i in range(len(value)):
p.chars[i] = value[i]
return rstr.LLHelpers.ll_strhash(p)
def func(c1, c2):
c1 = chr(c1)
c2 = chr(c2)
d = self.newdict()
d[c1] = 1
d[c2] = 2
del d[c1]
return d[c2]
char_by_hash = {}
base = rdict.DICT_INITSIZE
for y in range(0, 256):
y = chr(y)
y_hash = lowlevelhash(y) % base
char_by_hash.setdefault(y_hash, []).append(y)
x, y = char_by_hash[0][:2] # find a collision
res = self.interpret(func, [ord(x), ord(y)])
assert res == 2
def func2(c1, c2):
c1 = chr(c1)
c2 = chr(c2)
d = self.newdict()
d[c1] = 1
d[c2] = 2
del d[c1]
d[c1] = 3
return d
res = self.interpret(func2, [ord(x), ord(y)])
for i in range(len(res.entries)):
assert not (res.entries.everused(i) and not res.entries.valid(i))
def func3(c0, c1, c2, c3, c4, c5, c6, c7):
d = self.newdict()
c0 = chr(c0) ; d[c0] = 1; del d[c0]
c1 = chr(c1) ; d[c1] = 1; del d[c1]
c2 = chr(c2) ; d[c2] = 1; del d[c2]
c3 = chr(c3) ; d[c3] = 1; del d[c3]
c4 = chr(c4) ; d[c4] = 1; del d[c4]
c5 = chr(c5) ; d[c5] = 1; del d[c5]
c6 = chr(c6) ; d[c6] = 1; del d[c6]
c7 = chr(c7) ; d[c7] = 1; del d[c7]
return d
if rdict.DICT_INITSIZE != 8:
py.test.skip("make dict tests more indepdent from initsize")
res = self.interpret(func3, [ord(char_by_hash[i][0])
for i in range(rdict.DICT_INITSIZE)])
count_frees = 0
for i in range(len(res.entries)):
if not res.entries.everused(i):
count_frees += 1
assert count_frees >= 3
class TestStress:
def test_stress(self):
from rpython.annotator.dictdef import DictKey, DictValue
from rpython.annotator import model as annmodel
dictrepr = rdict.DictRepr(None, rint.signed_repr, rint.signed_repr,
DictKey(None, annmodel.SomeInteger()),
DictValue(None, annmodel.SomeInteger()))
dictrepr.setup()
l_dict = rdict.ll_newdict(dictrepr.DICT)
referencetable = [None] * 400
referencelength = 0
value = 0
def complete_check():
for n, refvalue in zip(range(len(referencetable)), referencetable):
try:
gotvalue = rdict.ll_dict_getitem(l_dict, n)
except KeyError:
assert refvalue is None
else:
assert gotvalue == refvalue
for x in not_really_random():
n = int(x*100.0) # 0 <= x < 400
op = repr(x)[-1]
if op <= '2' and referencetable[n] is not None:
rdict.ll_dict_delitem(l_dict, n)
referencetable[n] = None
referencelength -= 1
elif op <= '6':
rdict.ll_dict_setitem(l_dict, n, value)
if referencetable[n] is None:
referencelength += 1
referencetable[n] = value
value += 1
else:
try:
gotvalue = rdict.ll_dict_getitem(l_dict, n)
except KeyError:
assert referencetable[n] is None
else:
assert gotvalue == referencetable[n]
if 1.38 <= x <= 1.39:
complete_check()
print 'current dict length:', referencelength
assert l_dict.num_items == referencelength
complete_check()
def test_stress_2(self):
yield self.stress_combination, True, False
yield self.stress_combination, False, True
yield self.stress_combination, False, False
yield self.stress_combination, True, True
def stress_combination(self, key_can_be_none, value_can_be_none):
from rpython.rtyper.lltypesystem.rstr import string_repr
from rpython.annotator.dictdef import DictKey, DictValue
from rpython.annotator import model as annmodel
print
print "Testing combination with can_be_None: keys %s, values %s" % (
key_can_be_none, value_can_be_none)
class PseudoRTyper:
cache_dummy_values = {}
dictrepr = rdict.DictRepr(PseudoRTyper(), string_repr, string_repr,
DictKey(None, annmodel.SomeString(key_can_be_none)),
DictValue(None, annmodel.SomeString(value_can_be_none)))
dictrepr.setup()
print dictrepr.lowleveltype
for key, value in dictrepr.DICTENTRY._adtmeths.items():
print ' %s = %s' % (key, value)
l_dict = rdict.ll_newdict(dictrepr.DICT)
referencetable = [None] * 400
referencelength = 0
values = not_really_random()
keytable = [string_repr.convert_const("foo%d" % n)
for n in range(len(referencetable))]
def complete_check():
for n, refvalue in zip(range(len(referencetable)), referencetable):
try:
gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n])
except KeyError:
assert refvalue is None
else:
assert gotvalue == refvalue
for x in not_really_random():
n = int(x*100.0) # 0 <= x < 400
op = repr(x)[-1]
if op <= '2' and referencetable[n] is not None:
rdict.ll_dict_delitem(l_dict, keytable[n])
referencetable[n] = None
referencelength -= 1
elif op <= '6':
ll_value = string_repr.convert_const(str(values.next()))
rdict.ll_dict_setitem(l_dict, keytable[n], ll_value)
if referencetable[n] is None:
referencelength += 1
referencetable[n] = ll_value
else:
try:
gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n])
except KeyError:
assert referencetable[n] is None
else:
assert gotvalue == referencetable[n]
if 1.38 <= x <= 1.39:
complete_check()
print 'current dict length:', referencelength
assert l_dict.num_items == referencelength
complete_check()
| false
| true
|
f71ab0cfdecb4656998e375e331065ba5d5988ae
| 15,809
|
py
|
Python
|
fkie_iop_rqt_access_control/src/fkie_iop_rqt_access_control/robot.py
|
fkie/iop_gui
|
918353b5767c6ff4a42b294316a03e08501fed28
|
[
"BSD-3-Clause"
] | null | null | null |
fkie_iop_rqt_access_control/src/fkie_iop_rqt_access_control/robot.py
|
fkie/iop_gui
|
918353b5767c6ff4a42b294316a03e08501fed28
|
[
"BSD-3-Clause"
] | null | null | null |
fkie_iop_rqt_access_control/src/fkie_iop_rqt_access_control/robot.py
|
fkie/iop_gui
|
918353b5767c6ff4a42b294316a03e08501fed28
|
[
"BSD-3-Clause"
] | 1
|
2018-11-27T03:39:41.000Z
|
2018-11-27T03:39:41.000Z
|
# ROS/IOP Bridge
# Copyright (c) 2017 Fraunhofer
#
# This program is dual licensed; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# version 2 as published by the Free Software Foundation, or
# enter into a proprietary license agreement with the copyright
# holder.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; or you can read the full license at
# <http://www.gnu.de/documents/gpl-2.0.html>
#
# :author: Alexander Tiderko
import os
from python_qt_binding import loadUi
from python_qt_binding.QtCore import QObject, Signal, Qt
from python_qt_binding.QtGui import QIcon
try:
from python_qt_binding.QtGui import QWidget, QDialog, QTreeWidget, QTreeWidgetItem
except:
from python_qt_binding.QtWidgets import QWidget, QDialog, QTreeWidget, QTreeWidgetItem
import rospy
from .address import Address
from fkie_iop_msgs.msg import OcuCmdEntry, JausAddress
from .handoff_dialog import HandoffDialog
class Robot(QObject):
MAX_AGE = 30
control_activated = Signal(Address)
control_deactivated = Signal(Address)
view_activated = Signal(Address)
view_deactivated = Signal(Address)
def __init__(self, subsystem, settings, authority=205):
QObject.__init__(self)
self._subsystem = subsystem
self._settings = settings
self._authority = authority
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'robot.ui')
self._widget = QWidget()
loadUi(ui_file, self._widget)
self._last_update = rospy.Time.now()
self._component_names = dict()
self._warnings = []
self._feedback_warnings = dict()
self._ocu_client = None
# address reported by access control client
self._control_addr = Address(JausAddress())
self._warning_dialog = self._create_warning_dialog()
self._detailed_dialog = self._create_detailed_dialog()
self.handoff_dialog = HandoffDialog(self.name, self.subsystem_id, self._settings, self._widget)
self.handoff_dialog.button_blink.connect(self._widget.button_handoff.setEnabled)
self._widget.button_view.clicked.connect(self._on_robot_view)
self._widget.button_control.setText("%s - %d" % (subsystem.ident.name, self._subsystem.ident.address.subsystem_id))
self._widget.button_control.clicked.connect(self._on_robot_control)
self._widget.button_control.setObjectName(subsystem.ident.name)
self._widget.button_handoff.setEnabled(False)
self._widget.button_handoff.clicked.connect(self.on_show_handoff)
self._widget.button_warnings.setEnabled(False)
self._widget.button_warnings.clicked.connect(self.on_show_warnings)
self._widget.button_details.clicked.connect(self.on_show_details)
def __del__(self):
self.handoff_dialog.setParent(None)
self.handoff_dialog.shutdown()
self.handoff_dialog = None
self._detailed_dialog = None
self._warning_dialog = None
self._ocu_client = None
self._feedback_warnings.clear()
self._component_names.clear()
del self._warnings[:]
@property
def name(self):
return self._subsystem.ident.name
@property
def subsystem_id(self):
# return the subsystem_id of the robot
return self._subsystem.ident.address.subsystem_id
@property
def ocu_client(self):
return self._ocu_client
@ocu_client.setter
def ocu_client(self, ocu_client):
self.set_warnings([])
if self._ocu_client is not None:
self._ocu_client.control_subsystem = -1
self._ocu_client = ocu_client
if self._ocu_client is not None:
self._ocu_client.control_subsystem = self.subsystem_id
if ocu_client.subsystem_restricted == self.subsystem_id:
self._widget.button_control.setEnabled(not ocu_client.only_monitor)
self.handoff_dialog.set_client(self._ocu_client)
self.update_feedback_warnings()
elif self.has_view() or self.has_control():
self.set_warnings(["No free OCU client available!", "Start an ocu_client with different nodeID to be able to listen for sensors on second robot."])
self.handoff_dialog.set_client(None)
if self._ocu_client is not None:
self._widget.button_handoff.setVisible(self._ocu_client.has_handoff_publisher())
else:
self._widget.button_handoff.setVisible(True)
@property
def ocu_client_restricted(self):
if self._ocu_client is not None:
if self._ocu_client.subsystem_restricted == self.subsystem_id:
return self._ocu_client
return None
@property
def control_addr(self):
return self._control_addr
@control_addr.setter
def control_addr(self, address):
self._control_addr = address
self._update_warnings_button()
def set_control_active(self, state):
self._widget.button_control.setEnabled(state)
def _on_robot_control(self, checked=False):
'''
Click on control robot button. Change to controlled or monitor state.
Publishes the signals: control_activated or view_activated.
'''
addr = Address(JausAddress(self._subsystem.ident.address.subsystem_id, 255, 255))
if checked:
self._widget.button_view.setChecked(checked)
self.control_activated.emit(addr)
self.handoff_dialog.on_access = True
else:
self.release_control()
self.control_deactivated.emit(addr)
self.handoff_dialog.cancel_handoff()
self.handoff_dialog.on_access = False
# if self.has_view():
# self.view_activated.emit(addr)
def _on_robot_view(self, checked=False):
'''
Click on view robot button. Change to monitor or not controlled state.
Publishes the signals: view_activated or control_deactivated.
'''
addr = Address(JausAddress(self._subsystem.ident.address.subsystem_id, 255, 255))
if checked:
self._widget.button_view.setChecked(checked)
self.view_activated.emit(addr)
else:
if self.has_control():
self._widget.button_control.setChecked(False)
self.control_deactivated.emit(addr)
self.view_deactivated.emit(addr)
def has_control(self):
return self._widget.button_control.isChecked()
def has_view(self):
return self._widget.button_view.isChecked()
def release_control(self):
self._widget.button_view.setChecked(False)
self._widget.button_control.setChecked(False)
def activate_view(self):
self._widget.button_view.setChecked(True)
def state_to_cmd(self):
cmd = OcuCmdEntry()
cmd.authority = self._settings.authority
cmd.name = self.name
cmd.address.subsystem_id = self._subsystem.ident.address.subsystem_id
cmd.address.node_id = 255
cmd.address.component_id = 255
if self._widget.button_control.isChecked():
cmd.access_control = 12
elif self._widget.button_view.isChecked():
cmd.access_control = 11
else:
cmd.access_control = 10
if self.ocu_client is not None:
cmd.ocu_client = self.ocu_client.address
else:
cmd.ocu_client.subsystem_id = 65535
cmd.ocu_client.node_id = 255
cmd.ocu_client.component_id = 255
return cmd
def update(self, subsystem):
'''
Applies the updated description of the subsystem.
:type feedback: fkie_iop_msgs/System
'''
if self._subsystem.ident.address.subsystem_id != subsystem.ident.address.subsystem_id:
return False
# if self._subsystem.ident.node_id != subsystem.ident.node_id:
# return False
if self._subsystem.ident.name != subsystem.ident.name:
return False
self._subsystem = subsystem
# self._last_update = rospy.Time.now()
return True
def on_show_handoff(self):
self.handoff_dialog.setVisible(not self.handoff_dialog.isVisible())
def on_show_details(self):
'''
Shows the subsystem in a new dialog as tree view.
'''
twc = self._detailed_dialog.treewidget_components
twc.clear()
client_info = "OCU client: ---"
if self._ocu_client is not None:
add_info = ''
if self.ocu_client.subsystem_restricted == self.subsystem_id:
if self.ocu_client.only_monitor:
add_info = ' [restricted, only monitor]'
else:
add_info = ' [restricted]'
client_info = "OCU client: %s%s" % (self.ocu_client.address, add_info)
elif self.control_addr.subsystem_id != 0:
client_info = 'Controlled by other OCU: %s' % self.control_addr
self._detailed_dialog.label_info.setText(client_info)
if self.name == self._subsystem.ident.name:
for node in self._subsystem.nodes:
node_item = QTreeWidgetItem(twc)
node_name = node.ident.name if node.ident.name else "NODE"
node_item.setText(0, "%s [id: %d]" % (node_name, node.ident.address.node_id))
for comp in node.components:
cmp_item = QTreeWidgetItem(node_item)
cmp_name = self._get_component_name(comp.address)
cmp_item.setText(0, "%s [%d.%d.%d]" % (cmp_name, comp.address.subsystem_id, comp.address.node_id, comp.address.component_id))
twc.expandItem(node_item)
for srv in comp.services:
srv_item = QTreeWidgetItem(cmp_item)
srv_item.setText(0, "%s v%d.%d" % (srv.uri, srv.major_version, srv.minor_version))
if self._detailed_dialog.isVisible():
self._detailed_dialog.setFocus(Qt.ActiveWindowFocusReason)
else:
self._detailed_dialog.show()
def on_show_warnings(self):
'''
Shows warning received by feedback.
'''
text_browser = self._warning_dialog.warnings
text_browser.clear()
if not self._warnings and not self._feedback_warnings:
text_browser.append('No known warnings!')
else:
for msg in self._warnings:
text_browser.append(msg)
if self._feedback_warnings:
text_browser.append('Services with warning state:')
for client, service_infos in self._feedback_warnings.items():
text_browser.append("Client %s:" % client)
for service_info in service_infos:
text_browser.append(" %s[%s]: %s" % (service_info.uri, Address(service_info.addr_control), self.access_state_to_str(service_info.access_state)))
self._warning_dialog.show()
def update_feedback_warnings(self):
'''
:type warnigns: dict(Address of the ocu client: ServiceInfo)
'''
# get all warnings for each subsystem
warnings = dict()
if self._ocu_client is not None:
cw = self._ocu_client.get_warnings(self.subsystem_id, self.has_control())
warnings.update(cw)
# get insufficient authority reports to update handoff state button
insathority = dict()
cw = self._ocu_client.get_srvs_ins_authority(self.subsystem_id)
insathority.update(cw)
# update insufficient authority to activate handoff dialog
self.handoff_dialog.update_authority_problems(insathority)
self._feedback_warnings = warnings
self._update_warnings_button()
def set_warnings(self, warnings):
'''
:type warnigns: list of strings
'''
self._warnings = warnings
self._update_warnings_button()
def _update_warnings_button(self):
has_warning = (len(self._warnings) + len(self._feedback_warnings)) > 0
if has_warning and self.has_control():
self._widget.button_control.setStyleSheet("QPushButton { background-color: #FE9A2E;}")
elif self.has_control():
self._widget.button_control.setStyleSheet("QPushButton { background-color: #98FB98;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: #98FB98;}")
elif self.has_view():
self._widget.button_control.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: #98FB98;}")
elif self.control_addr.subsystem_id != 0 and (self._ocu_client is None or self.control_addr.subsystem_id != self._ocu_client.subsystem_id):
self._widget.button_control.setStyleSheet("QPushButton { background-color: #A9A9A9;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: None;}")
else:
self._widget.button_control.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_warnings.setEnabled(has_warning)
def update_ident(self, ident):
if Address(ident.address) == Address(self._subsystem.ident.address):
self._last_update = rospy.Time.now()
if ident.system_type == 60001 or ident.request_type == 4:
if ident.address.subsystem_id == self._subsystem.ident.address.subsystem_id:
self._component_names[Address(ident.address)] = ident.name
return False
def _get_component_name(self, msg_address):
addr = Address(msg_address)
try:
return self._component_names[addr]
except Exception:
pass
return "Component"
def is_old(self):
return rospy.Time.now() - self._last_update > rospy.Duration(self.MAX_AGE)
def get_widget(self):
return self._widget
def _create_warning_dialog(self):
diag = QDialog(self._widget)
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'warning_info.ui')
loadUi(ui_file, diag)
diag.resize(600, 250)
diag.setWindowTitle("Warning for %s[%d]" % (self.name, self.subsystem_id))
diag.setWindowIcon(QIcon.fromTheme("dialog-warning"))
return diag
def _create_detailed_dialog(self):
diag = QDialog(self._widget)
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'system_info.ui')
loadUi(ui_file, diag)
diag.treewidget_components.setHeaderLabel("%s [%d]" % (self.name, self.subsystem_id))
diag.resize(500, 300)
diag.setWindowTitle("subsystem %s[%d]" % (self.name, self.subsystem_id))
diag.setWindowIcon(QIcon.fromTheme("help-about"))
return diag
def access_state_to_str(self, state):
if state == 0:
return 'NOT_AVAILABLE'
if state == 1:
return 'NOT_CONTROLLED'
if state == 2:
return 'CONTROL_RELEASED'
if state == 3:
return 'CONTROL_ACCEPTED'
if state == 4:
return 'TIMEOUT'
if state == 5:
return 'INSUFFICIENT_AUTHORITY'
if state == 6:
return 'MONITORING'
return 'UNKNOWN'
| 41.712401
| 171
| 0.656335
|
import os
from python_qt_binding import loadUi
from python_qt_binding.QtCore import QObject, Signal, Qt
from python_qt_binding.QtGui import QIcon
try:
from python_qt_binding.QtGui import QWidget, QDialog, QTreeWidget, QTreeWidgetItem
except:
from python_qt_binding.QtWidgets import QWidget, QDialog, QTreeWidget, QTreeWidgetItem
import rospy
from .address import Address
from fkie_iop_msgs.msg import OcuCmdEntry, JausAddress
from .handoff_dialog import HandoffDialog
class Robot(QObject):
MAX_AGE = 30
control_activated = Signal(Address)
control_deactivated = Signal(Address)
view_activated = Signal(Address)
view_deactivated = Signal(Address)
def __init__(self, subsystem, settings, authority=205):
QObject.__init__(self)
self._subsystem = subsystem
self._settings = settings
self._authority = authority
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'robot.ui')
self._widget = QWidget()
loadUi(ui_file, self._widget)
self._last_update = rospy.Time.now()
self._component_names = dict()
self._warnings = []
self._feedback_warnings = dict()
self._ocu_client = None
self._control_addr = Address(JausAddress())
self._warning_dialog = self._create_warning_dialog()
self._detailed_dialog = self._create_detailed_dialog()
self.handoff_dialog = HandoffDialog(self.name, self.subsystem_id, self._settings, self._widget)
self.handoff_dialog.button_blink.connect(self._widget.button_handoff.setEnabled)
self._widget.button_view.clicked.connect(self._on_robot_view)
self._widget.button_control.setText("%s - %d" % (subsystem.ident.name, self._subsystem.ident.address.subsystem_id))
self._widget.button_control.clicked.connect(self._on_robot_control)
self._widget.button_control.setObjectName(subsystem.ident.name)
self._widget.button_handoff.setEnabled(False)
self._widget.button_handoff.clicked.connect(self.on_show_handoff)
self._widget.button_warnings.setEnabled(False)
self._widget.button_warnings.clicked.connect(self.on_show_warnings)
self._widget.button_details.clicked.connect(self.on_show_details)
def __del__(self):
self.handoff_dialog.setParent(None)
self.handoff_dialog.shutdown()
self.handoff_dialog = None
self._detailed_dialog = None
self._warning_dialog = None
self._ocu_client = None
self._feedback_warnings.clear()
self._component_names.clear()
del self._warnings[:]
@property
def name(self):
return self._subsystem.ident.name
@property
def subsystem_id(self):
return self._subsystem.ident.address.subsystem_id
@property
def ocu_client(self):
return self._ocu_client
@ocu_client.setter
def ocu_client(self, ocu_client):
self.set_warnings([])
if self._ocu_client is not None:
self._ocu_client.control_subsystem = -1
self._ocu_client = ocu_client
if self._ocu_client is not None:
self._ocu_client.control_subsystem = self.subsystem_id
if ocu_client.subsystem_restricted == self.subsystem_id:
self._widget.button_control.setEnabled(not ocu_client.only_monitor)
self.handoff_dialog.set_client(self._ocu_client)
self.update_feedback_warnings()
elif self.has_view() or self.has_control():
self.set_warnings(["No free OCU client available!", "Start an ocu_client with different nodeID to be able to listen for sensors on second robot."])
self.handoff_dialog.set_client(None)
if self._ocu_client is not None:
self._widget.button_handoff.setVisible(self._ocu_client.has_handoff_publisher())
else:
self._widget.button_handoff.setVisible(True)
@property
def ocu_client_restricted(self):
if self._ocu_client is not None:
if self._ocu_client.subsystem_restricted == self.subsystem_id:
return self._ocu_client
return None
@property
def control_addr(self):
return self._control_addr
@control_addr.setter
def control_addr(self, address):
self._control_addr = address
self._update_warnings_button()
def set_control_active(self, state):
self._widget.button_control.setEnabled(state)
def _on_robot_control(self, checked=False):
addr = Address(JausAddress(self._subsystem.ident.address.subsystem_id, 255, 255))
if checked:
self._widget.button_view.setChecked(checked)
self.control_activated.emit(addr)
self.handoff_dialog.on_access = True
else:
self.release_control()
self.control_deactivated.emit(addr)
self.handoff_dialog.cancel_handoff()
self.handoff_dialog.on_access = False
def _on_robot_view(self, checked=False):
addr = Address(JausAddress(self._subsystem.ident.address.subsystem_id, 255, 255))
if checked:
self._widget.button_view.setChecked(checked)
self.view_activated.emit(addr)
else:
if self.has_control():
self._widget.button_control.setChecked(False)
self.control_deactivated.emit(addr)
self.view_deactivated.emit(addr)
def has_control(self):
return self._widget.button_control.isChecked()
def has_view(self):
return self._widget.button_view.isChecked()
def release_control(self):
self._widget.button_view.setChecked(False)
self._widget.button_control.setChecked(False)
def activate_view(self):
self._widget.button_view.setChecked(True)
def state_to_cmd(self):
cmd = OcuCmdEntry()
cmd.authority = self._settings.authority
cmd.name = self.name
cmd.address.subsystem_id = self._subsystem.ident.address.subsystem_id
cmd.address.node_id = 255
cmd.address.component_id = 255
if self._widget.button_control.isChecked():
cmd.access_control = 12
elif self._widget.button_view.isChecked():
cmd.access_control = 11
else:
cmd.access_control = 10
if self.ocu_client is not None:
cmd.ocu_client = self.ocu_client.address
else:
cmd.ocu_client.subsystem_id = 65535
cmd.ocu_client.node_id = 255
cmd.ocu_client.component_id = 255
return cmd
def update(self, subsystem):
if self._subsystem.ident.address.subsystem_id != subsystem.ident.address.subsystem_id:
return False
if self._subsystem.ident.name != subsystem.ident.name:
return False
self._subsystem = subsystem
return True
def on_show_handoff(self):
self.handoff_dialog.setVisible(not self.handoff_dialog.isVisible())
def on_show_details(self):
twc = self._detailed_dialog.treewidget_components
twc.clear()
client_info = "OCU client: ---"
if self._ocu_client is not None:
add_info = ''
if self.ocu_client.subsystem_restricted == self.subsystem_id:
if self.ocu_client.only_monitor:
add_info = ' [restricted, only monitor]'
else:
add_info = ' [restricted]'
client_info = "OCU client: %s%s" % (self.ocu_client.address, add_info)
elif self.control_addr.subsystem_id != 0:
client_info = 'Controlled by other OCU: %s' % self.control_addr
self._detailed_dialog.label_info.setText(client_info)
if self.name == self._subsystem.ident.name:
for node in self._subsystem.nodes:
node_item = QTreeWidgetItem(twc)
node_name = node.ident.name if node.ident.name else "NODE"
node_item.setText(0, "%s [id: %d]" % (node_name, node.ident.address.node_id))
for comp in node.components:
cmp_item = QTreeWidgetItem(node_item)
cmp_name = self._get_component_name(comp.address)
cmp_item.setText(0, "%s [%d.%d.%d]" % (cmp_name, comp.address.subsystem_id, comp.address.node_id, comp.address.component_id))
twc.expandItem(node_item)
for srv in comp.services:
srv_item = QTreeWidgetItem(cmp_item)
srv_item.setText(0, "%s v%d.%d" % (srv.uri, srv.major_version, srv.minor_version))
if self._detailed_dialog.isVisible():
self._detailed_dialog.setFocus(Qt.ActiveWindowFocusReason)
else:
self._detailed_dialog.show()
def on_show_warnings(self):
text_browser = self._warning_dialog.warnings
text_browser.clear()
if not self._warnings and not self._feedback_warnings:
text_browser.append('No known warnings!')
else:
for msg in self._warnings:
text_browser.append(msg)
if self._feedback_warnings:
text_browser.append('Services with warning state:')
for client, service_infos in self._feedback_warnings.items():
text_browser.append("Client %s:" % client)
for service_info in service_infos:
text_browser.append(" %s[%s]: %s" % (service_info.uri, Address(service_info.addr_control), self.access_state_to_str(service_info.access_state)))
self._warning_dialog.show()
def update_feedback_warnings(self):
warnings = dict()
if self._ocu_client is not None:
cw = self._ocu_client.get_warnings(self.subsystem_id, self.has_control())
warnings.update(cw)
insathority = dict()
cw = self._ocu_client.get_srvs_ins_authority(self.subsystem_id)
insathority.update(cw)
self.handoff_dialog.update_authority_problems(insathority)
self._feedback_warnings = warnings
self._update_warnings_button()
def set_warnings(self, warnings):
self._warnings = warnings
self._update_warnings_button()
def _update_warnings_button(self):
has_warning = (len(self._warnings) + len(self._feedback_warnings)) > 0
if has_warning and self.has_control():
self._widget.button_control.setStyleSheet("QPushButton { background-color: #FE9A2E;}")
elif self.has_control():
self._widget.button_control.setStyleSheet("QPushButton { background-color: #98FB98;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: #98FB98;}")
elif self.has_view():
self._widget.button_control.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: #98FB98;}")
elif self.control_addr.subsystem_id != 0 and (self._ocu_client is None or self.control_addr.subsystem_id != self._ocu_client.subsystem_id):
self._widget.button_control.setStyleSheet("QPushButton { background-color: #A9A9A9;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: None;}")
else:
self._widget.button_control.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_warnings.setEnabled(has_warning)
def update_ident(self, ident):
if Address(ident.address) == Address(self._subsystem.ident.address):
self._last_update = rospy.Time.now()
if ident.system_type == 60001 or ident.request_type == 4:
if ident.address.subsystem_id == self._subsystem.ident.address.subsystem_id:
self._component_names[Address(ident.address)] = ident.name
return False
def _get_component_name(self, msg_address):
addr = Address(msg_address)
try:
return self._component_names[addr]
except Exception:
pass
return "Component"
def is_old(self):
return rospy.Time.now() - self._last_update > rospy.Duration(self.MAX_AGE)
def get_widget(self):
return self._widget
def _create_warning_dialog(self):
diag = QDialog(self._widget)
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'warning_info.ui')
loadUi(ui_file, diag)
diag.resize(600, 250)
diag.setWindowTitle("Warning for %s[%d]" % (self.name, self.subsystem_id))
diag.setWindowIcon(QIcon.fromTheme("dialog-warning"))
return diag
def _create_detailed_dialog(self):
diag = QDialog(self._widget)
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'system_info.ui')
loadUi(ui_file, diag)
diag.treewidget_components.setHeaderLabel("%s [%d]" % (self.name, self.subsystem_id))
diag.resize(500, 300)
diag.setWindowTitle("subsystem %s[%d]" % (self.name, self.subsystem_id))
diag.setWindowIcon(QIcon.fromTheme("help-about"))
return diag
def access_state_to_str(self, state):
if state == 0:
return 'NOT_AVAILABLE'
if state == 1:
return 'NOT_CONTROLLED'
if state == 2:
return 'CONTROL_RELEASED'
if state == 3:
return 'CONTROL_ACCEPTED'
if state == 4:
return 'TIMEOUT'
if state == 5:
return 'INSUFFICIENT_AUTHORITY'
if state == 6:
return 'MONITORING'
return 'UNKNOWN'
| true
| true
|
f71ab0e75e50d66af2bfe69ef2fd8400a56a4fd4
| 1,903
|
py
|
Python
|
Assessments 1-8/Ass8/Q2_b_1.py
|
ZHANG-CAIQI/COMP1001
|
abfad8101b4b58697dfbc8599eebf466beebb9ec
|
[
"MIT"
] | 1
|
2020-05-17T03:28:17.000Z
|
2020-05-17T03:28:17.000Z
|
Assessments 1-8/Ass8/Q2_b_1.py
|
ZHANG-CAIQI/COMP1001
|
abfad8101b4b58697dfbc8599eebf466beebb9ec
|
[
"MIT"
] | null | null | null |
Assessments 1-8/Ass8/Q2_b_1.py
|
ZHANG-CAIQI/COMP1001
|
abfad8101b4b58697dfbc8599eebf466beebb9ec
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import numpy as np
def stockUp(priceFile):
# read the file
infile = open(priceFile, "r")
date = []
stock = []
# store only the dates and closing price
day = 1
firstLine = True
for line in infile:
if firstLine:
firstLine = False
else:
count_item = 0
for item in line.split(","):
if count_item == 0:
date.append(day)
elif count_item == 4:
stock.append(float(item))
count_item += 1
day += 1
infile.close()
# Compute the up periods
up = len(date)*[0]
for k in range(1,len(stock)): # skip the heading
i = k # i = k = 1
while ((i>0) and float(stock[k])>=float(stock[i])):
up[k] += 1
i -= 1
fig, ax1 = plt.subplots()
color = 'tab:red'
ax1.set_xlabel('Days started from 11/13/2017 and end on 11/12/2018')
ax1.set_ylabel('Stock prices', color=color)
ax1.plot(date, stock, color=color)
ax1.tick_params(axis='y', labelcolor=color)
ax2 = ax1.twinx() # instantiate a second axes that shares the same x-axis
color = 'tab:blue'
ax2.set_ylabel('Up periods', color=color) # we already handled the x-label with ax1
ax2.plot(date, up, color=color)
ax2.tick_params(axis='y', labelcolor=color)
fig.tight_layout() # otherwise the right y-label is slightly clipped
plt.show()
return
"""
plt.plot(date, up, marker='x')
plt.plot(date, stock, marker='o')
plt.title('The up periods for 11/13/2017-11/12/2018')
plt.xlabel('Days started from 11/13/2017 and end on 11/12/2018')
plt.ylabel('The up periods of GOOGL at closing')
plt.show()
"""
stockUp("GOOGL.csv")
| 27.185714
| 89
| 0.543878
|
import matplotlib.pyplot as plt
import numpy as np
def stockUp(priceFile):
infile = open(priceFile, "r")
date = []
stock = []
day = 1
firstLine = True
for line in infile:
if firstLine:
firstLine = False
else:
count_item = 0
for item in line.split(","):
if count_item == 0:
date.append(day)
elif count_item == 4:
stock.append(float(item))
count_item += 1
day += 1
infile.close()
up = len(date)*[0]
for k in range(1,len(stock)):
i = k
while ((i>0) and float(stock[k])>=float(stock[i])):
up[k] += 1
i -= 1
fig, ax1 = plt.subplots()
color = 'tab:red'
ax1.set_xlabel('Days started from 11/13/2017 and end on 11/12/2018')
ax1.set_ylabel('Stock prices', color=color)
ax1.plot(date, stock, color=color)
ax1.tick_params(axis='y', labelcolor=color)
ax2 = ax1.twinx()
color = 'tab:blue'
ax2.set_ylabel('Up periods', color=color)
ax2.plot(date, up, color=color)
ax2.tick_params(axis='y', labelcolor=color)
fig.tight_layout()
plt.show()
return
stockUp("GOOGL.csv")
| true
| true
|
f71ab0f98895a9582d987bf35cfa556cbf1224e1
| 694
|
py
|
Python
|
GENERAL/slots_manager.py
|
Couso99/EEG-Environment
|
d67de00c08c5892baebe5bf993cac0a5db6e70b1
|
[
"MIT"
] | null | null | null |
GENERAL/slots_manager.py
|
Couso99/EEG-Environment
|
d67de00c08c5892baebe5bf993cac0a5db6e70b1
|
[
"MIT"
] | null | null | null |
GENERAL/slots_manager.py
|
Couso99/EEG-Environment
|
d67de00c08c5892baebe5bf993cac0a5db6e70b1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
@author: %(Mikel Val Calvo)s
@email: %(mikel1982mail@gmail.com)
@institution: %(Dpto. de Inteligencia Artificial, Universidad Nacional de Educación a Distancia (UNED))
@DOI: 10.5281/zenodo.3759306
"""
#%%
class SlotsManager:
# Inicializa la lista de callbacks
def __init__(self):
self.callbacks = []
# Ejecuta los callbacks de la lista
def trigger(self):
for callback in self.callbacks:
callback()
print(callback)
# [callback() for callback in self.callbacks]
# Añade un slot a la lista de callbacks
def append(self, slot):
self.callbacks.append(slot)
print(slot)
| 23.931034
| 103
| 0.628242
|
class SlotsManager:
def __init__(self):
self.callbacks = []
def trigger(self):
for callback in self.callbacks:
callback()
print(callback)
def append(self, slot):
self.callbacks.append(slot)
print(slot)
| true
| true
|
f71ab3032781cd41199cec50632738defd8f52ca
| 116,626
|
py
|
Python
|
test/orm/test_joins.py
|
petit87/sqlalchemy
|
67d674bd63ca36ac32b23f96e2b19e9dac6b0863
|
[
"MIT"
] | null | null | null |
test/orm/test_joins.py
|
petit87/sqlalchemy
|
67d674bd63ca36ac32b23f96e2b19e9dac6b0863
|
[
"MIT"
] | null | null | null |
test/orm/test_joins.py
|
petit87/sqlalchemy
|
67d674bd63ca36ac32b23f96e2b19e9dac6b0863
|
[
"MIT"
] | null | null | null |
import itertools
import sqlalchemy as sa
from sqlalchemy import and_
from sqlalchemy import desc
from sqlalchemy import exc as sa_exc
from sqlalchemy import ForeignKey
from sqlalchemy import func
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import lateral
from sqlalchemy import literal_column
from sqlalchemy import MetaData
from sqlalchemy import not_
from sqlalchemy import or_
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import testing
from sqlalchemy import true
from sqlalchemy import union
from sqlalchemy.engine import default
from sqlalchemy.orm import aliased
from sqlalchemy.orm import backref
from sqlalchemy.orm import join
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import outerjoin
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
from sqlalchemy.orm import synonym
from sqlalchemy.sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.assertions import expect_raises_message
from sqlalchemy.testing.fixtures import fixture_session
from sqlalchemy.testing.schema import Column
from test.orm import _fixtures
from .inheritance import _poly_fixtures
from .test_query import QueryTest
class InheritedTest(_poly_fixtures._Polymorphic):
run_setup_mappers = "once"
class InheritedJoinTest(InheritedTest, AssertsCompiledSQL):
def test_single_prop(self):
Company = self.classes.Company
sess = fixture_session()
self.assert_compile(
sess.query(Company).join(Company.employees),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN people "
"ON companies.company_id = people.company_id",
use_default_dialect=True,
)
def test_force_via_select_from(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company)
.filter(Company.company_id == Engineer.company_id)
.filter(Engineer.primary_language == "java"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies, people, engineers "
"WHERE companies.company_id = people.company_id "
"AND engineers.primary_language "
"= :primary_language_1",
use_default_dialect=True,
)
self.assert_compile(
sess.query(Company)
.select_from(Company, Engineer)
.filter(Company.company_id == Engineer.company_id)
.filter(Engineer.primary_language == "java"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies, people JOIN engineers "
"ON people.person_id = engineers.person_id "
"WHERE companies.company_id = people.company_id "
"AND engineers.primary_language ="
" :primary_language_1",
use_default_dialect=True,
)
def test_single_prop_of_type(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company).join(Company.employees.of_type(Engineer)),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN "
"(people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON companies.company_id = people.company_id",
use_default_dialect=True,
)
def test_explicit_polymorphic_join_one(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company)
.join(Engineer)
.filter(Engineer.engineer_name == "vlad"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN (people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON "
"companies.company_id = people.company_id "
"WHERE engineers.engineer_name = :engineer_name_1",
use_default_dialect=True,
)
def test_explicit_polymorphic_join_two(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company)
.join(Engineer, Company.company_id == Engineer.company_id)
.filter(Engineer.engineer_name == "vlad"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN "
"(people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON "
"companies.company_id = people.company_id "
"WHERE engineers.engineer_name = :engineer_name_1",
use_default_dialect=True,
)
def test_auto_aliasing_multi_link(self):
# test [ticket:2903]
sess = fixture_session()
Company, Engineer, Manager, Boss = (
self.classes.Company,
self.classes.Engineer,
self.classes.Manager,
self.classes.Boss,
)
q = (
sess.query(Company)
.join(Company.employees.of_type(Engineer))
.join(Company.employees.of_type(Manager))
.join(Company.employees.of_type(Boss))
)
with testing.expect_warnings(
"An alias is being generated automatically against joined entity "
r"Mapper\[Manager\(managers\)\] due to overlapping",
"An alias is being generated automatically against joined entity "
r"Mapper\[Boss\(boss\)\] due to overlapping",
raise_on_any_unexpected=True,
):
self.assert_compile(
q,
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name FROM companies "
"JOIN (people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON companies.company_id = people.company_id "
"JOIN (people AS people_1 JOIN managers AS managers_1 "
"ON people_1.person_id = managers_1.person_id) "
"ON companies.company_id = people_1.company_id "
"JOIN (people AS people_2 JOIN managers AS managers_2 "
"ON people_2.person_id = managers_2.person_id "
"JOIN boss AS boss_1 "
"ON managers_2.person_id = boss_1.boss_id) "
"ON companies.company_id = people_2.company_id",
use_default_dialect=True,
)
class JoinOnSynonymTest(_fixtures.FixtureTest, AssertsCompiledSQL):
__dialect__ = "default"
@classmethod
def setup_mappers(cls):
User = cls.classes.User
Address = cls.classes.Address
users, addresses = (cls.tables.users, cls.tables.addresses)
cls.mapper_registry.map_imperatively(
User,
users,
properties={
"addresses": relationship(Address),
"ad_syn": synonym("addresses"),
},
)
cls.mapper_registry.map_imperatively(Address, addresses)
def test_join_on_synonym(self):
User = self.classes.User
self.assert_compile(
fixture_session().query(User).join(User.ad_syn),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
class JoinTest(QueryTest, AssertsCompiledSQL):
__dialect__ = "default"
@testing.combinations_list(
set(
itertools.product(
[
"relationship",
"relationship_only",
"none",
"explicit",
"table_none",
"table_explicit",
],
[True, False],
)
),
argnames="onclause_type, use_legacy",
)
def test_filter_by_from_join(self, onclause_type, use_legacy):
User, Address = self.classes("User", "Address")
(address_table,) = self.tables("addresses")
(user_table,) = self.tables("users")
if use_legacy:
sess = fixture_session()
q = sess.query(User)
else:
q = select(User).set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
if onclause_type == "relationship":
q = q.join(Address, User.addresses)
elif onclause_type == "relationship_only":
q = q.join(User.addresses)
elif onclause_type == "none":
q = q.join(Address)
elif onclause_type == "explicit":
q = q.join(Address, User.id == Address.user_id)
elif onclause_type == "table_none":
q = q.join(address_table)
elif onclause_type == "table_explicit":
q = q.join(
address_table, user_table.c.id == address_table.c.user_id
)
else:
assert False
q2 = q.filter_by(email_address="foo")
self.assert_compile(
q2,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"WHERE addresses.email_address = :email_address_1",
)
if use_legacy:
q2 = q.reset_joinpoint().filter_by(name="user")
self.assert_compile(
q2,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"WHERE users.name = :name_1",
)
def test_join_relationship_propagate_attrs(self):
"""test #6558"""
User = self.classes.User
users = self.tables.users
stmt = select(users).join(User.addresses)
eq_(
stmt._propagate_attrs,
{"compile_state_plugin": "orm", "plugin_subject": inspect(User)},
)
self.assert_compile(
stmt,
"SELECT users.id, users.name FROM users "
"JOIN addresses ON users.id = addresses.user_id",
)
@testing.combinations((True,), (False,), argnames="legacy")
@testing.combinations((True,), (False,), argnames="threelevel")
def test_join_with_entities(self, legacy, threelevel):
"""test issue #6503"""
User, Address, Dingaling = self.classes("User", "Address", "Dingaling")
if legacy:
sess = fixture_session()
stmt = sess.query(User).join(Address).with_entities(Address.id)
else:
stmt = select(User).join(Address).with_only_columns(Address.id)
stmt = stmt.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
if threelevel:
if legacy:
stmt = stmt.join(Address.dingaling).with_entities(Dingaling.id)
else:
stmt = stmt.join(Address.dingaling).with_only_columns(
Dingaling.id
)
if threelevel:
self.assert_compile(
stmt,
"SELECT dingalings.id AS dingalings_id "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"JOIN dingalings ON addresses.id = dingalings.address_id",
)
else:
self.assert_compile(
stmt,
"SELECT addresses.id AS addresses_id FROM users "
"JOIN addresses ON users.id = addresses.user_id",
)
@testing.combinations((True,), (False,), argnames="legacy")
@testing.combinations((True,), (False,), argnames="threelevel")
def test_join_and_union_with_entities(self, legacy, threelevel):
"""test issue #6698, regression caused by #6503"""
User, Address, Dingaling = self.classes("User", "Address", "Dingaling")
if legacy:
sess = fixture_session()
stmt = sess.query(User).join(Address).with_entities(Address.id)
else:
stmt = select(User).join(Address).with_only_columns(Address.id)
stmt = stmt.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
if threelevel:
if legacy:
stmt = stmt.join(Address.dingaling).with_entities(Dingaling.id)
to_union = sess.query(Dingaling.id)
else:
stmt = stmt.join(Address.dingaling).with_only_columns(
Dingaling.id
)
to_union = select(Dingaling.id).set_label_style(
LABEL_STYLE_TABLENAME_PLUS_COL
)
else:
if legacy:
to_union = sess.query(Address.id)
else:
to_union = select(Address.id).set_label_style(
LABEL_STYLE_TABLENAME_PLUS_COL
)
if legacy:
stmt = stmt.union(to_union)
else:
stmt = (
union(stmt, to_union)
.subquery()
.select()
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
)
if threelevel:
self.assert_compile(
stmt,
"SELECT anon_1.dingalings_id AS anon_1_dingalings_id FROM "
"(SELECT dingalings.id AS dingalings_id "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"JOIN dingalings ON addresses.id = dingalings.address_id "
"UNION "
"SELECT dingalings.id AS dingalings_id FROM dingalings) "
"AS anon_1",
)
else:
self.assert_compile(
stmt,
"SELECT anon_1.addresses_id AS anon_1_addresses_id FROM "
"(SELECT addresses.id AS addresses_id FROM users "
"JOIN addresses ON users.id = addresses.user_id "
"UNION "
"SELECT addresses.id AS addresses_id FROM addresses) "
"AS anon_1",
)
def test_invalid_kwarg_join(self):
User = self.classes.User
sess = fixture_session()
assert_raises_message(
TypeError,
r".*join\(\) .*unexpected .*keyword",
sess.query(User).join,
"address",
foob="bar",
bar="bat",
)
assert_raises_message(
TypeError,
r".*outerjoin\(\) .*unexpected .*keyword",
sess.query(User).outerjoin,
"address",
foob="bar",
bar="bat",
)
def test_left_w_no_entity(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
self.assert_compile(
sess.query(User, literal_column("x")).join(Address),
"SELECT users.id AS users_id, users.name AS users_name, x "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
self.assert_compile(
sess.query(literal_column("x"), User).join(Address),
"SELECT x, users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
def test_left_is_none_and_query_has_no_entities(self):
Address = self.classes.Address
sess = fixture_session()
assert_raises_message(
sa_exc.InvalidRequestError,
r"No entities to join from; please use select_from\(\) to "
r"establish the left entity/selectable of this join",
sess.query().join(Address)._compile_context,
)
def test_isouter_flag(self):
User = self.classes.User
self.assert_compile(
fixture_session().query(User).join(User.orders, isouter=True),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users LEFT OUTER JOIN orders ON users.id = orders.user_id",
)
def test_full_flag(self):
User = self.classes.User
self.assert_compile(
fixture_session().query(User).outerjoin(User.orders, full=True),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users FULL OUTER JOIN orders ON users.id = orders.user_id",
)
def test_single_prop_1(self):
User = self.classes.User
sess = fixture_session()
self.assert_compile(
sess.query(User).join(User.orders),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id",
)
def test_single_prop_2(self):
Order, User = (self.classes.Order, self.classes.User)
sess = fixture_session()
self.assert_compile(
sess.query(User).join(Order.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders JOIN users ON users.id = orders.user_id",
)
def test_single_prop_3(self):
Order, User = (self.classes.Order, self.classes.User)
sess = fixture_session()
oalias1 = aliased(Order)
self.assert_compile(
sess.query(User).join(oalias1.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders AS orders_1 JOIN users "
"ON users.id = orders_1.user_id",
)
def test_single_prop_4(self):
(
Order,
User,
) = (self.classes.Order, self.classes.User)
sess = fixture_session()
oalias1 = aliased(Order)
oalias2 = aliased(Order)
# another nonsensical query. (from [ticket:1537]).
# in this case, the contract of "left to right" is honored
self.assert_compile(
sess.query(User).join(oalias1.user).join(oalias2.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders AS orders_1 JOIN users "
"ON users.id = orders_1.user_id, "
"orders AS orders_2 JOIN users ON users.id = orders_2.user_id",
)
def test_single_prop_6(self):
User = self.classes.User
sess = fixture_session()
ualias = aliased(User)
self.assert_compile(
sess.query(ualias).join(ualias.orders),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1 JOIN orders ON users_1.id = orders.user_id",
)
def test_single_prop_9(self):
User = self.classes.User
sess = fixture_session()
subq = (
sess.query(User)
.filter(User.name == "ed")
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
.subquery()
)
ua = aliased(User, subq)
self.assert_compile(
sess.query(ua).join(ua.orders),
"SELECT anon_1.users_id AS anon_1_users_id, "
"anon_1.users_name AS anon_1_users_name "
"FROM (SELECT users.id AS users_id, users.name AS users_name "
"FROM users "
"WHERE users.name = :name_1) AS anon_1 JOIN orders "
"ON anon_1.users_id = orders.user_id",
)
def test_single_prop_12(self):
Order, User, Address = (
self.classes.Order,
self.classes.User,
self.classes.Address,
)
sess = fixture_session()
oalias1 = aliased(Order)
# test #1 for [ticket:1706]
ualias = aliased(User)
self.assert_compile(
sess.query(ualias)
.join(oalias1, ualias.orders)
.join(Address, ualias.addresses),
"SELECT users_1.id AS users_1_id, users_1.name AS "
"users_1_name FROM users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id JOIN addresses ON users_1.id "
"= addresses.user_id",
)
def test_single_prop_13(self):
Order, User, Address = (
self.classes.Order,
self.classes.User,
self.classes.Address,
)
sess = fixture_session()
# test #2 for [ticket:1706]
ualias = aliased(User)
ualias2 = aliased(User)
self.assert_compile(
sess.query(ualias)
.join(Address, ualias.addresses)
.join(ualias2, Address.user)
.join(Order, ualias.orders),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users "
"AS users_1 JOIN addresses ON users_1.id = addresses.user_id "
"JOIN users AS users_2 "
"ON users_2.id = addresses.user_id JOIN orders "
"ON users_1.id = orders.user_id",
)
def test_overlapping_paths_one_legacy(self):
User = self.classes.User
Order = self.classes.Order
sess = fixture_session()
# test overlapping paths. User->orders is used by both joins, but
# rendered once.
self.assert_compile(
sess.query(User)
.join(User.orders)
.join(Order.items)
.join(User.orders)
.join(Order.address),
"SELECT users.id AS users_id, users.name AS users_name FROM users "
"JOIN orders "
"ON users.id = orders.user_id "
"JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id JOIN addresses "
"ON addresses.id = orders.address_id",
)
def test_overlapping_paths_multilevel_legacy(self):
User = self.classes.User
Order = self.classes.Order
Address = self.classes.Address
s = fixture_session()
q = (
s.query(User)
.join(User.orders)
.join(User.addresses)
.join(User.orders)
.join(Order.items)
.join(User.addresses)
.join(Address.dingaling)
)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON users.id = addresses.user_id "
"JOIN order_items AS order_items_1 ON orders.id = "
"order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id "
"JOIN dingalings ON addresses.id = dingalings.address_id",
)
def test_overlapping_paths_one_modern(self):
User = self.classes.User
Order = self.classes.Order
# test overlapping paths. User->orders is used by both joins, but
# rendered once.
# label style is for comparison to legacy version. 1.4 version
# of select().join() did not behave the same as Query.join()
self.assert_compile(
select(User)
.join(User.orders)
.join(Order.items)
.join(User.orders)
.join(Order.address)
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL),
"SELECT users.id AS users_id, users.name AS users_name FROM users "
"JOIN orders "
"ON users.id = orders.user_id "
"JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id JOIN addresses "
"ON addresses.id = orders.address_id",
)
def test_overlapping_paths_multilevel_modern(self):
User = self.classes.User
Order = self.classes.Order
Address = self.classes.Address
# label style is for comparison to legacy version. 1.4 version
# of select().join() did not behave the same as Query.join()
q = (
select(User)
.join(User.orders)
.join(User.addresses)
.join(User.orders)
.join(Order.items)
.join(User.addresses)
.join(Address.dingaling)
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON users.id = addresses.user_id "
"JOIN order_items AS order_items_1 ON orders.id = "
"order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id "
"JOIN dingalings ON addresses.id = dingalings.address_id",
)
def test_join_nonmapped_column(self):
"""test that the search for a 'left' doesn't trip on non-mapped cols"""
Order, User = self.classes.Order, self.classes.User
sess = fixture_session()
# intentionally join() with a non-existent "left" side
self.assert_compile(
sess.query(User.id, literal_column("foo")).join(Order.user),
"SELECT users.id AS users_id, foo FROM "
"orders JOIN users ON users.id = orders.user_id",
)
def test_backwards_join(self):
User, Address = self.classes.User, self.classes.Address
# a more controversial feature. join from
# User->Address, but the onclause is Address.user.
sess = fixture_session()
eq_(
sess.query(User)
.join(Address.user)
.filter(Address.email_address == "ed@wood.com")
.all(),
[User(id=8, name="ed")],
)
# its actually not so controversial if you view it in terms
# of multiple entities.
eq_(
sess.query(User, Address)
.join(Address.user)
.filter(Address.email_address == "ed@wood.com")
.all(),
[(User(id=8, name="ed"), Address(email_address="ed@wood.com"))],
)
# this was the controversial part. now, raise an error if the feature
# is abused.
# before the error raise was added, this would silently work.....
assert_raises(
sa_exc.InvalidRequestError,
sess.query(User).join(Address, Address.user)._compile_context,
)
# but this one would silently fail
adalias = aliased(Address)
assert_raises(
sa_exc.InvalidRequestError,
sess.query(User).join(adalias, Address.user)._compile_context,
)
def test_multiple_with_aliases(self):
Order, User = self.classes.Order, self.classes.User
sess = fixture_session()
ualias = aliased(User)
oalias1 = aliased(Order)
oalias2 = aliased(Order)
self.assert_compile(
sess.query(ualias)
.join(oalias1, ualias.orders)
.join(oalias2, ualias.orders)
.filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1 "
"JOIN orders AS orders_1 ON users_1.id = orders_1.user_id "
"JOIN orders AS orders_2 ON "
"users_1.id = orders_2.user_id "
"WHERE orders_1.user_id = :user_id_1 "
"OR orders_2.user_id = :user_id_2",
use_default_dialect=True,
)
def test_select_from_orm_joins(self):
User, Order = self.classes.User, self.classes.Order
sess = fixture_session()
ualias = aliased(User)
oalias1 = aliased(Order)
oalias2 = aliased(Order)
self.assert_compile(
join(User, oalias2, User.id == oalias2.user_id),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias2, User.id == oalias2.user_id, full=True),
"users FULL OUTER JOIN orders AS orders_1 "
"ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias2, User.id == oalias2.user_id, isouter=True),
"users LEFT OUTER JOIN orders AS orders_1 "
"ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(
User,
oalias2,
User.id == oalias2.user_id,
isouter=True,
full=True,
),
"users FULL OUTER JOIN orders AS orders_1 "
"ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"JOIN orders AS orders_2 ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2, isouter=True),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"LEFT OUTER JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2, full=True),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"FULL OUTER JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2, full=True, isouter=True),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"FULL OUTER JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(ualias, oalias1, ualias.orders),
"users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
sess.query(ualias).select_from(
join(ualias, oalias1, ualias.orders)
),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1 "
"JOIN orders AS orders_1 ON users_1.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
sess.query(User, ualias).select_from(
join(ualias, oalias1, ualias.orders)
),
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, "
"users_1.name AS users_1_name FROM users, users AS users_1 "
"JOIN orders AS orders_1 ON users_1.id = orders_1.user_id",
use_default_dialect=True,
)
# this fails (and we can't quite fix right now).
if False:
self.assert_compile(
sess.query(User, ualias)
.join(oalias1, ualias.orders)
.join(oalias2, User.id == oalias2.user_id)
.filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS "
"users_1_name FROM users JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id, "
"users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id "
"WHERE orders_1.user_id = :user_id_1 "
"OR orders_2.user_id = :user_id_2",
use_default_dialect=True,
)
# this is the same thing using explicit orm.join() (which now offers
# multiple again)
self.assert_compile(
sess.query(User, ualias)
.select_from(
join(ualias, oalias1, ualias.orders),
join(User, oalias2, User.id == oalias2.user_id),
)
.filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS "
"users_1_name FROM users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id, "
"users JOIN orders AS orders_2 ON users.id = orders_2.user_id "
"WHERE orders_1.user_id = :user_id_1 "
"OR orders_2.user_id = :user_id_2",
use_default_dialect=True,
)
def test_overlapping_backwards_joins(self):
User, Order = self.classes.User, self.classes.Order
sess = fixture_session()
oalias1 = aliased(Order)
oalias2 = aliased(Order)
# this is invalid SQL - joins from orders_1/orders_2 to User twice.
# but that is what was asked for so they get it !
self.assert_compile(
sess.query(User).join(oalias1.user).join(oalias2.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders AS orders_1 "
"JOIN users ON users.id = orders_1.user_id, orders AS orders_2 "
"JOIN users ON users.id = orders_2.user_id",
use_default_dialect=True,
)
def test_replace_multiple_from_clause(self):
"""test adding joins onto multiple FROM clauses"""
User, Order, Address = (
self.classes.User,
self.classes.Order,
self.classes.Address,
)
sess = fixture_session()
self.assert_compile(
sess.query(Address, User)
.join(Address.dingaling)
.join(User.orders)
.join(Order.items),
"SELECT addresses.id AS addresses_id, "
"addresses.user_id AS addresses_user_id, "
"addresses.email_address AS addresses_email_address, "
"users.id AS users_id, "
"users.name AS users_name FROM addresses JOIN dingalings "
"ON addresses.id = dingalings.address_id, "
"users JOIN orders ON users.id = orders.user_id "
"JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id JOIN items "
"ON items.id = order_items_1.item_id",
use_default_dialect=True,
)
def test_invalid_join_entity_from_single_from_clause(self):
Address, Item = (self.classes.Address, self.classes.Item)
sess = fixture_session()
q = sess.query(Address).select_from(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Don't know how to join to .*Item.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(Item)._compile_context,
)
def test_invalid_join_entity_from_no_from_clause(self):
Address, Item = (self.classes.Address, self.classes.Item)
sess = fixture_session()
q = sess.query(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Don't know how to join to .*Item.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(Item)._compile_context,
)
def test_invalid_join_entity_from_multiple_from_clause(self):
"""test adding joins onto multiple FROM clauses where
we still need to say there's nothing to JOIN from"""
User, Address, Item = (
self.classes.User,
self.classes.Address,
self.classes.Item,
)
sess = fixture_session()
q = sess.query(Address, User).join(Address.dingaling).join(User.orders)
assert_raises_message(
sa.exc.InvalidRequestError,
"Don't know how to join to .*Item.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(Item)._compile_context,
)
def test_join_explicit_left_multiple_from_clause(self):
"""test adding joins onto multiple FROM clauses where
it is ambiguous which FROM should be used when an
ON clause is given"""
User = self.classes.User
sess = fixture_session()
u1 = aliased(User)
# in this case, two FROM objects, one
# is users, the other is u1_alias.
# User.addresses looks for the "users" table and can match
# to both u1_alias and users if the match is not specific enough
q = sess.query(User, u1).select_from(User, u1).join(User.addresses)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1, "
"users JOIN addresses ON users.id = addresses.user_id",
)
q = sess.query(User, u1).select_from(User, u1).join(u1.addresses)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users, "
"users AS users_1 JOIN addresses "
"ON users_1.id = addresses.user_id",
)
def test_join_explicit_left_multiple_adapted(self):
"""test adding joins onto multiple FROM clauses where
it is ambiguous which FROM should be used when an
ON clause is given"""
User = self.classes.User
sess = fixture_session()
u1 = aliased(User)
u2 = aliased(User)
# in this case, two FROM objects, one
# is users, the other is u1_alias.
# User.addresses looks for the "users" table and can match
# to both u1_alias and users if the match is not specific enough
assert_raises_message(
sa_exc.InvalidRequestError,
"Can't identify which entity in which to assign the "
"left side of this join.",
sess.query(u1, u2)
.select_from(u1, u2)
.join(User.addresses)
._compile_context,
)
# more specific ON clause
self.assert_compile(
sess.query(u1, u2).select_from(u1, u2).join(u2.addresses),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name, "
"users_2.id AS users_2_id, users_2.name AS users_2_name "
"FROM users AS users_1, "
"users AS users_2 JOIN addresses "
"ON users_2.id = addresses.user_id",
)
def test_join_entity_from_multiple_from_clause(self):
"""test adding joins onto multiple FROM clauses where
it is ambiguous which FROM should be used"""
User, Order, Address, Dingaling = (
self.classes.User,
self.classes.Order,
self.classes.Address,
self.classes.Dingaling,
)
sess = fixture_session()
q = sess.query(Address, User).join(Address.dingaling).join(User.orders)
a1 = aliased(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Can't determine which FROM clause to join from, there are "
"multiple FROMS which can join to this entity. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(a1)._compile_context,
)
# to resolve, add an ON clause
# the user->orders join is chosen to join to a1
self.assert_compile(
q.join(a1, Order.address_id == a1.id),
"SELECT addresses.id AS addresses_id, "
"addresses.user_id AS addresses_user_id, "
"addresses.email_address AS addresses_email_address, "
"users.id AS users_id, users.name AS users_name "
"FROM addresses JOIN dingalings "
"ON addresses.id = dingalings.address_id, "
"users JOIN orders "
"ON users.id = orders.user_id "
"JOIN addresses AS addresses_1 "
"ON orders.address_id = addresses_1.id",
)
# the address->dingalings join is chosen to join to a1
self.assert_compile(
q.join(a1, Dingaling.address_id == a1.id),
"SELECT addresses.id AS addresses_id, "
"addresses.user_id AS addresses_user_id, "
"addresses.email_address AS addresses_email_address, "
"users.id AS users_id, users.name AS users_name "
"FROM addresses JOIN dingalings "
"ON addresses.id = dingalings.address_id "
"JOIN addresses AS addresses_1 "
"ON dingalings.address_id = addresses_1.id, "
"users JOIN orders ON users.id = orders.user_id",
)
def test_join_entity_from_multiple_entities(self):
"""test adding joins onto multiple FROM clauses where
it is ambiguous which FROM should be used"""
Order, Address, Dingaling = (
self.classes.Order,
self.classes.Address,
self.classes.Dingaling,
)
sess = fixture_session()
q = sess.query(Order, Dingaling)
a1 = aliased(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Can't determine which FROM clause to join from, there are "
"multiple FROMS which can join to this entity. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(a1)._compile_context,
)
# to resolve, add an ON clause
# Order is chosen to join to a1
self.assert_compile(
q.join(a1, Order.address_id == a1.id),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
"orders.address_id AS orders_address_id, "
"orders.description AS orders_description, "
"orders.isopen AS orders_isopen, dingalings.id AS dingalings_id, "
"dingalings.address_id AS dingalings_address_id, "
"dingalings.data AS dingalings_data "
"FROM dingalings, orders "
"JOIN addresses AS addresses_1 "
"ON orders.address_id = addresses_1.id",
)
# Dingaling is chosen to join to a1
self.assert_compile(
q.join(a1, Dingaling.address_id == a1.id),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
"orders.address_id AS orders_address_id, "
"orders.description AS orders_description, "
"orders.isopen AS orders_isopen, dingalings.id AS dingalings_id, "
"dingalings.address_id AS dingalings_address_id, "
"dingalings.data AS dingalings_data "
"FROM orders, dingalings JOIN addresses AS addresses_1 "
"ON dingalings.address_id = addresses_1.id",
)
def test_clause_present_in_froms_twice_w_onclause(self):
# test [ticket:4584]
Order, Address, User = (
self.classes.Order,
self.classes.Address,
self.classes.User,
)
sess = fixture_session()
a1 = aliased(Address)
q = sess.query(Order).select_from(Order, a1, User)
assert_raises_message(
sa.exc.InvalidRequestError,
"Can't determine which FROM clause to join from, there are "
"multiple FROMS which can join to this entity. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.outerjoin(a1)._compile_context,
)
# the condition which occurs here is: Query._from_obj contains both
# "a1" by itself as well as a join that "a1" is part of.
# find_left_clause_to_join_from() needs to include removal of froms
# that are in the _hide_froms of joins the same way
# Selectable._get_display_froms does.
q = sess.query(Order).select_from(Order, a1, User)
q = q.outerjoin(a1, a1.id == Order.address_id)
q = q.outerjoin(User, a1.user_id == User.id)
self.assert_compile(
q,
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
"orders.address_id AS orders_address_id, "
"orders.description AS orders_description, "
"orders.isopen AS orders_isopen "
"FROM orders "
"LEFT OUTER JOIN addresses AS addresses_1 "
"ON addresses_1.id = orders.address_id "
"LEFT OUTER JOIN users ON addresses_1.user_id = users.id",
)
def test_clause_present_in_froms_twice_wo_onclause(self):
# test [ticket:4584]
Address, Dingaling, User = (
self.classes.Address,
self.classes.Dingaling,
self.classes.User,
)
sess = fixture_session()
a1 = aliased(Address)
# the condition which occurs here is: Query._from_obj contains both
# "a1" by itself as well as a join that "a1" is part of.
# find_left_clause_to_join_from() needs to include removal of froms
# that are in the _hide_froms of joins the same way
# Selectable._get_display_froms does.
q = sess.query(User).select_from(Dingaling, a1, User)
q = q.outerjoin(a1, User.id == a1.user_id)
q = q.outerjoin(Dingaling)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users LEFT OUTER JOIN addresses AS addresses_1 "
"ON users.id = addresses_1.user_id "
"LEFT OUTER JOIN dingalings "
"ON addresses_1.id = dingalings.address_id",
)
def test_pure_expression(self):
# this was actually false-passing due to the assertions
# fixture not following the regular codepath for Query
addresses, users = self.tables.addresses, self.tables.users
sess = fixture_session()
self.assert_compile(
sess.query(users).join(addresses),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
def test_no_onclause(self):
Item, User, Order = (
self.classes.Item,
self.classes.User,
self.classes.Order,
)
sess = fixture_session()
eq_(
sess.query(User)
.select_from(join(User, Order).join(Item, Order.items))
.filter(Item.description == "item 4")
.all(),
[User(name="jack")],
)
eq_(
sess.query(User.name)
.select_from(join(User, Order).join(Item, Order.items))
.filter(Item.description == "item 4")
.all(),
[("jack",)],
)
eq_(
sess.query(User)
.join(Order)
.join(Item, Order.items)
.filter(Item.description == "item 4")
.all(),
[User(name="jack")],
)
def test_clause_onclause(self):
Item, Order, order_items, User = (
self.classes.Item,
self.classes.Order,
self.tables.order_items,
self.classes.User,
)
sess = fixture_session()
eq_(
sess.query(User)
.join(Order, User.id == Order.user_id)
.join(order_items, Order.id == order_items.c.order_id)
.join(Item, order_items.c.item_id == Item.id)
.filter(Item.description == "item 4")
.all(),
[User(name="jack")],
)
eq_(
sess.query(User.name)
.join(Order, User.id == Order.user_id)
.join(order_items, Order.id == order_items.c.order_id)
.join(Item, order_items.c.item_id == Item.id)
.filter(Item.description == "item 4")
.all(),
[("jack",)],
)
ualias = aliased(User)
eq_(
sess.query(ualias.name)
.join(Order, ualias.id == Order.user_id)
.join(order_items, Order.id == order_items.c.order_id)
.join(Item, order_items.c.item_id == Item.id)
.filter(Item.description == "item 4")
.all(),
[("jack",)],
)
# explicit onclause with from_self(), means
# the onclause must be aliased against the query's custom
# FROM object
subq = sess.query(User).order_by(User.id).offset(2).subquery()
ua = aliased(User, subq)
eq_(
sess.query(ua).join(Order, ua.id == Order.user_id).all(),
[User(name="fred")],
)
def test_aliased_classes(self):
User, Address = self.classes.User, self.classes.Address
sess = fixture_session()
(user7, user8, user9, user10) = sess.query(User).all()
(address1, address2, address3, address4, address5) = sess.query(
Address
).all()
expected = [
(user7, address1),
(user8, address2),
(user8, address3),
(user8, address4),
(user9, address5),
(user10, None),
]
q = sess.query(User)
AdAlias = aliased(Address)
q = q.add_entity(AdAlias).select_from(outerjoin(User, AdAlias))
result = q.order_by(User.id, AdAlias.id).all()
eq_(result, expected)
sess.expunge_all()
q = sess.query(User).add_entity(AdAlias)
result = (
q.select_from(outerjoin(User, AdAlias))
.filter(AdAlias.email_address == "ed@bettyboop.com")
.all()
)
eq_(result, [(user8, address3)])
result = (
q.select_from(outerjoin(User, AdAlias, "addresses"))
.filter(AdAlias.email_address == "ed@bettyboop.com")
.all()
)
eq_(result, [(user8, address3)])
result = (
q.select_from(outerjoin(User, AdAlias, User.id == AdAlias.user_id))
.filter(AdAlias.email_address == "ed@bettyboop.com")
.all()
)
eq_(result, [(user8, address3)])
# this is the first test where we are joining "backwards" - from
# AdAlias to User even though
# the query is against User
q = sess.query(User, AdAlias)
result = (
q.join(AdAlias.user)
.filter(User.name == "ed")
.order_by(User.id, AdAlias.id)
)
eq_(
result.all(),
[(user8, address2), (user8, address3), (user8, address4)],
)
q = (
sess.query(User, AdAlias)
.select_from(join(AdAlias, User, AdAlias.user))
.filter(User.name == "ed")
)
eq_(
result.all(),
[(user8, address2), (user8, address3), (user8, address4)],
)
def test_expression_onclauses(self):
Order, User = self.classes.Order, self.classes.User
sess = fixture_session()
subq = sess.query(User).subquery()
self.assert_compile(
sess.query(User).join(subq, User.name == subq.c.name),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN (SELECT users.id AS id, users.name "
"AS name FROM users) AS anon_1 ON users.name = anon_1.name",
use_default_dialect=True,
)
subq = sess.query(Order).subquery()
self.assert_compile(
sess.query(User).join(subq, User.id == subq.c.user_id),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN (SELECT orders.id AS id, orders.user_id AS user_id, "
"orders.address_id AS address_id, orders.description AS "
"description, orders.isopen AS isopen FROM orders) AS "
"anon_1 ON users.id = anon_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
sess.query(User).join(Order, User.id == Order.user_id),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id",
use_default_dialect=True,
)
def test_aliased_classes_m2m(self):
Item, Order = self.classes.Item, self.classes.Order
sess = fixture_session()
(order1, order2, order3, order4, order5) = sess.query(Order).all()
(item1, item2, item3, item4, item5) = sess.query(Item).all()
expected = [
(order1, item1),
(order1, item2),
(order1, item3),
(order2, item1),
(order2, item2),
(order2, item3),
(order3, item3),
(order3, item4),
(order3, item5),
(order4, item1),
(order4, item5),
(order5, item5),
]
q = sess.query(Order)
q = (
q.add_entity(Item)
.select_from(join(Order, Item, "items"))
.order_by(Order.id, Item.id)
)
result = q.all()
eq_(result, expected)
IAlias = aliased(Item)
q = (
sess.query(Order, IAlias)
.select_from(join(Order, IAlias, "items"))
.filter(IAlias.description == "item 3")
)
result = q.all()
eq_(result, [(order1, item3), (order2, item3), (order3, item3)])
def test_joins_from_adapted_entities(self):
User = self.classes.User
# test for #1853
session = fixture_session()
first = session.query(User)
second = session.query(User)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.outerjoin(*join)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS "
"anon_1_users_id, anon_1.users_name AS "
"anon_1_users_name FROM (SELECT users.id "
"AS users_id, users.name AS users_name "
"FROM users UNION SELECT users.id AS "
"users_id, users.name AS users_name FROM "
"users) AS anon_1 LEFT OUTER JOIN (SELECT "
"users.id AS id FROM users) AS anon_2 ON "
"anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
first = session.query(User.id)
second = session.query(User.id)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.outerjoin(*join)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS anon_1_users_id "
"FROM (SELECT users.id AS users_id FROM "
"users UNION SELECT users.id AS users_id "
"FROM users) AS anon_1 LEFT OUTER JOIN "
"(SELECT users.id AS id FROM users) AS "
"anon_2 ON anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
def test_joins_from_adapted_entities_isouter(self):
User = self.classes.User
# test for #1853
session = fixture_session()
first = session.query(User)
second = session.query(User)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.join(*join, isouter=True)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS "
"anon_1_users_id, anon_1.users_name AS "
"anon_1_users_name FROM (SELECT users.id "
"AS users_id, users.name AS users_name "
"FROM users UNION SELECT users.id AS "
"users_id, users.name AS users_name FROM "
"users) AS anon_1 LEFT OUTER JOIN (SELECT "
"users.id AS id FROM users) AS anon_2 ON "
"anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
first = session.query(User.id)
second = session.query(User.id)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.join(*join, isouter=True)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS anon_1_users_id "
"FROM (SELECT users.id AS users_id FROM "
"users UNION SELECT users.id AS users_id "
"FROM users) AS anon_1 LEFT OUTER JOIN "
"(SELECT users.id AS id FROM users) AS "
"anon_2 ON anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
def test_overlap_with_aliases(self):
orders, User, users = (
self.tables.orders,
self.classes.User,
self.tables.users,
)
Order = self.classes.Order
oalias = orders.alias("oalias")
result = (
fixture_session()
.query(User)
.select_from(users.join(oalias))
.filter(
oalias.c.description.in_(["order 1", "order 2", "order 3"])
)
.join(User.orders)
.join(Order.items)
.order_by(User.id)
.all()
)
assert [User(id=7, name="jack"), User(id=9, name="fred")] == result
result = (
fixture_session()
.query(User)
.select_from(users.join(oalias))
.filter(
oalias.c.description.in_(["order 1", "order 2", "order 3"])
)
.join(User.orders)
.join(Order.items)
.filter_by(id=4)
.all()
)
assert [User(id=7, name="jack")] == result
def test_aliased_order_by(self):
User = self.classes.User
sess = fixture_session()
ualias = aliased(User)
eq_(
sess.query(User, ualias)
.filter(User.id > ualias.id)
.order_by(desc(ualias.id), User.name)
.all(),
[
(User(id=10, name="chuck"), User(id=9, name="fred")),
(User(id=10, name="chuck"), User(id=8, name="ed")),
(User(id=9, name="fred"), User(id=8, name="ed")),
(User(id=10, name="chuck"), User(id=7, name="jack")),
(User(id=8, name="ed"), User(id=7, name="jack")),
(User(id=9, name="fred"), User(id=7, name="jack")),
],
)
def test_plain_table(self):
addresses, User = self.tables.addresses, self.classes.User
sess = fixture_session()
eq_(
sess.query(User.name)
.join(addresses, User.id == addresses.c.user_id)
.order_by(User.id)
.all(),
[("jack",), ("ed",), ("ed",), ("ed",), ("fred",)],
)
def test_no_joinpoint_expr(self):
User, users = self.classes.User, self.tables.users
sess = fixture_session()
# these are consistent regardless of
# select_from() being present.
assert_raises_message(
sa_exc.InvalidRequestError,
"Don't know how to join to .*User.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
sess.query(users.c.id).join(User)._compile_context,
)
assert_raises_message(
sa_exc.InvalidRequestError,
"Don't know how to join to .*User.* "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
sess.query(users.c.id)
.select_from(users)
.join(User)
._compile_context,
)
def test_on_clause_no_right_side_one(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
# coercions does not catch this due to the
# legacy=True flag for JoinTargetRole
with expect_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, or ORM relationship "
"attribute expected, got",
):
sess.query(User).join(User.id == Address.user_id)
def test_on_clause_no_right_side_one_future(self):
User = self.classes.User
Address = self.classes.Address
# future mode can raise a more specific error at the coercions level
assert_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, "
"or ORM relationship attribute expected",
select(User).join,
User.id == Address.user_id,
)
def test_no_legacy_multi_join_two_element(self):
User = self.classes.User
Order = self.classes.Order
sess = fixture_session()
with expect_raises_message(
sa_exc.InvalidRequestError,
"No 'on clause' argument may be passed when joining to a "
"relationship path as a target",
):
sess.query(User).join(User.orders, Order.items)._compile_context()
def test_no_modern_multi_join_two_element(self):
User = self.classes.User
Order = self.classes.Order
sess = fixture_session()
with expect_raises_message(
sa_exc.InvalidRequestError,
"No 'on clause' argument may be passed when joining to a "
"relationship path as a target",
):
sess.execute(select(User).join(User.orders, Order.items))
def test_kw_only_blocks_legacy_multi_join(self):
User = self.classes.User
Order = self.classes.Order
Item = self.classes.Item
sess = fixture_session()
with expect_raises_message(
TypeError,
r".*join\(\) takes from 2 to 3 positional arguments but "
"4 were given",
):
sess.query(User).join(User.orders, Order.items, Item.keywords)
def test_on_clause_no_right_side_two(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
assert_raises_message(
sa_exc.ArgumentError,
"Join target Address.user_id does not refer to a mapped entity",
sess.query(User).join(Address.user_id)._compile_context,
)
def test_on_clause_no_right_side_two_future(self):
User = self.classes.User
Address = self.classes.Address
stmt = select(User).join(Address.user_id)
assert_raises_message(
sa_exc.ArgumentError,
"Join target Address.user_id does not refer to a mapped entity",
stmt.compile,
)
def test_no_strings_for_single_onclause_legacy_query(self):
User = self.classes.User
sess = fixture_session()
with expect_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, or ORM relationship "
"attribute expected, got 'addresses'",
):
sess.query(User).join("addresses")
def test_no_strings_for_single_onclause_newstyle(self):
User = self.classes.User
with expect_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, or ORM relationship "
"attribute expected, got 'addresses'",
):
select(User).join("addresses")
def test_no_strings_for_dual_onclause_legacy_query(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
with expect_raises_message(
sa_exc.ArgumentError,
"ON clause, typically a SQL expression or ORM relationship "
"attribute expected, got 'addresses'",
):
sess.query(User).join(Address, "addresses")
def test_no_strings_for_dual_onclause_newstyle(self):
User = self.classes.User
Address = self.classes.Address
with expect_raises_message(
sa_exc.ArgumentError,
"ON clause, typically a SQL expression or ORM relationship "
"attribute expected, got 'addresses'.",
):
select(User).join(Address, "addresses")
def test_select_from(self):
"""Test that the left edge of the join can be set reliably with
select_from()."""
Item, Order, User = (
self.classes.Item,
self.classes.Order,
self.classes.User,
)
sess = fixture_session()
self.assert_compile(
sess.query(Item.id)
.select_from(User)
.join(User.orders)
.join(Order.items),
"SELECT items.id AS items_id FROM users JOIN orders ON "
"users.id = orders.user_id JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id JOIN items ON items.id = "
"order_items_1.item_id",
use_default_dialect=True,
)
# here, the join really wants to add a second FROM clause
# for "Item". but select_from disallows that
self.assert_compile(
sess.query(Item.id)
.select_from(User)
.join(Item, User.id == Item.id),
"SELECT items.id AS items_id FROM users JOIN items "
"ON users.id = items.id",
use_default_dialect=True,
)
class JoinFromSelectableTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = "default"
run_setup_mappers = "once"
@classmethod
def define_tables(cls, metadata):
Table("table1", metadata, Column("id", Integer, primary_key=True))
Table(
"table2",
metadata,
Column("id", Integer, primary_key=True),
Column("t1_id", Integer),
)
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
class T2(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
table1, table2 = cls.tables.table1, cls.tables.table2
T1, T2 = cls.classes("T1", "T2")
cls.mapper_registry.map_imperatively(T1, table1)
cls.mapper_registry.map_imperatively(T2, table2)
def test_select_mapped_to_mapped_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(subq.c.count, T1.id)
.select_from(subq)
.join(T1, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 "
"GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
"ON anon_1.t1_id = table1.id",
)
def test_select_mapped_to_mapped_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(subq.c.count, T1.id).join(T1, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 "
"GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
"ON anon_1.t1_id = table1.id",
)
def test_select_mapped_to_select_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(subq.c.count, T1.id)
.select_from(T1)
.join(subq, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 GROUP BY table2.t1_id) "
"AS anon_1 ON anon_1.t1_id = table1.id",
)
def test_select_mapped_to_select_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
# without select_from
self.assert_compile(
sess.query(subq.c.count, T1.id).join(subq, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM table1 JOIN "
"(SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) "
"AS anon_1 ON anon_1.t1_id = table1.id",
)
# with select_from, same query
self.assert_compile(
sess.query(subq.c.count, T1.id)
.select_from(T1)
.join(subq, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM table1 JOIN "
"(SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) "
"AS anon_1 ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_mapped_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
# without select_from
self.assert_compile(
sess.query(T1.id, subq.c.count).join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"JOIN table1 ON anon_1.t1_id = table1.id",
)
# with select_from, same query
self.assert_compile(
sess.query(T1.id, subq.c.count)
.select_from(subq)
.join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"JOIN table1 ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_mapped_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(T1.id, subq.c.count)
.select_from(subq)
.join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
"ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_select_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(T1.id, subq.c.count)
.select_from(T1)
.join(subq, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_select_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(T1.id, subq.c.count).join(subq, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"ON anon_1.t1_id = table1.id",
)
class SelfRefMixedTest(fixtures.MappedTest, AssertsCompiledSQL):
run_setup_mappers = "once"
__dialect__ = default.DefaultDialect()
@classmethod
def define_tables(cls, metadata):
Table(
"nodes",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("parent_id", Integer, ForeignKey("nodes.id")),
)
Table(
"sub_table",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("node_id", Integer, ForeignKey("nodes.id")),
)
Table(
"assoc_table",
metadata,
Column("left_id", Integer, ForeignKey("nodes.id")),
Column("right_id", Integer, ForeignKey("nodes.id")),
)
@classmethod
def setup_classes(cls):
class Node(cls.Comparable):
pass
class Sub(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
nodes, assoc_table, sub_table = (
cls.tables.nodes,
cls.tables.assoc_table,
cls.tables.sub_table,
)
Node, Sub = cls.classes("Node", "Sub")
cls.mapper_registry.map_imperatively(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
join_depth=3,
backref=backref("parent", remote_side=[nodes.c.id]),
),
"subs": relationship(Sub),
"assoc": relationship(
Node,
secondary=assoc_table,
primaryjoin=nodes.c.id == assoc_table.c.left_id,
secondaryjoin=nodes.c.id == assoc_table.c.right_id,
),
},
)
cls.mapper_registry.map_imperatively(Sub, sub_table)
def test_o2m_aliased_plus_o2m(self):
Node, Sub = self.classes.Node, self.classes.Sub
sess = fixture_session()
n1 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(Sub, n1.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN sub_table ON nodes_1.id = sub_table.node_id",
)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(Sub, Node.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN sub_table ON nodes.id = sub_table.node_id",
)
def test_m2m_aliased_plus_o2m(self):
Node, Sub = self.classes.Node, self.classes.Sub
sess = fixture_session()
n1 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.assoc).join(Sub, n1.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = "
"assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = "
"assoc_table_1.right_id JOIN sub_table "
"ON nodes_1.id = sub_table.node_id",
)
self.assert_compile(
sess.query(Node).join(n1, Node.assoc).join(Sub, Node.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = "
"assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = "
"assoc_table_1.right_id JOIN sub_table "
"ON nodes.id = sub_table.node_id",
)
class CreateJoinsTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = "default"
def _inherits_fixture(self):
m = MetaData()
base = Table("base", m, Column("id", Integer, primary_key=True))
a = Table(
"a",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
Column("b_id", Integer, ForeignKey("b.id")),
)
b = Table(
"b",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
Column("c_id", Integer, ForeignKey("c.id")),
)
c = Table(
"c",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
)
class Base:
pass
class A(Base):
pass
class B(Base):
pass
class C(Base):
pass
self.mapper_registry.map_imperatively(Base, base)
self.mapper_registry.map_imperatively(
A,
a,
inherits=Base,
properties={"b": relationship(B, primaryjoin=a.c.b_id == b.c.id)},
)
self.mapper_registry.map_imperatively(
B,
b,
inherits=Base,
properties={"c": relationship(C, primaryjoin=b.c.c_id == c.c.id)},
)
self.mapper_registry.map_imperatively(C, c, inherits=Base)
return A, B, C, Base
def test_double_level_aliased_exists(self):
A, B, C, Base = self._inherits_fixture()
s = fixture_session()
self.assert_compile(
s.query(A).filter(A.b.has(B.c.has(C.id == 5))),
"SELECT a.id AS a_id, base.id AS base_id, a.b_id AS a_b_id "
"FROM base JOIN a ON base.id = a.id WHERE "
"EXISTS (SELECT 1 FROM (SELECT base.id AS base_id, b.id AS "
"b_id, b.c_id AS b_c_id FROM base JOIN b ON base.id = b.id) "
"AS anon_1 WHERE a.b_id = anon_1.b_id AND (EXISTS "
"(SELECT 1 FROM (SELECT base.id AS base_id, c.id AS c_id "
"FROM base JOIN c ON base.id = c.id) AS anon_2 "
"WHERE anon_1.b_c_id = anon_2.c_id AND anon_2.c_id = :id_1"
")))",
)
class JoinToNonPolyAliasesTest(fixtures.MappedTest, AssertsCompiledSQL):
"""test joins to an aliased selectable and that we can refer to that
aliased selectable in filter criteria.
Basically testing that the aliasing Query applies to with_polymorphic
targets doesn't leak into non-polymorphic mappers.
"""
__dialect__ = "default"
run_create_tables = None
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table(
"parent",
metadata,
Column("id", Integer, primary_key=True),
Column("data", String(50)),
)
Table(
"child",
metadata,
Column("id", Integer, primary_key=True),
Column("parent_id", Integer, ForeignKey("parent.id")),
Column("data", String(50)),
)
@classmethod
def setup_mappers(cls):
parent, child = cls.tables.parent, cls.tables.child
class Parent(cls.Comparable):
pass
class Child(cls.Comparable):
pass
mp = cls.mapper_registry.map_imperatively(Parent, parent)
cls.mapper_registry.map_imperatively(Child, child)
derived = select(child).alias()
npc = aliased(Child, derived)
cls.npc = npc
cls.derived = derived
mp.add_property("npc", relationship(npc))
def test_join_parent_child(self):
Parent = self.classes.Parent
sess = fixture_session()
self.assert_compile(
sess.query(Parent)
.join(Parent.npc)
.filter(self.derived.c.data == "x"),
"SELECT parent.id AS parent_id, parent.data AS parent_data "
"FROM parent JOIN (SELECT child.id AS id, "
"child.parent_id AS parent_id, "
"child.data AS data "
"FROM child) AS anon_1 ON parent.id = anon_1.parent_id "
"WHERE anon_1.data = :data_1",
)
def test_join_parent_child_select_from(self):
Parent = self.classes.Parent
npc = self.npc
sess = fixture_session()
self.assert_compile(
sess.query(npc)
.select_from(Parent)
.join(Parent.npc)
.filter(self.derived.c.data == "x"),
"SELECT anon_1.id AS anon_1_id, anon_1.parent_id "
"AS anon_1_parent_id, anon_1.data AS anon_1_data "
"FROM parent JOIN (SELECT child.id AS id, child.parent_id AS "
"parent_id, child.data AS data FROM child) AS anon_1 ON "
"parent.id = anon_1.parent_id WHERE anon_1.data = :data_1",
)
def test_join_select_parent_child(self):
Parent = self.classes.Parent
npc = self.npc
sess = fixture_session()
self.assert_compile(
sess.query(Parent, npc)
.join(Parent.npc)
.filter(self.derived.c.data == "x"),
"SELECT parent.id AS parent_id, parent.data AS parent_data, "
"anon_1.id AS anon_1_id, anon_1.parent_id AS anon_1_parent_id, "
"anon_1.data AS anon_1_data FROM parent JOIN "
"(SELECT child.id AS id, child.parent_id AS parent_id, "
"child.data AS data FROM child) AS anon_1 ON parent.id = "
"anon_1.parent_id WHERE anon_1.data = :data_1",
)
class SelfReferentialTest(fixtures.MappedTest, AssertsCompiledSQL):
run_setup_mappers = "once"
run_inserts = "once"
run_deletes = None
__dialect__ = "default"
@classmethod
def define_tables(cls, metadata):
Table(
"nodes",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("parent_id", Integer, ForeignKey("nodes.id")),
Column("data", String(30)),
)
@classmethod
def setup_classes(cls):
class Node(cls.Comparable):
def append(self, node):
self.children.append(node)
@classmethod
def setup_mappers(cls):
Node, nodes = cls.classes.Node, cls.tables.nodes
cls.mapper_registry.map_imperatively(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
join_depth=3,
backref=backref("parent", remote_side=[nodes.c.id]),
)
},
)
@classmethod
def insert_data(cls, connection):
Node = cls.classes.Node
sess = Session(connection)
n1 = Node(data="n1")
n1.append(Node(data="n11"))
n1.append(Node(data="n12"))
n1.append(Node(data="n13"))
n1.children[1].append(Node(data="n121"))
n1.children[1].append(Node(data="n122"))
n1.children[1].append(Node(data="n123"))
sess.add(n1)
sess.flush()
sess.close()
def test_join_4_explicit_join(self):
Node = self.classes.Node
sess = fixture_session()
na = aliased(Node)
na2 = aliased(Node)
# this one is a great example of how to show how the API changes;
# while it requires the explicitness of aliased(Node), the whole
# guesswork of joinpoint / aliased goes away and the whole thing
# is simpler
#
# .join("parent", aliased=True)
# .filter(Node.data == "n12")
# .join("parent", aliased=True, from_joinpoint=True)
# .filter(Node.data == "n1")
#
# becomes:
#
# na = aliased(Node)
# na2 = aliased(Node)
#
# ...
# .join(na, Node.parent)
# .filter(na.data == "n12")
# .join(na2, na.parent)
# .filter(na2.data == "n1")
#
q = (
sess.query(Node)
.filter(Node.data == "n122")
.join(na, Node.parent)
.filter(na.data == "n12")
.join(na2, na.parent)
.filter(na2.data == "n1")
)
self.assert_compile(
q,
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
"ON nodes_1.id = nodes.parent_id JOIN nodes AS nodes_2 "
"ON nodes_2.id = nodes_1.parent_id WHERE nodes.data = :data_1 "
"AND nodes_1.data = :data_2 AND nodes_2.data = :data_3",
checkparams={"data_1": "n122", "data_2": "n12", "data_3": "n1"},
)
node = q.first()
eq_(node.data, "n122")
def test_from_self_inside_excludes_outside(self):
"""test the propagation of aliased() from inside to outside
on a from_self()..
"""
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
# n1 is not inside the from_self(), so all cols must be maintained
# on the outside
subq = (
sess.query(Node)
.filter(Node.data == "n122")
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
.subquery()
)
na = aliased(Node, subq)
self.assert_compile(
sess.query(n1, na.id),
"SELECT nodes_1.id AS nodes_1_id, "
"nodes_1.parent_id AS nodes_1_parent_id, "
"nodes_1.data AS nodes_1_data, anon_1.nodes_id AS anon_1_nodes_id "
"FROM nodes AS nodes_1, (SELECT nodes.id AS nodes_id, "
"nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM "
"nodes WHERE nodes.data = :data_1) AS anon_1",
use_default_dialect=True,
)
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
q = sess.query(na, pa, ga).limit(1)
# parent, grandparent *are* inside the from_self(), so they
# should get aliased to the outside.
self.assert_compile(
q,
"SELECT anon_1.nodes_id AS anon_1_nodes_id, "
"anon_1.nodes_parent_id AS anon_1_nodes_parent_id, "
"anon_1.nodes_data AS anon_1_nodes_data, "
"anon_1.nodes_1_id AS anon_1_nodes_1_id, "
"anon_1.nodes_1_parent_id AS anon_1_nodes_1_parent_id, "
"anon_1.nodes_1_data AS anon_1_nodes_1_data, "
"anon_1.nodes_2_id AS anon_1_nodes_2_id, "
"anon_1.nodes_2_parent_id AS anon_1_nodes_2_parent_id, "
"anon_1.nodes_2_data AS anon_1_nodes_2_data "
"FROM (SELECT nodes.id AS nodes_id, nodes.parent_id "
"AS nodes_parent_id, nodes.data AS nodes_data, "
"nodes_1.id AS nodes_1_id, "
"nodes_1.parent_id AS nodes_1_parent_id, "
"nodes_1.data AS nodes_1_data, nodes_2.id AS nodes_2_id, "
"nodes_2.parent_id AS nodes_2_parent_id, nodes_2.data AS "
"nodes_2_data FROM nodes JOIN nodes AS nodes_1 ON "
"nodes_1.id = nodes.parent_id JOIN nodes AS nodes_2 "
"ON nodes_2.id = nodes_1.parent_id "
"WHERE nodes.data = :data_1 AND nodes_1.data = :data_2 AND "
"nodes_2.data = :data_3) AS anon_1 LIMIT :param_1",
{"param_1": 1},
use_default_dialect=True,
)
def test_join_to_self_no_aliases_raises(self):
Node = self.classes.Node
s = fixture_session()
assert_raises_message(
sa.exc.InvalidRequestError,
r"Can't construct a join from Mapper\[Node\(nodes\)\] to "
r"Mapper\[Node\(nodes\)\], they are the same entity",
s.query(Node).join(Node.children)._compile_context,
)
def test_explicit_join_1(self):
Node = self.classes.Node
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
join(Node, n1, "children").join(n2, "children"),
"nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_2(self):
Node = self.classes.Node
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
join(Node, n1, Node.children).join(n2, n1.children),
"nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_3(self):
Node = self.classes.Node
n1 = aliased(Node)
n2 = aliased(Node)
# the join_to_left=False here is unfortunate. the default on this
# flag should be False.
self.assert_compile(
join(Node, n1, Node.children).join(
n2, Node.children, join_to_left=False
),
"nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_4(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(n2, n1.children),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
"ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_5(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(n2, Node.children),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
"ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_6(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
node = (
sess.query(Node)
.select_from(join(Node, n1, "children"))
.filter(n1.data == "n122")
.first()
)
assert node.data == "n12"
def test_explicit_join_7(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
node = (
sess.query(Node)
.select_from(join(Node, n1, "children").join(n2, "children"))
.filter(n2.data == "n122")
.first()
)
assert node.data == "n1"
def test_explicit_join_8(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
# mix explicit and named onclauses
node = (
sess.query(Node)
.select_from(
join(Node, n1, Node.id == n1.parent_id).join(n2, "children")
)
.filter(n2.data == "n122")
.first()
)
assert node.data == "n1"
def test_explicit_join_9(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
node = (
sess.query(Node)
.select_from(join(Node, n1, "parent").join(n2, "parent"))
.filter(
and_(Node.data == "n122", n1.data == "n12", n2.data == "n1")
)
.first()
)
assert node.data == "n122"
def test_explicit_join_10(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
eq_(
list(
sess.query(Node)
.select_from(join(Node, n1, "parent").join(n2, "parent"))
.filter(
and_(
Node.data == "n122", n1.data == "n12", n2.data == "n1"
)
)
.with_entities(Node.data, n1.data, n2.data)
),
[("n122", "n12", "n1")],
)
def test_join_to_nonaliased(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
# using 'n1.parent' implicitly joins to unaliased Node
eq_(
sess.query(n1).join(n1.parent).filter(Node.data == "n1").all(),
[
Node(parent_id=1, data="n11", id=2),
Node(parent_id=1, data="n12", id=3),
Node(parent_id=1, data="n13", id=4),
],
)
# explicit (new syntax)
eq_(
sess.query(n1)
.join(Node, n1.parent)
.filter(Node.data == "n1")
.all(),
[
Node(parent_id=1, data="n11", id=2),
Node(parent_id=1, data="n12", id=3),
Node(parent_id=1, data="n13", id=4),
],
)
def test_multiple_explicit_entities_one(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
eq_(
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_multiple_explicit_entities_two(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
eq_(
sess.query(na, pa, ga).first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_multiple_explicit_entities_three(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
# same, change order around
subq = (
sess.query(parent, grandparent, Node)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
eq_(
sess.query(pa, ga, na).first(),
(Node(data="n12"), Node(data="n1"), Node(data="n122")),
)
def test_multiple_explicit_entities_four(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
eq_(
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.options(joinedload(Node.children))
.first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_multiple_explicit_entities_five(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
eq_(
sess.query(na, pa, ga).options(joinedload(na.children)).first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_any(self):
Node = self.classes.Node
sess = fixture_session()
eq_(
sess.query(Node)
.filter(Node.children.any(Node.data == "n1"))
.all(),
[],
)
eq_(
sess.query(Node)
.filter(Node.children.any(Node.data == "n12"))
.all(),
[Node(data="n1")],
)
eq_(
sess.query(Node)
.filter(~Node.children.any())
.order_by(Node.id)
.all(),
[
Node(data="n11"),
Node(data="n13"),
Node(data="n121"),
Node(data="n122"),
Node(data="n123"),
],
)
def test_has(self):
Node = self.classes.Node
sess = fixture_session()
eq_(
sess.query(Node)
.filter(Node.parent.has(Node.data == "n12"))
.order_by(Node.id)
.all(),
[Node(data="n121"), Node(data="n122"), Node(data="n123")],
)
eq_(
sess.query(Node)
.filter(Node.parent.has(Node.data == "n122"))
.all(),
[],
)
eq_(
sess.query(Node).filter(~Node.parent.has()).all(),
[Node(data="n1")],
)
def test_contains(self):
Node = self.classes.Node
sess = fixture_session()
n122 = sess.query(Node).filter(Node.data == "n122").one()
eq_(
sess.query(Node).filter(Node.children.contains(n122)).all(),
[Node(data="n12")],
)
n13 = sess.query(Node).filter(Node.data == "n13").one()
eq_(
sess.query(Node).filter(Node.children.contains(n13)).all(),
[Node(data="n1")],
)
def test_eq_ne(self):
Node = self.classes.Node
sess = fixture_session()
n12 = sess.query(Node).filter(Node.data == "n12").one()
eq_(
sess.query(Node).filter(Node.parent == n12).all(),
[Node(data="n121"), Node(data="n122"), Node(data="n123")],
)
eq_(
sess.query(Node).filter(Node.parent != n12).all(),
[
Node(data="n1"),
Node(data="n11"),
Node(data="n12"),
Node(data="n13"),
],
)
class SelfReferentialM2MTest(fixtures.MappedTest):
run_setup_mappers = "once"
run_inserts = "once"
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table(
"nodes",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("data", String(30)),
)
Table(
"node_to_nodes",
metadata,
Column(
"left_node_id",
Integer,
ForeignKey("nodes.id"),
primary_key=True,
),
Column(
"right_node_id",
Integer,
ForeignKey("nodes.id"),
primary_key=True,
),
)
@classmethod
def setup_classes(cls):
class Node(cls.Comparable):
pass
@classmethod
def insert_data(cls, connection):
Node, nodes, node_to_nodes = (
cls.classes.Node,
cls.tables.nodes,
cls.tables.node_to_nodes,
)
cls.mapper_registry.map_imperatively(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
secondary=node_to_nodes,
primaryjoin=nodes.c.id == node_to_nodes.c.left_node_id,
secondaryjoin=nodes.c.id == node_to_nodes.c.right_node_id,
)
},
)
sess = Session(connection)
n1 = Node(data="n1")
n2 = Node(data="n2")
n3 = Node(data="n3")
n4 = Node(data="n4")
n5 = Node(data="n5")
n6 = Node(data="n6")
n7 = Node(data="n7")
n1.children = [n2, n3, n4]
n2.children = [n3, n6, n7]
n3.children = [n5, n4]
sess.add(n1)
sess.add(n2)
sess.add(n3)
sess.add(n4)
sess.flush()
sess.close()
def test_any(self):
Node = self.classes.Node
sess = fixture_session()
eq_(
sess.query(Node)
.filter(Node.children.any(Node.data == "n3"))
.order_by(Node.data)
.all(),
[Node(data="n1"), Node(data="n2")],
)
def test_contains(self):
Node = self.classes.Node
sess = fixture_session()
n4 = sess.query(Node).filter_by(data="n4").one()
eq_(
sess.query(Node)
.filter(Node.children.contains(n4))
.order_by(Node.data)
.all(),
[Node(data="n1"), Node(data="n3")],
)
eq_(
sess.query(Node)
.filter(not_(Node.children.contains(n4)))
.order_by(Node.data)
.all(),
[
Node(data="n2"),
Node(data="n4"),
Node(data="n5"),
Node(data="n6"),
Node(data="n7"),
],
)
def test_explicit_join(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
eq_(
sess.query(Node)
.select_from(join(Node, n1, "children"))
.filter(n1.data.in_(["n3", "n7"]))
.order_by(Node.id)
.all(),
[Node(data="n1"), Node(data="n2")],
)
class JoinLateralTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = default.DefaultDialect(supports_native_boolean=True)
run_setup_bind = None
run_setup_mappers = "once"
run_create_tables = None
@classmethod
def define_tables(cls, metadata):
Table(
"people",
metadata,
Column("people_id", Integer, primary_key=True),
Column("age", Integer),
Column("name", String(30)),
)
Table(
"bookcases",
metadata,
Column("bookcase_id", Integer, primary_key=True),
Column(
"bookcase_owner_id", Integer, ForeignKey("people.people_id")
),
Column("bookcase_shelves", Integer),
Column("bookcase_width", Integer),
)
Table(
"books",
metadata,
Column("book_id", Integer, primary_key=True),
Column(
"bookcase_id", Integer, ForeignKey("bookcases.bookcase_id")
),
Column("book_owner_id", Integer, ForeignKey("people.people_id")),
Column("book_weight", Integer),
)
@classmethod
def setup_classes(cls):
class Person(cls.Comparable):
pass
class Bookcase(cls.Comparable):
pass
class Book(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
Person, Bookcase, Book = cls.classes("Person", "Bookcase", "Book")
people, bookcases, books = cls.tables("people", "bookcases", "books")
cls.mapper_registry.map_imperatively(Person, people)
cls.mapper_registry.map_imperatively(
Bookcase,
bookcases,
properties={
"owner": relationship(Person),
"books": relationship(Book),
},
)
cls.mapper_registry.map_imperatively(Book, books)
def test_select_subquery(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
subq = (
s.query(Book.book_id)
.correlate(Person)
.filter(Person.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(Person, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT people.people_id AS people_people_id, "
"people.age AS people_age, people.name AS people_name, "
"anon_1.book_id AS anon_1_book_id "
"FROM people JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE people.people_id = books.book_owner_id) AS anon_1 ON true",
)
# "aas" == "aliased against select"
def test_select_subquery_aas_implicit_correlate(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
s.query(Book.book_id)
.filter(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_select_subquery_aas_implicit_correlate_coreonly(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
select(Book.book_id)
.where(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_select_subquery_aas_explicit_correlate_coreonly(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
select(Book.book_id)
.correlate(pa)
.where(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_select_subquery_aas_explicit_correlate(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
s.query(Book.book_id)
.correlate(pa)
.filter(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_from_function(self):
Bookcase = self.classes.Bookcase
s = fixture_session()
srf = lateral(func.generate_series(1, Bookcase.bookcase_shelves))
self.assert_compile(
s.query(Bookcase).join(srf, true()),
"SELECT bookcases.bookcase_id AS bookcases_bookcase_id, "
"bookcases.bookcase_owner_id AS bookcases_bookcase_owner_id, "
"bookcases.bookcase_shelves AS bookcases_bookcase_shelves, "
"bookcases.bookcase_width AS bookcases_bookcase_width "
"FROM bookcases JOIN "
"LATERAL generate_series(:generate_series_1, "
"bookcases.bookcase_shelves) AS anon_1 ON true",
)
def test_from_function_aas(self):
Bookcase = self.classes.Bookcase
s = fixture_session()
subq = s.query(Bookcase).subquery()
ba = aliased(Bookcase, subq)
srf = lateral(func.generate_series(1, ba.bookcase_shelves))
self.assert_compile(
s.query(ba).join(srf, true()),
"SELECT anon_1.bookcase_id AS anon_1_bookcase_id, "
"anon_1.bookcase_owner_id AS anon_1_bookcase_owner_id, "
"anon_1.bookcase_shelves AS anon_1_bookcase_shelves, "
"anon_1.bookcase_width AS anon_1_bookcase_width "
"FROM (SELECT bookcases.bookcase_id AS bookcase_id, "
"bookcases.bookcase_owner_id AS bookcase_owner_id, "
"bookcases.bookcase_shelves AS bookcase_shelves, "
"bookcases.bookcase_width AS bookcase_width FROM bookcases) "
"AS anon_1 "
"JOIN LATERAL "
"generate_series(:generate_series_1, anon_1.bookcase_shelves) "
"AS anon_2 ON true",
)
class JoinRawTablesWLegacyTest(QueryTest, AssertsCompiledSQL):
"""test issue 6003 where creating a legacy query with only Core elements
fails to accommodate for the ORM context thus producing a query
that ignores the "legacy" joins
"""
__dialect__ = "default"
@testing.combinations(
(
lambda sess, User, Address: sess.query(User).join(Address),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN addresses ON users.id = addresses.user_id",
),
(
lambda sess, user_table, address_table: sess.query(
user_table
).join(address_table),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN addresses ON users.id = addresses.user_id",
),
(
lambda sess, User, Address, Order: sess.query(User)
.outerjoin(Order)
.join(Address),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users LEFT OUTER JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON addresses.id = orders.address_id",
),
(
lambda sess, user_table, address_table, order_table: sess.query(
user_table
)
.outerjoin(order_table)
.join(address_table),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users LEFT OUTER JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON addresses.id = orders.address_id",
),
)
def test_join_render(self, spec, expected):
User, Address, Order = self.classes("User", "Address", "Order")
user_table, address_table, order_table = self.tables(
"users", "addresses", "orders"
)
sess = fixture_session()
q = testing.resolve_lambda(spec, **locals())
self.assert_compile(q, expected)
self.assert_compile(
q.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL).statement,
expected,
)
def test_core_round_trip(self):
user_table, address_table = self.tables("users", "addresses")
sess = fixture_session()
q = (
sess.query(user_table)
.join(address_table)
.where(address_table.c.email_address.startswith("ed"))
)
eq_(q.all(), [(8, "ed"), (8, "ed"), (8, "ed")])
| 34.041448
| 79
| 0.563099
|
import itertools
import sqlalchemy as sa
from sqlalchemy import and_
from sqlalchemy import desc
from sqlalchemy import exc as sa_exc
from sqlalchemy import ForeignKey
from sqlalchemy import func
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import lateral
from sqlalchemy import literal_column
from sqlalchemy import MetaData
from sqlalchemy import not_
from sqlalchemy import or_
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import testing
from sqlalchemy import true
from sqlalchemy import union
from sqlalchemy.engine import default
from sqlalchemy.orm import aliased
from sqlalchemy.orm import backref
from sqlalchemy.orm import join
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import outerjoin
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
from sqlalchemy.orm import synonym
from sqlalchemy.sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.assertions import expect_raises_message
from sqlalchemy.testing.fixtures import fixture_session
from sqlalchemy.testing.schema import Column
from test.orm import _fixtures
from .inheritance import _poly_fixtures
from .test_query import QueryTest
class InheritedTest(_poly_fixtures._Polymorphic):
run_setup_mappers = "once"
class InheritedJoinTest(InheritedTest, AssertsCompiledSQL):
def test_single_prop(self):
Company = self.classes.Company
sess = fixture_session()
self.assert_compile(
sess.query(Company).join(Company.employees),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN people "
"ON companies.company_id = people.company_id",
use_default_dialect=True,
)
def test_force_via_select_from(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company)
.filter(Company.company_id == Engineer.company_id)
.filter(Engineer.primary_language == "java"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies, people, engineers "
"WHERE companies.company_id = people.company_id "
"AND engineers.primary_language "
"= :primary_language_1",
use_default_dialect=True,
)
self.assert_compile(
sess.query(Company)
.select_from(Company, Engineer)
.filter(Company.company_id == Engineer.company_id)
.filter(Engineer.primary_language == "java"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies, people JOIN engineers "
"ON people.person_id = engineers.person_id "
"WHERE companies.company_id = people.company_id "
"AND engineers.primary_language ="
" :primary_language_1",
use_default_dialect=True,
)
def test_single_prop_of_type(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company).join(Company.employees.of_type(Engineer)),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN "
"(people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON companies.company_id = people.company_id",
use_default_dialect=True,
)
def test_explicit_polymorphic_join_one(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company)
.join(Engineer)
.filter(Engineer.engineer_name == "vlad"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN (people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON "
"companies.company_id = people.company_id "
"WHERE engineers.engineer_name = :engineer_name_1",
use_default_dialect=True,
)
def test_explicit_polymorphic_join_two(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company)
.join(Engineer, Company.company_id == Engineer.company_id)
.filter(Engineer.engineer_name == "vlad"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN "
"(people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON "
"companies.company_id = people.company_id "
"WHERE engineers.engineer_name = :engineer_name_1",
use_default_dialect=True,
)
def test_auto_aliasing_multi_link(self):
sess = fixture_session()
Company, Engineer, Manager, Boss = (
self.classes.Company,
self.classes.Engineer,
self.classes.Manager,
self.classes.Boss,
)
q = (
sess.query(Company)
.join(Company.employees.of_type(Engineer))
.join(Company.employees.of_type(Manager))
.join(Company.employees.of_type(Boss))
)
with testing.expect_warnings(
"An alias is being generated automatically against joined entity "
r"Mapper\[Manager\(managers\)\] due to overlapping",
"An alias is being generated automatically against joined entity "
r"Mapper\[Boss\(boss\)\] due to overlapping",
raise_on_any_unexpected=True,
):
self.assert_compile(
q,
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name FROM companies "
"JOIN (people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON companies.company_id = people.company_id "
"JOIN (people AS people_1 JOIN managers AS managers_1 "
"ON people_1.person_id = managers_1.person_id) "
"ON companies.company_id = people_1.company_id "
"JOIN (people AS people_2 JOIN managers AS managers_2 "
"ON people_2.person_id = managers_2.person_id "
"JOIN boss AS boss_1 "
"ON managers_2.person_id = boss_1.boss_id) "
"ON companies.company_id = people_2.company_id",
use_default_dialect=True,
)
class JoinOnSynonymTest(_fixtures.FixtureTest, AssertsCompiledSQL):
__dialect__ = "default"
@classmethod
def setup_mappers(cls):
User = cls.classes.User
Address = cls.classes.Address
users, addresses = (cls.tables.users, cls.tables.addresses)
cls.mapper_registry.map_imperatively(
User,
users,
properties={
"addresses": relationship(Address),
"ad_syn": synonym("addresses"),
},
)
cls.mapper_registry.map_imperatively(Address, addresses)
def test_join_on_synonym(self):
User = self.classes.User
self.assert_compile(
fixture_session().query(User).join(User.ad_syn),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
class JoinTest(QueryTest, AssertsCompiledSQL):
__dialect__ = "default"
@testing.combinations_list(
set(
itertools.product(
[
"relationship",
"relationship_only",
"none",
"explicit",
"table_none",
"table_explicit",
],
[True, False],
)
),
argnames="onclause_type, use_legacy",
)
def test_filter_by_from_join(self, onclause_type, use_legacy):
User, Address = self.classes("User", "Address")
(address_table,) = self.tables("addresses")
(user_table,) = self.tables("users")
if use_legacy:
sess = fixture_session()
q = sess.query(User)
else:
q = select(User).set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
if onclause_type == "relationship":
q = q.join(Address, User.addresses)
elif onclause_type == "relationship_only":
q = q.join(User.addresses)
elif onclause_type == "none":
q = q.join(Address)
elif onclause_type == "explicit":
q = q.join(Address, User.id == Address.user_id)
elif onclause_type == "table_none":
q = q.join(address_table)
elif onclause_type == "table_explicit":
q = q.join(
address_table, user_table.c.id == address_table.c.user_id
)
else:
assert False
q2 = q.filter_by(email_address="foo")
self.assert_compile(
q2,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"WHERE addresses.email_address = :email_address_1",
)
if use_legacy:
q2 = q.reset_joinpoint().filter_by(name="user")
self.assert_compile(
q2,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"WHERE users.name = :name_1",
)
def test_join_relationship_propagate_attrs(self):
User = self.classes.User
users = self.tables.users
stmt = select(users).join(User.addresses)
eq_(
stmt._propagate_attrs,
{"compile_state_plugin": "orm", "plugin_subject": inspect(User)},
)
self.assert_compile(
stmt,
"SELECT users.id, users.name FROM users "
"JOIN addresses ON users.id = addresses.user_id",
)
@testing.combinations((True,), (False,), argnames="legacy")
@testing.combinations((True,), (False,), argnames="threelevel")
def test_join_with_entities(self, legacy, threelevel):
User, Address, Dingaling = self.classes("User", "Address", "Dingaling")
if legacy:
sess = fixture_session()
stmt = sess.query(User).join(Address).with_entities(Address.id)
else:
stmt = select(User).join(Address).with_only_columns(Address.id)
stmt = stmt.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
if threelevel:
if legacy:
stmt = stmt.join(Address.dingaling).with_entities(Dingaling.id)
else:
stmt = stmt.join(Address.dingaling).with_only_columns(
Dingaling.id
)
if threelevel:
self.assert_compile(
stmt,
"SELECT dingalings.id AS dingalings_id "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"JOIN dingalings ON addresses.id = dingalings.address_id",
)
else:
self.assert_compile(
stmt,
"SELECT addresses.id AS addresses_id FROM users "
"JOIN addresses ON users.id = addresses.user_id",
)
@testing.combinations((True,), (False,), argnames="legacy")
@testing.combinations((True,), (False,), argnames="threelevel")
def test_join_and_union_with_entities(self, legacy, threelevel):
User, Address, Dingaling = self.classes("User", "Address", "Dingaling")
if legacy:
sess = fixture_session()
stmt = sess.query(User).join(Address).with_entities(Address.id)
else:
stmt = select(User).join(Address).with_only_columns(Address.id)
stmt = stmt.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
if threelevel:
if legacy:
stmt = stmt.join(Address.dingaling).with_entities(Dingaling.id)
to_union = sess.query(Dingaling.id)
else:
stmt = stmt.join(Address.dingaling).with_only_columns(
Dingaling.id
)
to_union = select(Dingaling.id).set_label_style(
LABEL_STYLE_TABLENAME_PLUS_COL
)
else:
if legacy:
to_union = sess.query(Address.id)
else:
to_union = select(Address.id).set_label_style(
LABEL_STYLE_TABLENAME_PLUS_COL
)
if legacy:
stmt = stmt.union(to_union)
else:
stmt = (
union(stmt, to_union)
.subquery()
.select()
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
)
if threelevel:
self.assert_compile(
stmt,
"SELECT anon_1.dingalings_id AS anon_1_dingalings_id FROM "
"(SELECT dingalings.id AS dingalings_id "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"JOIN dingalings ON addresses.id = dingalings.address_id "
"UNION "
"SELECT dingalings.id AS dingalings_id FROM dingalings) "
"AS anon_1",
)
else:
self.assert_compile(
stmt,
"SELECT anon_1.addresses_id AS anon_1_addresses_id FROM "
"(SELECT addresses.id AS addresses_id FROM users "
"JOIN addresses ON users.id = addresses.user_id "
"UNION "
"SELECT addresses.id AS addresses_id FROM addresses) "
"AS anon_1",
)
def test_invalid_kwarg_join(self):
User = self.classes.User
sess = fixture_session()
assert_raises_message(
TypeError,
r".*join\(\) .*unexpected .*keyword",
sess.query(User).join,
"address",
foob="bar",
bar="bat",
)
assert_raises_message(
TypeError,
r".*outerjoin\(\) .*unexpected .*keyword",
sess.query(User).outerjoin,
"address",
foob="bar",
bar="bat",
)
def test_left_w_no_entity(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
self.assert_compile(
sess.query(User, literal_column("x")).join(Address),
"SELECT users.id AS users_id, users.name AS users_name, x "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
self.assert_compile(
sess.query(literal_column("x"), User).join(Address),
"SELECT x, users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
def test_left_is_none_and_query_has_no_entities(self):
Address = self.classes.Address
sess = fixture_session()
assert_raises_message(
sa_exc.InvalidRequestError,
r"No entities to join from; please use select_from\(\) to "
r"establish the left entity/selectable of this join",
sess.query().join(Address)._compile_context,
)
def test_isouter_flag(self):
User = self.classes.User
self.assert_compile(
fixture_session().query(User).join(User.orders, isouter=True),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users LEFT OUTER JOIN orders ON users.id = orders.user_id",
)
def test_full_flag(self):
User = self.classes.User
self.assert_compile(
fixture_session().query(User).outerjoin(User.orders, full=True),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users FULL OUTER JOIN orders ON users.id = orders.user_id",
)
def test_single_prop_1(self):
User = self.classes.User
sess = fixture_session()
self.assert_compile(
sess.query(User).join(User.orders),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id",
)
def test_single_prop_2(self):
Order, User = (self.classes.Order, self.classes.User)
sess = fixture_session()
self.assert_compile(
sess.query(User).join(Order.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders JOIN users ON users.id = orders.user_id",
)
def test_single_prop_3(self):
Order, User = (self.classes.Order, self.classes.User)
sess = fixture_session()
oalias1 = aliased(Order)
self.assert_compile(
sess.query(User).join(oalias1.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders AS orders_1 JOIN users "
"ON users.id = orders_1.user_id",
)
def test_single_prop_4(self):
(
Order,
User,
) = (self.classes.Order, self.classes.User)
sess = fixture_session()
oalias1 = aliased(Order)
oalias2 = aliased(Order)
self.assert_compile(
sess.query(User).join(oalias1.user).join(oalias2.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders AS orders_1 JOIN users "
"ON users.id = orders_1.user_id, "
"orders AS orders_2 JOIN users ON users.id = orders_2.user_id",
)
def test_single_prop_6(self):
User = self.classes.User
sess = fixture_session()
ualias = aliased(User)
self.assert_compile(
sess.query(ualias).join(ualias.orders),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1 JOIN orders ON users_1.id = orders.user_id",
)
def test_single_prop_9(self):
User = self.classes.User
sess = fixture_session()
subq = (
sess.query(User)
.filter(User.name == "ed")
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
.subquery()
)
ua = aliased(User, subq)
self.assert_compile(
sess.query(ua).join(ua.orders),
"SELECT anon_1.users_id AS anon_1_users_id, "
"anon_1.users_name AS anon_1_users_name "
"FROM (SELECT users.id AS users_id, users.name AS users_name "
"FROM users "
"WHERE users.name = :name_1) AS anon_1 JOIN orders "
"ON anon_1.users_id = orders.user_id",
)
def test_single_prop_12(self):
Order, User, Address = (
self.classes.Order,
self.classes.User,
self.classes.Address,
)
sess = fixture_session()
oalias1 = aliased(Order)
iased(User)
self.assert_compile(
sess.query(ualias)
.join(oalias1, ualias.orders)
.join(Address, ualias.addresses),
"SELECT users_1.id AS users_1_id, users_1.name AS "
"users_1_name FROM users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id JOIN addresses ON users_1.id "
"= addresses.user_id",
)
def test_single_prop_13(self):
Order, User, Address = (
self.classes.Order,
self.classes.User,
self.classes.Address,
)
sess = fixture_session()
iased(User)
ualias2 = aliased(User)
self.assert_compile(
sess.query(ualias)
.join(Address, ualias.addresses)
.join(ualias2, Address.user)
.join(Order, ualias.orders),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users "
"AS users_1 JOIN addresses ON users_1.id = addresses.user_id "
"JOIN users AS users_2 "
"ON users_2.id = addresses.user_id JOIN orders "
"ON users_1.id = orders.user_id",
)
def test_overlapping_paths_one_legacy(self):
User = self.classes.User
Order = self.classes.Order
sess = fixture_session()
self.assert_compile(
sess.query(User)
.join(User.orders)
.join(Order.items)
.join(User.orders)
.join(Order.address),
"SELECT users.id AS users_id, users.name AS users_name FROM users "
"JOIN orders "
"ON users.id = orders.user_id "
"JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id JOIN addresses "
"ON addresses.id = orders.address_id",
)
def test_overlapping_paths_multilevel_legacy(self):
User = self.classes.User
Order = self.classes.Order
Address = self.classes.Address
s = fixture_session()
q = (
s.query(User)
.join(User.orders)
.join(User.addresses)
.join(User.orders)
.join(Order.items)
.join(User.addresses)
.join(Address.dingaling)
)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON users.id = addresses.user_id "
"JOIN order_items AS order_items_1 ON orders.id = "
"order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id "
"JOIN dingalings ON addresses.id = dingalings.address_id",
)
def test_overlapping_paths_one_modern(self):
User = self.classes.User
Order = self.classes.Order
self.assert_compile(
select(User)
.join(User.orders)
.join(Order.items)
.join(User.orders)
.join(Order.address)
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL),
"SELECT users.id AS users_id, users.name AS users_name FROM users "
"JOIN orders "
"ON users.id = orders.user_id "
"JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id JOIN addresses "
"ON addresses.id = orders.address_id",
)
def test_overlapping_paths_multilevel_modern(self):
User = self.classes.User
Order = self.classes.Order
Address = self.classes.Address
q = (
select(User)
.join(User.orders)
.join(User.addresses)
.join(User.orders)
.join(Order.items)
.join(User.addresses)
.join(Address.dingaling)
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON users.id = addresses.user_id "
"JOIN order_items AS order_items_1 ON orders.id = "
"order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id "
"JOIN dingalings ON addresses.id = dingalings.address_id",
)
def test_join_nonmapped_column(self):
Order, User = self.classes.Order, self.classes.User
sess = fixture_session()
self.assert_compile(
sess.query(User.id, literal_column("foo")).join(Order.user),
"SELECT users.id AS users_id, foo FROM "
"orders JOIN users ON users.id = orders.user_id",
)
def test_backwards_join(self):
User, Address = self.classes.User, self.classes.Address
sess = fixture_session()
eq_(
sess.query(User)
.join(Address.user)
.filter(Address.email_address == "ed@wood.com")
.all(),
[User(id=8, name="ed")],
)
eq_(
sess.query(User, Address)
.join(Address.user)
.filter(Address.email_address == "ed@wood.com")
.all(),
[(User(id=8, name="ed"), Address(email_address="ed@wood.com"))],
)
assert_raises(
sa_exc.InvalidRequestError,
sess.query(User).join(Address, Address.user)._compile_context,
)
adalias = aliased(Address)
assert_raises(
sa_exc.InvalidRequestError,
sess.query(User).join(adalias, Address.user)._compile_context,
)
def test_multiple_with_aliases(self):
Order, User = self.classes.Order, self.classes.User
sess = fixture_session()
ualias = aliased(User)
oalias1 = aliased(Order)
oalias2 = aliased(Order)
self.assert_compile(
sess.query(ualias)
.join(oalias1, ualias.orders)
.join(oalias2, ualias.orders)
.filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1 "
"JOIN orders AS orders_1 ON users_1.id = orders_1.user_id "
"JOIN orders AS orders_2 ON "
"users_1.id = orders_2.user_id "
"WHERE orders_1.user_id = :user_id_1 "
"OR orders_2.user_id = :user_id_2",
use_default_dialect=True,
)
def test_select_from_orm_joins(self):
User, Order = self.classes.User, self.classes.Order
sess = fixture_session()
ualias = aliased(User)
oalias1 = aliased(Order)
oalias2 = aliased(Order)
self.assert_compile(
join(User, oalias2, User.id == oalias2.user_id),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias2, User.id == oalias2.user_id, full=True),
"users FULL OUTER JOIN orders AS orders_1 "
"ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias2, User.id == oalias2.user_id, isouter=True),
"users LEFT OUTER JOIN orders AS orders_1 "
"ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(
User,
oalias2,
User.id == oalias2.user_id,
isouter=True,
full=True,
),
"users FULL OUTER JOIN orders AS orders_1 "
"ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"JOIN orders AS orders_2 ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2, isouter=True),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"LEFT OUTER JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2, full=True),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"FULL OUTER JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2, full=True, isouter=True),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"FULL OUTER JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(ualias, oalias1, ualias.orders),
"users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
sess.query(ualias).select_from(
join(ualias, oalias1, ualias.orders)
),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1 "
"JOIN orders AS orders_1 ON users_1.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
sess.query(User, ualias).select_from(
join(ualias, oalias1, ualias.orders)
),
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, "
"users_1.name AS users_1_name FROM users, users AS users_1 "
"JOIN orders AS orders_1 ON users_1.id = orders_1.user_id",
use_default_dialect=True,
)
if False:
self.assert_compile(
sess.query(User, ualias)
.join(oalias1, ualias.orders)
.join(oalias2, User.id == oalias2.user_id)
.filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS "
"users_1_name FROM users JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id, "
"users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id "
"WHERE orders_1.user_id = :user_id_1 "
"OR orders_2.user_id = :user_id_2",
use_default_dialect=True,
)
# this is the same thing using explicit orm.join() (which now offers
# multiple again)
self.assert_compile(
sess.query(User, ualias)
.select_from(
join(ualias, oalias1, ualias.orders),
join(User, oalias2, User.id == oalias2.user_id),
)
.filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS "
"users_1_name FROM users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id, "
"users JOIN orders AS orders_2 ON users.id = orders_2.user_id "
"WHERE orders_1.user_id = :user_id_1 "
"OR orders_2.user_id = :user_id_2",
use_default_dialect=True,
)
def test_overlapping_backwards_joins(self):
User, Order = self.classes.User, self.classes.Order
sess = fixture_session()
oalias1 = aliased(Order)
oalias2 = aliased(Order)
# this is invalid SQL - joins from orders_1/orders_2 to User twice.
# but that is what was asked for so they get it !
self.assert_compile(
sess.query(User).join(oalias1.user).join(oalias2.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders AS orders_1 "
"JOIN users ON users.id = orders_1.user_id, orders AS orders_2 "
"JOIN users ON users.id = orders_2.user_id",
use_default_dialect=True,
)
def test_replace_multiple_from_clause(self):
User, Order, Address = (
self.classes.User,
self.classes.Order,
self.classes.Address,
)
sess = fixture_session()
self.assert_compile(
sess.query(Address, User)
.join(Address.dingaling)
.join(User.orders)
.join(Order.items),
"SELECT addresses.id AS addresses_id, "
"addresses.user_id AS addresses_user_id, "
"addresses.email_address AS addresses_email_address, "
"users.id AS users_id, "
"users.name AS users_name FROM addresses JOIN dingalings "
"ON addresses.id = dingalings.address_id, "
"users JOIN orders ON users.id = orders.user_id "
"JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id JOIN items "
"ON items.id = order_items_1.item_id",
use_default_dialect=True,
)
def test_invalid_join_entity_from_single_from_clause(self):
Address, Item = (self.classes.Address, self.classes.Item)
sess = fixture_session()
q = sess.query(Address).select_from(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Don't know how to join to .*Item.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(Item)._compile_context,
)
def test_invalid_join_entity_from_no_from_clause(self):
Address, Item = (self.classes.Address, self.classes.Item)
sess = fixture_session()
q = sess.query(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Don't know how to join to .*Item.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(Item)._compile_context,
)
def test_invalid_join_entity_from_multiple_from_clause(self):
User, Address, Item = (
self.classes.User,
self.classes.Address,
self.classes.Item,
)
sess = fixture_session()
q = sess.query(Address, User).join(Address.dingaling).join(User.orders)
assert_raises_message(
sa.exc.InvalidRequestError,
"Don't know how to join to .*Item.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(Item)._compile_context,
)
def test_join_explicit_left_multiple_from_clause(self):
User = self.classes.User
sess = fixture_session()
u1 = aliased(User)
q = sess.query(User, u1).select_from(User, u1).join(User.addresses)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1, "
"users JOIN addresses ON users.id = addresses.user_id",
)
q = sess.query(User, u1).select_from(User, u1).join(u1.addresses)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users, "
"users AS users_1 JOIN addresses "
"ON users_1.id = addresses.user_id",
)
def test_join_explicit_left_multiple_adapted(self):
User = self.classes.User
sess = fixture_session()
u1 = aliased(User)
u2 = aliased(User)
assert_raises_message(
sa_exc.InvalidRequestError,
"Can't identify which entity in which to assign the "
"left side of this join.",
sess.query(u1, u2)
.select_from(u1, u2)
.join(User.addresses)
._compile_context,
)
# more specific ON clause
self.assert_compile(
sess.query(u1, u2).select_from(u1, u2).join(u2.addresses),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name, "
"users_2.id AS users_2_id, users_2.name AS users_2_name "
"FROM users AS users_1, "
"users AS users_2 JOIN addresses "
"ON users_2.id = addresses.user_id",
)
def test_join_entity_from_multiple_from_clause(self):
User, Order, Address, Dingaling = (
self.classes.User,
self.classes.Order,
self.classes.Address,
self.classes.Dingaling,
)
sess = fixture_session()
q = sess.query(Address, User).join(Address.dingaling).join(User.orders)
a1 = aliased(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Can't determine which FROM clause to join from, there are "
"multiple FROMS which can join to this entity. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(a1)._compile_context,
)
self.assert_compile(
q.join(a1, Order.address_id == a1.id),
"SELECT addresses.id AS addresses_id, "
"addresses.user_id AS addresses_user_id, "
"addresses.email_address AS addresses_email_address, "
"users.id AS users_id, users.name AS users_name "
"FROM addresses JOIN dingalings "
"ON addresses.id = dingalings.address_id, "
"users JOIN orders "
"ON users.id = orders.user_id "
"JOIN addresses AS addresses_1 "
"ON orders.address_id = addresses_1.id",
)
self.assert_compile(
q.join(a1, Dingaling.address_id == a1.id),
"SELECT addresses.id AS addresses_id, "
"addresses.user_id AS addresses_user_id, "
"addresses.email_address AS addresses_email_address, "
"users.id AS users_id, users.name AS users_name "
"FROM addresses JOIN dingalings "
"ON addresses.id = dingalings.address_id "
"JOIN addresses AS addresses_1 "
"ON dingalings.address_id = addresses_1.id, "
"users JOIN orders ON users.id = orders.user_id",
)
def test_join_entity_from_multiple_entities(self):
Order, Address, Dingaling = (
self.classes.Order,
self.classes.Address,
self.classes.Dingaling,
)
sess = fixture_session()
q = sess.query(Order, Dingaling)
a1 = aliased(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Can't determine which FROM clause to join from, there are "
"multiple FROMS which can join to this entity. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(a1)._compile_context,
)
# to resolve, add an ON clause
# Order is chosen to join to a1
self.assert_compile(
q.join(a1, Order.address_id == a1.id),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
"orders.address_id AS orders_address_id, "
"orders.description AS orders_description, "
"orders.isopen AS orders_isopen, dingalings.id AS dingalings_id, "
"dingalings.address_id AS dingalings_address_id, "
"dingalings.data AS dingalings_data "
"FROM dingalings, orders "
"JOIN addresses AS addresses_1 "
"ON orders.address_id = addresses_1.id",
)
# Dingaling is chosen to join to a1
self.assert_compile(
q.join(a1, Dingaling.address_id == a1.id),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
"orders.address_id AS orders_address_id, "
"orders.description AS orders_description, "
"orders.isopen AS orders_isopen, dingalings.id AS dingalings_id, "
"dingalings.address_id AS dingalings_address_id, "
"dingalings.data AS dingalings_data "
"FROM orders, dingalings JOIN addresses AS addresses_1 "
"ON dingalings.address_id = addresses_1.id",
)
def test_clause_present_in_froms_twice_w_onclause(self):
# test [ticket:4584]
Order, Address, User = (
self.classes.Order,
self.classes.Address,
self.classes.User,
)
sess = fixture_session()
a1 = aliased(Address)
q = sess.query(Order).select_from(Order, a1, User)
assert_raises_message(
sa.exc.InvalidRequestError,
"Can't determine which FROM clause to join from, there are "
"multiple FROMS which can join to this entity. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.outerjoin(a1)._compile_context,
)
q = sess.query(Order).select_from(Order, a1, User)
q = q.outerjoin(a1, a1.id == Order.address_id)
q = q.outerjoin(User, a1.user_id == User.id)
self.assert_compile(
q,
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
"orders.address_id AS orders_address_id, "
"orders.description AS orders_description, "
"orders.isopen AS orders_isopen "
"FROM orders "
"LEFT OUTER JOIN addresses AS addresses_1 "
"ON addresses_1.id = orders.address_id "
"LEFT OUTER JOIN users ON addresses_1.user_id = users.id",
)
def test_clause_present_in_froms_twice_wo_onclause(self):
Address, Dingaling, User = (
self.classes.Address,
self.classes.Dingaling,
self.classes.User,
)
sess = fixture_session()
a1 = aliased(Address)
q = sess.query(User).select_from(Dingaling, a1, User)
q = q.outerjoin(a1, User.id == a1.user_id)
q = q.outerjoin(Dingaling)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users LEFT OUTER JOIN addresses AS addresses_1 "
"ON users.id = addresses_1.user_id "
"LEFT OUTER JOIN dingalings "
"ON addresses_1.id = dingalings.address_id",
)
def test_pure_expression(self):
addresses, users = self.tables.addresses, self.tables.users
sess = fixture_session()
self.assert_compile(
sess.query(users).join(addresses),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
def test_no_onclause(self):
Item, User, Order = (
self.classes.Item,
self.classes.User,
self.classes.Order,
)
sess = fixture_session()
eq_(
sess.query(User)
.select_from(join(User, Order).join(Item, Order.items))
.filter(Item.description == "item 4")
.all(),
[User(name="jack")],
)
eq_(
sess.query(User.name)
.select_from(join(User, Order).join(Item, Order.items))
.filter(Item.description == "item 4")
.all(),
[("jack",)],
)
eq_(
sess.query(User)
.join(Order)
.join(Item, Order.items)
.filter(Item.description == "item 4")
.all(),
[User(name="jack")],
)
def test_clause_onclause(self):
Item, Order, order_items, User = (
self.classes.Item,
self.classes.Order,
self.tables.order_items,
self.classes.User,
)
sess = fixture_session()
eq_(
sess.query(User)
.join(Order, User.id == Order.user_id)
.join(order_items, Order.id == order_items.c.order_id)
.join(Item, order_items.c.item_id == Item.id)
.filter(Item.description == "item 4")
.all(),
[User(name="jack")],
)
eq_(
sess.query(User.name)
.join(Order, User.id == Order.user_id)
.join(order_items, Order.id == order_items.c.order_id)
.join(Item, order_items.c.item_id == Item.id)
.filter(Item.description == "item 4")
.all(),
[("jack",)],
)
ualias = aliased(User)
eq_(
sess.query(ualias.name)
.join(Order, ualias.id == Order.user_id)
.join(order_items, Order.id == order_items.c.order_id)
.join(Item, order_items.c.item_id == Item.id)
.filter(Item.description == "item 4")
.all(),
[("jack",)],
)
# FROM object
subq = sess.query(User).order_by(User.id).offset(2).subquery()
ua = aliased(User, subq)
eq_(
sess.query(ua).join(Order, ua.id == Order.user_id).all(),
[User(name="fred")],
)
def test_aliased_classes(self):
User, Address = self.classes.User, self.classes.Address
sess = fixture_session()
(user7, user8, user9, user10) = sess.query(User).all()
(address1, address2, address3, address4, address5) = sess.query(
Address
).all()
expected = [
(user7, address1),
(user8, address2),
(user8, address3),
(user8, address4),
(user9, address5),
(user10, None),
]
q = sess.query(User)
AdAlias = aliased(Address)
q = q.add_entity(AdAlias).select_from(outerjoin(User, AdAlias))
result = q.order_by(User.id, AdAlias.id).all()
eq_(result, expected)
sess.expunge_all()
q = sess.query(User).add_entity(AdAlias)
result = (
q.select_from(outerjoin(User, AdAlias))
.filter(AdAlias.email_address == "ed@bettyboop.com")
.all()
)
eq_(result, [(user8, address3)])
result = (
q.select_from(outerjoin(User, AdAlias, "addresses"))
.filter(AdAlias.email_address == "ed@bettyboop.com")
.all()
)
eq_(result, [(user8, address3)])
result = (
q.select_from(outerjoin(User, AdAlias, User.id == AdAlias.user_id))
.filter(AdAlias.email_address == "ed@bettyboop.com")
.all()
)
eq_(result, [(user8, address3)])
# this is the first test where we are joining "backwards" - from
# AdAlias to User even though
# the query is against User
q = sess.query(User, AdAlias)
result = (
q.join(AdAlias.user)
.filter(User.name == "ed")
.order_by(User.id, AdAlias.id)
)
eq_(
result.all(),
[(user8, address2), (user8, address3), (user8, address4)],
)
q = (
sess.query(User, AdAlias)
.select_from(join(AdAlias, User, AdAlias.user))
.filter(User.name == "ed")
)
eq_(
result.all(),
[(user8, address2), (user8, address3), (user8, address4)],
)
def test_expression_onclauses(self):
Order, User = self.classes.Order, self.classes.User
sess = fixture_session()
subq = sess.query(User).subquery()
self.assert_compile(
sess.query(User).join(subq, User.name == subq.c.name),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN (SELECT users.id AS id, users.name "
"AS name FROM users) AS anon_1 ON users.name = anon_1.name",
use_default_dialect=True,
)
subq = sess.query(Order).subquery()
self.assert_compile(
sess.query(User).join(subq, User.id == subq.c.user_id),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN (SELECT orders.id AS id, orders.user_id AS user_id, "
"orders.address_id AS address_id, orders.description AS "
"description, orders.isopen AS isopen FROM orders) AS "
"anon_1 ON users.id = anon_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
sess.query(User).join(Order, User.id == Order.user_id),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id",
use_default_dialect=True,
)
def test_aliased_classes_m2m(self):
Item, Order = self.classes.Item, self.classes.Order
sess = fixture_session()
(order1, order2, order3, order4, order5) = sess.query(Order).all()
(item1, item2, item3, item4, item5) = sess.query(Item).all()
expected = [
(order1, item1),
(order1, item2),
(order1, item3),
(order2, item1),
(order2, item2),
(order2, item3),
(order3, item3),
(order3, item4),
(order3, item5),
(order4, item1),
(order4, item5),
(order5, item5),
]
q = sess.query(Order)
q = (
q.add_entity(Item)
.select_from(join(Order, Item, "items"))
.order_by(Order.id, Item.id)
)
result = q.all()
eq_(result, expected)
IAlias = aliased(Item)
q = (
sess.query(Order, IAlias)
.select_from(join(Order, IAlias, "items"))
.filter(IAlias.description == "item 3")
)
result = q.all()
eq_(result, [(order1, item3), (order2, item3), (order3, item3)])
def test_joins_from_adapted_entities(self):
User = self.classes.User
# test for #1853
session = fixture_session()
first = session.query(User)
second = session.query(User)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.outerjoin(*join)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS "
"anon_1_users_id, anon_1.users_name AS "
"anon_1_users_name FROM (SELECT users.id "
"AS users_id, users.name AS users_name "
"FROM users UNION SELECT users.id AS "
"users_id, users.name AS users_name FROM "
"users) AS anon_1 LEFT OUTER JOIN (SELECT "
"users.id AS id FROM users) AS anon_2 ON "
"anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
first = session.query(User.id)
second = session.query(User.id)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.outerjoin(*join)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS anon_1_users_id "
"FROM (SELECT users.id AS users_id FROM "
"users UNION SELECT users.id AS users_id "
"FROM users) AS anon_1 LEFT OUTER JOIN "
"(SELECT users.id AS id FROM users) AS "
"anon_2 ON anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
def test_joins_from_adapted_entities_isouter(self):
User = self.classes.User
# test for #1853
session = fixture_session()
first = session.query(User)
second = session.query(User)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.join(*join, isouter=True)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS "
"anon_1_users_id, anon_1.users_name AS "
"anon_1_users_name FROM (SELECT users.id "
"AS users_id, users.name AS users_name "
"FROM users UNION SELECT users.id AS "
"users_id, users.name AS users_name FROM "
"users) AS anon_1 LEFT OUTER JOIN (SELECT "
"users.id AS id FROM users) AS anon_2 ON "
"anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
first = session.query(User.id)
second = session.query(User.id)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.join(*join, isouter=True)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS anon_1_users_id "
"FROM (SELECT users.id AS users_id FROM "
"users UNION SELECT users.id AS users_id "
"FROM users) AS anon_1 LEFT OUTER JOIN "
"(SELECT users.id AS id FROM users) AS "
"anon_2 ON anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
def test_overlap_with_aliases(self):
orders, User, users = (
self.tables.orders,
self.classes.User,
self.tables.users,
)
Order = self.classes.Order
oalias = orders.alias("oalias")
result = (
fixture_session()
.query(User)
.select_from(users.join(oalias))
.filter(
oalias.c.description.in_(["order 1", "order 2", "order 3"])
)
.join(User.orders)
.join(Order.items)
.order_by(User.id)
.all()
)
assert [User(id=7, name="jack"), User(id=9, name="fred")] == result
result = (
fixture_session()
.query(User)
.select_from(users.join(oalias))
.filter(
oalias.c.description.in_(["order 1", "order 2", "order 3"])
)
.join(User.orders)
.join(Order.items)
.filter_by(id=4)
.all()
)
assert [User(id=7, name="jack")] == result
def test_aliased_order_by(self):
User = self.classes.User
sess = fixture_session()
ualias = aliased(User)
eq_(
sess.query(User, ualias)
.filter(User.id > ualias.id)
.order_by(desc(ualias.id), User.name)
.all(),
[
(User(id=10, name="chuck"), User(id=9, name="fred")),
(User(id=10, name="chuck"), User(id=8, name="ed")),
(User(id=9, name="fred"), User(id=8, name="ed")),
(User(id=10, name="chuck"), User(id=7, name="jack")),
(User(id=8, name="ed"), User(id=7, name="jack")),
(User(id=9, name="fred"), User(id=7, name="jack")),
],
)
def test_plain_table(self):
addresses, User = self.tables.addresses, self.classes.User
sess = fixture_session()
eq_(
sess.query(User.name)
.join(addresses, User.id == addresses.c.user_id)
.order_by(User.id)
.all(),
[("jack",), ("ed",), ("ed",), ("ed",), ("fred",)],
)
def test_no_joinpoint_expr(self):
User, users = self.classes.User, self.tables.users
sess = fixture_session()
# these are consistent regardless of
# select_from() being present.
assert_raises_message(
sa_exc.InvalidRequestError,
"Don't know how to join to .*User.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
sess.query(users.c.id).join(User)._compile_context,
)
assert_raises_message(
sa_exc.InvalidRequestError,
"Don't know how to join to .*User.* "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
sess.query(users.c.id)
.select_from(users)
.join(User)
._compile_context,
)
def test_on_clause_no_right_side_one(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
# coercions does not catch this due to the
# legacy=True flag for JoinTargetRole
with expect_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, or ORM relationship "
"attribute expected, got",
):
sess.query(User).join(User.id == Address.user_id)
def test_on_clause_no_right_side_one_future(self):
User = self.classes.User
Address = self.classes.Address
# future mode can raise a more specific error at the coercions level
assert_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, "
"or ORM relationship attribute expected",
select(User).join,
User.id == Address.user_id,
)
def test_no_legacy_multi_join_two_element(self):
User = self.classes.User
Order = self.classes.Order
sess = fixture_session()
with expect_raises_message(
sa_exc.InvalidRequestError,
"No 'on clause' argument may be passed when joining to a "
"relationship path as a target",
):
sess.query(User).join(User.orders, Order.items)._compile_context()
def test_no_modern_multi_join_two_element(self):
User = self.classes.User
Order = self.classes.Order
sess = fixture_session()
with expect_raises_message(
sa_exc.InvalidRequestError,
"No 'on clause' argument may be passed when joining to a "
"relationship path as a target",
):
sess.execute(select(User).join(User.orders, Order.items))
def test_kw_only_blocks_legacy_multi_join(self):
User = self.classes.User
Order = self.classes.Order
Item = self.classes.Item
sess = fixture_session()
with expect_raises_message(
TypeError,
r".*join\(\) takes from 2 to 3 positional arguments but "
"4 were given",
):
sess.query(User).join(User.orders, Order.items, Item.keywords)
def test_on_clause_no_right_side_two(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
assert_raises_message(
sa_exc.ArgumentError,
"Join target Address.user_id does not refer to a mapped entity",
sess.query(User).join(Address.user_id)._compile_context,
)
def test_on_clause_no_right_side_two_future(self):
User = self.classes.User
Address = self.classes.Address
stmt = select(User).join(Address.user_id)
assert_raises_message(
sa_exc.ArgumentError,
"Join target Address.user_id does not refer to a mapped entity",
stmt.compile,
)
def test_no_strings_for_single_onclause_legacy_query(self):
User = self.classes.User
sess = fixture_session()
with expect_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, or ORM relationship "
"attribute expected, got 'addresses'",
):
sess.query(User).join("addresses")
def test_no_strings_for_single_onclause_newstyle(self):
User = self.classes.User
with expect_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, or ORM relationship "
"attribute expected, got 'addresses'",
):
select(User).join("addresses")
def test_no_strings_for_dual_onclause_legacy_query(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
with expect_raises_message(
sa_exc.ArgumentError,
"ON clause, typically a SQL expression or ORM relationship "
"attribute expected, got 'addresses'",
):
sess.query(User).join(Address, "addresses")
def test_no_strings_for_dual_onclause_newstyle(self):
User = self.classes.User
Address = self.classes.Address
with expect_raises_message(
sa_exc.ArgumentError,
"ON clause, typically a SQL expression or ORM relationship "
"attribute expected, got 'addresses'.",
):
select(User).join(Address, "addresses")
def test_select_from(self):
Item, Order, User = (
self.classes.Item,
self.classes.Order,
self.classes.User,
)
sess = fixture_session()
self.assert_compile(
sess.query(Item.id)
.select_from(User)
.join(User.orders)
.join(Order.items),
"SELECT items.id AS items_id FROM users JOIN orders ON "
"users.id = orders.user_id JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id JOIN items ON items.id = "
"order_items_1.item_id",
use_default_dialect=True,
)
# here, the join really wants to add a second FROM clause
# for "Item". but select_from disallows that
self.assert_compile(
sess.query(Item.id)
.select_from(User)
.join(Item, User.id == Item.id),
"SELECT items.id AS items_id FROM users JOIN items "
"ON users.id = items.id",
use_default_dialect=True,
)
class JoinFromSelectableTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = "default"
run_setup_mappers = "once"
@classmethod
def define_tables(cls, metadata):
Table("table1", metadata, Column("id", Integer, primary_key=True))
Table(
"table2",
metadata,
Column("id", Integer, primary_key=True),
Column("t1_id", Integer),
)
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
class T2(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
table1, table2 = cls.tables.table1, cls.tables.table2
T1, T2 = cls.classes("T1", "T2")
cls.mapper_registry.map_imperatively(T1, table1)
cls.mapper_registry.map_imperatively(T2, table2)
def test_select_mapped_to_mapped_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(subq.c.count, T1.id)
.select_from(subq)
.join(T1, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 "
"GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
"ON anon_1.t1_id = table1.id",
)
def test_select_mapped_to_mapped_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(subq.c.count, T1.id).join(T1, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 "
"GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
"ON anon_1.t1_id = table1.id",
)
def test_select_mapped_to_select_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(subq.c.count, T1.id)
.select_from(T1)
.join(subq, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 GROUP BY table2.t1_id) "
"AS anon_1 ON anon_1.t1_id = table1.id",
)
def test_select_mapped_to_select_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
# without select_from
self.assert_compile(
sess.query(subq.c.count, T1.id).join(subq, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM table1 JOIN "
"(SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) "
"AS anon_1 ON anon_1.t1_id = table1.id",
)
# with select_from, same query
self.assert_compile(
sess.query(subq.c.count, T1.id)
.select_from(T1)
.join(subq, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM table1 JOIN "
"(SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) "
"AS anon_1 ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_mapped_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
# without select_from
self.assert_compile(
sess.query(T1.id, subq.c.count).join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"JOIN table1 ON anon_1.t1_id = table1.id",
)
# with select_from, same query
self.assert_compile(
sess.query(T1.id, subq.c.count)
.select_from(subq)
.join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"JOIN table1 ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_mapped_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(T1.id, subq.c.count)
.select_from(subq)
.join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
"ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_select_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(T1.id, subq.c.count)
.select_from(T1)
.join(subq, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_select_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(T1.id, subq.c.count).join(subq, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"ON anon_1.t1_id = table1.id",
)
class SelfRefMixedTest(fixtures.MappedTest, AssertsCompiledSQL):
run_setup_mappers = "once"
__dialect__ = default.DefaultDialect()
@classmethod
def define_tables(cls, metadata):
Table(
"nodes",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("parent_id", Integer, ForeignKey("nodes.id")),
)
Table(
"sub_table",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("node_id", Integer, ForeignKey("nodes.id")),
)
Table(
"assoc_table",
metadata,
Column("left_id", Integer, ForeignKey("nodes.id")),
Column("right_id", Integer, ForeignKey("nodes.id")),
)
@classmethod
def setup_classes(cls):
class Node(cls.Comparable):
pass
class Sub(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
nodes, assoc_table, sub_table = (
cls.tables.nodes,
cls.tables.assoc_table,
cls.tables.sub_table,
)
Node, Sub = cls.classes("Node", "Sub")
cls.mapper_registry.map_imperatively(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
join_depth=3,
backref=backref("parent", remote_side=[nodes.c.id]),
),
"subs": relationship(Sub),
"assoc": relationship(
Node,
secondary=assoc_table,
primaryjoin=nodes.c.id == assoc_table.c.left_id,
secondaryjoin=nodes.c.id == assoc_table.c.right_id,
),
},
)
cls.mapper_registry.map_imperatively(Sub, sub_table)
def test_o2m_aliased_plus_o2m(self):
Node, Sub = self.classes.Node, self.classes.Sub
sess = fixture_session()
n1 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(Sub, n1.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN sub_table ON nodes_1.id = sub_table.node_id",
)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(Sub, Node.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN sub_table ON nodes.id = sub_table.node_id",
)
def test_m2m_aliased_plus_o2m(self):
Node, Sub = self.classes.Node, self.classes.Sub
sess = fixture_session()
n1 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.assoc).join(Sub, n1.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = "
"assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = "
"assoc_table_1.right_id JOIN sub_table "
"ON nodes_1.id = sub_table.node_id",
)
self.assert_compile(
sess.query(Node).join(n1, Node.assoc).join(Sub, Node.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = "
"assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = "
"assoc_table_1.right_id JOIN sub_table "
"ON nodes.id = sub_table.node_id",
)
class CreateJoinsTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = "default"
def _inherits_fixture(self):
m = MetaData()
base = Table("base", m, Column("id", Integer, primary_key=True))
a = Table(
"a",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
Column("b_id", Integer, ForeignKey("b.id")),
)
b = Table(
"b",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
Column("c_id", Integer, ForeignKey("c.id")),
)
c = Table(
"c",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
)
class Base:
pass
class A(Base):
pass
class B(Base):
pass
class C(Base):
pass
self.mapper_registry.map_imperatively(Base, base)
self.mapper_registry.map_imperatively(
A,
a,
inherits=Base,
properties={"b": relationship(B, primaryjoin=a.c.b_id == b.c.id)},
)
self.mapper_registry.map_imperatively(
B,
b,
inherits=Base,
properties={"c": relationship(C, primaryjoin=b.c.c_id == c.c.id)},
)
self.mapper_registry.map_imperatively(C, c, inherits=Base)
return A, B, C, Base
def test_double_level_aliased_exists(self):
A, B, C, Base = self._inherits_fixture()
s = fixture_session()
self.assert_compile(
s.query(A).filter(A.b.has(B.c.has(C.id == 5))),
"SELECT a.id AS a_id, base.id AS base_id, a.b_id AS a_b_id "
"FROM base JOIN a ON base.id = a.id WHERE "
"EXISTS (SELECT 1 FROM (SELECT base.id AS base_id, b.id AS "
"b_id, b.c_id AS b_c_id FROM base JOIN b ON base.id = b.id) "
"AS anon_1 WHERE a.b_id = anon_1.b_id AND (EXISTS "
"(SELECT 1 FROM (SELECT base.id AS base_id, c.id AS c_id "
"FROM base JOIN c ON base.id = c.id) AS anon_2 "
"WHERE anon_1.b_c_id = anon_2.c_id AND anon_2.c_id = :id_1"
")))",
)
class JoinToNonPolyAliasesTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = "default"
run_create_tables = None
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table(
"parent",
metadata,
Column("id", Integer, primary_key=True),
Column("data", String(50)),
)
Table(
"child",
metadata,
Column("id", Integer, primary_key=True),
Column("parent_id", Integer, ForeignKey("parent.id")),
Column("data", String(50)),
)
@classmethod
def setup_mappers(cls):
parent, child = cls.tables.parent, cls.tables.child
class Parent(cls.Comparable):
pass
class Child(cls.Comparable):
pass
mp = cls.mapper_registry.map_imperatively(Parent, parent)
cls.mapper_registry.map_imperatively(Child, child)
derived = select(child).alias()
npc = aliased(Child, derived)
cls.npc = npc
cls.derived = derived
mp.add_property("npc", relationship(npc))
def test_join_parent_child(self):
Parent = self.classes.Parent
sess = fixture_session()
self.assert_compile(
sess.query(Parent)
.join(Parent.npc)
.filter(self.derived.c.data == "x"),
"SELECT parent.id AS parent_id, parent.data AS parent_data "
"FROM parent JOIN (SELECT child.id AS id, "
"child.parent_id AS parent_id, "
"child.data AS data "
"FROM child) AS anon_1 ON parent.id = anon_1.parent_id "
"WHERE anon_1.data = :data_1",
)
def test_join_parent_child_select_from(self):
Parent = self.classes.Parent
npc = self.npc
sess = fixture_session()
self.assert_compile(
sess.query(npc)
.select_from(Parent)
.join(Parent.npc)
.filter(self.derived.c.data == "x"),
"SELECT anon_1.id AS anon_1_id, anon_1.parent_id "
"AS anon_1_parent_id, anon_1.data AS anon_1_data "
"FROM parent JOIN (SELECT child.id AS id, child.parent_id AS "
"parent_id, child.data AS data FROM child) AS anon_1 ON "
"parent.id = anon_1.parent_id WHERE anon_1.data = :data_1",
)
def test_join_select_parent_child(self):
Parent = self.classes.Parent
npc = self.npc
sess = fixture_session()
self.assert_compile(
sess.query(Parent, npc)
.join(Parent.npc)
.filter(self.derived.c.data == "x"),
"SELECT parent.id AS parent_id, parent.data AS parent_data, "
"anon_1.id AS anon_1_id, anon_1.parent_id AS anon_1_parent_id, "
"anon_1.data AS anon_1_data FROM parent JOIN "
"(SELECT child.id AS id, child.parent_id AS parent_id, "
"child.data AS data FROM child) AS anon_1 ON parent.id = "
"anon_1.parent_id WHERE anon_1.data = :data_1",
)
class SelfReferentialTest(fixtures.MappedTest, AssertsCompiledSQL):
run_setup_mappers = "once"
run_inserts = "once"
run_deletes = None
__dialect__ = "default"
@classmethod
def define_tables(cls, metadata):
Table(
"nodes",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("parent_id", Integer, ForeignKey("nodes.id")),
Column("data", String(30)),
)
@classmethod
def setup_classes(cls):
class Node(cls.Comparable):
def append(self, node):
self.children.append(node)
@classmethod
def setup_mappers(cls):
Node, nodes = cls.classes.Node, cls.tables.nodes
cls.mapper_registry.map_imperatively(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
join_depth=3,
backref=backref("parent", remote_side=[nodes.c.id]),
)
},
)
@classmethod
def insert_data(cls, connection):
Node = cls.classes.Node
sess = Session(connection)
n1 = Node(data="n1")
n1.append(Node(data="n11"))
n1.append(Node(data="n12"))
n1.append(Node(data="n13"))
n1.children[1].append(Node(data="n121"))
n1.children[1].append(Node(data="n122"))
n1.children[1].append(Node(data="n123"))
sess.add(n1)
sess.flush()
sess.close()
def test_join_4_explicit_join(self):
Node = self.classes.Node
sess = fixture_session()
na = aliased(Node)
na2 = aliased(Node)
# this one is a great example of how to show how the API changes;
# while it requires the explicitness of aliased(Node), the whole
# guesswork of joinpoint / aliased goes away and the whole thing
# is simpler
#
# .join("parent", aliased=True)
# .filter(Node.data == "n12")
# .join("parent", aliased=True, from_joinpoint=True)
# .filter(Node.data == "n1")
#
# becomes:
#
# na = aliased(Node)
# na2 = aliased(Node)
#
# ...
# .join(na, Node.parent)
# .filter(na.data == "n12")
# .join(na2, na.parent)
# .filter(na2.data == "n1")
#
q = (
sess.query(Node)
.filter(Node.data == "n122")
.join(na, Node.parent)
.filter(na.data == "n12")
.join(na2, na.parent)
.filter(na2.data == "n1")
)
self.assert_compile(
q,
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
"ON nodes_1.id = nodes.parent_id JOIN nodes AS nodes_2 "
"ON nodes_2.id = nodes_1.parent_id WHERE nodes.data = :data_1 "
"AND nodes_1.data = :data_2 AND nodes_2.data = :data_3",
checkparams={"data_1": "n122", "data_2": "n12", "data_3": "n1"},
)
node = q.first()
eq_(node.data, "n122")
def test_from_self_inside_excludes_outside(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
# n1 is not inside the from_self(), so all cols must be maintained
# on the outside
subq = (
sess.query(Node)
.filter(Node.data == "n122")
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
.subquery()
)
na = aliased(Node, subq)
self.assert_compile(
sess.query(n1, na.id),
"SELECT nodes_1.id AS nodes_1_id, "
"nodes_1.parent_id AS nodes_1_parent_id, "
"nodes_1.data AS nodes_1_data, anon_1.nodes_id AS anon_1_nodes_id "
"FROM nodes AS nodes_1, (SELECT nodes.id AS nodes_id, "
"nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM "
"nodes WHERE nodes.data = :data_1) AS anon_1",
use_default_dialect=True,
)
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
q = sess.query(na, pa, ga).limit(1)
# parent, grandparent *are* inside the from_self(), so they
# should get aliased to the outside.
self.assert_compile(
q,
"SELECT anon_1.nodes_id AS anon_1_nodes_id, "
"anon_1.nodes_parent_id AS anon_1_nodes_parent_id, "
"anon_1.nodes_data AS anon_1_nodes_data, "
"anon_1.nodes_1_id AS anon_1_nodes_1_id, "
"anon_1.nodes_1_parent_id AS anon_1_nodes_1_parent_id, "
"anon_1.nodes_1_data AS anon_1_nodes_1_data, "
"anon_1.nodes_2_id AS anon_1_nodes_2_id, "
"anon_1.nodes_2_parent_id AS anon_1_nodes_2_parent_id, "
"anon_1.nodes_2_data AS anon_1_nodes_2_data "
"FROM (SELECT nodes.id AS nodes_id, nodes.parent_id "
"AS nodes_parent_id, nodes.data AS nodes_data, "
"nodes_1.id AS nodes_1_id, "
"nodes_1.parent_id AS nodes_1_parent_id, "
"nodes_1.data AS nodes_1_data, nodes_2.id AS nodes_2_id, "
"nodes_2.parent_id AS nodes_2_parent_id, nodes_2.data AS "
"nodes_2_data FROM nodes JOIN nodes AS nodes_1 ON "
"nodes_1.id = nodes.parent_id JOIN nodes AS nodes_2 "
"ON nodes_2.id = nodes_1.parent_id "
"WHERE nodes.data = :data_1 AND nodes_1.data = :data_2 AND "
"nodes_2.data = :data_3) AS anon_1 LIMIT :param_1",
{"param_1": 1},
use_default_dialect=True,
)
def test_join_to_self_no_aliases_raises(self):
Node = self.classes.Node
s = fixture_session()
assert_raises_message(
sa.exc.InvalidRequestError,
r"Can't construct a join from Mapper\[Node\(nodes\)\] to "
r"Mapper\[Node\(nodes\)\], they are the same entity",
s.query(Node).join(Node.children)._compile_context,
)
def test_explicit_join_1(self):
Node = self.classes.Node
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
join(Node, n1, "children").join(n2, "children"),
"nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_2(self):
Node = self.classes.Node
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
join(Node, n1, Node.children).join(n2, n1.children),
"nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_3(self):
Node = self.classes.Node
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
join(Node, n1, Node.children).join(
n2, Node.children, join_to_left=False
),
"nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_4(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(n2, n1.children),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
"ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_5(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(n2, Node.children),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
"ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_6(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
node = (
sess.query(Node)
.select_from(join(Node, n1, "children"))
.filter(n1.data == "n122")
.first()
)
assert node.data == "n12"
def test_explicit_join_7(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
node = (
sess.query(Node)
.select_from(join(Node, n1, "children").join(n2, "children"))
.filter(n2.data == "n122")
.first()
)
assert node.data == "n1"
def test_explicit_join_8(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
node = (
sess.query(Node)
.select_from(
join(Node, n1, Node.id == n1.parent_id).join(n2, "children")
)
.filter(n2.data == "n122")
.first()
)
assert node.data == "n1"
def test_explicit_join_9(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
node = (
sess.query(Node)
.select_from(join(Node, n1, "parent").join(n2, "parent"))
.filter(
and_(Node.data == "n122", n1.data == "n12", n2.data == "n1")
)
.first()
)
assert node.data == "n122"
def test_explicit_join_10(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
eq_(
list(
sess.query(Node)
.select_from(join(Node, n1, "parent").join(n2, "parent"))
.filter(
and_(
Node.data == "n122", n1.data == "n12", n2.data == "n1"
)
)
.with_entities(Node.data, n1.data, n2.data)
),
[("n122", "n12", "n1")],
)
def test_join_to_nonaliased(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
eq_(
sess.query(n1).join(n1.parent).filter(Node.data == "n1").all(),
[
Node(parent_id=1, data="n11", id=2),
Node(parent_id=1, data="n12", id=3),
Node(parent_id=1, data="n13", id=4),
],
)
eq_(
sess.query(n1)
.join(Node, n1.parent)
.filter(Node.data == "n1")
.all(),
[
Node(parent_id=1, data="n11", id=2),
Node(parent_id=1, data="n12", id=3),
Node(parent_id=1, data="n13", id=4),
],
)
def test_multiple_explicit_entities_one(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
eq_(
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_multiple_explicit_entities_two(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
eq_(
sess.query(na, pa, ga).first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_multiple_explicit_entities_three(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(parent, grandparent, Node)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
eq_(
sess.query(pa, ga, na).first(),
(Node(data="n12"), Node(data="n1"), Node(data="n122")),
)
def test_multiple_explicit_entities_four(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
eq_(
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.options(joinedload(Node.children))
.first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_multiple_explicit_entities_five(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
eq_(
sess.query(na, pa, ga).options(joinedload(na.children)).first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_any(self):
Node = self.classes.Node
sess = fixture_session()
eq_(
sess.query(Node)
.filter(Node.children.any(Node.data == "n1"))
.all(),
[],
)
eq_(
sess.query(Node)
.filter(Node.children.any(Node.data == "n12"))
.all(),
[Node(data="n1")],
)
eq_(
sess.query(Node)
.filter(~Node.children.any())
.order_by(Node.id)
.all(),
[
Node(data="n11"),
Node(data="n13"),
Node(data="n121"),
Node(data="n122"),
Node(data="n123"),
],
)
def test_has(self):
Node = self.classes.Node
sess = fixture_session()
eq_(
sess.query(Node)
.filter(Node.parent.has(Node.data == "n12"))
.order_by(Node.id)
.all(),
[Node(data="n121"), Node(data="n122"), Node(data="n123")],
)
eq_(
sess.query(Node)
.filter(Node.parent.has(Node.data == "n122"))
.all(),
[],
)
eq_(
sess.query(Node).filter(~Node.parent.has()).all(),
[Node(data="n1")],
)
def test_contains(self):
Node = self.classes.Node
sess = fixture_session()
n122 = sess.query(Node).filter(Node.data == "n122").one()
eq_(
sess.query(Node).filter(Node.children.contains(n122)).all(),
[Node(data="n12")],
)
n13 = sess.query(Node).filter(Node.data == "n13").one()
eq_(
sess.query(Node).filter(Node.children.contains(n13)).all(),
[Node(data="n1")],
)
def test_eq_ne(self):
Node = self.classes.Node
sess = fixture_session()
n12 = sess.query(Node).filter(Node.data == "n12").one()
eq_(
sess.query(Node).filter(Node.parent == n12).all(),
[Node(data="n121"), Node(data="n122"), Node(data="n123")],
)
eq_(
sess.query(Node).filter(Node.parent != n12).all(),
[
Node(data="n1"),
Node(data="n11"),
Node(data="n12"),
Node(data="n13"),
],
)
class SelfReferentialM2MTest(fixtures.MappedTest):
run_setup_mappers = "once"
run_inserts = "once"
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table(
"nodes",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("data", String(30)),
)
Table(
"node_to_nodes",
metadata,
Column(
"left_node_id",
Integer,
ForeignKey("nodes.id"),
primary_key=True,
),
Column(
"right_node_id",
Integer,
ForeignKey("nodes.id"),
primary_key=True,
),
)
@classmethod
def setup_classes(cls):
class Node(cls.Comparable):
pass
@classmethod
def insert_data(cls, connection):
Node, nodes, node_to_nodes = (
cls.classes.Node,
cls.tables.nodes,
cls.tables.node_to_nodes,
)
cls.mapper_registry.map_imperatively(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
secondary=node_to_nodes,
primaryjoin=nodes.c.id == node_to_nodes.c.left_node_id,
secondaryjoin=nodes.c.id == node_to_nodes.c.right_node_id,
)
},
)
sess = Session(connection)
n1 = Node(data="n1")
n2 = Node(data="n2")
n3 = Node(data="n3")
n4 = Node(data="n4")
n5 = Node(data="n5")
n6 = Node(data="n6")
n7 = Node(data="n7")
n1.children = [n2, n3, n4]
n2.children = [n3, n6, n7]
n3.children = [n5, n4]
sess.add(n1)
sess.add(n2)
sess.add(n3)
sess.add(n4)
sess.flush()
sess.close()
def test_any(self):
Node = self.classes.Node
sess = fixture_session()
eq_(
sess.query(Node)
.filter(Node.children.any(Node.data == "n3"))
.order_by(Node.data)
.all(),
[Node(data="n1"), Node(data="n2")],
)
def test_contains(self):
Node = self.classes.Node
sess = fixture_session()
n4 = sess.query(Node).filter_by(data="n4").one()
eq_(
sess.query(Node)
.filter(Node.children.contains(n4))
.order_by(Node.data)
.all(),
[Node(data="n1"), Node(data="n3")],
)
eq_(
sess.query(Node)
.filter(not_(Node.children.contains(n4)))
.order_by(Node.data)
.all(),
[
Node(data="n2"),
Node(data="n4"),
Node(data="n5"),
Node(data="n6"),
Node(data="n7"),
],
)
def test_explicit_join(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
eq_(
sess.query(Node)
.select_from(join(Node, n1, "children"))
.filter(n1.data.in_(["n3", "n7"]))
.order_by(Node.id)
.all(),
[Node(data="n1"), Node(data="n2")],
)
class JoinLateralTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = default.DefaultDialect(supports_native_boolean=True)
run_setup_bind = None
run_setup_mappers = "once"
run_create_tables = None
@classmethod
def define_tables(cls, metadata):
Table(
"people",
metadata,
Column("people_id", Integer, primary_key=True),
Column("age", Integer),
Column("name", String(30)),
)
Table(
"bookcases",
metadata,
Column("bookcase_id", Integer, primary_key=True),
Column(
"bookcase_owner_id", Integer, ForeignKey("people.people_id")
),
Column("bookcase_shelves", Integer),
Column("bookcase_width", Integer),
)
Table(
"books",
metadata,
Column("book_id", Integer, primary_key=True),
Column(
"bookcase_id", Integer, ForeignKey("bookcases.bookcase_id")
),
Column("book_owner_id", Integer, ForeignKey("people.people_id")),
Column("book_weight", Integer),
)
@classmethod
def setup_classes(cls):
class Person(cls.Comparable):
pass
class Bookcase(cls.Comparable):
pass
class Book(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
Person, Bookcase, Book = cls.classes("Person", "Bookcase", "Book")
people, bookcases, books = cls.tables("people", "bookcases", "books")
cls.mapper_registry.map_imperatively(Person, people)
cls.mapper_registry.map_imperatively(
Bookcase,
bookcases,
properties={
"owner": relationship(Person),
"books": relationship(Book),
},
)
cls.mapper_registry.map_imperatively(Book, books)
def test_select_subquery(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
subq = (
s.query(Book.book_id)
.correlate(Person)
.filter(Person.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(Person, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT people.people_id AS people_people_id, "
"people.age AS people_age, people.name AS people_name, "
"anon_1.book_id AS anon_1_book_id "
"FROM people JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE people.people_id = books.book_owner_id) AS anon_1 ON true",
)
def test_select_subquery_aas_implicit_correlate(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
s.query(Book.book_id)
.filter(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_select_subquery_aas_implicit_correlate_coreonly(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
select(Book.book_id)
.where(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_select_subquery_aas_explicit_correlate_coreonly(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
select(Book.book_id)
.correlate(pa)
.where(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_select_subquery_aas_explicit_correlate(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
s.query(Book.book_id)
.correlate(pa)
.filter(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_from_function(self):
Bookcase = self.classes.Bookcase
s = fixture_session()
srf = lateral(func.generate_series(1, Bookcase.bookcase_shelves))
self.assert_compile(
s.query(Bookcase).join(srf, true()),
"SELECT bookcases.bookcase_id AS bookcases_bookcase_id, "
"bookcases.bookcase_owner_id AS bookcases_bookcase_owner_id, "
"bookcases.bookcase_shelves AS bookcases_bookcase_shelves, "
"bookcases.bookcase_width AS bookcases_bookcase_width "
"FROM bookcases JOIN "
"LATERAL generate_series(:generate_series_1, "
"bookcases.bookcase_shelves) AS anon_1 ON true",
)
def test_from_function_aas(self):
Bookcase = self.classes.Bookcase
s = fixture_session()
subq = s.query(Bookcase).subquery()
ba = aliased(Bookcase, subq)
srf = lateral(func.generate_series(1, ba.bookcase_shelves))
self.assert_compile(
s.query(ba).join(srf, true()),
"SELECT anon_1.bookcase_id AS anon_1_bookcase_id, "
"anon_1.bookcase_owner_id AS anon_1_bookcase_owner_id, "
"anon_1.bookcase_shelves AS anon_1_bookcase_shelves, "
"anon_1.bookcase_width AS anon_1_bookcase_width "
"FROM (SELECT bookcases.bookcase_id AS bookcase_id, "
"bookcases.bookcase_owner_id AS bookcase_owner_id, "
"bookcases.bookcase_shelves AS bookcase_shelves, "
"bookcases.bookcase_width AS bookcase_width FROM bookcases) "
"AS anon_1 "
"JOIN LATERAL "
"generate_series(:generate_series_1, anon_1.bookcase_shelves) "
"AS anon_2 ON true",
)
class JoinRawTablesWLegacyTest(QueryTest, AssertsCompiledSQL):
__dialect__ = "default"
@testing.combinations(
(
lambda sess, User, Address: sess.query(User).join(Address),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN addresses ON users.id = addresses.user_id",
),
(
lambda sess, user_table, address_table: sess.query(
user_table
).join(address_table),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN addresses ON users.id = addresses.user_id",
),
(
lambda sess, User, Address, Order: sess.query(User)
.outerjoin(Order)
.join(Address),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users LEFT OUTER JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON addresses.id = orders.address_id",
),
(
lambda sess, user_table, address_table, order_table: sess.query(
user_table
)
.outerjoin(order_table)
.join(address_table),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users LEFT OUTER JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON addresses.id = orders.address_id",
),
)
def test_join_render(self, spec, expected):
User, Address, Order = self.classes("User", "Address", "Order")
user_table, address_table, order_table = self.tables(
"users", "addresses", "orders"
)
sess = fixture_session()
q = testing.resolve_lambda(spec, **locals())
self.assert_compile(q, expected)
self.assert_compile(
q.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL).statement,
expected,
)
def test_core_round_trip(self):
user_table, address_table = self.tables("users", "addresses")
sess = fixture_session()
q = (
sess.query(user_table)
.join(address_table)
.where(address_table.c.email_address.startswith("ed"))
)
eq_(q.all(), [(8, "ed"), (8, "ed"), (8, "ed")])
| true
| true
|
f71ab4470632fb3e14e414c8dba8614f764a6ebe
| 8,218
|
py
|
Python
|
bokeh_root_cmd/main.py
|
ideonate/bokeh-root-cmd
|
c26eee1414d3305749a8724b8740d9a4eaca0cf7
|
[
"Apache-2.0"
] | 1
|
2021-06-29T03:57:26.000Z
|
2021-06-29T03:57:26.000Z
|
bokeh_root_cmd/main.py
|
ideonate/bokeh-root-cmd
|
c26eee1414d3305749a8724b8740d9a4eaca0cf7
|
[
"Apache-2.0"
] | 4
|
2021-06-18T10:45:03.000Z
|
2021-09-13T22:12:45.000Z
|
bokeh_root_cmd/main.py
|
ideonate/bokeh-root-cmd
|
c26eee1414d3305749a8724b8740d9a4eaca0cf7
|
[
"Apache-2.0"
] | 2
|
2021-04-29T03:27:19.000Z
|
2021-09-13T21:44:39.000Z
|
"""Command line wrapper to serve one or more named Bokeh scripts or folders."""
import logging
import os
import re
import pathlib
import tempfile
from typing import Any, Dict, Tuple
import bokeh.server.views
import click
from bokeh.application.application import Application
from bokeh.command.util import build_single_handler_application
from bokeh.server.server import Server as _BkServer
from bokeh.server.views.root_handler import RootHandler
import logging
from .readycheck import create_ready_app
FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(format=FORMAT)
root_logger = logging.getLogger()
root_logger.setLevel(logging.INFO)
logger = logging.getLogger('bokeh_root_cmd')
class BokehServer:
def __init__(self, prefix=''):
self.prefix = prefix
if self.prefix != '':
self.html_file = None
def __del__(self):
if self.prefix != '' and self.html_file is not None:
self.html_file.close()
def _get_default_index_html(self):
return str(pathlib.Path(bokeh.server.views.__file__).parent / "app_index.html")
def _get_index_html(self):
"""
Where there is a prefix (e.g. /user/dan/dash-test) supplied, Bokeh/Panel's server doesn't work for us.
It doesn't distinguish between server-side and client-side URLs.
We want it to serve sub-apps at the URL /PanelNotebook
(so accessible at /user/dan/dash-test/PanelNotebook behind the cdsdashboards reverse proxy)
but for URLs on the index page to point the browser to /user/dan/dash-test/PanelNotebook.
Setting prefix in Bokeh results in correct client-side behavior, but unhelpfully also
serves at the prefix (So, combined with cdsdashboards reverse proxy it is only accessible at
/user/dan/dash-test/user/dan/dash-test/PanelNotebook).
"""
if hasattr(self, 'html_file'):
if self.html_file is None:
self.html_file = tempfile.NamedTemporaryFile("wt", suffix='.html')
with open(self._get_default_index_html(), "rt") as f:
for r in f.readlines():
r = re.sub(r'\{\{\s*prefix\s*\}\}', self.prefix, r)
self.html_file.write(r)
self.html_file.flush()
return self.html_file.name
return self._get_default_index_html()
@staticmethod
def _get_server_class():
return _BkServer
@staticmethod
def _make_app(command: str, url: str = "/", debug: bool = False) -> Application:
cwd_original = os.getcwd()
# Command can be absolute, or could be relative to cwd
app_py_path = os.path.join(os.getcwd(), command)
if os.path.isdir(app_py_path):
dirname = app_py_path
else:
dirname = os.path.dirname(app_py_path)
if app_py_path==dirname:
logger.debug("Fetching folder {}".format(app_py_path))
else:
logger.debug("Fetching script {}".format(app_py_path))
if os.path.isdir(dirname):
logger.debug("Changing working dir to {}".format(dirname))
os.chdir(dirname)
app = build_single_handler_application(app_py_path, [url])
os.chdir(cwd_original)
logger.debug("Changing working dir back to {}".format(cwd_original))
return app
@classmethod
def _is_single_app(cls, cmd: str):
"""
Return True if the path specified in `cmd` is exactly one app: either a single py/ipynb file
or a folder containing a main.py or main.ipynb file.
"""
cmd_path = pathlib.Path(cmd)
return cmd_path.is_file() or (cmd_path / "main.py").is_file() or (cmd_path / "main.ipynb").is_file()
@classmethod
def _get_applications(cls, command: Tuple[str], debug=False) -> Dict[str, Application]:
if len(command) == 1 and cls._is_single_app(command[0]):
return {"/": cls._make_app(command[0], debug)}
apps = {}
for cmd in command:
if cls._is_single_app(cmd):
cmds = [cmd]
else:
cmd_path = pathlib.Path(cmd)
cmds = list(cmd_path.glob("*.ipynb")) + list(cmd_path.glob("*.py"))
for singlecmd in cmds:
application = cls._make_app(singlecmd, debug)
route = application.handlers[0].url_path()
apps[route] = application
return apps
def _get_server_kwargs(self, port, ip, allow_websocket_origin, is_single_app) -> Dict[str, Any]:
server_kwargs = {"port": port, "ip": ip}
if allow_websocket_origin:
server_kwargs["allow_websocket_origin"] = list(allow_websocket_origin)
if not is_single_app:
index_html = self._get_index_html()
logger.debug("Using HTML template %s", index_html)
server_kwargs.update(
{"use_index": True, "redirect_root": True, "index": index_html}
)
return server_kwargs
def run(self, port, ip, debug, allow_websocket_origin, prefix, command):
logger.info("Starting %s", type(self).__name__)
if debug:
root_logger.setLevel(logging.DEBUG)
logger.debug("ip = %s", ip)
logger.debug("port = %s", port)
logger.debug("debug = %s", debug)
logger.debug("allow_websocket_origin = %s", allow_websocket_origin)
logger.debug("prefix = %s", prefix)
logger.debug("command = %s", command)
applications = self._get_applications(command, debug)
applications["/ready-check"] = create_ready_app()
logger.debug("applications = %s", list(applications.keys()))
server_kwargs = self._get_server_kwargs(port, ip, allow_websocket_origin, len(applications) <= 2)
if debug:
server_kwargs["log_level"]="debug"
server_kwargs["log_format"]=FORMAT
logger.debug("server_kwargs = %s", server_kwargs)
server = self._get_server_class()(applications, **server_kwargs)
server.run_until_shutdown()
class PanelServer(BokehServer):
@staticmethod
def _get_server_class():
from panel.io.server import Server as _PnServer
return _PnServer
def _get_default_index_html(self):
from panel.io.server import INDEX_HTML as _PANEL_INDEX_HTML
return _PANEL_INDEX_HTML
@click.command()
@click.option("--port", default=8888, type=click.INT, help="port for the proxy server to listen on")
@click.option("--ip", default=None, help="Address to listen on")
@click.option(
"--allow-websocket-origin", default=None, multiple=True, help="Web socket origins allowed"
)
@click.option("--debug/--no-debug", default=False, help="To display debug level logs")
@click.option(
"--server", default="bokeh", type=click.STRING, help="The server to use. One of bokeh or panel. Default is bokeh."
)
@click.option(
"--prefix", default="", type=click.STRING, help="URL prefix (for"
)
@click.argument("command", nargs=-1, required=True)
def run(port, ip, debug, allow_websocket_origin, server, prefix, command):
if server=="panel":
server = PanelServer(prefix)
else:
server = BokehServer(prefix)
server.run(
port=port,
ip=ip,
debug=debug,
allow_websocket_origin=allow_websocket_origin,
prefix=prefix,
command=command,
)
# Bokeh/ Panel can serve an index page with a list of applications at "/"
# The below is a workaround to avoid including the 'ready-check' application
def _root_handler_initialize_without_ready_check(self, *args, **kw):
kw["applications"]=kw["applications"].copy()
if "/ready-check" in kw["applications"]:
kw["applications"].pop("/ready-check")
self.applications = kw["applications"]
self.prefix = kw["prefix"]
self.index = kw["index"]
self.use_redirect = kw["use_redirect"]
RootHandler.initialize = _root_handler_initialize_without_ready_check
if __name__ == "__main__":
try:
run()
except SystemExit as se:
logger.error("Caught SystemExit {}".format(se))
| 35.886463
| 118
| 0.643344
|
import logging
import os
import re
import pathlib
import tempfile
from typing import Any, Dict, Tuple
import bokeh.server.views
import click
from bokeh.application.application import Application
from bokeh.command.util import build_single_handler_application
from bokeh.server.server import Server as _BkServer
from bokeh.server.views.root_handler import RootHandler
import logging
from .readycheck import create_ready_app
FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(format=FORMAT)
root_logger = logging.getLogger()
root_logger.setLevel(logging.INFO)
logger = logging.getLogger('bokeh_root_cmd')
class BokehServer:
def __init__(self, prefix=''):
self.prefix = prefix
if self.prefix != '':
self.html_file = None
def __del__(self):
if self.prefix != '' and self.html_file is not None:
self.html_file.close()
def _get_default_index_html(self):
return str(pathlib.Path(bokeh.server.views.__file__).parent / "app_index.html")
def _get_index_html(self):
if hasattr(self, 'html_file'):
if self.html_file is None:
self.html_file = tempfile.NamedTemporaryFile("wt", suffix='.html')
with open(self._get_default_index_html(), "rt") as f:
for r in f.readlines():
r = re.sub(r'\{\{\s*prefix\s*\}\}', self.prefix, r)
self.html_file.write(r)
self.html_file.flush()
return self.html_file.name
return self._get_default_index_html()
@staticmethod
def _get_server_class():
return _BkServer
@staticmethod
def _make_app(command: str, url: str = "/", debug: bool = False) -> Application:
cwd_original = os.getcwd()
app_py_path = os.path.join(os.getcwd(), command)
if os.path.isdir(app_py_path):
dirname = app_py_path
else:
dirname = os.path.dirname(app_py_path)
if app_py_path==dirname:
logger.debug("Fetching folder {}".format(app_py_path))
else:
logger.debug("Fetching script {}".format(app_py_path))
if os.path.isdir(dirname):
logger.debug("Changing working dir to {}".format(dirname))
os.chdir(dirname)
app = build_single_handler_application(app_py_path, [url])
os.chdir(cwd_original)
logger.debug("Changing working dir back to {}".format(cwd_original))
return app
@classmethod
def _is_single_app(cls, cmd: str):
cmd_path = pathlib.Path(cmd)
return cmd_path.is_file() or (cmd_path / "main.py").is_file() or (cmd_path / "main.ipynb").is_file()
@classmethod
def _get_applications(cls, command: Tuple[str], debug=False) -> Dict[str, Application]:
if len(command) == 1 and cls._is_single_app(command[0]):
return {"/": cls._make_app(command[0], debug)}
apps = {}
for cmd in command:
if cls._is_single_app(cmd):
cmds = [cmd]
else:
cmd_path = pathlib.Path(cmd)
cmds = list(cmd_path.glob("*.ipynb")) + list(cmd_path.glob("*.py"))
for singlecmd in cmds:
application = cls._make_app(singlecmd, debug)
route = application.handlers[0].url_path()
apps[route] = application
return apps
def _get_server_kwargs(self, port, ip, allow_websocket_origin, is_single_app) -> Dict[str, Any]:
server_kwargs = {"port": port, "ip": ip}
if allow_websocket_origin:
server_kwargs["allow_websocket_origin"] = list(allow_websocket_origin)
if not is_single_app:
index_html = self._get_index_html()
logger.debug("Using HTML template %s", index_html)
server_kwargs.update(
{"use_index": True, "redirect_root": True, "index": index_html}
)
return server_kwargs
def run(self, port, ip, debug, allow_websocket_origin, prefix, command):
logger.info("Starting %s", type(self).__name__)
if debug:
root_logger.setLevel(logging.DEBUG)
logger.debug("ip = %s", ip)
logger.debug("port = %s", port)
logger.debug("debug = %s", debug)
logger.debug("allow_websocket_origin = %s", allow_websocket_origin)
logger.debug("prefix = %s", prefix)
logger.debug("command = %s", command)
applications = self._get_applications(command, debug)
applications["/ready-check"] = create_ready_app()
logger.debug("applications = %s", list(applications.keys()))
server_kwargs = self._get_server_kwargs(port, ip, allow_websocket_origin, len(applications) <= 2)
if debug:
server_kwargs["log_level"]="debug"
server_kwargs["log_format"]=FORMAT
logger.debug("server_kwargs = %s", server_kwargs)
server = self._get_server_class()(applications, **server_kwargs)
server.run_until_shutdown()
class PanelServer(BokehServer):
@staticmethod
def _get_server_class():
from panel.io.server import Server as _PnServer
return _PnServer
def _get_default_index_html(self):
from panel.io.server import INDEX_HTML as _PANEL_INDEX_HTML
return _PANEL_INDEX_HTML
@click.command()
@click.option("--port", default=8888, type=click.INT, help="port for the proxy server to listen on")
@click.option("--ip", default=None, help="Address to listen on")
@click.option(
"--allow-websocket-origin", default=None, multiple=True, help="Web socket origins allowed"
)
@click.option("--debug/--no-debug", default=False, help="To display debug level logs")
@click.option(
"--server", default="bokeh", type=click.STRING, help="The server to use. One of bokeh or panel. Default is bokeh."
)
@click.option(
"--prefix", default="", type=click.STRING, help="URL prefix (for"
)
@click.argument("command", nargs=-1, required=True)
def run(port, ip, debug, allow_websocket_origin, server, prefix, command):
if server=="panel":
server = PanelServer(prefix)
else:
server = BokehServer(prefix)
server.run(
port=port,
ip=ip,
debug=debug,
allow_websocket_origin=allow_websocket_origin,
prefix=prefix,
command=command,
)
def _root_handler_initialize_without_ready_check(self, *args, **kw):
kw["applications"]=kw["applications"].copy()
if "/ready-check" in kw["applications"]:
kw["applications"].pop("/ready-check")
self.applications = kw["applications"]
self.prefix = kw["prefix"]
self.index = kw["index"]
self.use_redirect = kw["use_redirect"]
RootHandler.initialize = _root_handler_initialize_without_ready_check
if __name__ == "__main__":
try:
run()
except SystemExit as se:
logger.error("Caught SystemExit {}".format(se))
| true
| true
|
f71ab48c915466e77fb663ba45f13600446b8c5f
| 1,481
|
py
|
Python
|
invoices/api/viewsets.py
|
elcolie/zero-to-deploy
|
6191a33ef55af7c550c0e529a4e373bfe40bc014
|
[
"MIT"
] | null | null | null |
invoices/api/viewsets.py
|
elcolie/zero-to-deploy
|
6191a33ef55af7c550c0e529a4e373bfe40bc014
|
[
"MIT"
] | 6
|
2020-06-05T19:09:26.000Z
|
2022-01-13T00:54:56.000Z
|
invoices/api/viewsets.py
|
elcolie/zero-to-deploy
|
6191a33ef55af7c550c0e529a4e373bfe40bc014
|
[
"MIT"
] | null | null | null |
from django_filters import rest_framework as filters
from rest_framework import viewsets
from rest_framework.filters import SearchFilter, OrderingFilter
from rest_framework.permissions import IsAuthenticated, BasePermission
from invoices.api.serializers import InvoiceSerializer
from invoices.models import Invoice
class IsStaffPermission(BasePermission):
def has_permission(self, request, view):
return request.user.is_staff
class InvoiceFilter(filters.FilterSet):
customer_username = filters.CharFilter(name='order__customer__username', lookup_expr='icontains')
customer_first_name = filters.CharFilter(name='order__customer__first_name', lookup_expr='icontains')
created_at = filters.DateTimeFilter(name='created_at', lookup_expr='gte')
updated_at = filters.DateTimeFilter(name='updated_at', lookup_expr='gte')
class Meta:
model = Invoice
fields = [
'customer_username',
'customer_first_name',
'created_at',
'updated_at',
]
class InvoiceViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated, IsStaffPermission)
queryset = Invoice.objects.all()
serializer_class = InvoiceSerializer
filter_backends = (filters.DjangoFilterBackend, SearchFilter, OrderingFilter)
filter_class = InvoiceFilter
search_fields = (
'order__customer__username',
'order__customer__first_name',
'order__customer__last_name',
)
| 35.261905
| 105
| 0.748143
|
from django_filters import rest_framework as filters
from rest_framework import viewsets
from rest_framework.filters import SearchFilter, OrderingFilter
from rest_framework.permissions import IsAuthenticated, BasePermission
from invoices.api.serializers import InvoiceSerializer
from invoices.models import Invoice
class IsStaffPermission(BasePermission):
def has_permission(self, request, view):
return request.user.is_staff
class InvoiceFilter(filters.FilterSet):
customer_username = filters.CharFilter(name='order__customer__username', lookup_expr='icontains')
customer_first_name = filters.CharFilter(name='order__customer__first_name', lookup_expr='icontains')
created_at = filters.DateTimeFilter(name='created_at', lookup_expr='gte')
updated_at = filters.DateTimeFilter(name='updated_at', lookup_expr='gte')
class Meta:
model = Invoice
fields = [
'customer_username',
'customer_first_name',
'created_at',
'updated_at',
]
class InvoiceViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated, IsStaffPermission)
queryset = Invoice.objects.all()
serializer_class = InvoiceSerializer
filter_backends = (filters.DjangoFilterBackend, SearchFilter, OrderingFilter)
filter_class = InvoiceFilter
search_fields = (
'order__customer__username',
'order__customer__first_name',
'order__customer__last_name',
)
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.