hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71a4d41c019c5c22fe4a775dccecbf2510b5ece | 7,159 | py | Python | flash/image/classification/integrations/baal/loop.py | twsl/lightning-flash | 9927853ac23551b444dbe969e287879c69be4094 | [
"Apache-2.0"
] | null | null | null | flash/image/classification/integrations/baal/loop.py | twsl/lightning-flash | 9927853ac23551b444dbe969e287879c69be4094 | [
"Apache-2.0"
] | null | null | null | flash/image/classification/integrations/baal/loop.py | twsl/lightning-flash | 9927853ac23551b444dbe969e287879c69be4094 | [
"Apache-2.0"
] | null | null | null | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from copy import deepcopy
from typing import Any, Dict, Optional
import torch
from pytorch_lightning.loops import Loop
from pytorch_lightning.loops.fit_loop import FitLoop
from pytorch_lightning.trainer.connectors.data_connector import _PatchDataLoader
from pytorch_lightning.trainer.progress import Progress
from pytorch_lightning.trainer.states import TrainerFn, TrainerStatus
import flash
from flash.core.data.utils import _STAGES_PREFIX
from flash.core.utilities.imports import requires
from flash.core.utilities.stages import RunningStage
from flash.image.classification.integrations.baal.data import ActiveLearningDataModule
from flash.image.classification.integrations.baal.dropout import InferenceMCDropoutTask
class ActiveLearningLoop(Loop):
@requires("baal")
def __init__(self, label_epoch_frequency: int, inference_iteration: int = 2, should_reset_weights: bool = True):
"""The `ActiveLearning Loop` describes the following training procedure. This loop is connected with the
`ActiveLearningTrainer`
Example::
while unlabelled data or budget critera not reached:
if labelled data
trainer.fit(model, labelled data)
if unlabelled data:
predictions = trainer.predict(model, unlabelled data)
uncertainties = heuristic(predictions)
request labellelisation for the sample with highest uncertainties under a given budget
Args:
label_epoch_frequency: Number of epoch to train on before requesting labellisation.
inference_iteration: Number of inference to perform to compute uncertainty.
"""
super().__init__()
self.label_epoch_frequency = label_epoch_frequency
self.inference_iteration = inference_iteration
self.should_reset_weights = should_reset_weights
self.fit_loop: Optional[FitLoop] = None
self.progress = Progress()
self._model_state_dict: Optional[Dict[str, torch.Tensor]] = None
self._lightning_module: Optional[flash.Task] = None
@property
def done(self) -> bool:
return self.progress.current.completed >= self.max_epochs
def connect(self, fit_loop: FitLoop):
self.fit_loop = fit_loop
self.max_epochs = self.fit_loop.max_epochs
self.fit_loop.max_epochs = self.label_epoch_frequency
def on_run_start(self, *args: Any, **kwargs: Any) -> None:
assert isinstance(self.trainer.datamodule, ActiveLearningDataModule)
self.trainer.predict_loop._return_predictions = True
self._lightning_module = self.trainer.lightning_module
self._model_state_dict = deepcopy(self._lightning_module.state_dict())
self.inference_model = InferenceMCDropoutTask(self._lightning_module, self.inference_iteration)
def reset(self) -> None:
pass
def on_advance_start(self, *args: Any, **kwargs: Any) -> None:
if self.trainer.datamodule.has_labelled_data:
self._reset_dataloader_for_stage(RunningStage.TRAINING)
self._reset_dataloader_for_stage(RunningStage.VALIDATING)
if self.trainer.datamodule.has_test:
self._reset_dataloader_for_stage(RunningStage.TESTING)
if self.trainer.datamodule.has_unlabelled_data:
self._reset_dataloader_for_stage(RunningStage.PREDICTING)
self.progress.increment_ready()
def advance(self, *args: Any, **kwargs: Any) -> None:
self.progress.increment_started()
if self.trainer.datamodule.has_labelled_data:
self.fit_loop.run()
if self.trainer.datamodule.has_test:
self._reset_testing()
metrics = self.trainer.test_loop.run()
if metrics:
self.trainer.logger.log_metrics(metrics[0], step=self.trainer.global_step)
if self.trainer.datamodule.has_unlabelled_data:
self._reset_predicting()
probabilities = self.trainer.predict_loop.run()
self.trainer.datamodule.label(probabilities=probabilities)
else:
raise StopIteration
self._reset_fitting()
self.progress.increment_processed()
def on_advance_end(self) -> None:
if self.trainer.datamodule.has_unlabelled_data and self.should_reset_weights:
# reload the weights to retrain from scratch with the new labelled data.
self._lightning_module.load_state_dict(self._model_state_dict)
self.progress.increment_completed()
return super().on_advance_end()
def on_run_end(self):
self._reset_fitting()
return super().on_run_end()
def on_save_checkpoint(self) -> Dict:
return {"datamodule_state_dict": self.trainer.datamodule.state_dict()}
def on_load_checkpoint(self, state_dict) -> None:
self.trainer.datamodule.load_state_dict(state_dict.pop("datamodule_state_dict"))
def __getattr__(self, key):
if key not in self.__dict__:
return getattr(self.fit_loop, key)
return self.__dict__[key]
def _reset_fitting(self):
self.trainer.state.fn = TrainerFn.FITTING
self.trainer.training = True
self.trainer.lightning_module.on_train_dataloader()
self.trainer.accelerator.connect(self._lightning_module)
self.fit_loop.epoch_progress = Progress()
def _reset_predicting(self):
self.trainer.state.fn = TrainerFn.PREDICTING
self.trainer.predicting = True
self.trainer.lightning_module.on_predict_dataloader()
self.trainer.accelerator.connect(self.inference_model)
def _reset_testing(self):
self.trainer.state.fn = TrainerFn.TESTING
self.trainer.state.status = TrainerStatus.RUNNING
self.trainer.testing = True
self.trainer.lightning_module.on_test_dataloader()
self.trainer.accelerator.connect(self._lightning_module)
def _reset_dataloader_for_stage(self, running_state: RunningStage):
dataloader_name = f"{_STAGES_PREFIX[running_state]}_dataloader"
# If the dataloader exists, we reset it.
dataloader = getattr(self.trainer.datamodule, dataloader_name, None)
if dataloader:
setattr(
self.trainer.lightning_module,
dataloader_name,
_PatchDataLoader(dataloader(), running_state),
)
setattr(self.trainer, dataloader_name, None)
getattr(self.trainer, f"reset_{dataloader_name}")(self.trainer.lightning_module)
| 42.613095 | 116 | 0.70778 |
from copy import deepcopy
from typing import Any, Dict, Optional
import torch
from pytorch_lightning.loops import Loop
from pytorch_lightning.loops.fit_loop import FitLoop
from pytorch_lightning.trainer.connectors.data_connector import _PatchDataLoader
from pytorch_lightning.trainer.progress import Progress
from pytorch_lightning.trainer.states import TrainerFn, TrainerStatus
import flash
from flash.core.data.utils import _STAGES_PREFIX
from flash.core.utilities.imports import requires
from flash.core.utilities.stages import RunningStage
from flash.image.classification.integrations.baal.data import ActiveLearningDataModule
from flash.image.classification.integrations.baal.dropout import InferenceMCDropoutTask
class ActiveLearningLoop(Loop):
@requires("baal")
def __init__(self, label_epoch_frequency: int, inference_iteration: int = 2, should_reset_weights: bool = True):
super().__init__()
self.label_epoch_frequency = label_epoch_frequency
self.inference_iteration = inference_iteration
self.should_reset_weights = should_reset_weights
self.fit_loop: Optional[FitLoop] = None
self.progress = Progress()
self._model_state_dict: Optional[Dict[str, torch.Tensor]] = None
self._lightning_module: Optional[flash.Task] = None
@property
def done(self) -> bool:
return self.progress.current.completed >= self.max_epochs
def connect(self, fit_loop: FitLoop):
self.fit_loop = fit_loop
self.max_epochs = self.fit_loop.max_epochs
self.fit_loop.max_epochs = self.label_epoch_frequency
def on_run_start(self, *args: Any, **kwargs: Any) -> None:
assert isinstance(self.trainer.datamodule, ActiveLearningDataModule)
self.trainer.predict_loop._return_predictions = True
self._lightning_module = self.trainer.lightning_module
self._model_state_dict = deepcopy(self._lightning_module.state_dict())
self.inference_model = InferenceMCDropoutTask(self._lightning_module, self.inference_iteration)
def reset(self) -> None:
pass
def on_advance_start(self, *args: Any, **kwargs: Any) -> None:
if self.trainer.datamodule.has_labelled_data:
self._reset_dataloader_for_stage(RunningStage.TRAINING)
self._reset_dataloader_for_stage(RunningStage.VALIDATING)
if self.trainer.datamodule.has_test:
self._reset_dataloader_for_stage(RunningStage.TESTING)
if self.trainer.datamodule.has_unlabelled_data:
self._reset_dataloader_for_stage(RunningStage.PREDICTING)
self.progress.increment_ready()
def advance(self, *args: Any, **kwargs: Any) -> None:
self.progress.increment_started()
if self.trainer.datamodule.has_labelled_data:
self.fit_loop.run()
if self.trainer.datamodule.has_test:
self._reset_testing()
metrics = self.trainer.test_loop.run()
if metrics:
self.trainer.logger.log_metrics(metrics[0], step=self.trainer.global_step)
if self.trainer.datamodule.has_unlabelled_data:
self._reset_predicting()
probabilities = self.trainer.predict_loop.run()
self.trainer.datamodule.label(probabilities=probabilities)
else:
raise StopIteration
self._reset_fitting()
self.progress.increment_processed()
def on_advance_end(self) -> None:
if self.trainer.datamodule.has_unlabelled_data and self.should_reset_weights:
self._lightning_module.load_state_dict(self._model_state_dict)
self.progress.increment_completed()
return super().on_advance_end()
def on_run_end(self):
self._reset_fitting()
return super().on_run_end()
def on_save_checkpoint(self) -> Dict:
return {"datamodule_state_dict": self.trainer.datamodule.state_dict()}
def on_load_checkpoint(self, state_dict) -> None:
self.trainer.datamodule.load_state_dict(state_dict.pop("datamodule_state_dict"))
def __getattr__(self, key):
if key not in self.__dict__:
return getattr(self.fit_loop, key)
return self.__dict__[key]
def _reset_fitting(self):
self.trainer.state.fn = TrainerFn.FITTING
self.trainer.training = True
self.trainer.lightning_module.on_train_dataloader()
self.trainer.accelerator.connect(self._lightning_module)
self.fit_loop.epoch_progress = Progress()
def _reset_predicting(self):
self.trainer.state.fn = TrainerFn.PREDICTING
self.trainer.predicting = True
self.trainer.lightning_module.on_predict_dataloader()
self.trainer.accelerator.connect(self.inference_model)
def _reset_testing(self):
self.trainer.state.fn = TrainerFn.TESTING
self.trainer.state.status = TrainerStatus.RUNNING
self.trainer.testing = True
self.trainer.lightning_module.on_test_dataloader()
self.trainer.accelerator.connect(self._lightning_module)
def _reset_dataloader_for_stage(self, running_state: RunningStage):
dataloader_name = f"{_STAGES_PREFIX[running_state]}_dataloader"
dataloader = getattr(self.trainer.datamodule, dataloader_name, None)
if dataloader:
setattr(
self.trainer.lightning_module,
dataloader_name,
_PatchDataLoader(dataloader(), running_state),
)
setattr(self.trainer, dataloader_name, None)
getattr(self.trainer, f"reset_{dataloader_name}")(self.trainer.lightning_module)
| true | true |
f71a4e72b400b79bef2912f5ab1fa11a4cf0e50a | 15,580 | py | Python | GUI_functions/build_asp.py | AntonAlbertovich/Eusocial-Cluster-Utility | fef4f583b6151bb40e54d6825d65d668581c2121 | [
"MIT"
] | 2 | 2019-03-22T15:08:31.000Z | 2019-03-23T20:10:40.000Z | GUI_functions/build_asp.py | AntonAlbertovich/Eusocial-Cluster-Utility | fef4f583b6151bb40e54d6825d65d668581c2121 | [
"MIT"
] | 1 | 2019-03-23T20:08:12.000Z | 2019-03-23T20:08:12.000Z | GUI_functions/build_asp.py | AntonAlbertovich/Eusocial-Cluster-Utility | fef4f583b6151bb40e54d6825d65d668581c2121 | [
"MIT"
] | 1 | 2019-03-23T19:56:07.000Z | 2019-03-23T19:56:07.000Z | import pickle
# This is the script which builds a ASP model intended to be solved with clingo.
# This program has been test on Ubuntu 18.04 and CentOS 7.
# Using Clingo 5.3.0 installed via Conda
# Parsing the output of this program will require clyngor-with-clingo which may be installed via pip.
if __name__ == "__main__":
input_file= open("GUI_functions/Cluster_details.bin", "rb")
# Loads the data structure for the machines in the cluster.
all_macs= list(pickle.load(input_file))
input_file.close()
input_file= open("GUI_functions/Tasks_details.bin", "rb")
# Loads the data structure for the tasks of the cluster.
all_jobs= list(pickle.load(input_file))
input_file.close()
asp_file = open("GUI_functions/asp.lp", 'w')
# The program asp.lp is made.
asp_file.write("#include <incmode>. \n")
asp_file.write("#program base. \n")
asp_file.write("% A dynamically generated program.\n")
asp_file.write("% Made by build_asp.py using the data structures stored in Cluster_details.bin and Tasks_details.bin\n")
asp_file.write("% Define the fluents of the program. \n")
# this section writes a header to the asp.lp file.
asp_file.write("status(-done).\n")
asp_file.write("status(done).\n")
asp_file.write("location(home).\n")
asp_file.write("% location() describes the individual nodes/machines of a cluster. \n")
asp_file.write("% home is the ECU master directory on one machine in a given cluster. \n")
asp_file.write("% The machine which home is on is assumed to be directly connected to all other machines in the cluster. \n")
# Comment section detailing location and home.
for i in range(len(all_macs)):
mac = all_macs[i][0]
mac.replace(" ", "")
mac.lower()
asp_file.write("location("+mac+").\n")
for i in range(len(all_macs)):
# In the Cluster_details data structure there exists the detials pertaining to which machines are networked to other machines.
# In this loop this data is used to build a model of the cluster's network in asp.lp.
mac = all_macs[i][0]
mac.replace(" ", "")
mac.lower()
asp_file.write("connection(home, "+mac+").\n")
# Here home is connected to all the machines in the cluster.
for j in range(len(all_macs[i][2])):
mac1 = all_macs[i][2][j]
mac1.replace(" ", "")
mac1.lower()
asp_file.write("connection("+mac+", "+mac1+").\n")
# Here the connection for each machine is modeled.
# At this time ECU does not assume two way edge connection.
# The graph representing the network of a cluster is thus a directed graph.
# This is a core featur of ECU.
asp_file.write("holds(F,0) :- init(F).\n")
asp_file.write("#program step(t).\n")
asp_file.write("{ move(X,Y,t) : task(X), location(Y)} :- holds(on(X,M),t-1), connection(M, Y).\n")
asp_file.write("0{ turn(X,Y,t)}1 :- status(Y), task(X), holds(on(X,Z),t), valid_on(X, Z).\n")
asp_file.write(":- move(X,Y,t), holds(on(X,Y1),t-1), Y == home.\n")
asp_file.write("% Programs can not be moved back into the home directory.\n")
asp_file.write(":- turn(X,Y,t), holds(at(X,done),t-1).\n")
asp_file.write("% Programs can not be executed if they are already complete.\n")
asp_file.write(":- turn(X,Y,t), holds(on(X,M),t), depends_on(X, X1), not holds(on(X1,M),t).\n")
# Comments detailing limits of move and turn.
asp_file.write("moved(X,t) :- move(X,Y,t).\n")
asp_file.write("% moved() indicated what task X was moved at turn t.\n")
# Comment detailing moved()
asp_file.write("turned(X,t) :- turn(X, Y, t).\n")
asp_file.write("% turn() indicated what task X was executed at what turn t.\n")
# Comment detailing turn()
asp_file.write("turned_at(X, M, t) :- turned(X, t), holds(on(X,M),t).\n")
asp_file.write("% turned_at() indicated what task X was executed at Machine M at what turn t.\n")
# Comment detailing turned_at()
asp_file.write("turned_with_2(M, X, X1, Z, t) :- turned(X,t), holds(on(X,M),t), thread_cost(X, C), turned(X1,t), holds(on(X1,M),t), thread_cost(X1, C1), X != X1, Z = C + C1.\n")
asp_file.write("turned_with_3(M, X, X1, X2, Z, t) :- turned(X,t), holds(on(X,M),t), thread_cost(X, C), turned_with_2(M, X1, X2, C1, t), X != X1, X != X2, Z = C + C1.\n")
asp_file.write("turned_with_4(M, X, X1, X2, X3, Z, t) :- turned(X,t), holds(on(X,M),t), thread_cost(X, C), turned_with_3(M, X1, X2, X3, C1, t), X != X1, X != X2, X != X3, Z = C + C1.\n")
asp_file.write(":- turned_with_2(M, X, X1, Z, t), machine_threads(M, C), Z > C.\n")
asp_file.write(":- turned_with_3(M, X, X1, X2, Z, t), machine_threads(M, C), Z > C.\n")
asp_file.write(":- turned_with_4(M, X, X1, X2, X3, Z, t), machine_threads(M, C), Z > C.\n")
asp_file.write(":- turned(X,t), holds(on(X,M),t), thread_cost(X, C), turned_with_4(M, X1, X2, X3, X4, C1, t), X != X1, X != X2, X != X3, X != X4.\n")
asp_file.write("% These rules allow for up to 4 task to be ran in parrallel on any one machine at a time, \n")
asp_file.write("% if and only if the sum of the thread cost of said tasks does not add up to a number greater than \n")
asp_file.write("% the core count of said machine. \n")
# Comment section detailing the rules which allow for parrallel taks execution on a machine while preventing an overloading of a the machine's multi-threading capabilities.
asp_file.write(":- turned_at(X, M, t), cuda_not_on(M), cuda_needed(X).\n")
asp_file.write(":- turned_at(X, M, t), spacy_not_on(M), spacy_needed(X).\n")
asp_file.write(":- turned_at(X, M, t), psutil_not_on(M), psutil_needed(X).\n")
asp_file.write(":- turned_at(X, M, t), clingo_not_on(M), clingo_needed(X).\n")
asp_file.write("% This section will prevent a program which requires a given toolkit from being scheduled to run on a machine\n")
asp_file.write("% which does not have said toolkit.\n")
asp_file.write(":- move(X, Z, Y1), turned(X, Y2), Y1 == Y2.\n")
asp_file.write(":- move(X, Z, Y1), move(X, Z, Y2), Y1 != Y2.\n")
asp_file.write(":- move(X, Z, T1), turned(X,T2), T1 > T2, nobody_depends_on(X).\n")
asp_file.write("% A program can not be moved and executed at the same time.\n")
# This section may not needed as there is nothing wrong with creating duplicates of completed programs so long as they are needed.
#asp_file.write(":- move(X, Z1, Y), move(X, Z2, Y), Z1 != Z2.\n")
#asp_file.write("% A program can not be moved to two different locations at the same time.\n")
# By preventing a program from being moved to two different locations at the same time we prevent duplicates of programs from existing and proliferating throughout the system.
asp_file.write(":- turned(X1, T1), turned(X2, T2), depends_on(X2, X1), T1 >= T2, moved(X2,T).\n")
asp_file.write("% A program can executed before all of it's dependencies.\n")
asp_file.write("holds(on(X,Y),t) :- move(X,Y,t).\n")
asp_file.write("holds(on(X,Z),t) :- holds(on(X,Z),t-1), not moved(X,t).\n")
asp_file.write("holds(at(X,Y),t) :- turn(X,Y,t).\n")
asp_file.write("holds(at(X,Z),t) :- holds(at(X,Z),t-1), not turned(X,t).\n")
asp_file.write("valid_on(X, Y) :- thread_cost(X, Z1), machine_threads(Y, Z2), Z1 <= Z2.\n")
asp_file.write(":- os_needed(X, S), turned_at(X, M, t), not os_on(M, S), not -os_needed(X).\n")
asp_file.write(":- holds(on(X,M1),t), holds(on(X,M2),t), M1 != M2, holds(at(X,-done),t).\n")
asp_file.write("% A program can not be duplicated if it has not been executed.\n")
asp_file.write(":- holds(on(X,M1),t), holds(on(X,M2),t), M1 != M2, task(X1), task(X2), not depends_on(X1, X), not depends_on(X2, X), X1 != X2, turned_at(X1, M1, T1), turned_at(X2, M2, T2).\n")
asp_file.write("% A program can not be dupllicated if it is not the dependecy of at least two different later programs which are executed on atleast two diffent machines.\n")
# This prevents the over-duplication of dependencies.
# Given that sending programs is exspensive, limiting this process must be a priority.
asp_file.write("% An unfinished program can not be at to two different locations at the same time.\n")
asp_file.write("#program check(t).\n")
asp_file.write(":- query(t), goal(F), not holds(F,t).\n")
asp_file.write("#show move/3.\n")
asp_file.write("#show turned_at/3.\n")
asp_file.write("#program base.\n")
# Here all the tasks are added to the model
all_tasks= []
for i in range(len(all_jobs)):
job = all_jobs[i][0]
job = job.replace(" ", "")
job = job.replace(".", "_")
job = job.lower()
asp_file.write("task("+job+").\n")
all_tasks.append(job)
asp_file.write("os(ubuntu_DE).\n")
asp_file.write("os(centOS_7_DE).\n")
asp_file.write("os(centOS_7_NE).\n")
asp_file.write("os(debian).\n")
asp_file.write("os(red_hat).\n")
asp_file.write("os(no_os).\n")
# Here the needed toolkits for each task are added
for i in range(len(all_jobs)):
job = all_jobs[i][0]
job = job.replace(" ", "")
job = job.replace(".", "_")
job = job.lower()
for j in range(len(all_jobs[i][3])):
if all_jobs[i][3][j] == "CUDA":
asp_file.write("cuda_needed("+job+").\n")
elif all_jobs[i][3][j] == "psutil":
asp_file.write("psutil_needed("+job+").\n")
elif all_jobs[i][3][j] == "spaCy":
asp_file.write("spacy_needed("+job+").\n")
elif all_jobs[i][3][j] == "clingo":
asp_file.write("clingo_needed("+job+").\n")
# Here, if a toolkit is designated to be installed on a machine then this fact is added to the model.
for i in range(len(all_macs)):
mac = all_macs[i][0]
mac.replace(" ", "")
mac.lower()
for j in range(len(all_macs[i][3])):
if all_macs[i][3][j] == "CUDA":
asp_file.write("cuda_on("+mac+").\n")
elif all_macs[i][3][j] == "psutil":
asp_file.write("psutil_on("+mac+").\n")
elif all_macs[i][3][j] == "spaCy":
asp_file.write("spacy_on("+mac+").\n")
elif all_macs[i][3][j] == "clingo":
asp_file.write("clingo_on("+mac+").\n")
asp_file.write("% If a toolkit is not on on a machine then this rule is ture for that machine.\n")
asp_file.write("cuda_not_on(X) :- location(X), not cuda_on(X).\n")
asp_file.write("spacy_not_on(X) :- location(X), not spacy_on(X).\n")
asp_file.write("psutil_not_on(X) :- location(X), not psutil_on(X).\n")
asp_file.write("clingo_not_on(X) :- location(X), not clingo_on(X).\n")
asp_file.write("% If a task can only be executed on a specific OS then the rule os_needed() represents this in the model.\n")
for i in range(len(all_jobs)):
job = all_jobs[i][0]
job = job.replace(" ", "")
job = job.replace(".", "_")
job = job.lower()
if all_jobs[i][1][1] == "Ubuntu 18.04 [Desktop Edition]":
asp_file.write("os_needed("+job+", ubuntu_DE).\n")
elif all_jobs[i][1][1] == "CentOS 7 [Desktop Edition]":
asp_file.write("os_needed("+job+", centOS_7_DE).\n")
elif all_jobs[i][1][1] == "CentOS 7 [Node/server Edition]":
asp_file.write("os_needed("+job+", centOS_7_NE).\n")
elif all_jobs[i][1][1] == "Unlisted Debian based OS":
asp_file.write("os_needed("+job+", debian).\n")
elif all_jobs[i][1][1] == "Unlisted Red Hat based OS":
asp_file.write("os_needed("+job+", red_hat).\n")
elif all_jobs[i][1][1] == "N/A":
asp_file.write("-os_needed("+job+").\n")
asp_file.write("% Here the OS of each machine in the cluster is represented in the model.\n")
for i in range(len(all_macs)):
mac = all_macs[i][0]
mac.replace(" ", "")
mac.lower()
if all_macs[i][7] == "Ubuntu 18.04 [Desktop Edition]":
asp_file.write("os_on("+mac+", ubuntu_DE).\n")
elif all_macs[i][7] == "CentOS 7 [Desktop Edition]":
asp_file.write("os_on("+mac+", centOS_7_DE).\n")
elif all_macs[i][7] == "CentOS 7 [Node/server Edition]":
asp_file.write("os_on("+mac+", centOS_7_NE).\n")
elif all_macs[i][7] == "Unlisted Debian based OS":
asp_file.write("os_on("+mac+", debian).\n")
elif all_macs[i][7] == "Unlisted Red Hat based OS":
asp_file.write("os_on("+mac+").\n")
asp_file.write("% The thread_cost() rule represents how many threads a given task requires.\n")
# At this time, ECU assumes that the user knows how many threads each task needs.
for i in range(len(all_jobs)):
job = all_jobs[i][0]
job = job.replace(" ", "")
job = job.replace(".", "_")
job = job.lower()
thread = str(all_jobs[i][4])
asp_file.write("thread_cost("+job+", "+thread+").\n")
asp_file.write("% The depends_on(X1, X2) rule represents that X2 must be exectued and on the machine executing X1.\n")
# A program P1 may need to be executed at a different machine than another program P2, even if P2 depends on P1.
depended_on = []
for i in range(len(all_jobs)):
job0 = all_jobs[i][0]
job0 = job0.replace(" ", "")
job0 = job0.replace(".", "_")
job0 = job0.lower()
for j in range(len(all_jobs[i][2])):
job1 = all_jobs[i][2][j]
job1 = job1.replace(" ", "")
job1 = job1.replace(".", "_")
job1 = job1.lower()
depended_on.append(job1)
asp_file.write("depends_on("+job0+", "+job1+").\n")
for k in range(len(all_tasks)):
for l in range(len(depended_on)) :
if all_tasks[k] == depended_on[l]:
all_tasks[k] = False
break
for k in range(len(all_tasks)):
if all_tasks[k] != False:
asp_file.write("nobody_depends_on("+all_tasks[k]+").\n")
asp_file.write("% The machine_threads() rule represents how many cores on any given machine.\n")
# Though a task which has a higher multi-threading demand than the total cores on the machine which said task is being ran on may execute without issue, this is not always the case.
# ECU assumes that every task being executed in a cluster is an exspensive task requiring near full usage of the core on any given machine.
for i in range(len(all_macs)):
mac = all_macs[i][0]
mac.replace(" ", "")
mac.lower()
thread = str(all_macs[i][6])
asp_file.write("machine_threads("+mac+", "+thread+").\n")
asp_file.write("% Initialization of the statuses of all tasks.\n")
for i in range(len(all_jobs)):
job = all_jobs[i][0]
job = job.replace(" ", "")
job = job.replace(".", "_")
job = job.lower()
asp_file.write("init(on("+job+", home)).\n") # All tasks are started at home.
asp_file.write("init(at("+job+", -done)).\n")
asp_file.write("% Declartion of the goals of the system.\n")
for i in range(len(all_jobs)):
job = all_jobs[i][0]
job = job.replace(" ", "")
job = job.replace(".", "_")
job = job.lower()
asp_file.write("goal(at("+job+", done)).\n")
# Comments for all loops are written to asp.lp
asp_file.close()
| 52.635135 | 196 | 0.60706 | import pickle
if __name__ == "__main__":
input_file= open("GUI_functions/Cluster_details.bin", "rb")
all_macs= list(pickle.load(input_file))
input_file.close()
input_file= open("GUI_functions/Tasks_details.bin", "rb")
all_jobs= list(pickle.load(input_file))
input_file.close()
asp_file = open("GUI_functions/asp.lp", 'w')
asp_file.write("#include <incmode>. \n")
asp_file.write("#program base. \n")
asp_file.write("% A dynamically generated program.\n")
asp_file.write("% Made by build_asp.py using the data structures stored in Cluster_details.bin and Tasks_details.bin\n")
asp_file.write("% Define the fluents of the program. \n")
asp_file.write("status(-done).\n")
asp_file.write("status(done).\n")
asp_file.write("location(home).\n")
asp_file.write("% location() describes the individual nodes/machines of a cluster. \n")
asp_file.write("% home is the ECU master directory on one machine in a given cluster. \n")
asp_file.write("% The machine which home is on is assumed to be directly connected to all other machines in the cluster. \n")
for i in range(len(all_macs)):
mac = all_macs[i][0]
mac.replace(" ", "")
mac.lower()
asp_file.write("location("+mac+").\n")
for i in range(len(all_macs)):
mac = all_macs[i][0]
mac.replace(" ", "")
mac.lower()
asp_file.write("connection(home, "+mac+").\n")
# Here home is connected to all the machines in the cluster.
for j in range(len(all_macs[i][2])):
mac1 = all_macs[i][2][j]
mac1.replace(" ", "")
mac1.lower()
asp_file.write("connection("+mac+", "+mac1+").\n")
# Here the connection for each machine is modeled.
# At this time ECU does not assume two way edge connection.
# The graph representing the network of a cluster is thus a directed graph.
# This is a core featur of ECU.
asp_file.write("holds(F,0) :- init(F).\n")
asp_file.write("#program step(t).\n")
asp_file.write("{ move(X,Y,t) : task(X), location(Y)} :- holds(on(X,M),t-1), connection(M, Y).\n")
asp_file.write("0{ turn(X,Y,t)}1 :- status(Y), task(X), holds(on(X,Z),t), valid_on(X, Z).\n")
asp_file.write(":- move(X,Y,t), holds(on(X,Y1),t-1), Y == home.\n")
asp_file.write("% Programs can not be moved back into the home directory.\n")
asp_file.write(":- turn(X,Y,t), holds(at(X,done),t-1).\n")
asp_file.write("% Programs can not be executed if they are already complete.\n")
asp_file.write(":- turn(X,Y,t), holds(on(X,M),t), depends_on(X, X1), not holds(on(X1,M),t).\n")
# Comments detailing limits of move and turn.
asp_file.write("moved(X,t) :- move(X,Y,t).\n")
asp_file.write("% moved() indicated what task X was moved at turn t.\n")
# Comment detailing moved()
asp_file.write("turned(X,t) :- turn(X, Y, t).\n")
asp_file.write("% turn() indicated what task X was executed at what turn t.\n")
# Comment detailing turn()
asp_file.write("turned_at(X, M, t) :- turned(X, t), holds(on(X,M),t).\n")
asp_file.write("% turned_at() indicated what task X was executed at Machine M at what turn t.\n")
# Comment detailing turned_at()
asp_file.write("turned_with_2(M, X, X1, Z, t) :- turned(X,t), holds(on(X,M),t), thread_cost(X, C), turned(X1,t), holds(on(X1,M),t), thread_cost(X1, C1), X != X1, Z = C + C1.\n")
asp_file.write("turned_with_3(M, X, X1, X2, Z, t) :- turned(X,t), holds(on(X,M),t), thread_cost(X, C), turned_with_2(M, X1, X2, C1, t), X != X1, X != X2, Z = C + C1.\n")
asp_file.write("turned_with_4(M, X, X1, X2, X3, Z, t) :- turned(X,t), holds(on(X,M),t), thread_cost(X, C), turned_with_3(M, X1, X2, X3, C1, t), X != X1, X != X2, X != X3, Z = C + C1.\n")
asp_file.write(":- turned_with_2(M, X, X1, Z, t), machine_threads(M, C), Z > C.\n")
asp_file.write(":- turned_with_3(M, X, X1, X2, Z, t), machine_threads(M, C), Z > C.\n")
asp_file.write(":- turned_with_4(M, X, X1, X2, X3, Z, t), machine_threads(M, C), Z > C.\n")
asp_file.write(":- turned(X,t), holds(on(X,M),t), thread_cost(X, C), turned_with_4(M, X1, X2, X3, X4, C1, t), X != X1, X != X2, X != X3, X != X4.\n")
asp_file.write("% These rules allow for up to 4 task to be ran in parrallel on any one machine at a time, \n")
asp_file.write("% if and only if the sum of the thread cost of said tasks does not add up to a number greater than \n")
asp_file.write("% the core count of said machine. \n")
# Comment section detailing the rules which allow for parrallel taks execution on a machine while preventing an overloading of a the machine's multi-threading capabilities.
asp_file.write(":- turned_at(X, M, t), cuda_not_on(M), cuda_needed(X).\n")
asp_file.write(":- turned_at(X, M, t), spacy_not_on(M), spacy_needed(X).\n")
asp_file.write(":- turned_at(X, M, t), psutil_not_on(M), psutil_needed(X).\n")
asp_file.write(":- turned_at(X, M, t), clingo_not_on(M), clingo_needed(X).\n")
asp_file.write("% This section will prevent a program which requires a given toolkit from being scheduled to run on a machine\n")
asp_file.write("% which does not have said toolkit.\n")
asp_file.write(":- move(X, Z, Y1), turned(X, Y2), Y1 == Y2.\n")
asp_file.write(":- move(X, Z, Y1), move(X, Z, Y2), Y1 != Y2.\n")
asp_file.write(":- move(X, Z, T1), turned(X,T2), T1 > T2, nobody_depends_on(X).\n")
asp_file.write("% A program can not be moved and executed at the same time.\n")
asp_file.write(":- turned(X1, T1), turned(X2, T2), depends_on(X2, X1), T1 >= T2, moved(X2,T).\n")
asp_file.write("% A program can executed before all of it's dependencies.\n")
asp_file.write("holds(on(X,Y),t) :- move(X,Y,t).\n")
asp_file.write("holds(on(X,Z),t) :- holds(on(X,Z),t-1), not moved(X,t).\n")
asp_file.write("holds(at(X,Y),t) :- turn(X,Y,t).\n")
asp_file.write("holds(at(X,Z),t) :- holds(at(X,Z),t-1), not turned(X,t).\n")
asp_file.write("valid_on(X, Y) :- thread_cost(X, Z1), machine_threads(Y, Z2), Z1 <= Z2.\n")
asp_file.write(":- os_needed(X, S), turned_at(X, M, t), not os_on(M, S), not -os_needed(X).\n")
asp_file.write(":- holds(on(X,M1),t), holds(on(X,M2),t), M1 != M2, holds(at(X,-done),t).\n")
asp_file.write("% A program can not be duplicated if it has not been executed.\n")
asp_file.write(":- holds(on(X,M1),t), holds(on(X,M2),t), M1 != M2, task(X1), task(X2), not depends_on(X1, X), not depends_on(X2, X), X1 != X2, turned_at(X1, M1, T1), turned_at(X2, M2, T2).\n")
asp_file.write("% A program can not be dupllicated if it is not the dependecy of at least two different later programs which are executed on atleast two diffent machines.\n")
# This prevents the over-duplication of dependencies.
# Given that sending programs is exspensive, limiting this process must be a priority.
asp_file.write("% An unfinished program can not be at to two different locations at the same time.\n")
asp_file.write("#program check(t).\n")
asp_file.write(":- query(t), goal(F), not holds(F,t).\n")
asp_file.write("#show move/3.\n")
asp_file.write("#show turned_at/3.\n")
asp_file.write("#program base.\n")
# Here all the tasks are added to the model
all_tasks= []
for i in range(len(all_jobs)):
job = all_jobs[i][0]
job = job.replace(" ", "")
job = job.replace(".", "_")
job = job.lower()
asp_file.write("task("+job+").\n")
all_tasks.append(job)
asp_file.write("os(ubuntu_DE).\n")
asp_file.write("os(centOS_7_DE).\n")
asp_file.write("os(centOS_7_NE).\n")
asp_file.write("os(debian).\n")
asp_file.write("os(red_hat).\n")
asp_file.write("os(no_os).\n")
# Here the needed toolkits for each task are added
for i in range(len(all_jobs)):
job = all_jobs[i][0]
job = job.replace(" ", "")
job = job.replace(".", "_")
job = job.lower()
for j in range(len(all_jobs[i][3])):
if all_jobs[i][3][j] == "CUDA":
asp_file.write("cuda_needed("+job+").\n")
elif all_jobs[i][3][j] == "psutil":
asp_file.write("psutil_needed("+job+").\n")
elif all_jobs[i][3][j] == "spaCy":
asp_file.write("spacy_needed("+job+").\n")
elif all_jobs[i][3][j] == "clingo":
asp_file.write("clingo_needed("+job+").\n")
# Here, if a toolkit is designated to be installed on a machine then this fact is added to the model.
for i in range(len(all_macs)):
mac = all_macs[i][0]
mac.replace(" ", "")
mac.lower()
for j in range(len(all_macs[i][3])):
if all_macs[i][3][j] == "CUDA":
asp_file.write("cuda_on("+mac+").\n")
elif all_macs[i][3][j] == "psutil":
asp_file.write("psutil_on("+mac+").\n")
elif all_macs[i][3][j] == "spaCy":
asp_file.write("spacy_on("+mac+").\n")
elif all_macs[i][3][j] == "clingo":
asp_file.write("clingo_on("+mac+").\n")
asp_file.write("% If a toolkit is not on on a machine then this rule is ture for that machine.\n")
asp_file.write("cuda_not_on(X) :- location(X), not cuda_on(X).\n")
asp_file.write("spacy_not_on(X) :- location(X), not spacy_on(X).\n")
asp_file.write("psutil_not_on(X) :- location(X), not psutil_on(X).\n")
asp_file.write("clingo_not_on(X) :- location(X), not clingo_on(X).\n")
asp_file.write("% If a task can only be executed on a specific OS then the rule os_needed() represents this in the model.\n")
for i in range(len(all_jobs)):
job = all_jobs[i][0]
job = job.replace(" ", "")
job = job.replace(".", "_")
job = job.lower()
if all_jobs[i][1][1] == "Ubuntu 18.04 [Desktop Edition]":
asp_file.write("os_needed("+job+", ubuntu_DE).\n")
elif all_jobs[i][1][1] == "CentOS 7 [Desktop Edition]":
asp_file.write("os_needed("+job+", centOS_7_DE).\n")
elif all_jobs[i][1][1] == "CentOS 7 [Node/server Edition]":
asp_file.write("os_needed("+job+", centOS_7_NE).\n")
elif all_jobs[i][1][1] == "Unlisted Debian based OS":
asp_file.write("os_needed("+job+", debian).\n")
elif all_jobs[i][1][1] == "Unlisted Red Hat based OS":
asp_file.write("os_needed("+job+", red_hat).\n")
elif all_jobs[i][1][1] == "N/A":
asp_file.write("-os_needed("+job+").\n")
asp_file.write("% Here the OS of each machine in the cluster is represented in the model.\n")
for i in range(len(all_macs)):
mac = all_macs[i][0]
mac.replace(" ", "")
mac.lower()
if all_macs[i][7] == "Ubuntu 18.04 [Desktop Edition]":
asp_file.write("os_on("+mac+", ubuntu_DE).\n")
elif all_macs[i][7] == "CentOS 7 [Desktop Edition]":
asp_file.write("os_on("+mac+", centOS_7_DE).\n")
elif all_macs[i][7] == "CentOS 7 [Node/server Edition]":
asp_file.write("os_on("+mac+", centOS_7_NE).\n")
elif all_macs[i][7] == "Unlisted Debian based OS":
asp_file.write("os_on("+mac+", debian).\n")
elif all_macs[i][7] == "Unlisted Red Hat based OS":
asp_file.write("os_on("+mac+").\n")
asp_file.write("% The thread_cost() rule represents how many threads a given task requires.\n")
# At this time, ECU assumes that the user knows how many threads each task needs.
for i in range(len(all_jobs)):
job = all_jobs[i][0]
job = job.replace(" ", "")
job = job.replace(".", "_")
job = job.lower()
thread = str(all_jobs[i][4])
asp_file.write("thread_cost("+job+", "+thread+").\n")
asp_file.write("% The depends_on(X1, X2) rule represents that X2 must be exectued and on the machine executing X1.\n")
# A program P1 may need to be executed at a different machine than another program P2, even if P2 depends on P1.
depended_on = []
for i in range(len(all_jobs)):
job0 = all_jobs[i][0]
job0 = job0.replace(" ", "")
job0 = job0.replace(".", "_")
job0 = job0.lower()
for j in range(len(all_jobs[i][2])):
job1 = all_jobs[i][2][j]
job1 = job1.replace(" ", "")
job1 = job1.replace(".", "_")
job1 = job1.lower()
depended_on.append(job1)
asp_file.write("depends_on("+job0+", "+job1+").\n")
for k in range(len(all_tasks)):
for l in range(len(depended_on)) :
if all_tasks[k] == depended_on[l]:
all_tasks[k] = False
break
for k in range(len(all_tasks)):
if all_tasks[k] != False:
asp_file.write("nobody_depends_on("+all_tasks[k]+").\n")
asp_file.write("% The machine_threads() rule represents how many cores on any given machine.\n")
# Though a task which has a higher multi-threading demand than the total cores on the machine which said task is being ran on may execute without issue, this is not always the case.
# ECU assumes that every task being executed in a cluster is an exspensive task requiring near full usage of the core on any given machine.
for i in range(len(all_macs)):
mac = all_macs[i][0]
mac.replace(" ", "")
mac.lower()
thread = str(all_macs[i][6])
asp_file.write("machine_threads("+mac+", "+thread+").\n")
asp_file.write("% Initialization of the statuses of all tasks.\n")
for i in range(len(all_jobs)):
job = all_jobs[i][0]
job = job.replace(" ", "")
job = job.replace(".", "_")
job = job.lower()
asp_file.write("init(on("+job+", home)).\n") # All tasks are started at home.
asp_file.write("init(at("+job+", -done)).\n")
asp_file.write("% Declartion of the goals of the system.\n")
for i in range(len(all_jobs)):
job = all_jobs[i][0]
job = job.replace(" ", "")
job = job.replace(".", "_")
job = job.lower()
asp_file.write("goal(at("+job+", done)).\n")
# Comments for all loops are written to asp.lp
asp_file.close()
| true | true |
f71a4eddc4f441ac7f58d13143c891e1a2b0e540 | 5,556 | py | Python | datasets/dataloader_infer.py | Nitin-Mane/dense-ulearn-vos | 9e39d359a53a2343522ce5820fdf27223a4ffcb4 | [
"Apache-2.0"
] | 157 | 2021-11-11T13:45:48.000Z | 2022-03-14T03:06:09.000Z | datasets/dataloader_infer.py | Nitin-Mane/dense-ulearn-vos | 9e39d359a53a2343522ce5820fdf27223a4ffcb4 | [
"Apache-2.0"
] | 11 | 2021-11-20T11:53:47.000Z | 2022-03-30T01:51:56.000Z | datasets/dataloader_infer.py | Nitin-Mane/dense-ulearn-vos | 9e39d359a53a2343522ce5820fdf27223a4ffcb4 | [
"Apache-2.0"
] | 16 | 2021-11-12T09:19:45.000Z | 2022-03-16T10:32:39.000Z | """
Copyright (c) 2021 TU Darmstadt
Author: Nikita Araslanov <nikita.araslanov@tu-darmstadt.de>
License: Apache License 2.0
"""
import os
import torch
from PIL import Image
import numpy as np
import torchvision.transforms as tf
from .dataloader_base import DLBase
class DataSeg(DLBase):
def __init__(self, cfg, split, ignore_labels=[], \
root=os.path.expanduser('./data'), renorm=False):
super(DataSeg, self).__init__()
self.cfg = cfg
self.root = root
self.split = split
self.ignore_labels = ignore_labels
self._init_palette(self.cfg.DATASET.NUM_CLASSES)
# train/val/test splits are pre-cut
split_fn = os.path.join(self.root, self.split + ".txt")
assert os.path.isfile(split_fn)
self.sequence_ids = []
self.sequence_names = []
def add_sequence(name):
vlen = len(self.images)
assert vlen >= cfg.DATASET.VIDEO_LEN, \
"Detected video shorter [{}] than training length [{}]".format(vlen, \
cfg.DATASET.VIDEO_LEN)
self.sequence_ids.append(vlen)
self.sequence_names.append(name)
return vlen
self.images = []
self.masks = []
self.flags = []
token = None
with open(split_fn, "r") as lines:
for line in lines:
_flag, _image, _mask = line.strip("\n").split(' ')
# save every frame
#_flag = 1
self.flags.append(int(_flag))
_image = os.path.join(cfg.DATASET.ROOT, _image.lstrip('/'))
assert os.path.isfile(_image), '%s not found' % _image
# each sequence may have a different length
# do some book-keeping e.g. to ensure we have
# sequences long enough for subsequent sampling
_token = _image.split("/")[-2] # parent directory
# sequence ID is in the filename
#_token = os.path.basename(_image).split("_")[0]
if token != _token:
if not token is None:
add_sequence(token)
token = _token
self.images.append(_image)
if _mask is None:
self.masks.append(None)
else:
_mask = os.path.join(cfg.DATASET.ROOT, _mask.lstrip('/'))
#assert os.path.isfile(_mask), '%s not found' % _mask
self.masks.append(_mask)
# update the last sequence
# returns the total amount of frames
add_sequence(token)
print("Loaded {} sequences".format(len(self.sequence_ids)))
# definint data augmentation:
print("Dataloader: {}".format(split), " #", len(self.images))
print("\t {}: no augmentation".format(split))
self.tf = tf.Compose([tf.ToTensor(), tf.Normalize(mean=self.MEAN, std=self.STD)])
self._num_samples = len(self.images)
def __len__(self):
return len(self.sequence_ids)
def _mask2tensor(self, mask, num_classes=6):
h,w = mask.shape
ones = torch.ones(1,h,w)
zeros = torch.zeros(num_classes,h,w)
max_idx = mask.max()
assert max_idx < num_classes, "{} >= {}".format(max_idx, num_classes)
return zeros.scatter(0, mask[None, ...], ones)
def denorm(self, image):
if image.dim() == 3:
assert image.dim() == 3, "Expected image [CxHxW]"
assert image.size(0) == 3, "Expected RGB image [3xHxW]"
for t, m, s in zip(image, self.MEAN, self.STD):
t.mul_(s).add_(m)
elif image.dim() == 4:
# batch mode
assert image.size(1) == 3, "Expected RGB image [3xHxW]"
for t, m, s in zip((0,1,2), self.MEAN, self.STD):
image[:, t, :, :].mul_(s).add_(m)
return image
def __getitem__(self, index):
seq_to = self.sequence_ids[index]
seq_from = 0 if index == 0 else self.sequence_ids[index - 1]
image0 = Image.open(self.images[seq_from])
w,h = image0.size
images, masks, fns, flags = [], [], [], []
tracks = torch.LongTensor(self.cfg.DATASET.NUM_CLASSES).fill_(-1)
masks = torch.LongTensor(self.cfg.DATASET.NUM_CLASSES, h, w).zero_()
known_ids = set()
for t in range(seq_from, seq_to):
t0 = t - seq_from
image = Image.open(self.images[t]).convert('RGB')
fns.append(os.path.basename(self.images[t].replace(".jpg", "")))
flags.append(self.flags[t])
if os.path.isfile(self.masks[t]):
mask = Image.open(self.masks[t])
mask = torch.from_numpy(np.array(mask, np.long, copy=False))
unique_ids = np.unique(mask)
for oid in unique_ids:
if not oid in known_ids:
tracks[oid] = t0
known_ids.add(oid)
masks[oid] = (mask == oid).long()
else:
mask = Image.new('L', image.size)
image = self.tf(image)
images.append(image)
images = torch.stack(images, 0)
seq_name = self.sequence_names[index]
flags = torch.LongTensor(flags)
return images, images, masks, tracks, len(known_ids), fns, flags, seq_name
| 33.071429 | 89 | 0.533837 |
import os
import torch
from PIL import Image
import numpy as np
import torchvision.transforms as tf
from .dataloader_base import DLBase
class DataSeg(DLBase):
def __init__(self, cfg, split, ignore_labels=[], \
root=os.path.expanduser('./data'), renorm=False):
super(DataSeg, self).__init__()
self.cfg = cfg
self.root = root
self.split = split
self.ignore_labels = ignore_labels
self._init_palette(self.cfg.DATASET.NUM_CLASSES)
split_fn = os.path.join(self.root, self.split + ".txt")
assert os.path.isfile(split_fn)
self.sequence_ids = []
self.sequence_names = []
def add_sequence(name):
vlen = len(self.images)
assert vlen >= cfg.DATASET.VIDEO_LEN, \
"Detected video shorter [{}] than training length [{}]".format(vlen, \
cfg.DATASET.VIDEO_LEN)
self.sequence_ids.append(vlen)
self.sequence_names.append(name)
return vlen
self.images = []
self.masks = []
self.flags = []
token = None
with open(split_fn, "r") as lines:
for line in lines:
_flag, _image, _mask = line.strip("\n").split(' ')
self.flags.append(int(_flag))
_image = os.path.join(cfg.DATASET.ROOT, _image.lstrip('/'))
assert os.path.isfile(_image), '%s not found' % _image
_token = _image.split("/")[-2]
if token != _token:
if not token is None:
add_sequence(token)
token = _token
self.images.append(_image)
if _mask is None:
self.masks.append(None)
else:
_mask = os.path.join(cfg.DATASET.ROOT, _mask.lstrip('/'))
self.masks.append(_mask)
add_sequence(token)
print("Loaded {} sequences".format(len(self.sequence_ids)))
print("Dataloader: {}".format(split), " #", len(self.images))
print("\t {}: no augmentation".format(split))
self.tf = tf.Compose([tf.ToTensor(), tf.Normalize(mean=self.MEAN, std=self.STD)])
self._num_samples = len(self.images)
def __len__(self):
return len(self.sequence_ids)
def _mask2tensor(self, mask, num_classes=6):
h,w = mask.shape
ones = torch.ones(1,h,w)
zeros = torch.zeros(num_classes,h,w)
max_idx = mask.max()
assert max_idx < num_classes, "{} >= {}".format(max_idx, num_classes)
return zeros.scatter(0, mask[None, ...], ones)
def denorm(self, image):
if image.dim() == 3:
assert image.dim() == 3, "Expected image [CxHxW]"
assert image.size(0) == 3, "Expected RGB image [3xHxW]"
for t, m, s in zip(image, self.MEAN, self.STD):
t.mul_(s).add_(m)
elif image.dim() == 4:
assert image.size(1) == 3, "Expected RGB image [3xHxW]"
for t, m, s in zip((0,1,2), self.MEAN, self.STD):
image[:, t, :, :].mul_(s).add_(m)
return image
def __getitem__(self, index):
seq_to = self.sequence_ids[index]
seq_from = 0 if index == 0 else self.sequence_ids[index - 1]
image0 = Image.open(self.images[seq_from])
w,h = image0.size
images, masks, fns, flags = [], [], [], []
tracks = torch.LongTensor(self.cfg.DATASET.NUM_CLASSES).fill_(-1)
masks = torch.LongTensor(self.cfg.DATASET.NUM_CLASSES, h, w).zero_()
known_ids = set()
for t in range(seq_from, seq_to):
t0 = t - seq_from
image = Image.open(self.images[t]).convert('RGB')
fns.append(os.path.basename(self.images[t].replace(".jpg", "")))
flags.append(self.flags[t])
if os.path.isfile(self.masks[t]):
mask = Image.open(self.masks[t])
mask = torch.from_numpy(np.array(mask, np.long, copy=False))
unique_ids = np.unique(mask)
for oid in unique_ids:
if not oid in known_ids:
tracks[oid] = t0
known_ids.add(oid)
masks[oid] = (mask == oid).long()
else:
mask = Image.new('L', image.size)
image = self.tf(image)
images.append(image)
images = torch.stack(images, 0)
seq_name = self.sequence_names[index]
flags = torch.LongTensor(flags)
return images, images, masks, tracks, len(known_ids), fns, flags, seq_name
| true | true |
f71a4f157fbcfd39a6a5a1e24d4913bdf4df7d2c | 6,777 | py | Python | etna/analysis/eda_utils.py | Carlosbogo/etna | b6210f0e79ee92aa9ae8ff4fcfb267be9fb7cc94 | [
"Apache-2.0"
] | null | null | null | etna/analysis/eda_utils.py | Carlosbogo/etna | b6210f0e79ee92aa9ae8ff4fcfb267be9fb7cc94 | [
"Apache-2.0"
] | null | null | null | etna/analysis/eda_utils.py | Carlosbogo/etna | b6210f0e79ee92aa9ae8ff4fcfb267be9fb7cc94 | [
"Apache-2.0"
] | null | null | null | import math
import warnings
from itertools import combinations
from typing import TYPE_CHECKING
from typing import Optional
from typing import Sequence
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import statsmodels.api as sm
from matplotlib.ticker import MaxNLocator
from statsmodels.graphics import utils
if TYPE_CHECKING:
from etna.datasets import TSDataset
plot_acf = sm.graphics.tsa.plot_acf
plot_pacf = sm.graphics.tsa.plot_pacf
def cross_corr_plot(ts: "TSDataset", n_segments: int = 10, maxlags: int = 21, segments: Optional[Sequence] = None):
"""
Cross-correlation plot between multiple timeseries.
Parameters
----------
ts:
TSDataset with timeseries data
n_segments:
number of random segments to plot
maxlags:
number of timeseries shifts for cross-correlation
segments:
segments to plot
"""
if not segments:
segments = list(ts.segments)
segments = np.random.choice(segments, size=min(len(segments), n_segments), replace=False)
segment_pairs = list(combinations(segments, r=2))
if len(segment_pairs) == 0:
raise ValueError("There are no pairs to plot! Try set n_segments > 1.")
columns_num = min(2, len(segment_pairs))
rows_num = math.ceil(len(segment_pairs) / columns_num)
fig, ax = plt.subplots(rows_num, columns_num, figsize=(20, 5 * rows_num), constrained_layout=True, squeeze=False)
ax = ax.ravel()
fig.suptitle("Cross-correlation", fontsize=16)
for i, (segment_1, segment_2) in enumerate(segment_pairs):
df_segment_1 = ts[:, segment_1, :][segment_1]
df_segment_2 = ts[:, segment_2, :][segment_2]
fig, axx = utils.create_mpl_ax(ax[i])
target_1 = df_segment_1.target
target_2 = df_segment_2.target
if target_1.dtype == int or target_2.dtype == int:
warnings.warn(
"At least one target column has integer dtype, "
"it is converted to float in order to calculate correlation."
)
target_1 = target_1.astype(float)
target_2 = target_2.astype(float)
lags, level, _, _ = axx.xcorr(x=target_1, y=target_2, maxlags=maxlags)
ax[i].plot(lags, level, "o", markersize=5)
ax[i].set_title(f"{segment_1} vs {segment_2}")
ax[i].xaxis.set_major_locator(MaxNLocator(integer=True))
plt.show()
def sample_acf_plot(ts: "TSDataset", n_segments: int = 10, lags: int = 21, segments: Sequence = None):
"""
Autocorrelation plot for multiple timeseries.
Parameters
----------
ts:
TSDataset with timeseries data
n_segments:
number of random segments to plot
lags:
number of timeseries shifts for cross-correlation
segments:
segments to plot
Notes
-----
https://en.wikipedia.org/wiki/Autocorrelation
"""
if not segments:
segments = sorted(ts.segments)
k = min(n_segments, len(segments))
columns_num = min(2, k)
rows_num = math.ceil(k / columns_num)
fig, ax = plt.subplots(rows_num, columns_num, figsize=(20, 5 * rows_num), constrained_layout=True, squeeze=False)
ax = ax.ravel()
fig.suptitle("Partial Autocorrelation", fontsize=16)
for i, name in enumerate(sorted(np.random.choice(segments, size=k, replace=False))):
df_slice = ts[:, name, :][name]
plot_acf(x=df_slice["target"].values, ax=ax[i], lags=lags)
ax[i].set_title(name)
plt.show()
def sample_pacf_plot(ts: "TSDataset", n_segments: int = 10, lags: int = 21, segments: Sequence = None):
"""
Partial autocorrelation plot for multiple timeseries.
Parameters
----------
ts:
TSDataset with timeseries data
n_segments:
number of random segments to plot
lags:
number of timeseries shifts for cross-correlation
segments:
segments to plot
Notes
-----
https://en.wikipedia.org/wiki/Partial_autocorrelation_function
"""
if not segments:
segments = sorted(ts.segments)
k = min(n_segments, len(segments))
columns_num = min(2, k)
rows_num = math.ceil(k / columns_num)
fig, ax = plt.subplots(rows_num, columns_num, figsize=(20, 5 * rows_num), constrained_layout=True, squeeze=False)
ax = ax.ravel()
fig.suptitle("Partial Autocorrelation", fontsize=16)
for i, name in enumerate(sorted(np.random.choice(segments, size=k, replace=False))):
df_slice = ts[:, name, :][name]
plot_pacf(x=df_slice["target"].values, ax=ax[i], lags=lags)
ax[i].set_title(name)
plt.show()
def distribution_plot(
ts: "TSDataset",
n_segments: int = 10,
segments: Sequence = None,
shift: int = 30,
window: int = 30,
freq: str = "1M",
n_rows: int = 10,
):
"""Distribution of z-values grouped by segments and time frequency.
... math:
mean_{i} = \\sum_{j=i-\\text{shift}}^{i-\\text{shift}+\\text{window}} \\frac{x_{j}}{\\text{window}}
Parameters
----------
ts:
dataset with timeseries data
n_segments:
number of random segments to plot
segments:
segments to plot
shift:
number of timeseries shifts for statistics calc
window:
number of points for statistics calc
freq:
group for z_{i}
n_rows:
maximum number of rows to plot
"""
df_pd = ts.to_pandas(flatten=True)
if not segments:
segments = df_pd.segment.unique()
segments = np.random.choice(segments, size=min(len(segments), n_segments), replace=False)
df_full = df_pd[df_pd.segment.isin(segments)]
df_full.loc[:, "mean"] = (
df_full.groupby("segment").target.shift(shift).transform(lambda s: s.rolling(window).mean())
)
df_full.loc[:, "std"] = df_full.groupby("segment").target.shift(shift).transform(lambda s: s.rolling(window).std())
df_full = df_full.dropna()
df_full.loc[:, "z"] = (df_full["target"] - df_full["mean"]) / df_full["std"]
grouped_data = df_full.groupby([df_full.timestamp.dt.to_period(freq)])
columns_num = min(2, len(grouped_data))
rows_num = min(n_rows, math.ceil(len(grouped_data) / columns_num))
groups = set(list(grouped_data.groups.keys())[-rows_num * columns_num :])
fig, ax = plt.subplots(rows_num, columns_num, figsize=(20, 7.5 * rows_num), constrained_layout=True, squeeze=False)
fig.suptitle(f"Z statistic shift: {shift} window: {window}", fontsize=16)
ax = ax.ravel()
i = 0
for period, df_slice in grouped_data:
if period not in groups:
continue
sns.boxplot(data=df_slice.sort_values(by="segment"), y="z", x="segment", ax=ax[i], fliersize=False)
ax[i].set_title(f"{period}")
i += 1
| 34.576531 | 119 | 0.649255 | import math
import warnings
from itertools import combinations
from typing import TYPE_CHECKING
from typing import Optional
from typing import Sequence
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import statsmodels.api as sm
from matplotlib.ticker import MaxNLocator
from statsmodels.graphics import utils
if TYPE_CHECKING:
from etna.datasets import TSDataset
plot_acf = sm.graphics.tsa.plot_acf
plot_pacf = sm.graphics.tsa.plot_pacf
def cross_corr_plot(ts: "TSDataset", n_segments: int = 10, maxlags: int = 21, segments: Optional[Sequence] = None):
if not segments:
segments = list(ts.segments)
segments = np.random.choice(segments, size=min(len(segments), n_segments), replace=False)
segment_pairs = list(combinations(segments, r=2))
if len(segment_pairs) == 0:
raise ValueError("There are no pairs to plot! Try set n_segments > 1.")
columns_num = min(2, len(segment_pairs))
rows_num = math.ceil(len(segment_pairs) / columns_num)
fig, ax = plt.subplots(rows_num, columns_num, figsize=(20, 5 * rows_num), constrained_layout=True, squeeze=False)
ax = ax.ravel()
fig.suptitle("Cross-correlation", fontsize=16)
for i, (segment_1, segment_2) in enumerate(segment_pairs):
df_segment_1 = ts[:, segment_1, :][segment_1]
df_segment_2 = ts[:, segment_2, :][segment_2]
fig, axx = utils.create_mpl_ax(ax[i])
target_1 = df_segment_1.target
target_2 = df_segment_2.target
if target_1.dtype == int or target_2.dtype == int:
warnings.warn(
"At least one target column has integer dtype, "
"it is converted to float in order to calculate correlation."
)
target_1 = target_1.astype(float)
target_2 = target_2.astype(float)
lags, level, _, _ = axx.xcorr(x=target_1, y=target_2, maxlags=maxlags)
ax[i].plot(lags, level, "o", markersize=5)
ax[i].set_title(f"{segment_1} vs {segment_2}")
ax[i].xaxis.set_major_locator(MaxNLocator(integer=True))
plt.show()
def sample_acf_plot(ts: "TSDataset", n_segments: int = 10, lags: int = 21, segments: Sequence = None):
if not segments:
segments = sorted(ts.segments)
k = min(n_segments, len(segments))
columns_num = min(2, k)
rows_num = math.ceil(k / columns_num)
fig, ax = plt.subplots(rows_num, columns_num, figsize=(20, 5 * rows_num), constrained_layout=True, squeeze=False)
ax = ax.ravel()
fig.suptitle("Partial Autocorrelation", fontsize=16)
for i, name in enumerate(sorted(np.random.choice(segments, size=k, replace=False))):
df_slice = ts[:, name, :][name]
plot_acf(x=df_slice["target"].values, ax=ax[i], lags=lags)
ax[i].set_title(name)
plt.show()
def sample_pacf_plot(ts: "TSDataset", n_segments: int = 10, lags: int = 21, segments: Sequence = None):
if not segments:
segments = sorted(ts.segments)
k = min(n_segments, len(segments))
columns_num = min(2, k)
rows_num = math.ceil(k / columns_num)
fig, ax = plt.subplots(rows_num, columns_num, figsize=(20, 5 * rows_num), constrained_layout=True, squeeze=False)
ax = ax.ravel()
fig.suptitle("Partial Autocorrelation", fontsize=16)
for i, name in enumerate(sorted(np.random.choice(segments, size=k, replace=False))):
df_slice = ts[:, name, :][name]
plot_pacf(x=df_slice["target"].values, ax=ax[i], lags=lags)
ax[i].set_title(name)
plt.show()
def distribution_plot(
ts: "TSDataset",
n_segments: int = 10,
segments: Sequence = None,
shift: int = 30,
window: int = 30,
freq: str = "1M",
n_rows: int = 10,
):
df_pd = ts.to_pandas(flatten=True)
if not segments:
segments = df_pd.segment.unique()
segments = np.random.choice(segments, size=min(len(segments), n_segments), replace=False)
df_full = df_pd[df_pd.segment.isin(segments)]
df_full.loc[:, "mean"] = (
df_full.groupby("segment").target.shift(shift).transform(lambda s: s.rolling(window).mean())
)
df_full.loc[:, "std"] = df_full.groupby("segment").target.shift(shift).transform(lambda s: s.rolling(window).std())
df_full = df_full.dropna()
df_full.loc[:, "z"] = (df_full["target"] - df_full["mean"]) / df_full["std"]
grouped_data = df_full.groupby([df_full.timestamp.dt.to_period(freq)])
columns_num = min(2, len(grouped_data))
rows_num = min(n_rows, math.ceil(len(grouped_data) / columns_num))
groups = set(list(grouped_data.groups.keys())[-rows_num * columns_num :])
fig, ax = plt.subplots(rows_num, columns_num, figsize=(20, 7.5 * rows_num), constrained_layout=True, squeeze=False)
fig.suptitle(f"Z statistic shift: {shift} window: {window}", fontsize=16)
ax = ax.ravel()
i = 0
for period, df_slice in grouped_data:
if period not in groups:
continue
sns.boxplot(data=df_slice.sort_values(by="segment"), y="z", x="segment", ax=ax[i], fliersize=False)
ax[i].set_title(f"{period}")
i += 1
| true | true |
f71a5114748409f8688b38305fe77035a3f0228a | 2,251 | py | Python | 18_Working with Dates and Times in Python/03_Time Zones and Daylight Saving/05_What time did the bike leave.py | mohd-faizy/DataScience-With-Python | 13ebb10cf9083343056d5b782957241de1d595f9 | [
"MIT"
] | 5 | 2021-02-03T14:36:58.000Z | 2022-01-01T10:29:26.000Z | 18_Working with Dates and Times in Python/03_Time Zones and Daylight Saving/05_What time did the bike leave.py | mohd-faizy/DataScience-With-Python | 13ebb10cf9083343056d5b782957241de1d595f9 | [
"MIT"
] | null | null | null | 18_Working with Dates and Times in Python/03_Time Zones and Daylight Saving/05_What time did the bike leave.py | mohd-faizy/DataScience-With-Python | 13ebb10cf9083343056d5b782957241de1d595f9 | [
"MIT"
] | 3 | 2021-02-08T00:31:16.000Z | 2022-03-17T13:52:32.000Z | '''
05 - What time did the bike leave? (Global edition)
When you need to move a datetime from one timezone into another, use
.astimezone() and tz. Often you will be moving things into UTC, but for
fun let's try moving things from 'America/New_York' into a few different
time zones.
------------------------------------------------------------------
Instructions:
- Set uk to be the timezone for the UK: 'Europe/London'.
- Change local to be in the uk timezone and assign it to notlocal.
------------------------------------------------------------------
'''
# Create the timezone object
uk = tz.gettz('Europe/London')
# Pull out the start of the first trip
local = onebike_datetimes[0]['start']
# What time was it in the UK?
notlocal = local.astimezone(uk)
# Print them out and see the difference
print(local.isoformat())
print(notlocal.isoformat())
'''
<script.py> output:
2017-10-01T15:23:25-04:00
2017-10-01T20:23:25+01:00
'''
'''
------------------------------------------------------------------
- Set ist to be the timezone for India: 'Asia/Kolkata'.
- Change local to be in the ist timezone and assign it to notlocal.
------------------------------------------------------------------
'''
# Create the timezone object
ist = tz.gettz('Asia/Kolkata')
# Pull out the start of the first trip
local = onebike_datetimes[0]['start']
# What time was it in the UK?
notlocal = local.astimezone(ist)
# Print them out and see the difference
print(local.isoformat())
print(notlocal.isoformat())
'''
<script.py> output:
2017-10-01T15:23:25-04:00
2017-10-02T00:53:25+05:30
'''
'''
------------------------------------------------------------------
- Set sm to be the timezone for Samoa: 'Pacific/Apia'.
- Change local to be in the sm timezone and assign it to notlocal.
------------------------------------------------------------------
'''
# Create the timezone object
sm = tz.gettz('Pacific/Apia')
# Pull out the start of the first trip
local = onebike_datetimes[0]['start']
# What time was it in Samoa?
notlocal = local.astimezone(sm)
# Print them out and see the difference
print(local.isoformat())
print(notlocal.isoformat())
'''
<script.py> output:
2017-10-01T15:23:25-04:00
2017-10-02T09:23:25+14:00
'''
| 27.120482 | 73 | 0.581519 |
uk = tz.gettz('Europe/London')
local = onebike_datetimes[0]['start']
notlocal = local.astimezone(uk)
print(local.isoformat())
print(notlocal.isoformat())
ist = tz.gettz('Asia/Kolkata')
local = onebike_datetimes[0]['start']
notlocal = local.astimezone(ist)
print(local.isoformat())
print(notlocal.isoformat())
sm = tz.gettz('Pacific/Apia')
local = onebike_datetimes[0]['start']
notlocal = local.astimezone(sm)
print(local.isoformat())
print(notlocal.isoformat())
| true | true |
f71a5174d2bf23ea7be1f3e9c5de988669aecc72 | 7,805 | py | Python | src/tools/scripts/lofreq2_cluster.py | joshwkearney/lofreq | 8966e95044875ec9068d2ea4d1cf72ed96d92781 | [
"MIT"
] | 74 | 2015-01-02T19:18:01.000Z | 2022-02-25T04:16:18.000Z | src/tools/scripts/lofreq2_cluster.py | joshwkearney/lofreq | 8966e95044875ec9068d2ea4d1cf72ed96d92781 | [
"MIT"
] | 125 | 2015-01-06T07:25:30.000Z | 2022-03-15T12:56:23.000Z | src/tools/scripts/lofreq2_cluster.py | joshwkearney/lofreq | 8966e95044875ec9068d2ea4d1cf72ed96d92781 | [
"MIT"
] | 31 | 2015-01-14T00:41:14.000Z | 2022-02-16T14:45:13.000Z | #!/usr/bin/env python
"""Cluster SNVs based on SNV freqs confidence interval
"""
__author__ = "Andreas Wilm, Niranjan Nagarajan"
__email__ = "wilma@gis.a-star.edu.sg"
__copyright__ = "2013,2014 Genome Institute of Singapore"
__license__ = "The MIT License"
# --- standard library imports
#
import sys
import logging
import os
import argparse
from math import sqrt
from collections import namedtuple
from itertools import groupby
#--- third-party imports
#
# /
#--- project specific imports
#
# James Casbon's pyvcf
import vcf
#global logger
# http://docs.python.org/library/logging.html
LOG = logging.getLogger("")
logging.basicConfig(level=logging.WARN,
format='%(levelname)s [%(asctime)s]: %(message)s')
CI = namedtuple('CI', ['min', 'max'])
# invocation of ipython on exceptions
#import sys, pdb
#from IPython.core import ultratb
#sys.excepthook = ultratb.FormattedTB(mode='Verbose',
# color_scheme='Linux', call_pdb=1)
def compute_ci(coverage, var_count):
"""Compute confidnce interval:
Agresti-Coull Interval at the 0.05 level
http://en.wikipedia.org/wiki/Binomial_proportion_confidence_interval#Agresti-Coull_Interval
n~ = n + 4
p~ = 1/n~ * (X + 4/2)
ci: p~ +- 2*sqrt(1/n~ * p~ * (1-p~)
"""
n_t = float(coverage + 4)
p_t = (var_count + 2) / n_t
ci = 2 * sqrt(p_t * (1-p_t) / n_t)
min_ci = p_t - 3*ci
if min_ci < 0.0:
min_ci = 0.0
max_ci = p_t + 3*ci
return CI._make([min_ci, max_ci])
def fasta_iter(fasta_name):
"""
given a fasta file. yield tuples of header, sequence
Brent Pedersen: https://www.biostars.org/p/710/
"""
fh = open(fasta_name)
# ditch the boolean (x[0]) and just keep the header or sequence since
# we know they alternate.
faiter = (x[1] for x in groupby(fh, lambda line: line[0] == ">"))
for header in faiter:
# drop the ">"
#header = header.next()[1:].strip()
header = header.next()[1:].strip().split(" ")[0]
# join all sequence lines to one.
seq = "".join(s.strip() for s in faiter.next())
yield header, seq
def cmdline_parser():
"""
creates an OptionParser instance
"""
# http://docs.python.org/dev/howto/argparse.html
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--verbose",
action="store_true",
dest="verbose",
help="be verbose")
parser.add_argument("--debug",
action="store_true",
dest="debug",
help="enable debugging")
parser.add_argument("-i", "--variants",
dest="var_file",
help="variant input file (vcf format)")
parser.add_argument("-r", "--ref",
dest="reffa",
help="Reference fasta file (for reconstructing cluster sequence)")
parser.add_argument("-o", "--out",
dest="cluster_file",
default="-",
help="Cluster output file (- for stdout = default)")
return parser
def vcf_var_to_str(v):
return "%s %d %s>%s %f" % (
v.CHROM, v.POS, v.REF, ','.join(["%s" % x for x in v.ALT]), v.INFO['AF'])
def main():
"""The main function
"""
parser = cmdline_parser()
args = parser.parse_args()
# FIXME catch unrecognized args (not just (len(args)
if args.verbose:
LOG.setLevel(logging.INFO)
if args.debug:
LOG.setLevel(logging.DEBUG)
for (in_file, descr) in [(args.var_file, "variant file")]:
if not in_file:
parser.error("%s input file argument missing." % descr)
sys.exit(1)
if not os.path.exists(in_file) and in_file != "-":
sys.stderr.write(
"file '%s' does not exist.\n" % in_file)
sys.exit(1)
for (out_file, descr) in [(args.cluster_file, "cluster output file")]:
if not out_file:
parser.error("%s output file argument missing." % descr)
sys.exit(1)
if os.path.exists(out_file) and out_file!="-":
sys.stderr.write(
"Cowardly refusing to overwrite existing"
" output file '%s'.\n" % out_file)
sys.exit(1)
if args.cluster_file == '-':
fh_out = sys.stdout
else:
fh_out = open(args.cluster_file, 'w')
if args.reffa:
refno = 0
for refname, refseq in fasta_iter(args.reffa):
if refno > 0:
sys.stderr.write("Only supporting one sequence\n")
sys.exit(1)
refno += 1
else:
refseq = ""
if args.var_file == '-':
vcf_fh = sys.stdin
else:
vcf_fh = vcf.VCFReader(filename=args.var_file)
var_list = [v for v in vcf_fh]
if any([not v.is_snp for v in var_list]):
sys.stderr.write("WARNING: Only supporting SNPs! Automatically removing others\n")
var_list = [v for v in var_list if v.is_snp]
LOG.info("Parsed %d SNPs from %s" % (len(var_list), args.var_file))
assert all([x.INFO.has_key('AF') and x.INFO.has_key('DP')
for x in var_list])
var_list = sorted(var_list, key=lambda x: x.INFO['AF'], reverse=True)
ci_list = [compute_ci(v.INFO['DP'], int(v.INFO['AF'] * v.INFO['DP']))
for v in var_list]
var_ci_list = list(zip(var_list, ci_list))
del var_list, ci_list# paranoia
if len(var_ci_list)==0:
fh_out.write("No variants <-> no clusters!\n")
if fh_out != sys.stdout:
fh_out.close()
sys.exit(0)
cluster = dict()
clu_no = 0
seed_var, seed_ci = var_ci_list[0]
#cluster[clu_no,'members'] = ["%s %f" % (seed.repr, seed.freq)]
cluster[clu_no,'members'] = [seed_var]
cluster[clu_no,'min'] = seed_ci.min
cluster[clu_no,'max'] = seed_ci.max
for var, ci in var_ci_list[1:]:
LOG.debug("checking %s %f: max_ci %f vvar. clu_min %f" % (
var, var.INFO['AF'], ci.max, cluster[clu_no,'min']))
if ci.max > cluster[clu_no,'min']:
#cluster[clu_no,'members'].append("%s %f" % (var.repr, var.freq))
cluster[clu_no,'members'].append(var)
else:
clu_no += 1
seed = var
#cluster[clu_no,'members'] = ["%s %f" % (seed.repr, seed.freq)]
cluster[clu_no,'members'] = [seed]
cluster[clu_no,'min'] = ci.min
cluster[clu_no,'max'] = ci.max
for i in range(clu_no+1):
fh_out.write("# cluster %d (freq. range: %f - %f): %s\n" % (
i+1, cluster[i,'min'], cluster[i,'max'],
', '.join([vcf_var_to_str(x) for x in cluster[i,'members']])))
# write sequence as well if we have a reference
if refseq:
haplotype = refseq
for v in sorted(cluster[i,'members'], key = lambda v: v.POS):
# FIXME random order for multi-allelic psositions
assert v.CHROM == refname
assert refseq[v.POS-1] == v.REF# use refseq to not break for multi-allelic positions
assert len(v.ALT)==1, ("Support for 1 base alt only")
alt = str(v.ALT[0])
idx = v.POS-1
haplotype = haplotype[:idx] + alt + haplotype[idx + 1:]
fh_out.write(">haplotype-cluster-{}\n{}\n".format(i+1, haplotype))
if fh_out != sys.stdout:
fh_out.close()
print("%d clusters found (written to %s)" % (clu_no+1, fh_out.name))
if __name__ == "__main__":
main()
LOG.info("Successful program exit")
| 30.251938 | 100 | 0.557207 |
__author__ = "Andreas Wilm, Niranjan Nagarajan"
__email__ = "wilma@gis.a-star.edu.sg"
__copyright__ = "2013,2014 Genome Institute of Singapore"
__license__ = "The MIT License"
import sys
import logging
import os
import argparse
from math import sqrt
from collections import namedtuple
from itertools import groupby
import vcf
#global logger
# http://docs.python.org/library/logging.html
LOG = logging.getLogger("")
logging.basicConfig(level=logging.WARN,
format='%(levelname)s [%(asctime)s]: %(message)s')
CI = namedtuple('CI', ['min', 'max'])
# invocation of ipython on exceptions
#import sys, pdb
#from IPython.core import ultratb
#sys.excepthook = ultratb.FormattedTB(mode='Verbose',
# color_scheme='Linux', call_pdb=1)
def compute_ci(coverage, var_count):
n_t = float(coverage + 4)
p_t = (var_count + 2) / n_t
ci = 2 * sqrt(p_t * (1-p_t) / n_t)
min_ci = p_t - 3*ci
if min_ci < 0.0:
min_ci = 0.0
max_ci = p_t + 3*ci
return CI._make([min_ci, max_ci])
def fasta_iter(fasta_name):
fh = open(fasta_name)
# ditch the boolean (x[0]) and just keep the header or sequence since
# we know they alternate.
faiter = (x[1] for x in groupby(fh, lambda line: line[0] == ">"))
for header in faiter:
# drop the ">"
#header = header.next()[1:].strip()
header = header.next()[1:].strip().split(" ")[0]
# join all sequence lines to one.
seq = "".join(s.strip() for s in faiter.next())
yield header, seq
def cmdline_parser():
# http://docs.python.org/dev/howto/argparse.html
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--verbose",
action="store_true",
dest="verbose",
help="be verbose")
parser.add_argument("--debug",
action="store_true",
dest="debug",
help="enable debugging")
parser.add_argument("-i", "--variants",
dest="var_file",
help="variant input file (vcf format)")
parser.add_argument("-r", "--ref",
dest="reffa",
help="Reference fasta file (for reconstructing cluster sequence)")
parser.add_argument("-o", "--out",
dest="cluster_file",
default="-",
help="Cluster output file (- for stdout = default)")
return parser
def vcf_var_to_str(v):
return "%s %d %s>%s %f" % (
v.CHROM, v.POS, v.REF, ','.join(["%s" % x for x in v.ALT]), v.INFO['AF'])
def main():
parser = cmdline_parser()
args = parser.parse_args()
# FIXME catch unrecognized args (not just (len(args)
if args.verbose:
LOG.setLevel(logging.INFO)
if args.debug:
LOG.setLevel(logging.DEBUG)
for (in_file, descr) in [(args.var_file, "variant file")]:
if not in_file:
parser.error("%s input file argument missing." % descr)
sys.exit(1)
if not os.path.exists(in_file) and in_file != "-":
sys.stderr.write(
"file '%s' does not exist.\n" % in_file)
sys.exit(1)
for (out_file, descr) in [(args.cluster_file, "cluster output file")]:
if not out_file:
parser.error("%s output file argument missing." % descr)
sys.exit(1)
if os.path.exists(out_file) and out_file!="-":
sys.stderr.write(
"Cowardly refusing to overwrite existing"
" output file '%s'.\n" % out_file)
sys.exit(1)
if args.cluster_file == '-':
fh_out = sys.stdout
else:
fh_out = open(args.cluster_file, 'w')
if args.reffa:
refno = 0
for refname, refseq in fasta_iter(args.reffa):
if refno > 0:
sys.stderr.write("Only supporting one sequence\n")
sys.exit(1)
refno += 1
else:
refseq = ""
if args.var_file == '-':
vcf_fh = sys.stdin
else:
vcf_fh = vcf.VCFReader(filename=args.var_file)
var_list = [v for v in vcf_fh]
if any([not v.is_snp for v in var_list]):
sys.stderr.write("WARNING: Only supporting SNPs! Automatically removing others\n")
var_list = [v for v in var_list if v.is_snp]
LOG.info("Parsed %d SNPs from %s" % (len(var_list), args.var_file))
assert all([x.INFO.has_key('AF') and x.INFO.has_key('DP')
for x in var_list])
var_list = sorted(var_list, key=lambda x: x.INFO['AF'], reverse=True)
ci_list = [compute_ci(v.INFO['DP'], int(v.INFO['AF'] * v.INFO['DP']))
for v in var_list]
var_ci_list = list(zip(var_list, ci_list))
del var_list, ci_list# paranoia
if len(var_ci_list)==0:
fh_out.write("No variants <-> no clusters!\n")
if fh_out != sys.stdout:
fh_out.close()
sys.exit(0)
cluster = dict()
clu_no = 0
seed_var, seed_ci = var_ci_list[0]
#cluster[clu_no,'members'] = ["%s %f" % (seed.repr, seed.freq)]
cluster[clu_no,'members'] = [seed_var]
cluster[clu_no,'min'] = seed_ci.min
cluster[clu_no,'max'] = seed_ci.max
for var, ci in var_ci_list[1:]:
LOG.debug("checking %s %f: max_ci %f vvar. clu_min %f" % (
var, var.INFO['AF'], ci.max, cluster[clu_no,'min']))
if ci.max > cluster[clu_no,'min']:
#cluster[clu_no,'members'].append("%s %f" % (var.repr, var.freq))
cluster[clu_no,'members'].append(var)
else:
clu_no += 1
seed = var
#cluster[clu_no,'members'] = ["%s %f" % (seed.repr, seed.freq)]
cluster[clu_no,'members'] = [seed]
cluster[clu_no,'min'] = ci.min
cluster[clu_no,'max'] = ci.max
for i in range(clu_no+1):
fh_out.write("# cluster %d (freq. range: %f - %f): %s\n" % (
i+1, cluster[i,'min'], cluster[i,'max'],
', '.join([vcf_var_to_str(x) for x in cluster[i,'members']])))
# write sequence as well if we have a reference
if refseq:
haplotype = refseq
for v in sorted(cluster[i,'members'], key = lambda v: v.POS):
# FIXME random order for multi-allelic psositions
assert v.CHROM == refname
assert refseq[v.POS-1] == v.REF# use refseq to not break for multi-allelic positions
assert len(v.ALT)==1, ("Support for 1 base alt only")
alt = str(v.ALT[0])
idx = v.POS-1
haplotype = haplotype[:idx] + alt + haplotype[idx + 1:]
fh_out.write(">haplotype-cluster-{}\n{}\n".format(i+1, haplotype))
if fh_out != sys.stdout:
fh_out.close()
print("%d clusters found (written to %s)" % (clu_no+1, fh_out.name))
if __name__ == "__main__":
main()
LOG.info("Successful program exit")
| true | true |
f71a51a2c95b6595d277af331364047551e8377e | 608 | py | Python | problems/number-complement.py | sailikhithk/tech-interview-prep | e833764cf98915d56118bddfa0e01871c58de75e | [
"Apache-2.0"
] | null | null | null | problems/number-complement.py | sailikhithk/tech-interview-prep | e833764cf98915d56118bddfa0e01871c58de75e | [
"Apache-2.0"
] | null | null | null | problems/number-complement.py | sailikhithk/tech-interview-prep | e833764cf98915d56118bddfa0e01871c58de75e | [
"Apache-2.0"
] | null | null | null | """
First, we convert the num to its birary.
```
>>> bin(5)
>>> '0b101'
```
Second, we need to return the base10 of binary's the complement.
Complement is easy `'101' => '010'`.
Turn to base10:
```
'010' => 0*pow(2, 2) + 1*pow(2, 1) + 0*pow(2, 0)
'11011' => 1*pow(2, 4) + 1*pow(2, 3) + 0*pow(2, 2) + 1*pow(2, 1) + 1*pow(2, 0)
```
Basics bit manipulation.
<https://www.youtube.com/watch?v=NLKQEOgBAnw>
"""
class Solution(object):
def findComplement(self, num):
b = bin(num)[2:]
opt = 0
for i, c in enumerate(reversed(b)):
if c=='0': opt+=pow(2, i)
return opt
| 23.384615 | 78 | 0.555921 | class Solution(object):
def findComplement(self, num):
b = bin(num)[2:]
opt = 0
for i, c in enumerate(reversed(b)):
if c=='0': opt+=pow(2, i)
return opt
| true | true |
f71a52383480c16caea8e9d42551045766340f5e | 251 | py | Python | jacoren/__version__.py | kuszaj/jacoren | 42344982248ed688da8f3d9383ca4ae63f542cf3 | [
"MIT"
] | 1 | 2018-02-27T08:54:40.000Z | 2018-02-27T08:54:40.000Z | jacoren/__version__.py | kuszaj/jacoren | 42344982248ed688da8f3d9383ca4ae63f542cf3 | [
"MIT"
] | null | null | null | jacoren/__version__.py | kuszaj/jacoren | 42344982248ed688da8f3d9383ca4ae63f542cf3 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Package info."""
__version__ = '0.1.0'
__title__ = 'jacoren'
__description__ = ''
__author__ = 'Piotr Kuszaj'
__author_email__ = 'peterkuszaj@gmail.com'
__license__ = 'MIT'
__all__ = ('platform', 'cpu', 'memory', 'disks')
| 20.916667 | 48 | 0.657371 |
__version__ = '0.1.0'
__title__ = 'jacoren'
__description__ = ''
__author__ = 'Piotr Kuszaj'
__author_email__ = 'peterkuszaj@gmail.com'
__license__ = 'MIT'
__all__ = ('platform', 'cpu', 'memory', 'disks')
| true | true |
f71a5241dff474c819eaebc8af456389f5a76087 | 4,386 | py | Python | tests/unit/test_task.py | lekshmimallika-aot/business-schemas | d95b43f1d04e29fd9bab101789c277db54123d9b | [
"Apache-2.0"
] | 2 | 2020-02-05T21:36:27.000Z | 2021-08-28T23:56:52.000Z | tests/unit/test_task.py | lekshmimallika-aot/business-schemas | d95b43f1d04e29fd9bab101789c277db54123d9b | [
"Apache-2.0"
] | 13 | 2020-03-25T17:28:11.000Z | 2022-03-30T20:06:04.000Z | tests/unit/test_task.py | lekshmimallika-aot/business-schemas | d95b43f1d04e29fd9bab101789c277db54123d9b | [
"Apache-2.0"
] | 19 | 2020-01-31T23:11:47.000Z | 2022-03-30T18:08:15.000Z | # Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test Suite to ensure the legal task schema is valid.
This suite should have at least 1 test for filing and todo task items.
"""
from registry_schemas import validate
from registry_schemas.example_data import FILING_HEADER, UNMANAGED
def test_valid_task_todo():
"""Assert that the schema accepts a valid todo task."""
task = {
'task': {
'todo': {
'business': {
'cacheId': 1,
'foundingDate': '2007-04-08T00:00:00+00:00',
'identifier': 'CP0002098',
'lastLedgerTimestamp': '2019-04-15T20:05:49.068272+00:00',
'legalName': 'Legal Name - CP0002098'
},
'header': {
'name': 'annualReport',
'ARFilingYear': 2019,
'status': 'NEW'
}
}
},
'order': 2,
'enabled': False
}
is_valid, errors = validate(task, 'task')
# if errors:
# for err in errors:
# print(err.message)
print(errors)
assert is_valid
def test_valid_task_filing():
"""Assert that the schema accepts a valid filing task."""
import copy
filing = copy.deepcopy(FILING_HEADER)
filing['filing']['unmanaged'] = UNMANAGED
new_task = {
'task': {
'filing': copy.deepcopy(filing['filing'])
},
'order': 1,
'enabled': True
}
is_valid, errors = validate(new_task, 'task')
assert is_valid
def test_invalid_task_neither():
"""Assert that the schema rejects an invalid task."""
task = {
'task': {
'invalid': {
'foo': 'abc',
'bar': '123'
}
},
'order': 2,
'enabled': False
}
is_valid, errors = validate(task, 'task')
# if errors:
# for err in errors:
# print(err.message)
print(errors)
assert not is_valid
def test_invalid_task_missing_order():
"""Assert that the schema rejects a task missing the 'order' property."""
task = {
'task': {
'todo': {
'business': {
'cacheId': 1,
'foundingDate': '2007-04-08T00:00:00+00:00',
'identifier': 'CP0002098',
'lastLedgerTimestamp': '2019-04-15T20:05:49.068272+00:00',
'legalName': 'Legal Name - CP0002098'
},
'header': {
'name': 'annualReport',
'ARFilingYear': 2019,
'status': 'NEW'
}
}
},
'enabled': False
}
is_valid, errors = validate(task, 'task')
# if errors:
# for err in errors:
# print(err.message)
print(errors)
assert not is_valid
def test_invalid_task_missing_enabled():
"""Assert that the schema rejects a task missing the 'enabled' property."""
task = {
'task': {
'todo': {
'business': {
'cacheId': 1,
'foundingDate': '2007-04-08T00:00:00+00:00',
'identifier': 'CP0002098',
'lastLedgerTimestamp': '2019-04-15T20:05:49.068272+00:00',
'legalName': 'Legal Name - CP0002098'
},
'header': {
'name': 'annualReport',
'ARFilingYear': 2019,
'status': 'NEW'
}
}
},
'order': 2
}
is_valid, errors = validate(task, 'task')
# if errors:
# for err in errors:
# print(err.message)
print(errors)
assert not is_valid
| 27.4125 | 79 | 0.512312 |
from registry_schemas import validate
from registry_schemas.example_data import FILING_HEADER, UNMANAGED
def test_valid_task_todo():
task = {
'task': {
'todo': {
'business': {
'cacheId': 1,
'foundingDate': '2007-04-08T00:00:00+00:00',
'identifier': 'CP0002098',
'lastLedgerTimestamp': '2019-04-15T20:05:49.068272+00:00',
'legalName': 'Legal Name - CP0002098'
},
'header': {
'name': 'annualReport',
'ARFilingYear': 2019,
'status': 'NEW'
}
}
},
'order': 2,
'enabled': False
}
is_valid, errors = validate(task, 'task')
print(errors)
assert is_valid
def test_valid_task_filing():
import copy
filing = copy.deepcopy(FILING_HEADER)
filing['filing']['unmanaged'] = UNMANAGED
new_task = {
'task': {
'filing': copy.deepcopy(filing['filing'])
},
'order': 1,
'enabled': True
}
is_valid, errors = validate(new_task, 'task')
assert is_valid
def test_invalid_task_neither():
task = {
'task': {
'invalid': {
'foo': 'abc',
'bar': '123'
}
},
'order': 2,
'enabled': False
}
is_valid, errors = validate(task, 'task')
print(errors)
assert not is_valid
def test_invalid_task_missing_order():
task = {
'task': {
'todo': {
'business': {
'cacheId': 1,
'foundingDate': '2007-04-08T00:00:00+00:00',
'identifier': 'CP0002098',
'lastLedgerTimestamp': '2019-04-15T20:05:49.068272+00:00',
'legalName': 'Legal Name - CP0002098'
},
'header': {
'name': 'annualReport',
'ARFilingYear': 2019,
'status': 'NEW'
}
}
},
'enabled': False
}
is_valid, errors = validate(task, 'task')
print(errors)
assert not is_valid
def test_invalid_task_missing_enabled():
task = {
'task': {
'todo': {
'business': {
'cacheId': 1,
'foundingDate': '2007-04-08T00:00:00+00:00',
'identifier': 'CP0002098',
'lastLedgerTimestamp': '2019-04-15T20:05:49.068272+00:00',
'legalName': 'Legal Name - CP0002098'
},
'header': {
'name': 'annualReport',
'ARFilingYear': 2019,
'status': 'NEW'
}
}
},
'order': 2
}
is_valid, errors = validate(task, 'task')
print(errors)
assert not is_valid
| true | true |
f71a524f93d7cd5915ce95bc5b60b531dbf7e8cf | 18,115 | py | Python | scons-local/SCons/Tool/GettextCommon.py | bibleuspro/scons | 625d446ae8996ff1b3d660c44e2827fc832cf12b | [
"MIT"
] | 1 | 2017-02-10T00:26:44.000Z | 2017-02-10T00:26:44.000Z | scons-local/SCons/Tool/GettextCommon.py | bibleuspro/scons | 625d446ae8996ff1b3d660c44e2827fc832cf12b | [
"MIT"
] | null | null | null | scons-local/SCons/Tool/GettextCommon.py | bibleuspro/scons | 625d446ae8996ff1b3d660c44e2827fc832cf12b | [
"MIT"
] | null | null | null | """SCons.Tool.GettextCommon module
Used by several tools of `gettext` toolset.
"""
# Copyright (c) 2001 - 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Tool/GettextCommon.py 2014/07/05 09:42:21 garyo"
import SCons.Warnings
import re
#############################################################################
class XgettextToolWarning(SCons.Warnings.Warning): pass
class XgettextNotFound(XgettextToolWarning): pass
class MsginitToolWarning(SCons.Warnings.Warning): pass
class MsginitNotFound(MsginitToolWarning): pass
class MsgmergeToolWarning(SCons.Warnings.Warning): pass
class MsgmergeNotFound(MsgmergeToolWarning): pass
class MsgfmtToolWarning(SCons.Warnings.Warning): pass
class MsgfmtNotFound(MsgfmtToolWarning): pass
#############################################################################
SCons.Warnings.enableWarningClass(XgettextToolWarning)
SCons.Warnings.enableWarningClass(XgettextNotFound)
SCons.Warnings.enableWarningClass(MsginitToolWarning)
SCons.Warnings.enableWarningClass(MsginitNotFound)
SCons.Warnings.enableWarningClass(MsgmergeToolWarning)
SCons.Warnings.enableWarningClass(MsgmergeNotFound)
SCons.Warnings.enableWarningClass(MsgfmtToolWarning)
SCons.Warnings.enableWarningClass(MsgfmtNotFound)
#############################################################################
#############################################################################
class _POTargetFactory(object):
""" A factory of `PO` target files.
Factory defaults differ from these of `SCons.Node.FS.FS`. We set `precious`
(this is required by builders and actions gettext) and `noclean` flags by
default for all produced nodes.
"""
def __init__( self, env, nodefault = True, alias = None, precious = True
, noclean = True ):
""" Object constructor.
**Arguments**
- *env* (`SCons.Environment.Environment`)
- *nodefault* (`boolean`) - if `True`, produced nodes will be ignored
from default target `'.'`
- *alias* (`string`) - if provided, produced nodes will be automatically
added to this alias, and alias will be set as `AlwaysBuild`
- *precious* (`boolean`) - if `True`, the produced nodes will be set as
`Precious`.
- *noclen* (`boolean`) - if `True`, the produced nodes will be excluded
from `Clean`.
"""
self.env = env
self.alias = alias
self.precious = precious
self.noclean = noclean
self.nodefault = nodefault
def _create_node(self, name, factory, directory = None, create = 1):
""" Create node, and set it up to factory settings. """
import SCons.Util
node = factory(name, directory, create)
node.set_noclean(self.noclean)
node.set_precious(self.precious)
if self.nodefault:
self.env.Ignore('.', node)
if self.alias:
self.env.AlwaysBuild(self.env.Alias(self.alias, node))
return node
def Entry(self, name, directory = None, create = 1):
""" Create `SCons.Node.FS.Entry` """
return self._create_node(name, self.env.fs.Entry, directory, create)
def File(self, name, directory = None, create = 1):
""" Create `SCons.Node.FS.File` """
return self._create_node(name, self.env.fs.File, directory, create)
#############################################################################
#############################################################################
_re_comment = re.compile(r'(#[^\n\r]+)$', re.M)
_re_lang = re.compile(r'([a-zA-Z0-9_]+)', re.M)
#############################################################################
def _read_linguas_from_files(env, linguas_files = None):
""" Parse `LINGUAS` file and return list of extracted languages """
import SCons.Util
import SCons.Environment
global _re_comment
global _re_lang
if not SCons.Util.is_List(linguas_files) \
and not SCons.Util.is_String(linguas_files) \
and not isinstance(linguas_files, SCons.Node.FS.Base) \
and linguas_files:
# If, linguas_files==True or such, then read 'LINGUAS' file.
linguas_files = [ 'LINGUAS' ]
if linguas_files is None:
return []
fnodes = env.arg2nodes(linguas_files)
linguas = []
for fnode in fnodes:
contents = _re_comment.sub("", fnode.get_text_contents())
ls = [ l for l in _re_lang.findall(contents) if l ]
linguas.extend(ls)
return linguas
#############################################################################
#############################################################################
from SCons.Builder import BuilderBase
#############################################################################
class _POFileBuilder(BuilderBase):
""" `PO` file builder.
This is multi-target single-source builder. In typical situation the source
is single `POT` file, e.g. `messages.pot`, and there are multiple `PO`
targets to be updated from this `POT`. We must run
`SCons.Builder.BuilderBase._execute()` separatelly for each target to track
dependencies separatelly for each target file.
**NOTE**: if we call `SCons.Builder.BuilderBase._execute(.., target, ...)`
with target being list of all targets, all targets would be rebuilt each time
one of the targets from this list is missing. This would happen, for example,
when new language `ll` enters `LINGUAS_FILE` (at this moment there is no
`ll.po` file yet). To avoid this, we override
`SCons.Builder.BuilerBase._execute()` and call it separatelly for each
target. Here we also append to the target list the languages read from
`LINGUAS_FILE`.
"""
#
#* The argument for overriding _execute(): We must use environment with
# builder overrides applied (see BuilderBase.__init__(). Here it comes for
# free.
#* The argument against using 'emitter': The emitter is called too late
# by BuilderBase._execute(). If user calls, for example:
#
# env.POUpdate(LINGUAS_FILE = 'LINGUAS')
#
# the builder throws error, because it is called with target=None,
# source=None and is trying to "generate" sources or target list first.
# If user calls
#
# env.POUpdate(['foo', 'baz'], LINGUAS_FILE = 'LINGUAS')
#
# the env.BuilderWrapper() calls our builder with target=None,
# source=['foo', 'baz']. The BuilderBase._execute() then splits execution
# and execute iterativelly (recursion) self._execute(None, source[i]).
# After that it calls emitter (which is quite too late). The emitter is
# also called in each iteration, what makes things yet worse.
def __init__(self, env, **kw):
if not 'suffix' in kw:
kw['suffix'] = '$POSUFFIX'
if not 'src_suffix' in kw:
kw['src_suffix'] = '$POTSUFFIX'
if not 'src_builder' in kw:
kw['src_builder'] = '_POTUpdateBuilder'
if not 'single_source' in kw:
kw['single_source'] = True
alias = None
if 'target_alias' in kw:
alias = kw['target_alias']
del kw['target_alias']
if not 'target_factory' in kw:
kw['target_factory'] = _POTargetFactory(env, alias=alias).File
BuilderBase.__init__(self, **kw)
def _execute(self, env, target, source, *args, **kw):
""" Execute builder's actions.
Here we append to `target` the languages read from `$LINGUAS_FILE` and
apply `SCons.Builder.BuilderBase._execute()` separatelly to each target.
The arguments and return value are same as for
`SCons.Builder.BuilderBase._execute()`.
"""
import SCons.Util
import SCons.Node
linguas_files = None
if env.has_key('LINGUAS_FILE') and env['LINGUAS_FILE']:
linguas_files = env['LINGUAS_FILE']
# This prevents endless recursion loop (we'll be invoked once for
# each target appended here, we must not extend the list again).
env['LINGUAS_FILE'] = None
linguas = _read_linguas_from_files(env,linguas_files)
if SCons.Util.is_List(target):
target.extend(linguas)
elif target is not None:
target = [target] + linguas
else:
target = linguas
if not target:
# Let the SCons.BuilderBase to handle this patologic situation
return BuilderBase._execute( self, env, target, source, *args, **kw)
# The rest is ours
if not SCons.Util.is_List(target):
target = [ target ]
result = []
for tgt in target:
r = BuilderBase._execute( self, env, [tgt], source, *args, **kw)
result.extend(r)
if linguas_files is not None:
env['LINGUAS_FILE'] = linguas_files
return SCons.Node.NodeList(result)
#############################################################################
import SCons.Environment
#############################################################################
def _translate(env, target=None, source=SCons.Environment._null, *args, **kw):
""" Function for `Translate()` pseudo-builder """
if target is None: target = []
pot = env.POTUpdate(None, source, *args, **kw)
po = env.POUpdate(target, pot, *args, **kw)
return po
#############################################################################
#############################################################################
class RPaths(object):
""" Callable object, which returns pathnames relative to SCons current
working directory.
It seems like `SCons.Node.FS.Base.get_path()` returns absolute paths
for nodes that are outside of current working directory (`env.fs.getcwd()`).
Here, we often have `SConscript`, `POT` and `PO` files within `po/`
directory and source files (e.g. `*.c`) outside of it. When generating `POT`
template file, references to source files are written to `POT` template, so
a translator may later quickly jump to appropriate source file and line from
its `PO` editor (e.g. `poedit`). Relative paths in `PO` file are usually
interpreted by `PO` editor as paths relative to the place, where `PO` file
lives. The absolute paths would make resultant `POT` file nonportable, as
the references would be correct only on the machine, where `POT` file was
recently re-created. For such reason, we need a function, which always
returns relative paths. This is the purpose of `RPaths` callable object.
The `__call__` method returns paths relative to current woking directory, but
we assume, that *xgettext(1)* is run from the directory, where target file is
going to be created.
Note, that this may not work for files distributed over several hosts or
across different drives on windows. We assume here, that single local
filesystem holds both source files and target `POT` templates.
Intended use of `RPaths` - in `xgettext.py`::
def generate(env):
from GettextCommon import RPaths
...
sources = '$( ${_concat( "", SOURCES, "", __env__, XgettextRPaths, TARGET, SOURCES)} $)'
env.Append(
...
XGETTEXTCOM = 'XGETTEXT ... ' + sources,
...
XgettextRPaths = RPaths(env)
)
"""
# NOTE: This callable object returns pathnames of dirs/files relative to
# current working directory. The pathname remains relative also for entries
# that are outside of current working directory (node, that
# SCons.Node.FS.File and siblings return absolute path in such case). For
# simplicity we compute path relative to current working directory, this
# seems be enough for our purposes (don't need TARGET variable and
# SCons.Defaults.Variable_Caller stuff).
def __init__(self, env):
""" Initialize `RPaths` callable object.
**Arguments**:
- *env* - a `SCons.Environment.Environment` object, defines *current
working dir*.
"""
self.env = env
# FIXME: I'm not sure, how it should be implemented (what the *args are in
# general, what is **kw).
def __call__(self, nodes, *args, **kw):
""" Return nodes' paths (strings) relative to current working directory.
**Arguments**:
- *nodes* ([`SCons.Node.FS.Base`]) - list of nodes.
- *args* - currently unused.
- *kw* - currently unused.
**Returns**:
- Tuple of strings, which represent paths relative to current working
directory (for given environment).
"""
# os.path.relpath is available only on python >= 2.6. We use our own
# implementation. It's taken from BareNecessities package:
# http://jimmyg.org/work/code/barenecessities/index.html
from posixpath import curdir
def relpath(path, start=curdir):
import posixpath
"""Return a relative version of a path"""
if not path:
raise ValueError("no path specified")
start_list = posixpath.abspath(start).split(posixpath.sep)
path_list = posixpath.abspath(path).split(posixpath.sep)
# Work out how much of the filepath is shared by start and path.
i = len(posixpath.commonprefix([start_list, path_list]))
rel_list = [posixpath.pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return posixpath.curdir
return posixpath.join(*rel_list)
import os
import SCons.Node.FS
rpaths = ()
cwd = self.env.fs.getcwd().get_abspath()
for node in nodes:
rpath = None
if isinstance(node, SCons.Node.FS.Base):
rpath = relpath(node.get_abspath(), cwd)
# FIXME: Other types possible here?
if rpath is not None:
rpaths += (rpath,)
return rpaths
#############################################################################
#############################################################################
def _init_po_files(target, source, env):
""" Action function for `POInit` builder. """
nop = lambda target, source, env : 0
if env.has_key('POAUTOINIT'):
autoinit = env['POAUTOINIT']
else:
autoinit = False
# Well, if everything outside works well, this loop should do single
# iteration. Otherwise we are rebuilding all the targets even, if just
# one has changed (but is this out fault?).
for tgt in target:
if not tgt.exists():
if autoinit:
action = SCons.Action.Action('$MSGINITCOM', '$MSGINITCOMSTR')
else:
msg = 'File ' + repr(str(tgt)) + ' does not exist. ' \
+ 'If you are a translator, you can create it through: \n' \
+ '$MSGINITCOM'
action = SCons.Action.Action(nop, msg)
status = action([tgt], source, env)
if status: return status
return 0
#############################################################################
#############################################################################
def _detect_xgettext(env):
""" Detects *xgettext(1)* binary """
if env.has_key('XGETTEXT'):
return env['XGETTEXT']
xgettext = env.Detect('xgettext');
if xgettext:
return xgettext
raise SCons.Errors.StopError(XgettextNotFound,"Could not detect xgettext")
return None
#############################################################################
def _xgettext_exists(env):
return _detect_xgettext(env)
#############################################################################
#############################################################################
def _detect_msginit(env):
""" Detects *msginit(1)* program. """
if env.has_key('MSGINIT'):
return env['MSGINIT']
msginit = env.Detect('msginit');
if msginit:
return msginit
raise SCons.Errors.StopError(MsginitNotFound, "Could not detect msginit")
return None
#############################################################################
def _msginit_exists(env):
return _detect_msginit(env)
#############################################################################
#############################################################################
def _detect_msgmerge(env):
""" Detects *msgmerge(1)* program. """
if env.has_key('MSGMERGE'):
return env['MSGMERGE']
msgmerge = env.Detect('msgmerge');
if msgmerge:
return msgmerge
raise SCons.Errors.StopError(MsgmergeNotFound, "Could not detect msgmerge")
return None
#############################################################################
def _msgmerge_exists(env):
return _detect_msgmerge(env)
#############################################################################
#############################################################################
def _detect_msgfmt(env):
""" Detects *msgmfmt(1)* program. """
if env.has_key('MSGFMT'):
return env['MSGFMT']
msgfmt = env.Detect('msgfmt');
if msgfmt:
return msgfmt
raise SCons.Errors.StopError(MsgfmtNotFound, "Could not detect msgfmt")
return None
#############################################################################
def _msgfmt_exists(env):
return _detect_msgfmt(env)
#############################################################################
#############################################################################
def tool_list(platform, env):
""" List tools that shall be generated by top-level `gettext` tool """
return [ 'xgettext', 'msginit', 'msgmerge', 'msgfmt' ]
#############################################################################
| 42.030162 | 96 | 0.599558 |
__revision__ = "src/engine/SCons/Tool/GettextCommon.py 2014/07/05 09:42:21 garyo"
import SCons.Warnings
import re
| true | true |
f71a5321b655a69d95438bc4946e72b3c1c4abfa | 5,314 | py | Python | scilab2py/utils.py | blink1073/scilab2py | d487828a7087890ce1e035a7c09c4819ff8276c4 | [
"MIT"
] | 8 | 2015-10-16T23:28:16.000Z | 2020-06-19T18:49:18.000Z | scilab2py/utils.py | blink1073/scilab2py | d487828a7087890ce1e035a7c09c4819ff8276c4 | [
"MIT"
] | 8 | 2015-06-25T20:57:56.000Z | 2020-04-03T22:33:16.000Z | scilab2py/utils.py | blink1073/scilab2py | d487828a7087890ce1e035a7c09c4819ff8276c4 | [
"MIT"
] | 6 | 2015-04-21T12:23:44.000Z | 2021-10-01T00:08:47.000Z | """
.. module:: utils
:synopsis: Miscellaneous helper constructs
.. moduleauthor:: Steven Silvester <steven.silvester@ieee.org>
"""
import os
import inspect
import dis
import tempfile
import sys
from .compat import PY2
def _remove_temp_files(dirname):
"""
Remove the created mat files in the user's temp folder
"""
import os
import glob
for fname in glob.glob(os.path.join(dirname, 'tmp*.mat')):
try:
os.remove(fname)
except OSError: # pragma: no cover
pass
def get_nout():
"""
Return the number of return values the caller is expecting.
Adapted from the ompc project.
Returns
=======
out : int
Number of arguments expected by caller.
"""
frame = inspect.currentframe()
# step into the function that called us
# nout is two frames back
frame = frame.f_back.f_back
bytecode = frame.f_code.co_code
if sys.version_info >= (3, 6):
instruction = bytecode[frame.f_lasti + 2]
else:
instruction = bytecode[frame.f_lasti + 3]
instruction = ord(instruction) if PY2 else instruction
if instruction == dis.opmap['UNPACK_SEQUENCE']:
if sys.version_info >= (3, 6):
howmany = bytecode[frame.f_lasti + 3]
else:
howmany = bytecode[frame.f_lasti + 4]
howmany = ord(howmany) if PY2 else howmany
return howmany
elif instruction in [dis.opmap['POP_TOP'], dis.opmap['PRINT_EXPR']]:
return 0
return 1
def create_file(temp_dir):
"""
Create a MAT file with a random name in the temp directory
Parameters
==========
temp_dir : str, optional
If specified, the file will be created in that directory,
otherwise a default directory is used.
Returns
=======
out : str
Random file name with the desired extension
"""
temp_file = tempfile.NamedTemporaryFile(suffix='.mat', delete=False,
dir=temp_dir)
temp_file.close()
return os.path.abspath(temp_file.name)
class Scilab2PyError(Exception):
""" Called when we can't open Scilab or Scilab throws an error
"""
pass
class Struct(dict):
"""
Scilab style struct, enhanced.
Supports dictionary and attribute style access. Can be pickled,
and supports code completion in a REPL.
Examples
========
>>> from pprint import pprint
>>> from scilab2py import Struct
>>> a = Struct()
>>> a.b = 'spam' # a["b"] == 'spam'
>>> a.c["d"] = 'eggs' # a.c.d == 'eggs'
>>> pprint(a)
{'b': 'spam', 'c': {'d': 'eggs'}}
"""
def __getattr__(self, attr):
"""Access the dictionary keys for unknown attributes."""
try:
return self[attr]
except KeyError:
msg = "'Struct' object has no attribute %s" % attr
raise AttributeError(msg)
def __getitem__(self, attr):
"""
Get a dict value; create a Struct if requesting a Struct member.
Do not create a key if the attribute starts with an underscore.
"""
if attr in self.keys() or attr.startswith('_'):
return dict.__getitem__(self, attr)
frame = inspect.currentframe()
# step into the function that called us
if frame.f_back.f_back and self._is_allowed(frame.f_back.f_back):
dict.__setitem__(self, attr, Struct())
elif self._is_allowed(frame.f_back):
dict.__setitem__(self, attr, Struct())
return dict.__getitem__(self, attr)
def _is_allowed(self, frame):
"""Check for allowed op code in the calling frame"""
allowed = [dis.opmap['STORE_ATTR'], dis.opmap['LOAD_CONST'],
dis.opmap.get('STOP_CODE', 0)]
bytecode = frame.f_code.co_code
instruction = bytecode[frame.f_lasti + 3]
instruction = ord(instruction) if PY2 else instruction
return instruction in allowed
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
@property
def __dict__(self):
"""Allow for code completion in a REPL"""
return self.copy()
def get_log(name=None):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging
if name is None:
name = 'scilab2py'
else:
name = 'scilab2py.' + name
log = logging.getLogger(name)
log.setLevel(logging.WARN)
return log
def _setup_log():
"""Configure root logger.
"""
import logging
import sys
try:
handler = logging.StreamHandler(stream=sys.stdout)
except TypeError: # pragma: no cover
handler = logging.StreamHandler(strm=sys.stdout)
log = get_log()
log.addHandler(handler)
log.setLevel(logging.WARN)
log.propagate = False
_setup_log()
| 26.974619 | 75 | 0.585058 | import os
import inspect
import dis
import tempfile
import sys
from .compat import PY2
def _remove_temp_files(dirname):
import os
import glob
for fname in glob.glob(os.path.join(dirname, 'tmp*.mat')):
try:
os.remove(fname)
except OSError:
pass
def get_nout():
frame = inspect.currentframe()
frame = frame.f_back.f_back
bytecode = frame.f_code.co_code
if sys.version_info >= (3, 6):
instruction = bytecode[frame.f_lasti + 2]
else:
instruction = bytecode[frame.f_lasti + 3]
instruction = ord(instruction) if PY2 else instruction
if instruction == dis.opmap['UNPACK_SEQUENCE']:
if sys.version_info >= (3, 6):
howmany = bytecode[frame.f_lasti + 3]
else:
howmany = bytecode[frame.f_lasti + 4]
howmany = ord(howmany) if PY2 else howmany
return howmany
elif instruction in [dis.opmap['POP_TOP'], dis.opmap['PRINT_EXPR']]:
return 0
return 1
def create_file(temp_dir):
temp_file = tempfile.NamedTemporaryFile(suffix='.mat', delete=False,
dir=temp_dir)
temp_file.close()
return os.path.abspath(temp_file.name)
class Scilab2PyError(Exception):
pass
class Struct(dict):
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
msg = "'Struct' object has no attribute %s" % attr
raise AttributeError(msg)
def __getitem__(self, attr):
if attr in self.keys() or attr.startswith('_'):
return dict.__getitem__(self, attr)
frame = inspect.currentframe()
if frame.f_back.f_back and self._is_allowed(frame.f_back.f_back):
dict.__setitem__(self, attr, Struct())
elif self._is_allowed(frame.f_back):
dict.__setitem__(self, attr, Struct())
return dict.__getitem__(self, attr)
def _is_allowed(self, frame):
allowed = [dis.opmap['STORE_ATTR'], dis.opmap['LOAD_CONST'],
dis.opmap.get('STOP_CODE', 0)]
bytecode = frame.f_code.co_code
instruction = bytecode[frame.f_lasti + 3]
instruction = ord(instruction) if PY2 else instruction
return instruction in allowed
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
@property
def __dict__(self):
return self.copy()
def get_log(name=None):
import logging
if name is None:
name = 'scilab2py'
else:
name = 'scilab2py.' + name
log = logging.getLogger(name)
log.setLevel(logging.WARN)
return log
def _setup_log():
import logging
import sys
try:
handler = logging.StreamHandler(stream=sys.stdout)
except TypeError:
handler = logging.StreamHandler(strm=sys.stdout)
log = get_log()
log.addHandler(handler)
log.setLevel(logging.WARN)
log.propagate = False
_setup_log()
| true | true |
f71a539e1bc739d74244c33e61ec48175b1a0e68 | 182 | py | Python | yatube/yatube/urls.py | Cooke64/hw02_community | 10005d05e0142ec9e68b3578d239b6e3da66c0a3 | [
"BSD-3-Clause"
] | null | null | null | yatube/yatube/urls.py | Cooke64/hw02_community | 10005d05e0142ec9e68b3578d239b6e3da66c0a3 | [
"BSD-3-Clause"
] | null | null | null | yatube/yatube/urls.py | Cooke64/hw02_community | 10005d05e0142ec9e68b3578d239b6e3da66c0a3 | [
"BSD-3-Clause"
] | null | null | null | from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('', include('posts.urls', namespace='post')),
path('admin/', admin.site.urls),
]
| 22.75 | 54 | 0.686813 | from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('', include('posts.urls', namespace='post')),
path('admin/', admin.site.urls),
]
| true | true |
f71a53b58b0c817babbdccd697976cfe68604cef | 182 | py | Python | Chapter 4/4-5.py | lzhang1/BeginningPygame | c239925041a6fa361386f65316ef4bea12c3b482 | [
"MIT"
] | 43 | 2015-09-20T02:05:48.000Z | 2022-03-01T22:00:43.000Z | Chapter 4/4-5.py | lzhang1/BeginningPygame | c239925041a6fa361386f65316ef4bea12c3b482 | [
"MIT"
] | null | null | null | Chapter 4/4-5.py | lzhang1/BeginningPygame | c239925041a6fa361386f65316ef4bea12c3b482 | [
"MIT"
] | 40 | 2015-05-19T06:51:13.000Z | 2022-03-27T18:11:16.000Z | def lerp(value1, value2, factor):
return value1+(value2-value1)*factor
print(lerp(100, 200, 0.))
print(lerp(100, 200, 1.))
print(lerp(100, 200, .5))
print(lerp(100, 200, .25))
| 22.75 | 40 | 0.659341 | def lerp(value1, value2, factor):
return value1+(value2-value1)*factor
print(lerp(100, 200, 0.))
print(lerp(100, 200, 1.))
print(lerp(100, 200, .5))
print(lerp(100, 200, .25))
| true | true |
f71a53e8b0bfef59cec65a1838904cf9ebf97f18 | 3,838 | py | Python | paasta_tools/metrics/metrics_lib.py | xcorail/paasta | 3f132c73b45fcf0afc31ddb889205ecd9394d4bb | [
"Apache-2.0"
] | null | null | null | paasta_tools/metrics/metrics_lib.py | xcorail/paasta | 3f132c73b45fcf0afc31ddb889205ecd9394d4bb | [
"Apache-2.0"
] | 4 | 2021-02-08T20:42:08.000Z | 2021-06-02T00:51:04.000Z | paasta_tools/metrics/metrics_lib.py | eric-erki/An-open-distributed-platform-as-a-service | 6769c5601685deb1017910ab8d09109e8e998892 | [
"Apache-2.0"
] | null | null | null | import logging
import time
from abc import ABC
from abc import abstractmethod
from typing import Any
from typing import Callable
from typing import Dict
from typing import Optional
from typing import Type
from typing import Union
from typing_extensions import Protocol
from paasta_tools.utils import load_system_paasta_config
log = logging.getLogger(__name__)
try:
import yelp_meteorite
except ImportError:
yelp_meteorite = None
_metrics_interfaces: Dict[str, Type['BaseMetrics']] = {}
class TimerProtocol(Protocol):
def start(self) -> None:
raise NotImplementedError()
def stop(self) -> None:
raise NotImplementedError()
class GaugeProtocol(Protocol):
def set(self, value: Union[int, float]) -> None:
raise NotImplementedError()
class CounterProtocol(Protocol):
def count(self) -> None:
raise NotImplementedError()
class BaseMetrics(ABC):
def __init__(self, base_name: str) -> None:
self.base_name = base_name
@abstractmethod
def create_timer(self, name: str, **kwargs: Any) -> TimerProtocol:
raise NotImplementedError()
@abstractmethod
def create_gauge(self, name: str, **kwargs: Any) -> GaugeProtocol:
raise NotImplementedError()
@abstractmethod
def create_counter(self, name: str, **kwargs: Any) -> CounterProtocol:
raise NotImplementedError()
def get_metrics_interface(base_name: str) -> BaseMetrics:
metrics_provider = load_system_paasta_config().get_metrics_provider()
return _metrics_interfaces[metrics_provider](base_name)
def register_metrics_interface(name: Optional[str]) -> Callable[[Type[BaseMetrics]], Type[BaseMetrics]]:
def outer(func: Type[BaseMetrics]) -> Type[BaseMetrics]:
_metrics_interfaces[name] = func
return func
return outer
@register_metrics_interface('meteorite')
class MeteoriteMetrics(BaseMetrics):
def __init__(self, base_name: str) -> None:
self.base_name = base_name
if yelp_meteorite is None:
raise ImportError("yelp_meteorite not imported, pleast try another metrics provider")
def create_timer(self, name: str, **kwargs: Any) -> TimerProtocol:
return yelp_meteorite.create_timer(self.base_name + '.' + name, kwargs)
def create_gauge(self, name: str, **kwargs: Any) -> GaugeProtocol:
return yelp_meteorite.create_gauge(self.base_name + '.' + name, kwargs)
def create_counter(self, name: str, **kwargs: Any) -> CounterProtocol:
return yelp_meteorite.create_counter(self.base_name + '.' + name, kwargs)
class Timer(TimerProtocol):
def __init__(self, name: str) -> None:
self.name = name
def start(self) -> None:
log.debug("timer {} start at {}".format(self.name, time.time()))
def stop(self) -> None:
log.debug("timer {} stop at {}".format(self.name, time.time()))
class Gauge(GaugeProtocol):
def __init__(self, name: str) -> None:
self.name = name
def set(self, value: Union[int, float]) -> None:
log.debug(f"gauge {self.name} set to {value}")
class Counter(GaugeProtocol):
def __init__(self, name: str) -> None:
self.name = name
self.counter = 0
def count(self) -> None:
self.counter += 1
log.debug(f"counter {self.name} incremented to {self.counter}")
@register_metrics_interface(None)
class NoMetrics(BaseMetrics):
def __init__(self, base_name: str) -> None:
self.base_name = base_name
def create_timer(self, name: str, **kwargs: Any) -> Timer:
return Timer(self.base_name + '.' + name)
def create_gauge(self, name: str, **kwargs: Any) -> Gauge:
return Gauge(self.base_name + '.' + name)
def create_counter(self, name: str, **kwargs: Any) -> Counter:
return Counter(self.base_name + '.' + name)
| 29.075758 | 104 | 0.683689 | import logging
import time
from abc import ABC
from abc import abstractmethod
from typing import Any
from typing import Callable
from typing import Dict
from typing import Optional
from typing import Type
from typing import Union
from typing_extensions import Protocol
from paasta_tools.utils import load_system_paasta_config
log = logging.getLogger(__name__)
try:
import yelp_meteorite
except ImportError:
yelp_meteorite = None
_metrics_interfaces: Dict[str, Type['BaseMetrics']] = {}
class TimerProtocol(Protocol):
def start(self) -> None:
raise NotImplementedError()
def stop(self) -> None:
raise NotImplementedError()
class GaugeProtocol(Protocol):
def set(self, value: Union[int, float]) -> None:
raise NotImplementedError()
class CounterProtocol(Protocol):
def count(self) -> None:
raise NotImplementedError()
class BaseMetrics(ABC):
def __init__(self, base_name: str) -> None:
self.base_name = base_name
@abstractmethod
def create_timer(self, name: str, **kwargs: Any) -> TimerProtocol:
raise NotImplementedError()
@abstractmethod
def create_gauge(self, name: str, **kwargs: Any) -> GaugeProtocol:
raise NotImplementedError()
@abstractmethod
def create_counter(self, name: str, **kwargs: Any) -> CounterProtocol:
raise NotImplementedError()
def get_metrics_interface(base_name: str) -> BaseMetrics:
metrics_provider = load_system_paasta_config().get_metrics_provider()
return _metrics_interfaces[metrics_provider](base_name)
def register_metrics_interface(name: Optional[str]) -> Callable[[Type[BaseMetrics]], Type[BaseMetrics]]:
def outer(func: Type[BaseMetrics]) -> Type[BaseMetrics]:
_metrics_interfaces[name] = func
return func
return outer
@register_metrics_interface('meteorite')
class MeteoriteMetrics(BaseMetrics):
def __init__(self, base_name: str) -> None:
self.base_name = base_name
if yelp_meteorite is None:
raise ImportError("yelp_meteorite not imported, pleast try another metrics provider")
def create_timer(self, name: str, **kwargs: Any) -> TimerProtocol:
return yelp_meteorite.create_timer(self.base_name + '.' + name, kwargs)
def create_gauge(self, name: str, **kwargs: Any) -> GaugeProtocol:
return yelp_meteorite.create_gauge(self.base_name + '.' + name, kwargs)
def create_counter(self, name: str, **kwargs: Any) -> CounterProtocol:
return yelp_meteorite.create_counter(self.base_name + '.' + name, kwargs)
class Timer(TimerProtocol):
def __init__(self, name: str) -> None:
self.name = name
def start(self) -> None:
log.debug("timer {} start at {}".format(self.name, time.time()))
def stop(self) -> None:
log.debug("timer {} stop at {}".format(self.name, time.time()))
class Gauge(GaugeProtocol):
def __init__(self, name: str) -> None:
self.name = name
def set(self, value: Union[int, float]) -> None:
log.debug(f"gauge {self.name} set to {value}")
class Counter(GaugeProtocol):
def __init__(self, name: str) -> None:
self.name = name
self.counter = 0
def count(self) -> None:
self.counter += 1
log.debug(f"counter {self.name} incremented to {self.counter}")
@register_metrics_interface(None)
class NoMetrics(BaseMetrics):
def __init__(self, base_name: str) -> None:
self.base_name = base_name
def create_timer(self, name: str, **kwargs: Any) -> Timer:
return Timer(self.base_name + '.' + name)
def create_gauge(self, name: str, **kwargs: Any) -> Gauge:
return Gauge(self.base_name + '.' + name)
def create_counter(self, name: str, **kwargs: Any) -> Counter:
return Counter(self.base_name + '.' + name)
| true | true |
f71a540bc5690d18d0e43343992b3cd169988b23 | 3,516 | py | Python | DDQN.py | TimoleonLatinopoulos/MortalKombatOpenAI | 59dc89d1f50dd74690859e5e1fa18701a5246382 | [
"MIT"
] | 1 | 2020-08-12T08:16:06.000Z | 2020-08-12T08:16:06.000Z | DDQN.py | TimoleonLatinopoulos/MortalKombatOpenAI | 59dc89d1f50dd74690859e5e1fa18701a5246382 | [
"MIT"
] | null | null | null | DDQN.py | TimoleonLatinopoulos/MortalKombatOpenAI | 59dc89d1f50dd74690859e5e1fa18701a5246382 | [
"MIT"
] | null | null | null | import tensorflow as tf
from keras.activations import relu
from keras.initializers import VarianceScaling
from keras.layers import Dense, Conv2D, Flatten
from keras.losses import logcosh
class DDQN:
""" Implements a Dueling Dual Deep Q-Network based on the frames of the Retro Environment """
def __init__(self, n_actions, frame_height=63, frame_width=113, stacked_frames=4, learning_rate=0.00001):
self.n_actions = n_actions
self.frame_height = frame_height
self.frame_width = frame_width
self.stacked_frames = stacked_frames
self.learning_rate = learning_rate
self.input = tf.placeholder(shape=[None, self.frame_height, self.frame_width, self.stacked_frames],
dtype=tf.float32)
self.input = self.input / 255
# Convolutional layers
self.conv1 = self.conv_layer(self.input, 32, [8, 8], 4, 'conv1')
self.conv2 = self.conv_layer(self.conv1, 64, [4, 4], 2, 'conv2')
self.conv3 = self.conv_layer(self.conv2, 64, [3, 3], 1, 'conv3')
self.flat = Flatten()(self.conv3)
self.dense1 = self.dense_layer(self.flat, 512, 'dense1', relu)
# Splitting into value and advantage streams
self.v_stream, self.a_stream = tf.split(self.dense1, 2, 1)
self.value = self.dense_layer(self.v_stream, 1, 'value')
self.advantage = self.dense_layer(self.a_stream, self.n_actions, 'advantage')
# Getting Q-values from value and advantage streams
self.q_values = self.value + tf.subtract(self.advantage, tf.reduce_mean(self.advantage, axis=1, keepdims=True))
self.prediction = tf.argmax(self.q_values, 1)
# targetQ according to Bellman equation
self.target_q = tf.placeholder(shape=[None], dtype=tf.float32)
self.action = tf.placeholder(shape=[None], dtype=tf.uint8)
self.action_one_hot = tf.one_hot(self.action, self.n_actions, dtype=tf.float32)
self.Q = tf.reduce_sum(tf.multiply(self.q_values, self.action_one_hot), axis=1)
# Parameter updates
self.error = logcosh(self.target_q, self.Q)
self.loss = tf.reduce_mean(self.error)
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate)
self.update = self.optimizer.minimize(self.loss)
@staticmethod
def conv_layer(_inputs, _filters, _kernel_size, _strides, _name):
return Conv2D(filters=_filters, kernel_size=_kernel_size, strides=_strides,
kernel_initializer=VarianceScaling(scale=2.0), padding="valid",
activation=relu, use_bias=False, name=_name)(_inputs)
@staticmethod
def dense_layer(_inputs, _units, _name, _activation=None):
return Dense(activation=_activation, units=_units,
kernel_initializer=VarianceScaling(scale=2.0), name=_name)(_inputs)
class TargetNetworkUpdater:
""" Updates the variables and the weights of the target network based on the main network """
def __init__(self, main_vars, target_vars):
self.main_vars = main_vars
self.target_vars = target_vars
def update_target_vars(self):
update_ops = []
for i, var in enumerate(self.main_vars):
copy_op = self.target_vars[i].assign(var.value())
update_ops.append(copy_op)
return update_ops
def update_networks(self, sess):
update_ops = self.update_target_vars()
for copy_op in update_ops:
sess.run(copy_op)
| 43.95 | 119 | 0.674346 | import tensorflow as tf
from keras.activations import relu
from keras.initializers import VarianceScaling
from keras.layers import Dense, Conv2D, Flatten
from keras.losses import logcosh
class DDQN:
def __init__(self, n_actions, frame_height=63, frame_width=113, stacked_frames=4, learning_rate=0.00001):
self.n_actions = n_actions
self.frame_height = frame_height
self.frame_width = frame_width
self.stacked_frames = stacked_frames
self.learning_rate = learning_rate
self.input = tf.placeholder(shape=[None, self.frame_height, self.frame_width, self.stacked_frames],
dtype=tf.float32)
self.input = self.input / 255
self.conv1 = self.conv_layer(self.input, 32, [8, 8], 4, 'conv1')
self.conv2 = self.conv_layer(self.conv1, 64, [4, 4], 2, 'conv2')
self.conv3 = self.conv_layer(self.conv2, 64, [3, 3], 1, 'conv3')
self.flat = Flatten()(self.conv3)
self.dense1 = self.dense_layer(self.flat, 512, 'dense1', relu)
self.v_stream, self.a_stream = tf.split(self.dense1, 2, 1)
self.value = self.dense_layer(self.v_stream, 1, 'value')
self.advantage = self.dense_layer(self.a_stream, self.n_actions, 'advantage')
self.q_values = self.value + tf.subtract(self.advantage, tf.reduce_mean(self.advantage, axis=1, keepdims=True))
self.prediction = tf.argmax(self.q_values, 1)
self.target_q = tf.placeholder(shape=[None], dtype=tf.float32)
self.action = tf.placeholder(shape=[None], dtype=tf.uint8)
self.action_one_hot = tf.one_hot(self.action, self.n_actions, dtype=tf.float32)
self.Q = tf.reduce_sum(tf.multiply(self.q_values, self.action_one_hot), axis=1)
self.error = logcosh(self.target_q, self.Q)
self.loss = tf.reduce_mean(self.error)
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate)
self.update = self.optimizer.minimize(self.loss)
@staticmethod
def conv_layer(_inputs, _filters, _kernel_size, _strides, _name):
return Conv2D(filters=_filters, kernel_size=_kernel_size, strides=_strides,
kernel_initializer=VarianceScaling(scale=2.0), padding="valid",
activation=relu, use_bias=False, name=_name)(_inputs)
@staticmethod
def dense_layer(_inputs, _units, _name, _activation=None):
return Dense(activation=_activation, units=_units,
kernel_initializer=VarianceScaling(scale=2.0), name=_name)(_inputs)
class TargetNetworkUpdater:
def __init__(self, main_vars, target_vars):
self.main_vars = main_vars
self.target_vars = target_vars
def update_target_vars(self):
update_ops = []
for i, var in enumerate(self.main_vars):
copy_op = self.target_vars[i].assign(var.value())
update_ops.append(copy_op)
return update_ops
def update_networks(self, sess):
update_ops = self.update_target_vars()
for copy_op in update_ops:
sess.run(copy_op)
| true | true |
f71a5820fe472212056e6d6abaa0d96203b1f555 | 939 | py | Python | pglast/enums/pg_class.py | fentik/pglast | c4652b3a6098faf26fa8d3a8fd054f23acd72f9c | [
"PostgreSQL"
] | 1 | 2021-08-20T10:09:59.000Z | 2021-08-20T10:09:59.000Z | pglast/enums/pg_class.py | fentik/pglast | c4652b3a6098faf26fa8d3a8fd054f23acd72f9c | [
"PostgreSQL"
] | null | null | null | pglast/enums/pg_class.py | fentik/pglast | c4652b3a6098faf26fa8d3a8fd054f23acd72f9c | [
"PostgreSQL"
] | null | null | null | # -*- coding: utf-8 -*-
# :Project: pglast -- DO NOT EDIT: automatically extracted from pg_class.h @ 13-2.0.6-0-ga248206
# :Author: Lele Gaifax <lele@metapensiero.it>
# :License: GNU General Public License version 3 or later
# :Copyright: © 2017-2021 Lele Gaifax
#
from enum import Enum, IntEnum, IntFlag, auto
try:
from enum import StrEnum
except ImportError:
# Python < 3.10
class StrEnum(str, Enum):
pass
# #define-ed constants
RELKIND_RELATION = 'r'
RELKIND_INDEX = 'i'
RELKIND_SEQUENCE = 'S'
RELKIND_TOASTVALUE = 't'
RELKIND_VIEW = 'v'
RELKIND_MATVIEW = 'm'
RELKIND_COMPOSITE_TYPE = 'c'
RELKIND_FOREIGN_TABLE = 'f'
RELKIND_PARTITIONED_TABLE = 'p'
RELKIND_PARTITIONED_INDEX = 'I'
RELPERSISTENCE_PERMANENT = 'p'
RELPERSISTENCE_UNLOGGED = 'u'
RELPERSISTENCE_TEMP = 't'
REPLICA_IDENTITY_DEFAULT = 'd'
REPLICA_IDENTITY_NOTHING = 'n'
REPLICA_IDENTITY_FULL = 'f'
REPLICA_IDENTITY_INDEX = 'i'
| 17.388889 | 98 | 0.713525 |
from enum import Enum, IntEnum, IntFlag, auto
try:
from enum import StrEnum
except ImportError:
class StrEnum(str, Enum):
pass
'r'
RELKIND_INDEX = 'i'
RELKIND_SEQUENCE = 'S'
RELKIND_TOASTVALUE = 't'
RELKIND_VIEW = 'v'
RELKIND_MATVIEW = 'm'
RELKIND_COMPOSITE_TYPE = 'c'
RELKIND_FOREIGN_TABLE = 'f'
RELKIND_PARTITIONED_TABLE = 'p'
RELKIND_PARTITIONED_INDEX = 'I'
RELPERSISTENCE_PERMANENT = 'p'
RELPERSISTENCE_UNLOGGED = 'u'
RELPERSISTENCE_TEMP = 't'
REPLICA_IDENTITY_DEFAULT = 'd'
REPLICA_IDENTITY_NOTHING = 'n'
REPLICA_IDENTITY_FULL = 'f'
REPLICA_IDENTITY_INDEX = 'i'
| true | true |
f71a5952c0b0537a3a97b410e481a15d260c9393 | 7,086 | py | Python | d3rlpy/models/torch/encoders.py | meokz/d3rlpy | 40504e2d8b424547558ab82786c523e8f4626a82 | [
"MIT"
] | 2 | 2021-04-21T08:19:29.000Z | 2021-05-17T09:08:06.000Z | d3rlpy/models/torch/encoders.py | meokz/d3rlpy | 40504e2d8b424547558ab82786c523e8f4626a82 | [
"MIT"
] | null | null | null | d3rlpy/models/torch/encoders.py | meokz/d3rlpy | 40504e2d8b424547558ab82786c523e8f4626a82 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
import torch.nn.functional as F
def _create_activation(activation_type):
if activation_type == 'relu':
return torch.relu
elif activation_type == 'swish':
return lambda x: x * torch.sigmoid(x)
raise ValueError('invalid activation_type.')
def create_encoder(observation_shape,
action_size=None,
use_batch_norm=False,
discrete_action=False,
activation_type='relu',
**kwargs):
activation = _create_activation(activation_type)
if len(observation_shape) == 3:
# pixel input
if action_size is not None:
return PixelEncoderWithAction(observation_shape,
action_size,
use_batch_norm=use_batch_norm,
discrete_action=discrete_action,
activation=activation,
**kwargs)
return PixelEncoder(observation_shape,
use_batch_norm=use_batch_norm,
activation=activation,
**kwargs)
elif len(observation_shape) == 1:
# vector input
if action_size is not None:
return VectorEncoderWithAction(observation_shape,
action_size,
use_batch_norm=use_batch_norm,
discrete_action=discrete_action,
activation=activation,
**kwargs)
return VectorEncoder(observation_shape,
use_batch_norm=use_batch_norm,
activation=activation,
**kwargs)
else:
raise ValueError('observation_shape must be 1d or 3d.')
class PixelEncoder(nn.Module):
def __init__(self,
observation_shape,
filters=None,
feature_size=None,
use_batch_norm=False,
activation=torch.relu):
super().__init__()
# default architecture is based on Nature DQN paper.
if filters is None:
filters = [(32, 8, 4), (64, 4, 2), (64, 3, 1)]
if feature_size is None:
feature_size = 512
self.observation_shape = observation_shape
self.use_batch_norm = use_batch_norm
self.activation = activation
self.feature_size = feature_size
# convolutional layers
in_channels = [observation_shape[0]] + [f[0] for f in filters[:-1]]
self.convs = nn.ModuleList()
self.conv_bns = nn.ModuleList()
for in_channel, f in zip(in_channels, filters):
out_channel, kernel_size, stride = f
conv = nn.Conv2d(in_channel,
out_channel,
kernel_size=kernel_size,
stride=stride)
self.convs.append(conv)
if use_batch_norm:
self.conv_bns.append(nn.BatchNorm2d(out_channel))
# last dense layer
self.fc = nn.Linear(self._get_linear_input_size(), feature_size)
if use_batch_norm:
self.fc_bn = nn.BatchNorm1d(feature_size)
def _get_linear_input_size(self):
x = torch.rand((1, ) + self.observation_shape)
with torch.no_grad():
return self._conv_encode(x).view(1, -1).shape[1]
def _conv_encode(self, x):
h = x
for i in range(len(self.convs)):
h = self.activation(self.convs[i](h))
if self.use_batch_norm:
h = self.conv_bns[i](h)
return h
def forward(self, x):
h = self._conv_encode(x)
h = self.activation(self.fc(h.view(h.shape[0], -1)))
if self.use_batch_norm:
h = self.fc_bn(h)
return h
class PixelEncoderWithAction(PixelEncoder):
def __init__(self,
observation_shape,
action_size,
filters=None,
feature_size=None,
use_batch_norm=False,
discrete_action=False,
activation=torch.relu):
self.action_size = action_size
self.discrete_action = discrete_action
super().__init__(observation_shape, filters, feature_size,
use_batch_norm, activation)
def _get_linear_input_size(self):
size = super()._get_linear_input_size()
return size + self.action_size
def forward(self, x, action):
h = self._conv_encode(x)
if self.discrete_action:
action = F.one_hot(action.view(-1).long(),
num_classes=self.action_size).float()
# cocat feature and action
h = torch.cat([h.view(h.shape[0], -1), action], dim=1)
h = self.activation(self.fc(h))
if self.use_batch_norm:
h = self.fc_bn(h)
return h
class VectorEncoder(nn.Module):
def __init__(self,
observation_shape,
hidden_units=None,
use_batch_norm=False,
activation=torch.relu):
super().__init__()
self.observation_shape = observation_shape
if hidden_units is None:
hidden_units = [256, 256]
self.use_batch_norm = use_batch_norm
self.feature_size = hidden_units[-1]
self.activation = activation
in_units = [observation_shape[0]] + hidden_units[:-1]
self.fcs = nn.ModuleList()
self.bns = nn.ModuleList()
for in_unit, out_unit in zip(in_units, hidden_units):
self.fcs.append(nn.Linear(in_unit, out_unit))
if use_batch_norm:
self.bns.append(nn.BatchNorm1d(out_unit))
def forward(self, x):
h = x
for i in range(len(self.fcs)):
h = self.activation(self.fcs[i](h))
if self.use_batch_norm:
h = self.bns[i](h)
return h
class VectorEncoderWithAction(VectorEncoder):
def __init__(self,
observation_shape,
action_size,
hidden_units=None,
use_batch_norm=False,
discrete_action=False,
activation=torch.relu):
self.action_size = action_size
self.discrete_action = discrete_action
concat_shape = (observation_shape[0] + action_size, )
super().__init__(concat_shape, hidden_units, use_batch_norm,
activation)
self.observation_shape = observation_shape
def forward(self, x, action):
if self.discrete_action:
action = F.one_hot(action.view(-1).long(),
num_classes=self.action_size).float()
x = torch.cat([x, action], dim=1)
return super().forward(x)
| 34.565854 | 75 | 0.540785 | import torch
import torch.nn as nn
import torch.nn.functional as F
def _create_activation(activation_type):
if activation_type == 'relu':
return torch.relu
elif activation_type == 'swish':
return lambda x: x * torch.sigmoid(x)
raise ValueError('invalid activation_type.')
def create_encoder(observation_shape,
action_size=None,
use_batch_norm=False,
discrete_action=False,
activation_type='relu',
**kwargs):
activation = _create_activation(activation_type)
if len(observation_shape) == 3:
if action_size is not None:
return PixelEncoderWithAction(observation_shape,
action_size,
use_batch_norm=use_batch_norm,
discrete_action=discrete_action,
activation=activation,
**kwargs)
return PixelEncoder(observation_shape,
use_batch_norm=use_batch_norm,
activation=activation,
**kwargs)
elif len(observation_shape) == 1:
if action_size is not None:
return VectorEncoderWithAction(observation_shape,
action_size,
use_batch_norm=use_batch_norm,
discrete_action=discrete_action,
activation=activation,
**kwargs)
return VectorEncoder(observation_shape,
use_batch_norm=use_batch_norm,
activation=activation,
**kwargs)
else:
raise ValueError('observation_shape must be 1d or 3d.')
class PixelEncoder(nn.Module):
def __init__(self,
observation_shape,
filters=None,
feature_size=None,
use_batch_norm=False,
activation=torch.relu):
super().__init__()
if filters is None:
filters = [(32, 8, 4), (64, 4, 2), (64, 3, 1)]
if feature_size is None:
feature_size = 512
self.observation_shape = observation_shape
self.use_batch_norm = use_batch_norm
self.activation = activation
self.feature_size = feature_size
in_channels = [observation_shape[0]] + [f[0] for f in filters[:-1]]
self.convs = nn.ModuleList()
self.conv_bns = nn.ModuleList()
for in_channel, f in zip(in_channels, filters):
out_channel, kernel_size, stride = f
conv = nn.Conv2d(in_channel,
out_channel,
kernel_size=kernel_size,
stride=stride)
self.convs.append(conv)
if use_batch_norm:
self.conv_bns.append(nn.BatchNorm2d(out_channel))
self.fc = nn.Linear(self._get_linear_input_size(), feature_size)
if use_batch_norm:
self.fc_bn = nn.BatchNorm1d(feature_size)
def _get_linear_input_size(self):
x = torch.rand((1, ) + self.observation_shape)
with torch.no_grad():
return self._conv_encode(x).view(1, -1).shape[1]
def _conv_encode(self, x):
h = x
for i in range(len(self.convs)):
h = self.activation(self.convs[i](h))
if self.use_batch_norm:
h = self.conv_bns[i](h)
return h
def forward(self, x):
h = self._conv_encode(x)
h = self.activation(self.fc(h.view(h.shape[0], -1)))
if self.use_batch_norm:
h = self.fc_bn(h)
return h
class PixelEncoderWithAction(PixelEncoder):
def __init__(self,
observation_shape,
action_size,
filters=None,
feature_size=None,
use_batch_norm=False,
discrete_action=False,
activation=torch.relu):
self.action_size = action_size
self.discrete_action = discrete_action
super().__init__(observation_shape, filters, feature_size,
use_batch_norm, activation)
def _get_linear_input_size(self):
size = super()._get_linear_input_size()
return size + self.action_size
def forward(self, x, action):
h = self._conv_encode(x)
if self.discrete_action:
action = F.one_hot(action.view(-1).long(),
num_classes=self.action_size).float()
h = torch.cat([h.view(h.shape[0], -1), action], dim=1)
h = self.activation(self.fc(h))
if self.use_batch_norm:
h = self.fc_bn(h)
return h
class VectorEncoder(nn.Module):
def __init__(self,
observation_shape,
hidden_units=None,
use_batch_norm=False,
activation=torch.relu):
super().__init__()
self.observation_shape = observation_shape
if hidden_units is None:
hidden_units = [256, 256]
self.use_batch_norm = use_batch_norm
self.feature_size = hidden_units[-1]
self.activation = activation
in_units = [observation_shape[0]] + hidden_units[:-1]
self.fcs = nn.ModuleList()
self.bns = nn.ModuleList()
for in_unit, out_unit in zip(in_units, hidden_units):
self.fcs.append(nn.Linear(in_unit, out_unit))
if use_batch_norm:
self.bns.append(nn.BatchNorm1d(out_unit))
def forward(self, x):
h = x
for i in range(len(self.fcs)):
h = self.activation(self.fcs[i](h))
if self.use_batch_norm:
h = self.bns[i](h)
return h
class VectorEncoderWithAction(VectorEncoder):
def __init__(self,
observation_shape,
action_size,
hidden_units=None,
use_batch_norm=False,
discrete_action=False,
activation=torch.relu):
self.action_size = action_size
self.discrete_action = discrete_action
concat_shape = (observation_shape[0] + action_size, )
super().__init__(concat_shape, hidden_units, use_batch_norm,
activation)
self.observation_shape = observation_shape
def forward(self, x, action):
if self.discrete_action:
action = F.one_hot(action.view(-1).long(),
num_classes=self.action_size).float()
x = torch.cat([x, action], dim=1)
return super().forward(x)
| true | true |
f71a5d5dd300e03985a3ca77a605a2e70ab1f462 | 121,589 | py | Python | tests/git_cl_test.py | 2youyou2/depot_tools | 8b94108e684872a89f7108f51ba74f01220d64fa | [
"BSD-3-Clause"
] | 7 | 2018-09-26T11:10:40.000Z | 2020-12-19T13:32:12.000Z | tests/git_cl_test.py | 2youyou2/depot_tools | 8b94108e684872a89f7108f51ba74f01220d64fa | [
"BSD-3-Clause"
] | null | null | null | tests/git_cl_test.py | 2youyou2/depot_tools | 8b94108e684872a89f7108f51ba74f01220d64fa | [
"BSD-3-Clause"
] | 4 | 2020-03-27T07:49:45.000Z | 2020-11-17T02:46:42.000Z | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for git_cl.py."""
import contextlib
import datetime
import json
import logging
import os
import StringIO
import sys
import tempfile
import unittest
import urlparse
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from testing_support.auto_stub import TestCase
import metrics
# We have to disable monitoring before importing git_cl.
metrics.DISABLE_METRICS_COLLECTION = True
import gerrit_util
import git_cl
import git_common
import git_footers
import subprocess2
def callError(code=1, cmd='', cwd='', stdout='', stderr=''):
return subprocess2.CalledProcessError(code, cmd, cwd, stdout, stderr)
CERR1 = callError(1)
def MakeNamedTemporaryFileMock(expected_content):
class NamedTemporaryFileMock(object):
def __init__(self, *args, **kwargs):
self.name = '/tmp/named'
self.expected_content = expected_content
def __enter__(self):
return self
def __exit__(self, _type, _value, _tb):
pass
def write(self, content):
if self.expected_content:
assert content == self.expected_content
def close(self):
pass
return NamedTemporaryFileMock
class ChangelistMock(object):
# A class variable so we can access it when we don't have access to the
# instance that's being set.
desc = ""
def __init__(self, **kwargs):
pass
def GetIssue(self):
return 1
def GetDescription(self, force=False):
return ChangelistMock.desc
def UpdateDescription(self, desc, force=False):
ChangelistMock.desc = desc
class PresubmitMock(object):
def __init__(self, *args, **kwargs):
self.reviewers = []
self.more_cc = ['chromium-reviews+test-more-cc@chromium.org']
@staticmethod
def should_continue():
return True
class GitCheckoutMock(object):
def __init__(self, *args, **kwargs):
pass
@staticmethod
def reset():
GitCheckoutMock.conflict = False
def apply_patch(self, p):
if GitCheckoutMock.conflict:
raise Exception('failed')
class WatchlistsMock(object):
def __init__(self, _):
pass
@staticmethod
def GetWatchersForPaths(_):
return ['joe@example.com']
class CodereviewSettingsFileMock(object):
def __init__(self):
pass
# pylint: disable=no-self-use
def read(self):
return ("CODE_REVIEW_SERVER: gerrit.chromium.org\n" +
"GERRIT_HOST: True\n")
class AuthenticatorMock(object):
def __init__(self, *_args):
pass
def has_cached_credentials(self):
return True
def authorize(self, http):
return http
def CookiesAuthenticatorMockFactory(hosts_with_creds=None, same_auth=False):
"""Use to mock Gerrit/Git credentials from ~/.netrc or ~/.gitcookies.
Usage:
>>> self.mock(git_cl.gerrit_util, "CookiesAuthenticator",
CookiesAuthenticatorMockFactory({'host': ('user', _, 'pass')})
OR
>>> self.mock(git_cl.gerrit_util, "CookiesAuthenticator",
CookiesAuthenticatorMockFactory(
same_auth=('user', '', 'pass'))
"""
class CookiesAuthenticatorMock(git_cl.gerrit_util.CookiesAuthenticator):
def __init__(self): # pylint: disable=super-init-not-called
# Intentionally not calling super() because it reads actual cookie files.
pass
@classmethod
def get_gitcookies_path(cls):
return '~/.gitcookies'
@classmethod
def get_netrc_path(cls):
return '~/.netrc'
def _get_auth_for_host(self, host):
if same_auth:
return same_auth
return (hosts_with_creds or {}).get(host)
return CookiesAuthenticatorMock
class MockChangelistWithBranchAndIssue():
def __init__(self, branch, issue):
self.branch = branch
self.issue = issue
def GetBranch(self):
return self.branch
def GetIssue(self):
return self.issue
class SystemExitMock(Exception):
pass
class TestGitClBasic(unittest.TestCase):
def test_get_description(self):
cl = git_cl.Changelist(issue=1, codereview='gerrit',
codereview_host='host')
cl.description = 'x'
cl.has_description = True
cl._codereview_impl.FetchDescription = lambda *a, **kw: 'y'
self.assertEquals(cl.GetDescription(), 'x')
self.assertEquals(cl.GetDescription(force=True), 'y')
self.assertEquals(cl.GetDescription(), 'y')
def test_description_footers(self):
cl = git_cl.Changelist(issue=1, codereview='gerrit',
codereview_host='host')
cl.description = '\n'.join([
'This is some message',
'',
'It has some lines',
'and, also',
'',
'Some: Really',
'Awesome: Footers',
])
cl.has_description = True
cl._codereview_impl.UpdateDescriptionRemote = lambda *a, **kw: 'y'
msg, footers = cl.GetDescriptionFooters()
self.assertEquals(
msg, ['This is some message', '', 'It has some lines', 'and, also'])
self.assertEquals(footers, [('Some', 'Really'), ('Awesome', 'Footers')])
msg.append('wut')
footers.append(('gnarly-dude', 'beans'))
cl.UpdateDescriptionFooters(msg, footers)
self.assertEquals(cl.GetDescription().splitlines(), [
'This is some message',
'',
'It has some lines',
'and, also',
'wut'
'',
'Some: Really',
'Awesome: Footers',
'Gnarly-Dude: beans',
])
def test_get_bug_line_values(self):
f = lambda p, bugs: list(git_cl._get_bug_line_values(p, bugs))
self.assertEqual(f('', ''), [])
self.assertEqual(f('', '123,v8:456'), ['123', 'v8:456'])
self.assertEqual(f('v8', '456'), ['v8:456'])
self.assertEqual(f('v8', 'chromium:123,456'), ['v8:456', 'chromium:123'])
# Not nice, but not worth carying.
self.assertEqual(f('v8', 'chromium:123,456,v8:123'),
['v8:456', 'chromium:123', 'v8:123'])
def _test_git_number(self, parent_msg, dest_ref, child_msg,
parent_hash='parenthash'):
desc = git_cl.ChangeDescription(child_msg)
desc.update_with_git_number_footers(parent_hash, parent_msg, dest_ref)
return desc.description
def assertEqualByLine(self, actual, expected):
self.assertEqual(actual.splitlines(), expected.splitlines())
def test_git_number_bad_parent(self):
with self.assertRaises(ValueError):
self._test_git_number('Parent', 'refs/heads/master', 'Child')
def test_git_number_bad_parent_footer(self):
with self.assertRaises(AssertionError):
self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: wrong',
'refs/heads/master', 'Child')
def test_git_number_bad_lineage_ignored(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#1}\n'
'Cr-Branched-From: mustBeReal40CharHash-branch@{#pos}',
'refs/heads/master', 'Child')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#2}\n'
'Cr-Branched-From: mustBeReal40CharHash-branch@{#pos}')
def test_git_number_same_branch(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#12}',
dest_ref='refs/heads/master',
child_msg='Child')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#13}')
def test_git_number_same_branch_mixed_footers(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#12}',
dest_ref='refs/heads/master',
child_msg='Child\n'
'\n'
'Broken-by: design\n'
'BUG=123')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Broken-by: design\n'
'BUG=123\n'
'Cr-Commit-Position: refs/heads/master@{#13}')
def test_git_number_same_branch_with_originals(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#12}',
dest_ref='refs/heads/master',
child_msg='Child\n'
'\n'
'Some users are smart and insert their own footers\n'
'\n'
'Cr-Whatever: value\n'
'Cr-Commit-Position: refs/copy/paste@{#22}')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Some users are smart and insert their own footers\n'
'\n'
'Cr-Original-Whatever: value\n'
'Cr-Original-Commit-Position: refs/copy/paste@{#22}\n'
'Cr-Commit-Position: refs/heads/master@{#13}')
def test_git_number_new_branch(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#12}',
dest_ref='refs/heads/branch',
child_msg='Child')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Cr-Commit-Position: refs/heads/branch@{#1}\n'
'Cr-Branched-From: parenthash-refs/heads/master@{#12}')
def test_git_number_lineage(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/branch@{#1}\n'
'Cr-Branched-From: somehash-refs/heads/master@{#12}',
dest_ref='refs/heads/branch',
child_msg='Child')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Cr-Commit-Position: refs/heads/branch@{#2}\n'
'Cr-Branched-From: somehash-refs/heads/master@{#12}')
def test_git_number_moooooooore_lineage(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/branch@{#5}\n'
'Cr-Branched-From: somehash-refs/heads/master@{#12}',
dest_ref='refs/heads/mooore',
child_msg='Child')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Cr-Commit-Position: refs/heads/mooore@{#1}\n'
'Cr-Branched-From: parenthash-refs/heads/branch@{#5}\n'
'Cr-Branched-From: somehash-refs/heads/master@{#12}')
def test_git_number_ever_moooooooore_lineage(self):
self.maxDiff = 10000 # pylint: disable=attribute-defined-outside-init
actual = self._test_git_number(
'CQ commit on fresh new branch + numbering.\n'
'\n'
'NOTRY=True\n'
'NOPRESUBMIT=True\n'
'BUG=\n'
'\n'
'Review-Url: https://codereview.chromium.org/2577703003\n'
'Cr-Commit-Position: refs/heads/gnumb-test/br@{#1}\n'
'Cr-Branched-From: 0749ff9edc-refs/heads/gnumb-test/cq@{#4}\n'
'Cr-Branched-From: 5c49df2da6-refs/heads/master@{#41618}',
dest_ref='refs/heads/gnumb-test/cl',
child_msg='git cl on fresh new branch + numbering.\n'
'\n'
'Review-Url: https://codereview.chromium.org/2575043003 .\n')
self.assertEqualByLine(
actual,
'git cl on fresh new branch + numbering.\n'
'\n'
'Review-Url: https://codereview.chromium.org/2575043003 .\n'
'Cr-Commit-Position: refs/heads/gnumb-test/cl@{#1}\n'
'Cr-Branched-From: parenthash-refs/heads/gnumb-test/br@{#1}\n'
'Cr-Branched-From: 0749ff9edc-refs/heads/gnumb-test/cq@{#4}\n'
'Cr-Branched-From: 5c49df2da6-refs/heads/master@{#41618}')
def test_git_number_cherry_pick(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/branch@{#1}\n'
'Cr-Branched-From: somehash-refs/heads/master@{#12}',
dest_ref='refs/heads/branch',
child_msg='Child, which is cherry-pick from master\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#100}\n'
'(cherry picked from commit deadbeef12345678deadbeef12345678deadbeef)')
self.assertEqualByLine(
actual,
'Child, which is cherry-pick from master\n'
'\n'
'(cherry picked from commit deadbeef12345678deadbeef12345678deadbeef)\n'
'\n'
'Cr-Original-Commit-Position: refs/heads/master@{#100}\n'
'Cr-Commit-Position: refs/heads/branch@{#2}\n'
'Cr-Branched-From: somehash-refs/heads/master@{#12}')
def test_gerrit_mirror_hack(self):
cr = 'chromium-review.googlesource.com'
url0 = 'https://%s/a/changes/x?a=b' % cr
origMirrors = git_cl.gerrit_util._GERRIT_MIRROR_PREFIXES
try:
git_cl.gerrit_util._GERRIT_MIRROR_PREFIXES = ['us1', 'us2']
url1 = git_cl.gerrit_util._UseGerritMirror(url0, cr)
url2 = git_cl.gerrit_util._UseGerritMirror(url1, cr)
url3 = git_cl.gerrit_util._UseGerritMirror(url2, cr)
self.assertNotEqual(url1, url2)
self.assertEqual(sorted((url1, url2)), [
'https://us1-mirror-chromium-review.googlesource.com/a/changes/x?a=b',
'https://us2-mirror-chromium-review.googlesource.com/a/changes/x?a=b'])
self.assertEqual(url1, url3)
finally:
git_cl.gerrit_util._GERRIT_MIRROR_PREFIXES = origMirrors
def test_valid_accounts(self):
mock_per_account = {
'u1': None, # 404, doesn't exist.
'u2': {
'_account_id': 123124,
'avatars': [],
'email': 'u2@example.com',
'name': 'User Number 2',
'status': 'OOO',
},
'u3': git_cl.gerrit_util.GerritError(500, 'retries didn\'t help :('),
}
def GetAccountDetailsMock(_, account):
# Poor-man's mock library's side_effect.
v = mock_per_account.pop(account)
if isinstance(v, Exception):
raise v
return v
original = git_cl.gerrit_util.GetAccountDetails
try:
git_cl.gerrit_util.GetAccountDetails = GetAccountDetailsMock
actual = git_cl.gerrit_util.ValidAccounts(
'host', ['u1', 'u2', 'u3'], max_threads=1)
finally:
git_cl.gerrit_util.GetAccountDetails = original
self.assertEqual(actual, {
'u2': {
'_account_id': 123124,
'avatars': [],
'email': 'u2@example.com',
'name': 'User Number 2',
'status': 'OOO',
},
})
class TestParseIssueURL(unittest.TestCase):
def _validate(self, parsed, issue=None, patchset=None, hostname=None,
codereview=None, fail=False):
self.assertIsNotNone(parsed)
if fail:
self.assertFalse(parsed.valid)
return
self.assertTrue(parsed.valid)
self.assertEqual(parsed.issue, issue)
self.assertEqual(parsed.patchset, patchset)
self.assertEqual(parsed.hostname, hostname)
self.assertEqual(parsed.codereview, codereview)
def _run_and_validate(self, func, url, *args, **kwargs):
result = func(urlparse.urlparse(url))
if kwargs.pop('fail', False):
self.assertIsNone(result)
return None
self._validate(result, *args, fail=False, **kwargs)
def test_gerrit(self):
def test(url, issue=None, patchset=None, hostname=None, fail=None):
self._test_ParseIssueUrl(
git_cl._GerritChangelistImpl.ParseIssueURL,
url, issue, patchset, hostname, fail)
def test(url, *args, **kwargs):
self._run_and_validate(git_cl._GerritChangelistImpl.ParseIssueURL, url,
*args, codereview='gerrit', **kwargs)
test('http://chrome-review.source.com/c/123',
123, None, 'chrome-review.source.com')
test('https://chrome-review.source.com/c/123/',
123, None, 'chrome-review.source.com')
test('https://chrome-review.source.com/c/123/4',
123, 4, 'chrome-review.source.com')
test('https://chrome-review.source.com/#/c/123/4',
123, 4, 'chrome-review.source.com')
test('https://chrome-review.source.com/c/123/4',
123, 4, 'chrome-review.source.com')
test('https://chrome-review.source.com/123',
123, None, 'chrome-review.source.com')
test('https://chrome-review.source.com/123/4',
123, 4, 'chrome-review.source.com')
test('https://chrome-review.source.com/c/123/1/whatisthis', fail=True)
test('https://chrome-review.source.com/c/abc/', fail=True)
test('ssh://chrome-review.source.com/c/123/1/', fail=True)
def test_ParseIssueNumberArgument(self):
def test(arg, *args, **kwargs):
codereview_hint = kwargs.pop('hint', None)
self._validate(git_cl.ParseIssueNumberArgument(arg, codereview_hint),
*args, **kwargs)
test('123', 123)
test('', fail=True)
test('abc', fail=True)
test('123/1', fail=True)
test('123a', fail=True)
test('ssh://chrome-review.source.com/#/c/123/4/', fail=True)
# Looks like Rietveld and Gerrit, but we should select Gerrit now
# w/ or w/o hint.
test('https://codereview.source.com/123',
123, None, 'codereview.source.com', 'gerrit',
hint='gerrit')
test('https://codereview.source.com/123',
123, None, 'codereview.source.com', 'gerrit')
# Gerrrit.
test('https://chrome-review.source.com/c/123/4',
123, 4, 'chrome-review.source.com', 'gerrit')
test('https://chrome-review.source.com/bad/123/4', fail=True)
class GitCookiesCheckerTest(TestCase):
def setUp(self):
super(GitCookiesCheckerTest, self).setUp()
self.c = git_cl._GitCookiesChecker()
self.c._all_hosts = []
def mock_hosts_creds(self, subhost_identity_pairs):
def ensure_googlesource(h):
if not h.endswith(self.c._GOOGLESOURCE):
assert not h.endswith('.')
return h + '.' + self.c._GOOGLESOURCE
return h
self.c._all_hosts = [(ensure_googlesource(h), i, '.gitcookies')
for h, i in subhost_identity_pairs]
def test_identity_parsing(self):
self.assertEqual(self.c._parse_identity('ldap.google.com'),
('ldap', 'google.com'))
self.assertEqual(self.c._parse_identity('git-ldap.example.com'),
('ldap', 'example.com'))
# Specical case because we know there are no subdomains in chromium.org.
self.assertEqual(self.c._parse_identity('git-note.period.chromium.org'),
('note.period', 'chromium.org'))
# Pathological: ".period." can be either username OR domain, more likely
# domain.
self.assertEqual(self.c._parse_identity('git-note.period.example.com'),
('note', 'period.example.com'))
def test_analysis_nothing(self):
self.c._all_hosts = []
self.assertFalse(self.c.has_generic_host())
self.assertEqual(set(), self.c.get_conflicting_hosts())
self.assertEqual(set(), self.c.get_duplicated_hosts())
self.assertEqual(set(), self.c.get_partially_configured_hosts())
self.assertEqual(set(), self.c.get_hosts_with_wrong_identities())
def test_analysis(self):
self.mock_hosts_creds([
('.googlesource.com', 'git-example.chromium.org'),
('chromium', 'git-example.google.com'),
('chromium-review', 'git-example.google.com'),
('chrome-internal', 'git-example.chromium.org'),
('chrome-internal-review', 'git-example.chromium.org'),
('conflict', 'git-example.google.com'),
('conflict-review', 'git-example.chromium.org'),
('dup', 'git-example.google.com'),
('dup', 'git-example.google.com'),
('dup-review', 'git-example.google.com'),
('partial', 'git-example.google.com'),
('gpartial-review', 'git-example.google.com'),
])
self.assertTrue(self.c.has_generic_host())
self.assertEqual(set(['conflict.googlesource.com']),
self.c.get_conflicting_hosts())
self.assertEqual(set(['dup.googlesource.com']),
self.c.get_duplicated_hosts())
self.assertEqual(set(['partial.googlesource.com',
'gpartial-review.googlesource.com']),
self.c.get_partially_configured_hosts())
self.assertEqual(set(['chromium.googlesource.com',
'chrome-internal.googlesource.com']),
self.c.get_hosts_with_wrong_identities())
def test_report_no_problems(self):
self.test_analysis_nothing()
self.mock(sys, 'stdout', StringIO.StringIO())
self.assertFalse(self.c.find_and_report_problems())
self.assertEqual(sys.stdout.getvalue(), '')
def test_report(self):
self.test_analysis()
self.mock(sys, 'stdout', StringIO.StringIO())
self.mock(git_cl.gerrit_util.CookiesAuthenticator, 'get_gitcookies_path',
classmethod(lambda _: '~/.gitcookies'))
self.assertTrue(self.c.find_and_report_problems())
with open(os.path.join(os.path.dirname(__file__),
'git_cl_creds_check_report.txt')) as f:
expected = f.read()
def by_line(text):
return [l.rstrip() for l in text.rstrip().splitlines()]
self.maxDiff = 10000 # pylint: disable=attribute-defined-outside-init
self.assertEqual(by_line(sys.stdout.getvalue().strip()), by_line(expected))
class TestGitCl(TestCase):
def setUp(self):
super(TestGitCl, self).setUp()
self.calls = []
self._calls_done = []
self.mock(git_cl, 'time_time',
lambda: self._mocked_call('time.time'))
self.mock(git_cl.metrics.collector, 'add_repeated',
lambda *a: self._mocked_call('add_repeated', *a))
self.mock(subprocess2, 'call', self._mocked_call)
self.mock(subprocess2, 'check_call', self._mocked_call)
self.mock(subprocess2, 'check_output', self._mocked_call)
self.mock(subprocess2, 'communicate',
lambda *a, **kw: ([self._mocked_call(*a, **kw), ''], 0))
self.mock(git_cl.gclient_utils, 'CheckCallAndFilter', self._mocked_call)
self.mock(git_common, 'is_dirty_git_tree', lambda x: False)
self.mock(git_common, 'get_or_create_merge_base',
lambda *a: (
self._mocked_call(['get_or_create_merge_base']+list(a))))
self.mock(git_cl, 'BranchExists', lambda _: True)
self.mock(git_cl, 'FindCodereviewSettingsFile', lambda: '')
self.mock(git_cl, 'SaveDescriptionBackup', lambda _:
self._mocked_call('SaveDescriptionBackup'))
self.mock(git_cl, 'ask_for_data', lambda *a, **k: self._mocked_call(
*(['ask_for_data'] + list(a)), **k))
self.mock(git_cl, 'write_json', lambda path, contents:
self._mocked_call('write_json', path, contents))
self.mock(git_cl.presubmit_support, 'DoPresubmitChecks', PresubmitMock)
self.mock(git_cl.checkout, 'GitCheckout', GitCheckoutMock)
GitCheckoutMock.reset()
self.mock(git_cl.watchlists, 'Watchlists', WatchlistsMock)
self.mock(git_cl.auth, 'get_authenticator_for_host', AuthenticatorMock)
self.mock(git_cl.gerrit_util, 'GetChangeDetail',
lambda *args, **kwargs: self._mocked_call(
'GetChangeDetail', *args, **kwargs))
self.mock(git_cl.gerrit_util, 'GetChangeComments',
lambda *args, **kwargs: self._mocked_call(
'GetChangeComments', *args, **kwargs))
self.mock(git_cl.gerrit_util, 'GetChangeRobotComments',
lambda *args, **kwargs: self._mocked_call(
'GetChangeRobotComments', *args, **kwargs))
self.mock(git_cl.gerrit_util, 'AddReviewers',
lambda h, i, reviewers, ccs, notify: self._mocked_call(
'AddReviewers', h, i, reviewers, ccs, notify))
self.mock(git_cl.gerrit_util, 'SetReview',
lambda h, i, msg=None, labels=None, notify=None:
self._mocked_call('SetReview', h, i, msg, labels, notify))
self.mock(git_cl.gerrit_util.LuciContextAuthenticator, 'is_luci',
staticmethod(lambda: False))
self.mock(git_cl.gerrit_util.GceAuthenticator, 'is_gce',
classmethod(lambda _: False))
self.mock(git_cl.gerrit_util, 'ValidAccounts',
lambda host, accounts:
self._mocked_call('ValidAccounts', host, accounts))
self.mock(git_cl, 'DieWithError',
lambda msg, change=None: self._mocked_call(['DieWithError', msg]))
# It's important to reset settings to not have inter-tests interference.
git_cl.settings = None
def tearDown(self):
try:
self.assertEquals([], self.calls)
except AssertionError:
if not self.has_failed():
raise
# Sadly, has_failed() returns True if this OR any other tests before this
# one have failed.
git_cl.logging.error(
'!!!!!! IF YOU SEE THIS, READ BELOW, IT WILL SAVE YOUR TIME !!!!!\n'
'There are un-consumed self.calls after this test has finished.\n'
'If you don\'t know which test this is, run:\n'
' tests/git_cl_tests.py -v\n'
'If you are already running only this test, then **first** fix the '
'problem whose exception is emitted below by unittest runner.\n'
'Else, to be sure what\'s going on, run this test **alone** with \n'
' tests/git_cl_tests.py TestGitCl.<name>\n'
'and follow instructions above.\n' +
'=' * 80)
finally:
super(TestGitCl, self).tearDown()
def _mocked_call(self, *args, **_kwargs):
self.assertTrue(
self.calls,
'@%d Expected: <Missing> Actual: %r' % (len(self._calls_done), args))
top = self.calls.pop(0)
expected_args, result = top
# Also logs otherwise it could get caught in a try/finally and be hard to
# diagnose.
if expected_args != args:
N = 5
prior_calls = '\n '.join(
'@%d: %r' % (len(self._calls_done) - N + i, c[0])
for i, c in enumerate(self._calls_done[-N:]))
following_calls = '\n '.join(
'@%d: %r' % (len(self._calls_done) + i + 1, c[0])
for i, c in enumerate(self.calls[:N]))
extended_msg = (
'A few prior calls:\n %s\n\n'
'This (expected):\n @%d: %r\n'
'This (actual):\n @%d: %r\n\n'
'A few following expected calls:\n %s' %
(prior_calls, len(self._calls_done), expected_args,
len(self._calls_done), args, following_calls))
git_cl.logging.error(extended_msg)
self.fail('@%d\n'
' Expected: %r\n'
' Actual: %r' % (
len(self._calls_done), expected_args, args))
self._calls_done.append(top)
if isinstance(result, Exception):
raise result
return result
def test_ask_for_explicit_yes_true(self):
self.calls = [
(('ask_for_data', 'prompt [Yes/No]: '), 'blah'),
(('ask_for_data', 'Please, type yes or no: '), 'ye'),
]
self.assertTrue(git_cl.ask_for_explicit_yes('prompt'))
def test_LoadCodereviewSettingsFromFile_gerrit(self):
codereview_file = StringIO.StringIO('GERRIT_HOST: true')
self.calls = [
((['git', 'config', '--unset-all', 'rietveld.cc'],), CERR1),
((['git', 'config', '--unset-all', 'rietveld.tree-status-url'],), CERR1),
((['git', 'config', '--unset-all', 'rietveld.viewvc-url'],), CERR1),
((['git', 'config', '--unset-all', 'rietveld.bug-prefix'],), CERR1),
((['git', 'config', '--unset-all', 'rietveld.cpplint-regex'],), CERR1),
((['git', 'config', '--unset-all', 'rietveld.cpplint-ignore-regex'],),
CERR1),
((['git', 'config', '--unset-all', 'rietveld.run-post-upload-hook'],),
CERR1),
((['git', 'config', 'gerrit.host', 'true'],), ''),
]
self.assertIsNone(git_cl.LoadCodereviewSettingsFromFile(codereview_file))
@classmethod
def _is_gerrit_calls(cls, gerrit=False):
return [((['git', 'config', 'rietveld.autoupdate'],), ''),
((['git', 'config', 'gerrit.host'],), 'True' if gerrit else '')]
@classmethod
def _git_post_upload_calls(cls):
return [
((['git', 'rev-parse', 'HEAD'],), 'hash'),
((['git', 'symbolic-ref', 'HEAD'],), 'hash'),
((['git',
'config', 'branch.hash.last-upload-hash', 'hash'],), ''),
((['git', 'config', 'rietveld.run-post-upload-hook'],), ''),
]
@staticmethod
def _git_sanity_checks(diff_base, working_branch, get_remote_branch=True):
fake_ancestor = 'fake_ancestor'
fake_cl = 'fake_cl_for_patch'
return [
((['git',
'rev-parse', '--verify', diff_base],), fake_ancestor),
((['git',
'merge-base', fake_ancestor, 'HEAD'],), fake_ancestor),
((['git',
'rev-list', '^' + fake_ancestor, 'HEAD'],), fake_cl),
# Mock a config miss (error code 1)
((['git',
'config', 'gitcl.remotebranch'],), CERR1),
] + ([
# Call to GetRemoteBranch()
((['git',
'config', 'branch.%s.merge' % working_branch],),
'refs/heads/master'),
((['git',
'config', 'branch.%s.remote' % working_branch],), 'origin'),
] if get_remote_branch else []) + [
((['git', 'rev-list', '^' + fake_ancestor,
'refs/remotes/origin/master'],), ''),
]
@classmethod
def _gerrit_ensure_auth_calls(
cls, issue=None, skip_auth_check=False, short_hostname='chromium'):
cmd = ['git', 'config', '--bool', 'gerrit.skip-ensure-authenticated']
if skip_auth_check:
return [((cmd, ), 'true')]
calls = [((cmd, ), CERR1)]
if issue:
calls.extend([
((['git', 'config', 'branch.master.gerritserver'],), CERR1),
])
calls.extend([
((['git', 'config', 'branch.master.merge'],), 'refs/heads/master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://%s.googlesource.com/my/repo' % short_hostname),
])
return calls
@classmethod
def _gerrit_base_calls(cls, issue=None, fetched_description=None,
fetched_status=None, other_cl_owner=None,
custom_cl_base=None, short_hostname='chromium'):
calls = cls._is_gerrit_calls(True)
calls += [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.gerritissue'],),
CERR1 if issue is None else str(issue)),
]
if custom_cl_base:
ancestor_revision = custom_cl_base
else:
# Determine ancestor_revision to be merge base.
ancestor_revision = 'fake_ancestor_sha'
calls += [
((['git', 'config', 'branch.master.merge'],), 'refs/heads/master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['get_or_create_merge_base', 'master',
'refs/remotes/origin/master'],), ancestor_revision),
]
# Calls to verify branch point is ancestor
calls += cls._gerrit_ensure_auth_calls(
issue=issue, short_hostname=short_hostname)
if issue:
calls += [
(('GetChangeDetail', '%s-review.googlesource.com' % short_hostname,
'my%2Frepo~123456',
['DETAILED_ACCOUNTS', 'CURRENT_REVISION', 'CURRENT_COMMIT', 'LABELS']
),
{
'owner': {'email': (other_cl_owner or 'owner@example.com')},
'change_id': '123456789',
'current_revision': 'sha1_of_current_revision',
'revisions': { 'sha1_of_current_revision': {
'commit': {'message': fetched_description},
}},
'status': fetched_status or 'NEW',
}),
]
if fetched_status == 'ABANDONED':
calls += [
(('DieWithError', 'Change https://%s-review.googlesource.com/'
'123456 has been abandoned, new uploads are not '
'allowed' % short_hostname), SystemExitMock()),
]
return calls
if other_cl_owner:
calls += [
(('ask_for_data', 'Press Enter to upload, or Ctrl+C to abort'), ''),
]
calls += cls._git_sanity_checks(ancestor_revision, 'master',
get_remote_branch=False)
calls += [
((['git', 'rev-parse', '--show-cdup'],), ''),
((['git', 'rev-parse', 'HEAD'],), '12345'),
((['git', '-c', 'core.quotePath=false', 'diff', '--name-status',
'--no-renames', '-r', ancestor_revision + '...', '.'],),
'M\t.gitignore\n'),
((['git', 'config', 'branch.master.gerritpatchset'],), CERR1),
]
if not issue:
calls += [
((['git', 'log', '--pretty=format:%s%n%n%b',
ancestor_revision + '...'],),
'foo'),
]
calls += [
((['git', 'config', 'user.email'],), 'me@example.com'),
((['git', 'diff', '--no-ext-diff', '--stat', '-l100000', '-C50'] +
([custom_cl_base] if custom_cl_base else
[ancestor_revision, 'HEAD']),),
'+dat'),
]
return calls
@classmethod
def _gerrit_upload_calls(cls, description, reviewers, squash,
squash_mode='default',
expected_upstream_ref='origin/refs/heads/master',
title=None, notify=False,
post_amend_description=None, issue=None, cc=None,
custom_cl_base=None, tbr=None,
short_hostname='chromium',
labels=None):
if post_amend_description is None:
post_amend_description = description
cc = cc or []
# Determined in `_gerrit_base_calls`.
determined_ancestor_revision = custom_cl_base or 'fake_ancestor_sha'
calls = []
if squash_mode == 'default':
calls.extend([
((['git', 'config', '--bool', 'gerrit.override-squash-uploads'],), ''),
((['git', 'config', '--bool', 'gerrit.squash-uploads'],), ''),
])
elif squash_mode in ('override_squash', 'override_nosquash'):
calls.extend([
((['git', 'config', '--bool', 'gerrit.override-squash-uploads'],),
'true' if squash_mode == 'override_squash' else 'false'),
])
else:
assert squash_mode in ('squash', 'nosquash')
# If issue is given, then description is fetched from Gerrit instead.
if issue is None:
calls += [
((['git', 'log', '--pretty=format:%s\n\n%b',
((custom_cl_base + '..') if custom_cl_base else
'fake_ancestor_sha..HEAD')],),
description),
]
if squash:
title = 'Initial_upload'
else:
if not title:
calls += [
((['git', 'show', '-s', '--format=%s', 'HEAD'],), ''),
(('ask_for_data', 'Title for patchset []: '), 'User input'),
]
title = 'User_input'
if not git_footers.get_footer_change_id(description) and not squash:
calls += [
(('DownloadGerritHook', False), ''),
# Amending of commit message to get the Change-Id.
((['git', 'log', '--pretty=format:%s\n\n%b',
determined_ancestor_revision + '..HEAD'],),
description),
((['git', 'commit', '--amend', '-m', description],), ''),
((['git', 'log', '--pretty=format:%s\n\n%b',
determined_ancestor_revision + '..HEAD'],),
post_amend_description)
]
if squash:
if not issue:
# Prompting to edit description on first upload.
calls += [
((['git', 'config', 'core.editor'],), ''),
((['RunEditor'],), description),
]
ref_to_push = 'abcdef0123456789'
calls += [
((['git', 'config', 'branch.master.merge'],), 'refs/heads/master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
]
if custom_cl_base is None:
calls += [
((['get_or_create_merge_base', 'master',
'refs/remotes/origin/master'],),
'origin/master'),
]
parent = 'origin/master'
else:
calls += [
((['git', 'merge-base', '--is-ancestor', custom_cl_base,
'refs/remotes/origin/master'],),
callError(1)), # Means not ancenstor.
(('ask_for_data',
'Do you take responsibility for cleaning up potential mess '
'resulting from proceeding with upload? Press Enter to upload, '
'or Ctrl+C to abort'), ''),
]
parent = custom_cl_base
calls += [
((['git', 'rev-parse', 'HEAD:'],), # `HEAD:` means HEAD's tree hash.
'0123456789abcdef'),
((['git', 'commit-tree', '0123456789abcdef', '-p', parent,
'-F', '/tmp/named'],),
ref_to_push),
]
else:
ref_to_push = 'HEAD'
calls += [
(('SaveDescriptionBackup',), None),
((['git', 'rev-list',
(custom_cl_base if custom_cl_base else expected_upstream_ref) + '..' +
ref_to_push],),
'1hashPerLine\n'),
]
metrics_arguments = []
if notify:
ref_suffix = '%ready,notify=ALL'
metrics_arguments += ['ready', 'notify=ALL']
else:
if not issue and squash:
ref_suffix = '%wip'
metrics_arguments.append('wip')
else:
ref_suffix = '%notify=NONE'
metrics_arguments.append('notify=NONE')
if title:
ref_suffix += ',m=' + title
metrics_arguments.append('m')
calls += [
((['git', 'config', 'rietveld.cc'],), ''),
]
if short_hostname == 'chromium':
# All reviwers and ccs get into ref_suffix.
for r in sorted(reviewers):
ref_suffix += ',r=%s' % r
metrics_arguments.append('r')
for c in sorted(['chromium-reviews+test-more-cc@chromium.org',
'joe@example.com'] + cc):
ref_suffix += ',cc=%s' % c
metrics_arguments.append('cc')
reviewers, cc = [], []
else:
# TODO(crbug/877717): remove this case.
calls += [
(('ValidAccounts', '%s-review.googlesource.com' % short_hostname,
sorted(reviewers) + ['joe@example.com',
'chromium-reviews+test-more-cc@chromium.org'] + cc),
{
e: {'email': e}
for e in (reviewers + ['joe@example.com'] + cc)
})
]
for r in sorted(reviewers):
if r != 'bad-account-or-email':
ref_suffix += ',r=%s' % r
metrics_arguments.append('r')
reviewers.remove(r)
for c in sorted(['joe@example.com'] + cc):
ref_suffix += ',cc=%s' % c
metrics_arguments.append('cc')
if c in cc:
cc.remove(c)
for k, v in sorted((labels or {}).items()):
ref_suffix += ',l=%s+%d' % (k, v)
metrics_arguments.append('l=%s+%d' % (k, v))
if tbr:
calls += [
(('GetCodeReviewTbrScore',
'%s-review.googlesource.com' % short_hostname,
'my/repo'),
2,),
]
calls += [
(('time.time',), 1000,),
((['git', 'push',
'https://%s.googlesource.com/my/repo' % short_hostname,
ref_to_push + ':refs/for/refs/heads/master' + ref_suffix],),
(('remote:\n'
'remote: Processing changes: (\)\n'
'remote: Processing changes: (|)\n'
'remote: Processing changes: (/)\n'
'remote: Processing changes: (-)\n'
'remote: Processing changes: new: 1 (/)\n'
'remote: Processing changes: new: 1, done\n'
'remote:\n'
'remote: New Changes:\n'
'remote: https://%s-review.googlesource.com/#/c/my/repo/+/123456'
' XXX\n'
'remote:\n'
'To https://%s.googlesource.com/my/repo\n'
' * [new branch] hhhh -> refs/for/refs/heads/master\n'
) % (short_hostname, short_hostname)),),
(('time.time',), 2000,),
(('add_repeated',
'sub_commands',
{
'execution_time': 1000,
'command': 'git push',
'exit_code': 0,
'arguments': sorted(metrics_arguments),
}),
None,),
]
if squash:
calls += [
((['git', 'config', 'branch.master.gerritissue', '123456'],),
''),
((['git', 'config', 'branch.master.gerritserver',
'https://chromium-review.googlesource.com'],), ''),
((['git', 'config', 'branch.master.gerritsquashhash',
'abcdef0123456789'],), ''),
]
# TODO(crbug/877717): this should never be used.
if squash and short_hostname != 'chromium':
calls += [
(('AddReviewers',
'chromium-review.googlesource.com', 'my%2Frepo~123456',
sorted(reviewers),
cc + ['chromium-reviews+test-more-cc@chromium.org'],
notify),
''),
]
calls += cls._git_post_upload_calls()
return calls
def _run_gerrit_upload_test(
self,
upload_args,
description,
reviewers=None,
squash=True,
squash_mode=None,
expected_upstream_ref='origin/refs/heads/master',
title=None,
notify=False,
post_amend_description=None,
issue=None,
cc=None,
fetched_status=None,
other_cl_owner=None,
custom_cl_base=None,
tbr=None,
short_hostname='chromium',
labels=None):
"""Generic gerrit upload test framework."""
if squash_mode is None:
if '--no-squash' in upload_args:
squash_mode = 'nosquash'
elif '--squash' in upload_args:
squash_mode = 'squash'
else:
squash_mode = 'default'
reviewers = reviewers or []
cc = cc or []
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.mock(git_cl.gerrit_util, 'CookiesAuthenticator',
CookiesAuthenticatorMockFactory(
same_auth=('git-owner.example.com', '', 'pass')))
self.mock(git_cl._GerritChangelistImpl, '_GerritCommitMsgHookCheck',
lambda _, offer_removal: None)
self.mock(git_cl.gclient_utils, 'RunEditor',
lambda *_, **__: self._mocked_call(['RunEditor']))
self.mock(git_cl, 'DownloadGerritHook', lambda force: self._mocked_call(
'DownloadGerritHook', force))
self.calls = self._gerrit_base_calls(
issue=issue,
fetched_description=description,
fetched_status=fetched_status,
other_cl_owner=other_cl_owner,
custom_cl_base=custom_cl_base,
short_hostname=short_hostname)
if fetched_status != 'ABANDONED':
self.mock(tempfile, 'NamedTemporaryFile', MakeNamedTemporaryFileMock(
expected_content=description))
self.mock(os, 'remove', lambda _: True)
self.calls += self._gerrit_upload_calls(
description, reviewers, squash,
squash_mode=squash_mode,
expected_upstream_ref=expected_upstream_ref,
title=title, notify=notify,
post_amend_description=post_amend_description,
issue=issue, cc=cc,
custom_cl_base=custom_cl_base, tbr=tbr,
short_hostname=short_hostname,
labels=labels)
# Uncomment when debugging.
# print '\n'.join(map(lambda x: '%2i: %s' % x, enumerate(self.calls)))
git_cl.main(['upload'] + upload_args)
def test_gerrit_upload_without_change_id(self):
self._run_gerrit_upload_test(
['--no-squash'],
'desc\n\nBUG=\n',
[],
squash=False,
post_amend_description='desc\n\nBUG=\n\nChange-Id: Ixxx')
def test_gerrit_upload_without_change_id_override_nosquash(self):
self._run_gerrit_upload_test(
[],
'desc\n\nBUG=\n',
[],
squash=False,
squash_mode='override_nosquash',
post_amend_description='desc\n\nBUG=\n\nChange-Id: Ixxx')
def test_gerrit_no_reviewer(self):
self._run_gerrit_upload_test(
[],
'desc\n\nBUG=\n\nChange-Id: I123456789\n',
[],
squash=False,
squash_mode='override_nosquash')
def test_gerrit_no_reviewer_non_chromium_host(self):
# TODO(crbug/877717): remove this test case.
self._run_gerrit_upload_test(
[],
'desc\n\nBUG=\n\nChange-Id: I123456789\n',
[],
squash=False,
squash_mode='override_nosquash',
short_hostname='other')
def test_gerrit_patchset_title_special_chars(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self._run_gerrit_upload_test(
['-f', '-t', 'We\'ll escape ^_ ^ special chars...@{u}'],
'desc\n\nBUG=\n\nChange-Id: I123456789',
squash=False,
squash_mode='override_nosquash',
title='We%27ll_escape_%5E%5F_%5E_special_chars%2E%2E%2E%40%7Bu%7D')
def test_gerrit_reviewers_cmd_line(self):
self._run_gerrit_upload_test(
['-r', 'foo@example.com', '--send-mail'],
'desc\n\nBUG=\n\nChange-Id: I123456789',
['foo@example.com'],
squash=False,
squash_mode='override_nosquash',
notify=True)
def test_gerrit_reviewer_multiple(self):
self.mock(git_cl.gerrit_util, 'GetCodeReviewTbrScore',
lambda *a: self._mocked_call('GetCodeReviewTbrScore', *a))
self._run_gerrit_upload_test(
[],
'desc\nTBR=reviewer@example.com\nBUG=\nR=another@example.com\n'
'CC=more@example.com,people@example.com\n\n'
'Change-Id: 123456789',
['reviewer@example.com', 'another@example.com'],
expected_upstream_ref='origin/master',
cc=['more@example.com', 'people@example.com'],
tbr='reviewer@example.com',
labels={'Code-Review': 2})
def test_gerrit_upload_squash_first_is_default(self):
self._run_gerrit_upload_test(
[],
'desc\nBUG=\n\nChange-Id: 123456789',
[],
expected_upstream_ref='origin/master')
def test_gerrit_upload_squash_first(self):
self._run_gerrit_upload_test(
['--squash'],
'desc\nBUG=\n\nChange-Id: 123456789',
[],
squash=True,
expected_upstream_ref='origin/master')
def test_gerrit_upload_squash_first_with_labels(self):
self._run_gerrit_upload_test(
['--squash', '--cq-dry-run', '--enable-auto-submit'],
'desc\nBUG=\n\nChange-Id: 123456789',
[],
squash=True,
expected_upstream_ref='origin/master',
labels={'Commit-Queue': 1, 'Auto-Submit': 1})
def test_gerrit_upload_squash_first_against_rev(self):
custom_cl_base = 'custom_cl_base_rev_or_branch'
self._run_gerrit_upload_test(
['--squash', custom_cl_base],
'desc\nBUG=\n\nChange-Id: 123456789',
[],
squash=True,
expected_upstream_ref='origin/master',
custom_cl_base=custom_cl_base)
self.assertIn(
'If you proceed with upload, more than 1 CL may be created by Gerrit',
sys.stdout.getvalue())
def test_gerrit_upload_squash_reupload(self):
description = 'desc\nBUG=\n\nChange-Id: 123456789'
self._run_gerrit_upload_test(
['--squash'],
description,
[],
squash=True,
expected_upstream_ref='origin/master',
issue=123456)
def test_gerrit_upload_squash_reupload_to_abandoned(self):
self.mock(git_cl, 'DieWithError',
lambda msg, change=None: self._mocked_call('DieWithError', msg))
description = 'desc\nBUG=\n\nChange-Id: 123456789'
with self.assertRaises(SystemExitMock):
self._run_gerrit_upload_test(
['--squash'],
description,
[],
squash=True,
expected_upstream_ref='origin/master',
issue=123456,
fetched_status='ABANDONED')
def test_gerrit_upload_squash_reupload_to_not_owned(self):
self.mock(git_cl.gerrit_util, 'GetAccountDetails',
lambda *_, **__: {'email': 'yet-another@example.com'})
description = 'desc\nBUG=\n\nChange-Id: 123456789'
self._run_gerrit_upload_test(
['--squash'],
description,
[],
squash=True,
expected_upstream_ref='origin/master',
issue=123456,
other_cl_owner='other@example.com')
self.assertIn(
'WARNING: Change 123456 is owned by other@example.com, but you '
'authenticate to Gerrit as yet-another@example.com.\n'
'Uploading may fail due to lack of permissions',
git_cl.sys.stdout.getvalue())
def test_upload_branch_deps(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
def mock_run_git(*args, **_kwargs):
if args[0] == ['for-each-ref',
'--format=%(refname:short) %(upstream:short)',
'refs/heads']:
# Create a local branch dependency tree that looks like this:
# test1 -> test2 -> test3 -> test4 -> test5
# -> test3.1
# test6 -> test0
branch_deps = [
'test2 test1', # test1 -> test2
'test3 test2', # test2 -> test3
'test3.1 test2', # test2 -> test3.1
'test4 test3', # test3 -> test4
'test5 test4', # test4 -> test5
'test6 test0', # test0 -> test6
'test7', # test7
]
return '\n'.join(branch_deps)
self.mock(git_cl, 'RunGit', mock_run_git)
class RecordCalls:
times_called = 0
record_calls = RecordCalls()
def mock_CMDupload(*args, **_kwargs):
record_calls.times_called += 1
return 0
self.mock(git_cl, 'CMDupload', mock_CMDupload)
self.calls = [
(('ask_for_data', 'This command will checkout all dependent branches '
'and run "git cl upload". Press Enter to continue, '
'or Ctrl+C to abort'), ''),
]
class MockChangelist():
def __init__(self):
pass
def GetBranch(self):
return 'test1'
def GetIssue(self):
return '123'
def GetPatchset(self):
return '1001'
def IsGerrit(self):
return False
ret = git_cl.upload_branch_deps(MockChangelist(), [])
# CMDupload should have been called 5 times because of 5 dependent branches.
self.assertEquals(5, record_calls.times_called)
self.assertEquals(0, ret)
def test_gerrit_change_id(self):
self.calls = [
((['git', 'write-tree'], ),
'hashtree'),
((['git', 'rev-parse', 'HEAD~0'], ),
'branch-parent'),
((['git', 'var', 'GIT_AUTHOR_IDENT'], ),
'A B <a@b.org> 1456848326 +0100'),
((['git', 'var', 'GIT_COMMITTER_IDENT'], ),
'C D <c@d.org> 1456858326 +0100'),
((['git', 'hash-object', '-t', 'commit', '--stdin'], ),
'hashchange'),
]
change_id = git_cl.GenerateGerritChangeId('line1\nline2\n')
self.assertEqual(change_id, 'Ihashchange')
def test_desecription_append_footer(self):
for init_desc, footer_line, expected_desc in [
# Use unique desc first lines for easy test failure identification.
('foo', 'R=one', 'foo\n\nR=one'),
('foo\n\nR=one', 'BUG=', 'foo\n\nR=one\nBUG='),
('foo\n\nR=one', 'Change-Id: Ixx', 'foo\n\nR=one\n\nChange-Id: Ixx'),
('foo\n\nChange-Id: Ixx', 'R=one', 'foo\n\nR=one\n\nChange-Id: Ixx'),
('foo\n\nR=one\n\nChange-Id: Ixx', 'TBR=two',
'foo\n\nR=one\nTBR=two\n\nChange-Id: Ixx'),
('foo\n\nR=one\n\nChange-Id: Ixx', 'Foo-Bar: baz',
'foo\n\nR=one\n\nChange-Id: Ixx\nFoo-Bar: baz'),
('foo\n\nChange-Id: Ixx', 'Foo-Bak: baz',
'foo\n\nChange-Id: Ixx\nFoo-Bak: baz'),
('foo', 'Change-Id: Ixx', 'foo\n\nChange-Id: Ixx'),
]:
desc = git_cl.ChangeDescription(init_desc)
desc.append_footer(footer_line)
self.assertEqual(desc.description, expected_desc)
def test_update_reviewers(self):
data = [
('foo', [], [],
'foo'),
('foo\nR=xx', [], [],
'foo\nR=xx'),
('foo\nTBR=xx', [], [],
'foo\nTBR=xx'),
('foo', ['a@c'], [],
'foo\n\nR=a@c'),
('foo\nR=xx', ['a@c'], [],
'foo\n\nR=a@c, xx'),
('foo\nTBR=xx', ['a@c'], [],
'foo\n\nR=a@c\nTBR=xx'),
('foo\nTBR=xx\nR=yy', ['a@c'], [],
'foo\n\nR=a@c, yy\nTBR=xx'),
('foo\nBUG=', ['a@c'], [],
'foo\nBUG=\nR=a@c'),
('foo\nR=xx\nTBR=yy\nR=bar', ['a@c'], [],
'foo\n\nR=a@c, bar, xx\nTBR=yy'),
('foo', ['a@c', 'b@c'], [],
'foo\n\nR=a@c, b@c'),
('foo\nBar\n\nR=\nBUG=', ['c@c'], [],
'foo\nBar\n\nR=c@c\nBUG='),
('foo\nBar\n\nR=\nBUG=\nR=', ['c@c'], [],
'foo\nBar\n\nR=c@c\nBUG='),
# Same as the line before, but full of whitespaces.
(
'foo\nBar\n\n R = \n BUG = \n R = ', ['c@c'], [],
'foo\nBar\n\nR=c@c\n BUG =',
),
# Whitespaces aren't interpreted as new lines.
('foo BUG=allo R=joe ', ['c@c'], [],
'foo BUG=allo R=joe\n\nR=c@c'),
# Redundant TBRs get promoted to Rs
('foo\n\nR=a@c\nTBR=t@c', ['b@c', 'a@c'], ['a@c', 't@c'],
'foo\n\nR=a@c, b@c\nTBR=t@c'),
]
expected = [i[-1] for i in data]
actual = []
for orig, reviewers, tbrs, _expected in data:
obj = git_cl.ChangeDescription(orig)
obj.update_reviewers(reviewers, tbrs)
actual.append(obj.description)
self.assertEqual(expected, actual)
def test_get_hash_tags(self):
cases = [
('', []),
('a', []),
('[a]', ['a']),
('[aa]', ['aa']),
('[a ]', ['a']),
('[a- ]', ['a']),
('[a- b]', ['a-b']),
('[a--b]', ['a-b']),
('[a', []),
('[a]x', ['a']),
('[aa]x', ['aa']),
('[a b]', ['a-b']),
('[a b]', ['a-b']),
('[a__b]', ['a-b']),
('[a] x', ['a']),
('[a][b]', ['a', 'b']),
('[a] [b]', ['a', 'b']),
('[a][b]x', ['a', 'b']),
('[a][b] x', ['a', 'b']),
('[a]\n[b]', ['a']),
('[a\nb]', []),
('[a][', ['a']),
('Revert "[a] feature"', ['a']),
('Reland "[a] feature"', ['a']),
('Revert: [a] feature', ['a']),
('Reland: [a] feature', ['a']),
('Revert "Reland: [a] feature"', ['a']),
('Foo: feature', ['foo']),
('Foo Bar: feature', ['foo-bar']),
('Revert "Foo bar: feature"', ['foo-bar']),
('Reland "Foo bar: feature"', ['foo-bar']),
]
for desc, expected in cases:
change_desc = git_cl.ChangeDescription(desc)
actual = change_desc.get_hash_tags()
self.assertEqual(
actual,
expected,
'GetHashTags(%r) == %r, expected %r' % (desc, actual, expected))
self.assertEqual(None, git_cl.GetTargetRef('origin', None, 'master'))
self.assertEqual(None, git_cl.GetTargetRef(None,
'refs/remotes/origin/master',
'master'))
# Check default target refs for branches.
self.assertEqual('refs/heads/master',
git_cl.GetTargetRef('origin', 'refs/remotes/origin/master',
None))
self.assertEqual('refs/heads/master',
git_cl.GetTargetRef('origin', 'refs/remotes/origin/lkgr',
None))
self.assertEqual('refs/heads/master',
git_cl.GetTargetRef('origin', 'refs/remotes/origin/lkcr',
None))
self.assertEqual('refs/branch-heads/123',
git_cl.GetTargetRef('origin',
'refs/remotes/branch-heads/123',
None))
self.assertEqual('refs/diff/test',
git_cl.GetTargetRef('origin',
'refs/remotes/origin/refs/diff/test',
None))
self.assertEqual('refs/heads/chrome/m42',
git_cl.GetTargetRef('origin',
'refs/remotes/origin/chrome/m42',
None))
# Check target refs for user-specified target branch.
for branch in ('branch-heads/123', 'remotes/branch-heads/123',
'refs/remotes/branch-heads/123'):
self.assertEqual('refs/branch-heads/123',
git_cl.GetTargetRef('origin',
'refs/remotes/origin/master',
branch))
for branch in ('origin/master', 'remotes/origin/master',
'refs/remotes/origin/master'):
self.assertEqual('refs/heads/master',
git_cl.GetTargetRef('origin',
'refs/remotes/branch-heads/123',
branch))
for branch in ('master', 'heads/master', 'refs/heads/master'):
self.assertEqual('refs/heads/master',
git_cl.GetTargetRef('origin',
'refs/remotes/branch-heads/123',
branch))
def test_patch_when_dirty(self):
# Patch when local tree is dirty
self.mock(git_common, 'is_dirty_git_tree', lambda x: True)
self.assertNotEqual(git_cl.main(['patch', '123456']), 0)
@staticmethod
def _get_gerrit_codereview_server_calls(branch, value=None,
git_short_host='host',
detect_branch=True,
detect_server=True):
"""Returns calls executed by _GerritChangelistImpl.GetCodereviewServer.
If value is given, branch.<BRANCH>.gerritcodereview is already set.
"""
calls = []
if detect_branch:
calls.append(((['git', 'symbolic-ref', 'HEAD'],), branch))
if detect_server:
calls.append(((['git', 'config', 'branch.' + branch + '.gerritserver'],),
CERR1 if value is None else value))
if value is None:
calls += [
((['git', 'config', 'branch.' + branch + '.merge'],),
'refs/heads' + branch),
((['git', 'config', 'branch.' + branch + '.remote'],),
'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://%s.googlesource.com/my/repo' % git_short_host),
]
return calls
def _patch_common(self, force_codereview=False,
new_branch=False, git_short_host='host',
detect_gerrit_server=False,
actual_codereview=None,
codereview_in_url=False):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.mock(git_cl, 'IsGitVersionAtLeast', lambda *args: True)
if new_branch:
self.calls = [((['git', 'new-branch', 'master'],), ''),]
if codereview_in_url and actual_codereview == 'rietveld':
self.calls += [
((['git', 'rev-parse', '--show-cdup'],), ''),
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
]
if not force_codereview and not codereview_in_url:
# These calls detect codereview to use.
self.calls += [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.gerritissue'],), CERR1),
]
if detect_gerrit_server:
self.calls += self._get_gerrit_codereview_server_calls(
'master', git_short_host=git_short_host,
detect_branch=not new_branch and force_codereview)
actual_codereview = 'gerrit'
if actual_codereview == 'gerrit':
self.calls += [
(('GetChangeDetail', git_short_host + '-review.googlesource.com',
'my%2Frepo~123456', ['ALL_REVISIONS', 'CURRENT_COMMIT']),
{
'current_revision': '7777777777',
'revisions': {
'1111111111': {
'_number': 1,
'fetch': {'http': {
'url': 'https://%s.googlesource.com/my/repo' % git_short_host,
'ref': 'refs/changes/56/123456/1',
}},
},
'7777777777': {
'_number': 7,
'fetch': {'http': {
'url': 'https://%s.googlesource.com/my/repo' % git_short_host,
'ref': 'refs/changes/56/123456/7',
}},
},
},
}),
]
def test_patch_gerrit_default(self):
self._patch_common(git_short_host='chromium', detect_gerrit_server=True)
self.calls += [
((['git', 'fetch', 'https://chromium.googlesource.com/my/repo',
'refs/changes/56/123456/7'],), ''),
((['git', 'cherry-pick', 'FETCH_HEAD'],), ''),
((['git', 'config', 'branch.master.gerritissue', '123456'],),
''),
((['git', 'config', 'branch.master.gerritserver',
'https://chromium-review.googlesource.com'],), ''),
((['git', 'config', 'branch.master.gerritpatchset', '7'],), ''),
((['git', 'rev-parse', 'FETCH_HEAD'],), 'deadbeef'),
((['git', 'config', 'branch.master.last-upload-hash', 'deadbeef'],), ''),
((['git', 'config', 'branch.master.gerritsquashhash', 'deadbeef'],), ''),
]
self.assertEqual(git_cl.main(['patch', '123456']), 0)
def test_patch_gerrit_new_branch(self):
self._patch_common(
git_short_host='chromium', detect_gerrit_server=True, new_branch=True)
self.calls += [
((['git', 'fetch', 'https://chromium.googlesource.com/my/repo',
'refs/changes/56/123456/7'],), ''),
((['git', 'cherry-pick', 'FETCH_HEAD'],), ''),
((['git', 'config', 'branch.master.gerritissue', '123456'],),
''),
((['git', 'config', 'branch.master.gerritserver',
'https://chromium-review.googlesource.com'],), ''),
((['git', 'config', 'branch.master.gerritpatchset', '7'],), ''),
((['git', 'rev-parse', 'FETCH_HEAD'],), 'deadbeef'),
((['git', 'config', 'branch.master.last-upload-hash', 'deadbeef'],), ''),
((['git', 'config', 'branch.master.gerritsquashhash', 'deadbeef'],), ''),
]
self.assertEqual(git_cl.main(['patch', '-b', 'master', '123456']), 0)
def test_patch_gerrit_force(self):
self._patch_common(
force_codereview=True, git_short_host='host', detect_gerrit_server=True)
self.calls += [
((['git', 'fetch', 'https://host.googlesource.com/my/repo',
'refs/changes/56/123456/7'],), ''),
((['git', 'reset', '--hard', 'FETCH_HEAD'],), ''),
((['git', 'config', 'branch.master.gerritissue', '123456'],),
''),
((['git', 'config', 'branch.master.gerritserver',
'https://host-review.googlesource.com'],), ''),
((['git', 'config', 'branch.master.gerritpatchset', '7'],), ''),
((['git', 'rev-parse', 'FETCH_HEAD'],), 'deadbeef'),
((['git', 'config', 'branch.master.last-upload-hash', 'deadbeef'],), ''),
((['git', 'config', 'branch.master.gerritsquashhash', 'deadbeef'],), ''),
]
self.assertEqual(git_cl.main(['patch', '--gerrit', '123456', '--force']), 0)
def test_patch_gerrit_guess_by_url(self):
self.calls += self._get_gerrit_codereview_server_calls(
'master', git_short_host='else', detect_server=False)
self._patch_common(
actual_codereview='gerrit', git_short_host='else',
codereview_in_url=True, detect_gerrit_server=False)
self.calls += [
((['git', 'fetch', 'https://else.googlesource.com/my/repo',
'refs/changes/56/123456/1'],), ''),
((['git', 'cherry-pick', 'FETCH_HEAD'],), ''),
((['git', 'config', 'branch.master.gerritissue', '123456'],),
''),
((['git', 'config', 'branch.master.gerritserver',
'https://else-review.googlesource.com'],), ''),
((['git', 'config', 'branch.master.gerritpatchset', '1'],), ''),
((['git', 'rev-parse', 'FETCH_HEAD'],), 'deadbeef'),
((['git', 'config', 'branch.master.last-upload-hash', 'deadbeef'],), ''),
((['git', 'config', 'branch.master.gerritsquashhash', 'deadbeef'],), ''),
]
self.assertEqual(git_cl.main(
['patch', 'https://else-review.googlesource.com/#/c/123456/1']), 0)
def test_patch_gerrit_guess_by_url_with_repo(self):
self.calls += self._get_gerrit_codereview_server_calls(
'master', git_short_host='else', detect_server=False)
self._patch_common(
actual_codereview='gerrit', git_short_host='else',
codereview_in_url=True, detect_gerrit_server=False)
self.calls += [
((['git', 'fetch', 'https://else.googlesource.com/my/repo',
'refs/changes/56/123456/1'],), ''),
((['git', 'cherry-pick', 'FETCH_HEAD'],), ''),
((['git', 'config', 'branch.master.gerritissue', '123456'],),
''),
((['git', 'config', 'branch.master.gerritserver',
'https://else-review.googlesource.com'],), ''),
((['git', 'config', 'branch.master.gerritpatchset', '1'],), ''),
((['git', 'rev-parse', 'FETCH_HEAD'],), 'deadbeef'),
((['git', 'config', 'branch.master.last-upload-hash', 'deadbeef'],), ''),
((['git', 'config', 'branch.master.gerritsquashhash', 'deadbeef'],), ''),
]
self.assertEqual(git_cl.main(
['patch', 'https://else-review.googlesource.com/c/my/repo/+/123456/1']),
0)
def test_patch_gerrit_conflict(self):
self._patch_common(detect_gerrit_server=True, git_short_host='chromium')
self.calls += [
((['git', 'fetch', 'https://chromium.googlesource.com/my/repo',
'refs/changes/56/123456/7'],), ''),
((['git', 'cherry-pick', 'FETCH_HEAD'],), CERR1),
((['DieWithError', 'Command "git cherry-pick FETCH_HEAD" failed.\n'],),
SystemExitMock()),
]
with self.assertRaises(SystemExitMock):
git_cl.main(['patch', '123456'])
def test_patch_gerrit_not_exists(self):
def notExists(_issue, *_, **kwargs):
raise git_cl.gerrit_util.GerritError(404, '')
self.mock(git_cl.gerrit_util, 'GetChangeDetail', notExists)
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.gerritissue'],), CERR1),
((['git', 'config', 'branch.master.gerritserver'],), CERR1),
((['git', 'config', 'branch.master.merge'],), 'refs/heads/master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/my/repo'),
((['DieWithError',
'change 123456 at https://chromium-review.googlesource.com does not '
'exist or you have no access to it'],), SystemExitMock()),
]
with self.assertRaises(SystemExitMock):
self.assertEqual(1, git_cl.main(['patch', '123456']))
def _checkout_calls(self):
return [
((['git', 'config', '--local', '--get-regexp',
'branch\\..*\\.gerritissue'], ),
('branch.ger-branch.gerritissue 123456\n'
'branch.gbranch654.gerritissue 654321\n')),
]
def test_checkout_gerrit(self):
"""Tests git cl checkout <issue>."""
self.calls = self._checkout_calls()
self.calls += [((['git', 'checkout', 'ger-branch'], ), '')]
self.assertEqual(0, git_cl.main(['checkout', '123456']))
def test_checkout_not_found(self):
"""Tests git cl checkout <issue>."""
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.calls = self._checkout_calls()
self.assertEqual(1, git_cl.main(['checkout', '99999']))
def test_checkout_no_branch_issues(self):
"""Tests git cl checkout <issue>."""
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.calls = [
((['git', 'config', '--local', '--get-regexp',
'branch\\..*\\.gerritissue'], ), CERR1),
]
self.assertEqual(1, git_cl.main(['checkout', '99999']))
def _test_gerrit_ensure_authenticated_common(self, auth,
skip_auth_check=False):
self.mock(git_cl.gerrit_util, 'CookiesAuthenticator',
CookiesAuthenticatorMockFactory(hosts_with_creds=auth))
self.mock(git_cl, 'DieWithError',
lambda msg, change=None: self._mocked_call(['DieWithError', msg]))
self.calls = self._gerrit_ensure_auth_calls(skip_auth_check=skip_auth_check)
cl = git_cl.Changelist(codereview='gerrit')
cl.branch = 'master'
cl.branchref = 'refs/heads/master'
cl.lookedup_issue = True
return cl
def test_gerrit_ensure_authenticated_missing(self):
cl = self._test_gerrit_ensure_authenticated_common(auth={
'chromium.googlesource.com': ('git-is.ok', '', 'but gerrit is missing'),
})
self.calls.append(
((['DieWithError',
'Credentials for the following hosts are required:\n'
' chromium-review.googlesource.com\n'
'These are read from ~/.gitcookies (or legacy ~/.netrc)\n'
'You can (re)generate your credentials by visiting '
'https://chromium-review.googlesource.com/new-password'],), ''),)
self.assertIsNone(cl.EnsureAuthenticated(force=False))
def test_gerrit_ensure_authenticated_conflict(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
cl = self._test_gerrit_ensure_authenticated_common(auth={
'chromium.googlesource.com':
('git-one.example.com', None, 'secret1'),
'chromium-review.googlesource.com':
('git-other.example.com', None, 'secret2'),
})
self.calls.append(
(('ask_for_data', 'If you know what you are doing '
'press Enter to continue, or Ctrl+C to abort'), ''))
self.assertIsNone(cl.EnsureAuthenticated(force=False))
def test_gerrit_ensure_authenticated_ok(self):
cl = self._test_gerrit_ensure_authenticated_common(auth={
'chromium.googlesource.com':
('git-same.example.com', None, 'secret'),
'chromium-review.googlesource.com':
('git-same.example.com', None, 'secret'),
})
self.assertIsNone(cl.EnsureAuthenticated(force=False))
def test_gerrit_ensure_authenticated_skipped(self):
cl = self._test_gerrit_ensure_authenticated_common(
auth={}, skip_auth_check=True)
self.assertIsNone(cl.EnsureAuthenticated(force=False))
def test_gerrit_ensure_authenticated_bearer_token(self):
cl = self._test_gerrit_ensure_authenticated_common(auth={
'chromium.googlesource.com':
('', None, 'secret'),
'chromium-review.googlesource.com':
('', None, 'secret'),
})
self.assertIsNone(cl.EnsureAuthenticated(force=False))
header = gerrit_util.CookiesAuthenticator().get_auth_header(
'chromium.googlesource.com')
self.assertTrue('Bearer' in header)
def _cmd_set_commit_gerrit_common(self, vote, notify=None):
self.mock(git_cl.gerrit_util, 'SetReview',
lambda h, i, labels, notify=None:
self._mocked_call(['SetReview', h, i, labels, notify]))
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123'),
((['git', 'config', 'branch.feature.gerritserver'],),
'https://chromium-review.googlesource.com'),
((['git', 'config', 'branch.feature.merge'],), 'refs/heads/master'),
((['git', 'config', 'branch.feature.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/infra/infra.git'),
((['SetReview', 'chromium-review.googlesource.com',
'infra%2Finfra~123',
{'Commit-Queue': vote}, notify],), ''),
]
def test_cmd_set_commit_gerrit_clear(self):
self._cmd_set_commit_gerrit_common(0)
self.assertEqual(0, git_cl.main(['set-commit', '-c']))
def test_cmd_set_commit_gerrit_dry(self):
self._cmd_set_commit_gerrit_common(1, notify=False)
self.assertEqual(0, git_cl.main(['set-commit', '-d']))
def test_cmd_set_commit_gerrit(self):
self._cmd_set_commit_gerrit_common(2)
self.assertEqual(0, git_cl.main(['set-commit']))
def test_description_display(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.mock(git_cl, 'Changelist', ChangelistMock)
ChangelistMock.desc = 'foo\n'
self.assertEqual(0, git_cl.main(['description', '-d']))
self.assertEqual('foo\n', out.getvalue())
def test_StatusFieldOverrideIssueMissingArgs(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stderr', out)
try:
self.assertEqual(git_cl.main(['status', '--issue', '1']), 0)
except SystemExit as ex:
self.assertEqual(ex.code, 2)
self.assertRegexpMatches(out.getvalue(), r'--issue must be specified')
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stderr', out)
try:
self.assertEqual(git_cl.main(['status', '--issue', '1', '--gerrit']), 0)
except SystemExit as ex:
self.assertEqual(ex.code, 2)
self.assertRegexpMatches(out.getvalue(), r'--field must be specified')
def test_StatusFieldOverrideIssue(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
def assertIssue(cl_self, *_args):
self.assertEquals(cl_self.issue, 1)
return 'foobar'
self.mock(git_cl.Changelist, 'GetDescription', assertIssue)
self.assertEqual(
git_cl.main(['status', '--issue', '1', '--gerrit', '--field', 'desc']),
0)
self.assertEqual(out.getvalue(), 'foobar\n')
def test_SetCloseOverrideIssue(self):
def assertIssue(cl_self, *_args):
self.assertEquals(cl_self.issue, 1)
return 'foobar'
self.mock(git_cl.Changelist, 'GetDescription', assertIssue)
self.mock(git_cl.Changelist, 'CloseIssue', lambda *_: None)
self.assertEqual(
git_cl.main(['set-close', '--issue', '1', '--gerrit']), 0)
def test_description(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.merge'],), 'feature'),
((['git', 'config', 'branch.feature.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/my/repo'),
(('GetChangeDetail', 'chromium-review.googlesource.com',
'my%2Frepo~123123', ['CURRENT_REVISION', 'CURRENT_COMMIT']),
{
'current_revision': 'sha1',
'revisions': {'sha1': {
'commit': {'message': 'foobar'},
}},
}),
]
self.assertEqual(0, git_cl.main([
'description',
'https://chromium-review.googlesource.com/c/my/repo/+/123123',
'-d']))
self.assertEqual('foobar\n', out.getvalue())
def test_description_set_raw(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.mock(git_cl, 'Changelist', ChangelistMock)
self.mock(git_cl.sys, 'stdin', StringIO.StringIO('hihi'))
self.assertEqual(0, git_cl.main(['description', '-n', 'hihi']))
self.assertEqual('hihi', ChangelistMock.desc)
def test_description_appends_bug_line(self):
current_desc = 'Some.\n\nChange-Id: xxx'
def RunEditor(desc, _, **kwargs):
self.assertEquals(
'# Enter a description of the change.\n'
'# This will be displayed on the codereview site.\n'
'# The first line will also be used as the subject of the review.\n'
'#--------------------This line is 72 characters long'
'--------------------\n'
'Some.\n\nChange-Id: xxx\nBug: ',
desc)
# Simulate user changing something.
return 'Some.\n\nChange-Id: xxx\nBug: 123'
def UpdateDescriptionRemote(_, desc, force=False):
self.assertEquals(desc, 'Some.\n\nChange-Id: xxx\nBug: 123')
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.mock(git_cl.Changelist, 'GetDescription',
lambda *args: current_desc)
self.mock(git_cl._GerritChangelistImpl, 'UpdateDescriptionRemote',
UpdateDescriptionRemote)
self.mock(git_cl.gclient_utils, 'RunEditor', RunEditor)
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123'),
((['git', 'config', 'rietveld.autoupdate'],), CERR1),
((['git', 'config', 'rietveld.bug-prefix'],), CERR1),
((['git', 'config', 'core.editor'],), 'vi'),
]
self.assertEqual(0, git_cl.main(['description', '--gerrit']))
def test_description_set_stdin(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.mock(git_cl, 'Changelist', ChangelistMock)
self.mock(git_cl.sys, 'stdin', StringIO.StringIO('hi \r\n\t there\n\nman'))
self.assertEqual(0, git_cl.main(['description', '-n', '-']))
self.assertEqual('hi\n\t there\n\nman', ChangelistMock.desc)
def test_archive(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.calls = \
[((['git', 'for-each-ref', '--format=%(refname)', 'refs/heads'],),
'refs/heads/master\nrefs/heads/foo\nrefs/heads/bar'),
((['git', 'config', 'branch.master.gerritissue'],), '456'),
((['git', 'config', 'branch.foo.gerritissue'],), CERR1),
((['git', 'config', 'branch.bar.gerritissue'],), '789'),
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'tag', 'git-cl-archived-456-foo', 'foo'],), ''),
((['git', 'branch', '-D', 'foo'],), '')]
self.mock(git_cl, 'get_cl_statuses',
lambda branches, fine_grained, max_processes:
[(MockChangelistWithBranchAndIssue('master', 1), 'open'),
(MockChangelistWithBranchAndIssue('foo', 456), 'closed'),
(MockChangelistWithBranchAndIssue('bar', 789), 'open')])
self.assertEqual(0, git_cl.main(['archive', '-f']))
def test_archive_current_branch_fails(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.calls = \
[((['git', 'for-each-ref', '--format=%(refname)', 'refs/heads'],),
'refs/heads/master'),
((['git', 'config', 'branch.master.gerritissue'],), '1'),
((['git', 'symbolic-ref', 'HEAD'],), 'master')]
self.mock(git_cl, 'get_cl_statuses',
lambda branches, fine_grained, max_processes:
[(MockChangelistWithBranchAndIssue('master', 1), 'closed')])
self.assertEqual(1, git_cl.main(['archive', '-f']))
def test_archive_dry_run(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.calls = \
[((['git', 'for-each-ref', '--format=%(refname)', 'refs/heads'],),
'refs/heads/master\nrefs/heads/foo\nrefs/heads/bar'),
((['git', 'config', 'branch.master.gerritissue'],), '456'),
((['git', 'config', 'branch.foo.gerritissue'],), CERR1),
((['git', 'config', 'branch.bar.gerritissue'],), '789'),
((['git', 'symbolic-ref', 'HEAD'],), 'master'),]
self.mock(git_cl, 'get_cl_statuses',
lambda branches, fine_grained, max_processes:
[(MockChangelistWithBranchAndIssue('master', 1), 'open'),
(MockChangelistWithBranchAndIssue('foo', 456), 'closed'),
(MockChangelistWithBranchAndIssue('bar', 789), 'open')])
self.assertEqual(0, git_cl.main(['archive', '-f', '--dry-run']))
def test_archive_no_tags(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.calls = \
[((['git', 'for-each-ref', '--format=%(refname)', 'refs/heads'],),
'refs/heads/master\nrefs/heads/foo\nrefs/heads/bar'),
((['git', 'config', 'branch.master.gerritissue'],), '1'),
((['git', 'config', 'branch.foo.gerritissue'],), '456'),
((['git', 'config', 'branch.bar.gerritissue'],), CERR1),
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'branch', '-D', 'foo'],), '')]
self.mock(git_cl, 'get_cl_statuses',
lambda branches, fine_grained, max_processes:
[(MockChangelistWithBranchAndIssue('master', 1), 'open'),
(MockChangelistWithBranchAndIssue('foo', 456), 'closed'),
(MockChangelistWithBranchAndIssue('bar', 789), 'open')])
self.assertEqual(0, git_cl.main(['archive', '-f', '--notags']))
def test_cmd_issue_erase_existing(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123'),
# Let this command raise exception (retcode=1) - it should be ignored.
((['git', 'config', '--unset', 'branch.feature.last-upload-hash'],),
CERR1),
((['git', 'config', '--unset', 'branch.feature.gerritissue'],), ''),
((['git', 'config', '--unset', 'branch.feature.gerritpatchset'],), ''),
((['git', 'config', '--unset', 'branch.feature.gerritserver'],), ''),
((['git', 'config', '--unset', 'branch.feature.gerritsquashhash'],),
''),
((['git', 'log', '-1', '--format=%B'],), 'This is a description'),
]
self.assertEqual(0, git_cl.main(['issue', '0']))
def test_cmd_issue_erase_existing_with_change_id(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.mock(git_cl.Changelist, 'GetDescription',
lambda _: 'This is a description\n\nChange-Id: Ideadbeef')
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123'),
# Let this command raise exception (retcode=1) - it should be ignored.
((['git', 'config', '--unset', 'branch.feature.last-upload-hash'],),
CERR1),
((['git', 'config', '--unset', 'branch.feature.gerritissue'],), ''),
((['git', 'config', '--unset', 'branch.feature.gerritpatchset'],), ''),
((['git', 'config', '--unset', 'branch.feature.gerritserver'],), ''),
((['git', 'config', '--unset', 'branch.feature.gerritsquashhash'],),
''),
((['git', 'log', '-1', '--format=%B'],),
'This is a description\n\nChange-Id: Ideadbeef'),
((['git', 'commit', '--amend', '-m', 'This is a description\n'],), ''),
]
self.assertEqual(0, git_cl.main(['issue', '0']))
def test_cmd_issue_json(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123'),
((['git', 'config', 'branch.feature.gerritserver'],),
'https://chromium-review.googlesource.com'),
(('write_json', 'output.json',
{'issue': 123,
'issue_url': 'https://chromium-review.googlesource.com/123'}),
''),
]
self.assertEqual(0, git_cl.main(['issue', '--json', 'output.json']))
def test_git_cl_try_default_cq_dry_run_gerrit(self):
self.mock(git_cl.Changelist, 'GetChange',
lambda _, *a: (
self._mocked_call(['GetChange']+list(a))))
self.mock(git_cl.presubmit_support, 'DoGetTryMasters',
lambda *_, **__: (
self._mocked_call(['DoGetTryMasters'])))
self.mock(git_cl._GerritChangelistImpl, 'SetCQState',
lambda _, s: self._mocked_call(['SetCQState', s]))
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123456'),
((['git', 'config', 'branch.feature.gerritserver'],),
'https://chromium-review.googlesource.com'),
((['git', 'config', 'branch.feature.merge'],), 'feature'),
((['git', 'config', 'branch.feature.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/depot_tools'),
(('GetChangeDetail', 'chromium-review.googlesource.com',
'depot_tools~123456',
['DETAILED_ACCOUNTS', 'ALL_REVISIONS', 'CURRENT_COMMIT']), {
'project': 'depot_tools',
'status': 'OPEN',
'owner': {'email': 'owner@e.mail'},
'revisions': {
'deadbeaf': {
'_number': 6,
},
'beeeeeef': {
'_number': 7,
'fetch': {'http': {
'url': 'https://chromium.googlesource.com/depot_tools',
'ref': 'refs/changes/56/123456/7'
}},
},
},
}),
((['git', 'config', 'branch.feature.merge'],), 'feature'),
((['git', 'config', 'branch.feature.remote'],), 'origin'),
((['get_or_create_merge_base', 'feature', 'feature'],),
'fake_ancestor_sha'),
((['GetChange', 'fake_ancestor_sha', None], ),
git_cl.presubmit_support.GitChange(
'', '', '', '', '', '', '', '')),
((['git', 'rev-parse', '--show-cdup'],), '../'),
((['DoGetTryMasters'], ), None),
((['SetCQState', git_cl._CQState.DRY_RUN], ), None),
]
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.assertEqual(0, git_cl.main(['try']))
self.assertEqual(
out.getvalue(),
'Scheduling CQ dry run on: '
'https://chromium-review.googlesource.com/123456\n')
def test_git_cl_try_buildbucket_with_properties_gerrit(self):
self.mock(git_cl.Changelist, 'GetMostRecentPatchset', lambda _: 7)
self.mock(git_cl.uuid, 'uuid4', lambda: 'uuid4')
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123456'),
((['git', 'config', 'branch.feature.gerritserver'],),
'https://chromium-review.googlesource.com'),
((['git', 'config', 'branch.feature.merge'],), 'feature'),
((['git', 'config', 'branch.feature.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/depot_tools'),
(('GetChangeDetail', 'chromium-review.googlesource.com',
'depot_tools~123456',
['DETAILED_ACCOUNTS', 'ALL_REVISIONS', 'CURRENT_COMMIT']), {
'project': 'depot_tools',
'status': 'OPEN',
'owner': {'email': 'owner@e.mail'},
'revisions': {
'deadbeaf': {
'_number': 6,
},
'beeeeeef': {
'_number': 7,
'fetch': {'http': {
'url': 'https://chromium.googlesource.com/depot_tools',
'ref': 'refs/changes/56/123456/7'
}},
},
},
}),
]
def _buildbucket_retry(*_, **kw):
# self.maxDiff = 10000
body = json.loads(kw['body'])
self.assertEqual(len(body['builds']), 1)
build = body['builds'][0]
params = json.loads(build.pop('parameters_json'))
self.assertEqual(params, {
u'builder_name': u'win',
u'changes': [{u'author': {u'email': u'owner@e.mail'},
u'revision': None}],
u'properties': {
u'category': u'git_cl_try',
u'key': u'val',
u'json': [{u'a': 1}, None],
u'patch_gerrit_url':
u'https://chromium-review.googlesource.com',
u'patch_issue': 123456,
u'patch_project': u'depot_tools',
u'patch_ref': u'refs/changes/56/123456/7',
u'patch_repository_url':
u'https://chromium.googlesource.com/depot_tools',
u'patch_set': 7,
u'patch_storage': u'gerrit',
}
})
self.assertEqual(build, {
u'bucket': u'luci.chromium.try',
u'client_operation_id': u'uuid4',
u'tags': [
u'builder:win',
u'buildset:patch/gerrit/chromium-review.googlesource.com/123456/7',
u'user_agent:git_cl_try',
],
})
self.mock(git_cl, '_buildbucket_retry', _buildbucket_retry)
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.assertEqual(0, git_cl.main([
'try', '-B', 'luci.chromium.try', '-b', 'win',
'-p', 'key=val', '-p', 'json=[{"a":1}, null]']))
self.assertRegexpMatches(
git_cl.sys.stdout.getvalue(),
'Tried jobs on:\nBucket: luci.chromium.try')
def test_git_cl_try_bots_on_multiple_masters(self):
self.mock(git_cl.Changelist, 'GetMostRecentPatchset', lambda _: 7)
self.mock(git_cl.Changelist, 'GetChange',
lambda _, *a: (
self._mocked_call(['GetChange']+list(a))))
self.mock(git_cl.presubmit_support, 'DoGetTryMasters',
lambda *_, **__: (
self._mocked_call(['DoGetTryMasters'])))
self.mock(git_cl._GerritChangelistImpl, 'SetCQState',
lambda _, s: self._mocked_call(['SetCQState', s]))
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123456'),
((['git', 'config', 'branch.feature.gerritserver'],),
'https://chromium-review.googlesource.com'),
((['git', 'config', 'branch.feature.merge'],), 'feature'),
((['git', 'config', 'branch.feature.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/depot_tools'),
(('GetChangeDetail', 'chromium-review.googlesource.com',
'depot_tools~123456',
['DETAILED_ACCOUNTS', 'ALL_REVISIONS', 'CURRENT_COMMIT']), {
'project': 'depot_tools',
'status': 'OPEN',
'owner': {'email': 'owner@e.mail'},
'revisions': {
'deadbeaf': {
'_number': 6,
},
'beeeeeef': {
'_number': 7,
'fetch': {'http': {
'url': 'https://chromium.googlesource.com/depot_tools',
'ref': 'refs/changes/56/123456/7'
}},
},
},
}),
]
def _buildbucket_retry(*_, **kw):
body = json.loads(kw['body'])
self.assertEqual(len(body['builds']), 2)
self.assertEqual(body['builds'][0]['bucket'], 'bucket1')
params = json.loads(body['builds'][0]['parameters_json'])
self.assertEqual(params['builder_name'], 'builder1')
self.assertEqual(body['builds'][1]['bucket'], 'bucket2')
params = json.loads(body['builds'][1]['parameters_json'])
self.assertEqual(params['builder_name'], 'builder2')
self.mock(git_cl, '_buildbucket_retry', _buildbucket_retry)
self.mock(git_cl.urllib2, 'urlopen', lambda _: StringIO.StringIO(
json.dumps({
'builder1': {'bucket': 'bucket1'},
'builder2': {'bucket': 'bucket2'},
})))
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.assertEqual(
0, git_cl.main(['try', '-b', 'builder1', '-b', 'builder2']))
self.assertEqual(
git_cl.sys.stdout.getvalue(),
'Tried jobs on:\n'
'Bucket: bucket1\n'
' builder1: []\n'
'Bucket: bucket2\n'
' builder2: []\n'
'To see results here, run: git cl try-results\n'
'To see results in browser, run: git cl web\n')
def _common_GerritCommitMsgHookCheck(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.mock(git_cl.os.path, 'abspath',
lambda path: self._mocked_call(['abspath', path]))
self.mock(git_cl.os.path, 'exists',
lambda path: self._mocked_call(['exists', path]))
self.mock(git_cl.gclient_utils, 'FileRead',
lambda path: self._mocked_call(['FileRead', path]))
self.mock(git_cl.gclient_utils, 'rm_file_or_tree',
lambda path: self._mocked_call(['rm_file_or_tree', path]))
self.calls = [
((['git', 'rev-parse', '--show-cdup'],), '../'),
((['abspath', '../'],), '/abs/git_repo_root'),
]
return git_cl.Changelist(codereview='gerrit', issue=123)
def test_GerritCommitMsgHookCheck_custom_hook(self):
cl = self._common_GerritCommitMsgHookCheck()
self.calls += [
((['exists', '/abs/git_repo_root/.git/hooks/commit-msg'],), True),
((['FileRead', '/abs/git_repo_root/.git/hooks/commit-msg'],),
'#!/bin/sh\necho "custom hook"')
]
cl._codereview_impl._GerritCommitMsgHookCheck(offer_removal=True)
def test_GerritCommitMsgHookCheck_not_exists(self):
cl = self._common_GerritCommitMsgHookCheck()
self.calls += [
((['exists', '/abs/git_repo_root/.git/hooks/commit-msg'],), False),
]
cl._codereview_impl._GerritCommitMsgHookCheck(offer_removal=True)
def test_GerritCommitMsgHookCheck(self):
cl = self._common_GerritCommitMsgHookCheck()
self.calls += [
((['exists', '/abs/git_repo_root/.git/hooks/commit-msg'],), True),
((['FileRead', '/abs/git_repo_root/.git/hooks/commit-msg'],),
'...\n# From Gerrit Code Review\n...\nadd_ChangeId()\n'),
(('ask_for_data', 'Do you want to remove it now? [Yes/No]: '), 'Yes'),
((['rm_file_or_tree', '/abs/git_repo_root/.git/hooks/commit-msg'],),
''),
]
cl._codereview_impl._GerritCommitMsgHookCheck(offer_removal=True)
def test_GerritCmdLand(self):
self.calls += [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritsquashhash'],),
'deadbeaf'),
((['git', 'diff', 'deadbeaf'],), ''), # No diff.
((['git', 'config', 'branch.feature.gerritserver'],),
'chromium-review.googlesource.com'),
]
cl = git_cl.Changelist(issue=123, codereview='gerrit')
cl._codereview_impl._GetChangeDetail = lambda _: {
'labels': {},
'current_revision': 'deadbeaf',
}
cl._codereview_impl._GetChangeCommit = lambda: {
'commit': 'deadbeef',
'web_links': [{'name': 'gitiles',
'url': 'https://git.googlesource.com/test/+/deadbeef'}],
}
cl._codereview_impl.SubmitIssue = lambda wait_for_merge: None
out = StringIO.StringIO()
self.mock(sys, 'stdout', out)
self.assertEqual(0, cl.CMDLand(force=True,
bypass_hooks=True,
verbose=True,
parallel=False))
self.assertRegexpMatches(out.getvalue(), 'Issue.*123 has been submitted')
self.assertRegexpMatches(out.getvalue(), 'Landed as: .*deadbeef')
BUILDBUCKET_BUILDS_MAP = {
'9000': {
'id': '9000',
'bucket': 'master.x.y',
'created_by': 'user:someone@chromium.org',
'created_ts': '147200002222000',
'experimental': False,
'parameters_json': json.dumps({
'builder_name': 'my-bot',
'properties': {'category': 'cq'},
}),
'status': 'STARTED',
'tags': [
'build_address:x.y/my-bot/2',
'builder:my-bot',
'experimental:false',
'user_agent:cq',
],
'url': 'http://build.cr.org/p/x.y/builders/my-bot/builds/2',
},
'8000': {
'id': '8000',
'bucket': 'master.x.y',
'created_by': 'user:someone@chromium.org',
'created_ts': '147200001111000',
'experimental': False,
'failure_reason': 'BUILD_FAILURE',
'parameters_json': json.dumps({
'builder_name': 'my-bot',
'properties': {'category': 'cq'},
}),
'result_details_json': json.dumps({
'properties': {'buildnumber': 1},
}),
'result': 'FAILURE',
'status': 'COMPLETED',
'tags': [
'build_address:x.y/my-bot/1',
'builder:my-bot',
'experimental:false',
'user_agent:cq',
],
'url': 'http://build.cr.org/p/x.y/builders/my-bot/builds/1',
},
}
def test_write_try_results_json(self):
expected_output = [
{
'bucket': 'master.x.y',
'buildbucket_id': '8000',
'builder_name': 'my-bot',
'created_ts': '147200001111000',
'experimental': False,
'failure_reason': 'BUILD_FAILURE',
'result': 'FAILURE',
'status': 'COMPLETED',
'tags': [
'build_address:x.y/my-bot/1',
'builder:my-bot',
'experimental:false',
'user_agent:cq',
],
'url': 'http://build.cr.org/p/x.y/builders/my-bot/builds/1',
},
{
'bucket': 'master.x.y',
'buildbucket_id': '9000',
'builder_name': 'my-bot',
'created_ts': '147200002222000',
'experimental': False,
'failure_reason': None,
'result': None,
'status': 'STARTED',
'tags': [
'build_address:x.y/my-bot/2',
'builder:my-bot',
'experimental:false',
'user_agent:cq',
],
'url': 'http://build.cr.org/p/x.y/builders/my-bot/builds/2',
},
]
self.calls = [(('write_json', 'output.json', expected_output), '')]
git_cl.write_try_results_json('output.json', self.BUILDBUCKET_BUILDS_MAP)
def _setup_fetch_try_jobs(self, most_recent_patchset=20001):
out = StringIO.StringIO()
self.mock(sys, 'stdout', out)
self.mock(git_cl.Changelist, 'GetMostRecentPatchset',
lambda *args: most_recent_patchset)
self.mock(git_cl.auth, 'get_authenticator_for_host', lambda host, _cfg:
self._mocked_call(['get_authenticator_for_host', host]))
self.mock(git_cl, '_buildbucket_retry', lambda *_, **__:
self._mocked_call(['_buildbucket_retry']))
def _setup_fetch_try_jobs_gerrit(self, *request_results):
self._setup_fetch_try_jobs(most_recent_patchset=13)
self.calls += [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '1'),
# TODO(tandrii): Uncomment the below if we decide to support checking
# patchsets for Gerrit.
# Simulate that Gerrit has more patchsets than local.
# ((['git', 'config', 'branch.feature.gerritpatchset'],), '12'),
((['git', 'config', 'branch.feature.gerritserver'],),
'https://x-review.googlesource.com'),
((['get_authenticator_for_host', 'x-review.googlesource.com'],),
AuthenticatorMock()),
] + [((['_buildbucket_retry'],), r) for r in request_results]
def test_fetch_try_jobs_none_gerrit(self):
self._setup_fetch_try_jobs_gerrit({})
self.assertEqual(0, git_cl.main(['try-results']))
# TODO(tandrii): Uncomment the below if we decide to support checking
# patchsets for Gerrit.
# self.assertRegexpMatches(
# sys.stdout.getvalue(),
# r'Warning: Codereview server has newer patchsets \(13\)')
self.assertRegexpMatches(sys.stdout.getvalue(), 'No try jobs')
def test_fetch_try_jobs_some_gerrit(self):
self._setup_fetch_try_jobs_gerrit({
'builds': self.BUILDBUCKET_BUILDS_MAP.values(),
})
# TODO(tandrii): Uncomment the below if we decide to support checking
# patchsets for Gerrit.
# self.calls.remove(
# ((['git', 'config', 'branch.feature.gerritpatchset'],), '12'))
self.assertEqual(0, git_cl.main(['try-results', '--patchset', '5']))
# ... and doesn't result in warning.
self.assertNotRegexpMatches(sys.stdout.getvalue(), 'Warning')
self.assertRegexpMatches(sys.stdout.getvalue(), '^Failures:')
self.assertRegexpMatches(sys.stdout.getvalue(), 'Started:')
self.assertRegexpMatches(sys.stdout.getvalue(), '2 try jobs')
def _mock_gerrit_changes_for_detail_cache(self):
self.mock(git_cl._GerritChangelistImpl, '_GetGerritHost', lambda _: 'host')
def test_gerrit_change_detail_cache_simple(self):
self._mock_gerrit_changes_for_detail_cache()
self.calls = [
(('GetChangeDetail', 'host', 'my%2Frepo~1', []), 'a'),
(('GetChangeDetail', 'host', 'ab%2Frepo~2', []), 'b'),
(('GetChangeDetail', 'host', 'ab%2Frepo~2', []), 'b2'),
]
cl1 = git_cl.Changelist(issue=1, codereview='gerrit')
cl1._cached_remote_url = (
True, 'https://chromium.googlesource.com/a/my/repo.git/')
cl2 = git_cl.Changelist(issue=2, codereview='gerrit')
cl2._cached_remote_url = (
True, 'https://chromium.googlesource.com/ab/repo')
self.assertEqual(cl1._GetChangeDetail(), 'a') # Miss.
self.assertEqual(cl1._GetChangeDetail(), 'a')
self.assertEqual(cl2._GetChangeDetail(), 'b') # Miss.
self.assertEqual(cl2._GetChangeDetail(no_cache=True), 'b2') # Miss.
self.assertEqual(cl1._GetChangeDetail(), 'a')
self.assertEqual(cl2._GetChangeDetail(), 'b2')
def test_gerrit_change_detail_cache_options(self):
self._mock_gerrit_changes_for_detail_cache()
self.calls = [
(('GetChangeDetail', 'host', 'repo~1', ['C', 'A', 'B']), 'cab'),
(('GetChangeDetail', 'host', 'repo~1', ['A', 'D']), 'ad'),
(('GetChangeDetail', 'host', 'repo~1', ['A']), 'a'), # no_cache=True
# no longer in cache.
(('GetChangeDetail', 'host', 'repo~1', ['B']), 'b'),
]
cl = git_cl.Changelist(issue=1, codereview='gerrit')
cl._cached_remote_url = (True, 'https://chromium.googlesource.com/repo/')
self.assertEqual(cl._GetChangeDetail(options=['C', 'A', 'B']), 'cab')
self.assertEqual(cl._GetChangeDetail(options=['A', 'B', 'C']), 'cab')
self.assertEqual(cl._GetChangeDetail(options=['B', 'A']), 'cab')
self.assertEqual(cl._GetChangeDetail(options=['C']), 'cab')
self.assertEqual(cl._GetChangeDetail(options=['A']), 'cab')
self.assertEqual(cl._GetChangeDetail(), 'cab')
self.assertEqual(cl._GetChangeDetail(options=['A', 'D']), 'ad')
self.assertEqual(cl._GetChangeDetail(options=['A']), 'cab')
self.assertEqual(cl._GetChangeDetail(options=['D']), 'ad')
self.assertEqual(cl._GetChangeDetail(), 'cab')
# Finally, no_cache should invalidate all caches for given change.
self.assertEqual(cl._GetChangeDetail(options=['A'], no_cache=True), 'a')
self.assertEqual(cl._GetChangeDetail(options=['B']), 'b')
def test_gerrit_description_caching(self):
def gen_detail(rev, desc):
return {
'current_revision': rev,
'revisions': {rev: {'commit': {'message': desc}}}
}
self.calls = [
(('GetChangeDetail', 'host', 'my%2Frepo~1',
['CURRENT_REVISION', 'CURRENT_COMMIT']),
gen_detail('rev1', 'desc1')),
(('GetChangeDetail', 'host', 'my%2Frepo~1',
['CURRENT_REVISION', 'CURRENT_COMMIT']),
gen_detail('rev2', 'desc2')),
]
self._mock_gerrit_changes_for_detail_cache()
cl = git_cl.Changelist(issue=1, codereview='gerrit')
cl._cached_remote_url = (
True, 'https://chromium.googlesource.com/a/my/repo.git/')
self.assertEqual(cl.GetDescription(), 'desc1')
self.assertEqual(cl.GetDescription(), 'desc1') # cache hit.
self.assertEqual(cl.GetDescription(force=True), 'desc2')
def test_print_current_creds(self):
class CookiesAuthenticatorMock(object):
def __init__(self):
self.gitcookies = {
'host.googlesource.com': ('user', 'pass'),
'host-review.googlesource.com': ('user', 'pass'),
}
self.netrc = self
self.netrc.hosts = {
'github.com': ('user2', None, 'pass2'),
'host2.googlesource.com': ('user3', None, 'pass'),
}
self.mock(git_cl.gerrit_util, 'CookiesAuthenticator',
CookiesAuthenticatorMock)
self.mock(sys, 'stdout', StringIO.StringIO())
git_cl._GitCookiesChecker().print_current_creds(include_netrc=True)
self.assertEqual(list(sys.stdout.getvalue().splitlines()), [
' Host\t User\t Which file',
'============================\t=====\t===========',
'host-review.googlesource.com\t user\t.gitcookies',
' host.googlesource.com\t user\t.gitcookies',
' host2.googlesource.com\tuser3\t .netrc',
])
sys.stdout.buf = ''
git_cl._GitCookiesChecker().print_current_creds(include_netrc=False)
self.assertEqual(list(sys.stdout.getvalue().splitlines()), [
' Host\tUser\t Which file',
'============================\t====\t===========',
'host-review.googlesource.com\tuser\t.gitcookies',
' host.googlesource.com\tuser\t.gitcookies',
])
def _common_creds_check_mocks(self):
def exists_mock(path):
dirname = os.path.dirname(path)
if dirname == os.path.expanduser('~'):
dirname = '~'
base = os.path.basename(path)
if base in ('.netrc', '.gitcookies'):
return self._mocked_call('os.path.exists', '%s/%s' % (dirname, base))
# git cl also checks for existence other files not relevant to this test.
return None
self.mock(os.path, 'exists', exists_mock)
self.mock(sys, 'stdout', StringIO.StringIO())
def test_creds_check_gitcookies_not_configured(self):
self._common_creds_check_mocks()
self.mock(git_cl._GitCookiesChecker, 'get_hosts_with_creds',
lambda _, include_netrc=False: [])
self.calls = [
((['git', 'config', '--path', 'http.cookiefile'],), CERR1),
((['git', 'config', '--global', 'http.cookiefile'],), CERR1),
(('os.path.exists', '~/.netrc'), True),
(('ask_for_data', 'Press Enter to setup .gitcookies, '
'or Ctrl+C to abort'), ''),
((['git', 'config', '--global', 'http.cookiefile',
os.path.expanduser('~/.gitcookies')], ), ''),
]
self.assertEqual(0, git_cl.main(['creds-check']))
self.assertRegexpMatches(
sys.stdout.getvalue(),
'^You seem to be using outdated .netrc for git credentials:')
self.assertRegexpMatches(
sys.stdout.getvalue(),
'\nConfigured git to use .gitcookies from')
def test_creds_check_gitcookies_configured_custom_broken(self):
self._common_creds_check_mocks()
self.mock(git_cl._GitCookiesChecker, 'get_hosts_with_creds',
lambda _, include_netrc=False: [])
self.calls = [
((['git', 'config', '--path', 'http.cookiefile'],), CERR1),
((['git', 'config', '--global', 'http.cookiefile'],),
'/custom/.gitcookies'),
(('os.path.exists', '/custom/.gitcookies'), False),
(('ask_for_data', 'Reconfigure git to use default .gitcookies? '
'Press Enter to reconfigure, or Ctrl+C to abort'), ''),
((['git', 'config', '--global', 'http.cookiefile',
os.path.expanduser('~/.gitcookies')], ), ''),
]
self.assertEqual(0, git_cl.main(['creds-check']))
self.assertRegexpMatches(
sys.stdout.getvalue(),
'WARNING: You have configured custom path to .gitcookies: ')
self.assertRegexpMatches(
sys.stdout.getvalue(),
'However, your configured .gitcookies file is missing.')
def test_git_cl_comment_add_gerrit(self):
self.mock(git_cl.gerrit_util, 'SetReview',
lambda host, change, msg, ready:
self._mocked_call('SetReview', host, change, msg, ready))
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), CERR1),
((['git', 'symbolic-ref', 'HEAD'],), CERR1),
((['git', 'config', 'rietveld.upstream-branch'],), CERR1),
((['git', 'branch', '-r'],), 'origin/HEAD -> origin/master\n'
'origin/master'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/infra/infra'),
(('SetReview', 'chromium-review.googlesource.com', 'infra%2Finfra~10',
'msg', None),
None),
]
self.assertEqual(0, git_cl.main(['comment', '--gerrit', '-i', '10',
'-a', 'msg']))
def test_git_cl_comments_fetch_gerrit(self):
self.mock(sys, 'stdout', StringIO.StringIO())
self.calls = [
((['git', 'config', 'branch.foo.gerritserver'],), ''),
((['git', 'config', 'branch.foo.merge'],), ''),
((['git', 'config', 'rietveld.upstream-branch'],), CERR1),
((['git', 'branch', '-r'],), 'origin/HEAD -> origin/master\n'
'origin/master'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/infra/infra'),
(('GetChangeDetail', 'chromium-review.googlesource.com',
'infra%2Finfra~1',
['MESSAGES', 'DETAILED_ACCOUNTS', 'CURRENT_REVISION',
'CURRENT_COMMIT']), {
'owner': {'email': 'owner@example.com'},
'current_revision': 'ba5eba11',
'revisions': {
'deadbeaf': {
'_number': 1,
},
'ba5eba11': {
'_number': 2,
},
},
'messages': [
{
u'_revision_number': 1,
u'author': {
u'_account_id': 1111084,
u'email': u'commit-bot@chromium.org',
u'name': u'Commit Bot'
},
u'date': u'2017-03-15 20:08:45.000000000',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046dc50b',
u'message': u'Patch Set 1:\n\nDry run: CQ is trying the patch...',
u'tag': u'autogenerated:cq:dry-run'
},
{
u'_revision_number': 2,
u'author': {
u'_account_id': 11151243,
u'email': u'owner@example.com',
u'name': u'owner'
},
u'date': u'2017-03-16 20:00:41.000000000',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046d1234',
u'message': u'PTAL',
},
{
u'_revision_number': 2,
u'author': {
u'_account_id': 148512 ,
u'email': u'reviewer@example.com',
u'name': u'reviewer'
},
u'date': u'2017-03-17 05:19:37.500000000',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046d4568',
u'message': u'Patch Set 2: Code-Review+1',
},
]
}),
(('GetChangeComments', 'chromium-review.googlesource.com',
'infra%2Finfra~1'), {
'/COMMIT_MSG': [
{
'author': {'email': u'reviewer@example.com'},
'updated': u'2017-03-17 05:19:37.500000000',
'patch_set': 2,
'side': 'REVISION',
'message': 'Please include a bug link',
},
],
'codereview.settings': [
{
'author': {'email': u'owner@example.com'},
'updated': u'2017-03-16 20:00:41.000000000',
'patch_set': 2,
'side': 'PARENT',
'line': 42,
'message': 'I removed this because it is bad',
},
]
}),
(('GetChangeRobotComments', 'chromium-review.googlesource.com',
'infra%2Finfra~1'), {}),
((['git', 'config', 'branch.foo.gerritpatchset', '2'],), ''),
] * 2 + [
(('write_json', 'output.json', [
{
u'date': u'2017-03-16 20:00:41.000000',
u'message': (
u'PTAL\n' +
u'\n' +
u'codereview.settings\n' +
u' Base, Line 42: https://chromium-review.googlesource.com/' +
u'c/1/2/codereview.settings#b42\n' +
u' I removed this because it is bad\n'),
u'autogenerated': False,
u'approval': False,
u'disapproval': False,
u'sender': u'owner@example.com'
}, {
u'date': u'2017-03-17 05:19:37.500000',
u'message': (
u'Patch Set 2: Code-Review+1\n' +
u'\n' +
u'/COMMIT_MSG\n' +
u' PS2, File comment: https://chromium-review.googlesource' +
u'.com/c/1/2//COMMIT_MSG#\n' +
u' Please include a bug link\n'),
u'autogenerated': False,
u'approval': False,
u'disapproval': False,
u'sender': u'reviewer@example.com'
}
]),'')
]
expected_comments_summary = [
git_cl._CommentSummary(
message=(
u'PTAL\n' +
u'\n' +
u'codereview.settings\n' +
u' Base, Line 42: https://chromium-review.googlesource.com/' +
u'c/1/2/codereview.settings#b42\n' +
u' I removed this because it is bad\n'),
date=datetime.datetime(2017, 3, 16, 20, 0, 41, 0),
autogenerated=False,
disapproval=False, approval=False, sender=u'owner@example.com'),
git_cl._CommentSummary(
message=(
u'Patch Set 2: Code-Review+1\n' +
u'\n' +
u'/COMMIT_MSG\n' +
u' PS2, File comment: https://chromium-review.googlesource.com/' +
u'c/1/2//COMMIT_MSG#\n' +
u' Please include a bug link\n'),
date=datetime.datetime(2017, 3, 17, 5, 19, 37, 500000),
autogenerated=False,
disapproval=False, approval=False, sender=u'reviewer@example.com'),
]
cl = git_cl.Changelist(
codereview='gerrit', issue=1, branchref='refs/heads/foo')
self.assertEqual(cl.GetCommentsSummary(), expected_comments_summary)
self.mock(git_cl.Changelist, 'GetBranch', lambda _: 'foo')
self.assertEqual(
0, git_cl.main(['comments', '-i', '1', '-j', 'output.json']))
def test_git_cl_comments_robot_comments(self):
# git cl comments also fetches robot comments (which are considered a type
# of autogenerated comment), and unlike other types of comments, only robot
# comments from the latest patchset are shown.
self.mock(sys, 'stdout', StringIO.StringIO())
self.calls = [
((['git', 'config', 'branch.foo.gerritserver'],), ''),
((['git', 'config', 'branch.foo.merge'],), ''),
((['git', 'config', 'rietveld.upstream-branch'],), CERR1),
((['git', 'branch', '-r'],), 'origin/HEAD -> origin/master\n'
'origin/master'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/infra/infra'),
(('GetChangeDetail', 'chromium-review.googlesource.com',
'infra%2Finfra~1',
['MESSAGES', 'DETAILED_ACCOUNTS', 'CURRENT_REVISION',
'CURRENT_COMMIT']), {
'owner': {'email': 'owner@example.com'},
'current_revision': 'ba5eba11',
'revisions': {
'deadbeaf': {
'_number': 1,
},
'ba5eba11': {
'_number': 2,
},
},
'messages': [
{
u'_revision_number': 1,
u'author': {
u'_account_id': 1111084,
u'email': u'commit-bot@chromium.org',
u'name': u'Commit Bot'
},
u'date': u'2017-03-15 20:08:45.000000000',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046dc50b',
u'message': u'Patch Set 1:\n\nDry run: CQ is trying the patch...',
u'tag': u'autogenerated:cq:dry-run'
},
{
u'_revision_number': 1,
u'author': {
u'_account_id': 123,
u'email': u'tricium@serviceaccount.com',
u'name': u'Tricium'
},
u'date': u'2017-03-16 20:00:41.000000000',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046d1234',
u'message': u'(1 comment)',
u'tag': u'autogenerated:tricium',
},
{
u'_revision_number': 1,
u'author': {
u'_account_id': 123,
u'email': u'tricium@serviceaccount.com',
u'name': u'Tricium'
},
u'date': u'2017-03-16 20:00:41.000000000',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046d1234',
u'message': u'(1 comment)',
u'tag': u'autogenerated:tricium',
},
{
u'_revision_number': 2,
u'author': {
u'_account_id': 123 ,
u'email': u'tricium@serviceaccount.com',
u'name': u'reviewer'
},
u'date': u'2017-03-17 05:30:37.000000000',
u'tag': u'autogenerated:tricium',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046d4568',
u'message': u'(1 comment)',
},
]
}),
(('GetChangeComments', 'chromium-review.googlesource.com',
'infra%2Finfra~1'), {}),
(('GetChangeRobotComments', 'chromium-review.googlesource.com',
'infra%2Finfra~1'), {
'codereview.settings': [
{
u'author': {u'email': u'tricium@serviceaccount.com'},
u'updated': u'2017-03-17 05:30:37.000000000',
u'robot_run_id': u'5565031076855808',
u'robot_id': u'Linter/Category',
u'tag': u'autogenerated:tricium',
u'patch_set': 2,
u'side': u'REVISION',
u'message': u'Linter warning message text',
u'line': 32,
},
],
}),
((['git', 'config', 'branch.foo.gerritpatchset', '2'],), ''),
]
expected_comments_summary = [
git_cl._CommentSummary(date=datetime.datetime(2017, 3, 17, 5, 30, 37),
message=(
u'(1 comment)\n\ncodereview.settings\n'
u' PS2, Line 32: https://chromium-review.googlesource.com/'
u'c/1/2/codereview.settings#32\n'
u' Linter warning message text\n'),
sender=u'tricium@serviceaccount.com',
autogenerated=True, approval=False, disapproval=False)
]
cl = git_cl.Changelist(
codereview='gerrit', issue=1, branchref='refs/heads/foo')
self.assertEqual(cl.GetCommentsSummary(), expected_comments_summary)
def test_get_remote_url_with_mirror(self):
original_os_path_isdir = os.path.isdir
def selective_os_path_isdir_mock(path):
if path == '/cache/this-dir-exists':
return self._mocked_call('os.path.isdir', path)
return original_os_path_isdir(path)
self.mock(os.path, 'isdir', selective_os_path_isdir_mock)
url = 'https://chromium.googlesource.com/my/repo'
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.merge'],), 'master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'/cache/this-dir-exists'),
(('os.path.isdir', '/cache/this-dir-exists'),
True),
# Runs in /cache/this-dir-exists.
((['git', 'config', 'remote.origin.url'],),
url),
]
cl = git_cl.Changelist(codereview='gerrit', issue=1)
self.assertEqual(cl.GetRemoteUrl(), url)
self.assertEqual(cl.GetRemoteUrl(), url) # Must be cached.
def test_get_remote_url_non_existing_mirror(self):
original_os_path_isdir = os.path.isdir
def selective_os_path_isdir_mock(path):
if path == '/cache/this-dir-doesnt-exist':
return self._mocked_call('os.path.isdir', path)
return original_os_path_isdir(path)
self.mock(os.path, 'isdir', selective_os_path_isdir_mock)
self.mock(logging, 'error',
lambda fmt, *a: self._mocked_call('logging.error', fmt % a))
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.merge'],), 'master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'/cache/this-dir-doesnt-exist'),
(('os.path.isdir', '/cache/this-dir-doesnt-exist'),
False),
(('logging.error',
'Remote "origin" for branch "/cache/this-dir-doesnt-exist" points to'
' "master", but it doesn\'t exist.'), None),
]
cl = git_cl.Changelist(codereview='gerrit', issue=1)
self.assertIsNone(cl.GetRemoteUrl())
def test_get_remote_url_misconfigured_mirror(self):
original_os_path_isdir = os.path.isdir
def selective_os_path_isdir_mock(path):
if path == '/cache/this-dir-exists':
return self._mocked_call('os.path.isdir', path)
return original_os_path_isdir(path)
self.mock(os.path, 'isdir', selective_os_path_isdir_mock)
self.mock(logging, 'error',
lambda *a: self._mocked_call('logging.error', *a))
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.merge'],), 'master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'/cache/this-dir-exists'),
(('os.path.isdir', '/cache/this-dir-exists'), True),
# Runs in /cache/this-dir-exists.
((['git', 'config', 'remote.origin.url'],), ''),
(('logging.error',
'Remote "%(remote)s" for branch "%(branch)s" points to '
'"%(cache_path)s", but it is misconfigured.\n'
'"%(cache_path)s" must be a git repo and must have a remote named '
'"%(remote)s" pointing to the git host.', {
'remote': 'origin',
'cache_path': '/cache/this-dir-exists',
'branch': 'master'}
), None),
]
cl = git_cl.Changelist(codereview='gerrit', issue=1)
self.assertIsNone(cl.GetRemoteUrl())
def test_gerrit_change_identifier_with_project(self):
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.merge'],), 'master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/a/my/repo.git/'),
]
cl = git_cl.Changelist(codereview='gerrit', issue=123456)
self.assertEqual(cl._GerritChangeIdentifier(), 'my%2Frepo~123456')
def test_gerrit_change_identifier_without_project(self):
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.merge'],), 'master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],), CERR1),
]
cl = git_cl.Changelist(codereview='gerrit', issue=123456)
self.assertEqual(cl._GerritChangeIdentifier(), '123456')
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
unittest.main()
| 38.759643 | 80 | 0.577042 |
import contextlib
import datetime
import json
import logging
import os
import StringIO
import sys
import tempfile
import unittest
import urlparse
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from testing_support.auto_stub import TestCase
import metrics
metrics.DISABLE_METRICS_COLLECTION = True
import gerrit_util
import git_cl
import git_common
import git_footers
import subprocess2
def callError(code=1, cmd='', cwd='', stdout='', stderr=''):
return subprocess2.CalledProcessError(code, cmd, cwd, stdout, stderr)
CERR1 = callError(1)
def MakeNamedTemporaryFileMock(expected_content):
class NamedTemporaryFileMock(object):
def __init__(self, *args, **kwargs):
self.name = '/tmp/named'
self.expected_content = expected_content
def __enter__(self):
return self
def __exit__(self, _type, _value, _tb):
pass
def write(self, content):
if self.expected_content:
assert content == self.expected_content
def close(self):
pass
return NamedTemporaryFileMock
class ChangelistMock(object):
# instance that's being set.
desc = ""
def __init__(self, **kwargs):
pass
def GetIssue(self):
return 1
def GetDescription(self, force=False):
return ChangelistMock.desc
def UpdateDescription(self, desc, force=False):
ChangelistMock.desc = desc
class PresubmitMock(object):
def __init__(self, *args, **kwargs):
self.reviewers = []
self.more_cc = ['chromium-reviews+test-more-cc@chromium.org']
@staticmethod
def should_continue():
return True
class GitCheckoutMock(object):
def __init__(self, *args, **kwargs):
pass
@staticmethod
def reset():
GitCheckoutMock.conflict = False
def apply_patch(self, p):
if GitCheckoutMock.conflict:
raise Exception('failed')
class WatchlistsMock(object):
def __init__(self, _):
pass
@staticmethod
def GetWatchersForPaths(_):
return ['joe@example.com']
class CodereviewSettingsFileMock(object):
def __init__(self):
pass
def read(self):
return ("CODE_REVIEW_SERVER: gerrit.chromium.org\n" +
"GERRIT_HOST: True\n")
class AuthenticatorMock(object):
def __init__(self, *_args):
pass
def has_cached_credentials(self):
return True
def authorize(self, http):
return http
def CookiesAuthenticatorMockFactory(hosts_with_creds=None, same_auth=False):
class CookiesAuthenticatorMock(git_cl.gerrit_util.CookiesAuthenticator):
def __init__(self):
pass
@classmethod
def get_gitcookies_path(cls):
return '~/.gitcookies'
@classmethod
def get_netrc_path(cls):
return '~/.netrc'
def _get_auth_for_host(self, host):
if same_auth:
return same_auth
return (hosts_with_creds or {}).get(host)
return CookiesAuthenticatorMock
class MockChangelistWithBranchAndIssue():
def __init__(self, branch, issue):
self.branch = branch
self.issue = issue
def GetBranch(self):
return self.branch
def GetIssue(self):
return self.issue
class SystemExitMock(Exception):
pass
class TestGitClBasic(unittest.TestCase):
def test_get_description(self):
cl = git_cl.Changelist(issue=1, codereview='gerrit',
codereview_host='host')
cl.description = 'x'
cl.has_description = True
cl._codereview_impl.FetchDescription = lambda *a, **kw: 'y'
self.assertEquals(cl.GetDescription(), 'x')
self.assertEquals(cl.GetDescription(force=True), 'y')
self.assertEquals(cl.GetDescription(), 'y')
def test_description_footers(self):
cl = git_cl.Changelist(issue=1, codereview='gerrit',
codereview_host='host')
cl.description = '\n'.join([
'This is some message',
'',
'It has some lines',
'and, also',
'',
'Some: Really',
'Awesome: Footers',
])
cl.has_description = True
cl._codereview_impl.UpdateDescriptionRemote = lambda *a, **kw: 'y'
msg, footers = cl.GetDescriptionFooters()
self.assertEquals(
msg, ['This is some message', '', 'It has some lines', 'and, also'])
self.assertEquals(footers, [('Some', 'Really'), ('Awesome', 'Footers')])
msg.append('wut')
footers.append(('gnarly-dude', 'beans'))
cl.UpdateDescriptionFooters(msg, footers)
self.assertEquals(cl.GetDescription().splitlines(), [
'This is some message',
'',
'It has some lines',
'and, also',
'wut'
'',
'Some: Really',
'Awesome: Footers',
'Gnarly-Dude: beans',
])
def test_get_bug_line_values(self):
f = lambda p, bugs: list(git_cl._get_bug_line_values(p, bugs))
self.assertEqual(f('', ''), [])
self.assertEqual(f('', '123,v8:456'), ['123', 'v8:456'])
self.assertEqual(f('v8', '456'), ['v8:456'])
self.assertEqual(f('v8', 'chromium:123,456'), ['v8:456', 'chromium:123'])
self.assertEqual(f('v8', 'chromium:123,456,v8:123'),
['v8:456', 'chromium:123', 'v8:123'])
def _test_git_number(self, parent_msg, dest_ref, child_msg,
parent_hash='parenthash'):
desc = git_cl.ChangeDescription(child_msg)
desc.update_with_git_number_footers(parent_hash, parent_msg, dest_ref)
return desc.description
def assertEqualByLine(self, actual, expected):
self.assertEqual(actual.splitlines(), expected.splitlines())
def test_git_number_bad_parent(self):
with self.assertRaises(ValueError):
self._test_git_number('Parent', 'refs/heads/master', 'Child')
def test_git_number_bad_parent_footer(self):
with self.assertRaises(AssertionError):
self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: wrong',
'refs/heads/master', 'Child')
def test_git_number_bad_lineage_ignored(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#1}\n'
'Cr-Branched-From: mustBeReal40CharHash-branch@{#pos}',
'refs/heads/master', 'Child')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#2}\n'
'Cr-Branched-From: mustBeReal40CharHash-branch@{#pos}')
def test_git_number_same_branch(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#12}',
dest_ref='refs/heads/master',
child_msg='Child')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#13}')
def test_git_number_same_branch_mixed_footers(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#12}',
dest_ref='refs/heads/master',
child_msg='Child\n'
'\n'
'Broken-by: design\n'
'BUG=123')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Broken-by: design\n'
'BUG=123\n'
'Cr-Commit-Position: refs/heads/master@{#13}')
def test_git_number_same_branch_with_originals(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#12}',
dest_ref='refs/heads/master',
child_msg='Child\n'
'\n'
'Some users are smart and insert their own footers\n'
'\n'
'Cr-Whatever: value\n'
'Cr-Commit-Position: refs/copy/paste@{#22}')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Some users are smart and insert their own footers\n'
'\n'
'Cr-Original-Whatever: value\n'
'Cr-Original-Commit-Position: refs/copy/paste@{#22}\n'
'Cr-Commit-Position: refs/heads/master@{#13}')
def test_git_number_new_branch(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#12}',
dest_ref='refs/heads/branch',
child_msg='Child')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Cr-Commit-Position: refs/heads/branch@{#1}\n'
'Cr-Branched-From: parenthash-refs/heads/master@{#12}')
def test_git_number_lineage(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/branch@{#1}\n'
'Cr-Branched-From: somehash-refs/heads/master@{#12}',
dest_ref='refs/heads/branch',
child_msg='Child')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Cr-Commit-Position: refs/heads/branch@{#2}\n'
'Cr-Branched-From: somehash-refs/heads/master@{#12}')
def test_git_number_moooooooore_lineage(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/branch@{#5}\n'
'Cr-Branched-From: somehash-refs/heads/master@{#12}',
dest_ref='refs/heads/mooore',
child_msg='Child')
self.assertEqualByLine(
actual,
'Child\n'
'\n'
'Cr-Commit-Position: refs/heads/mooore@{#1}\n'
'Cr-Branched-From: parenthash-refs/heads/branch@{#5}\n'
'Cr-Branched-From: somehash-refs/heads/master@{#12}')
def test_git_number_ever_moooooooore_lineage(self):
self.maxDiff = 10000
actual = self._test_git_number(
'CQ commit on fresh new branch + numbering.\n'
'\n'
'NOTRY=True\n'
'NOPRESUBMIT=True\n'
'BUG=\n'
'\n'
'Review-Url: https://codereview.chromium.org/2577703003\n'
'Cr-Commit-Position: refs/heads/gnumb-test/br@{#1}\n'
'Cr-Branched-From: 0749ff9edc-refs/heads/gnumb-test/cq@{#4}\n'
'Cr-Branched-From: 5c49df2da6-refs/heads/master@{#41618}',
dest_ref='refs/heads/gnumb-test/cl',
child_msg='git cl on fresh new branch + numbering.\n'
'\n'
'Review-Url: https://codereview.chromium.org/2575043003 .\n')
self.assertEqualByLine(
actual,
'git cl on fresh new branch + numbering.\n'
'\n'
'Review-Url: https://codereview.chromium.org/2575043003 .\n'
'Cr-Commit-Position: refs/heads/gnumb-test/cl@{#1}\n'
'Cr-Branched-From: parenthash-refs/heads/gnumb-test/br@{#1}\n'
'Cr-Branched-From: 0749ff9edc-refs/heads/gnumb-test/cq@{#4}\n'
'Cr-Branched-From: 5c49df2da6-refs/heads/master@{#41618}')
def test_git_number_cherry_pick(self):
actual = self._test_git_number(
'Parent\n'
'\n'
'Cr-Commit-Position: refs/heads/branch@{#1}\n'
'Cr-Branched-From: somehash-refs/heads/master@{#12}',
dest_ref='refs/heads/branch',
child_msg='Child, which is cherry-pick from master\n'
'\n'
'Cr-Commit-Position: refs/heads/master@{#100}\n'
'(cherry picked from commit deadbeef12345678deadbeef12345678deadbeef)')
self.assertEqualByLine(
actual,
'Child, which is cherry-pick from master\n'
'\n'
'(cherry picked from commit deadbeef12345678deadbeef12345678deadbeef)\n'
'\n'
'Cr-Original-Commit-Position: refs/heads/master@{#100}\n'
'Cr-Commit-Position: refs/heads/branch@{#2}\n'
'Cr-Branched-From: somehash-refs/heads/master@{#12}')
def test_gerrit_mirror_hack(self):
cr = 'chromium-review.googlesource.com'
url0 = 'https://%s/a/changes/x?a=b' % cr
origMirrors = git_cl.gerrit_util._GERRIT_MIRROR_PREFIXES
try:
git_cl.gerrit_util._GERRIT_MIRROR_PREFIXES = ['us1', 'us2']
url1 = git_cl.gerrit_util._UseGerritMirror(url0, cr)
url2 = git_cl.gerrit_util._UseGerritMirror(url1, cr)
url3 = git_cl.gerrit_util._UseGerritMirror(url2, cr)
self.assertNotEqual(url1, url2)
self.assertEqual(sorted((url1, url2)), [
'https://us1-mirror-chromium-review.googlesource.com/a/changes/x?a=b',
'https://us2-mirror-chromium-review.googlesource.com/a/changes/x?a=b'])
self.assertEqual(url1, url3)
finally:
git_cl.gerrit_util._GERRIT_MIRROR_PREFIXES = origMirrors
def test_valid_accounts(self):
mock_per_account = {
'u1': None,
'u2': {
'_account_id': 123124,
'avatars': [],
'email': 'u2@example.com',
'name': 'User Number 2',
'status': 'OOO',
},
'u3': git_cl.gerrit_util.GerritError(500, 'retries didn\'t help :('),
}
def GetAccountDetailsMock(_, account):
v = mock_per_account.pop(account)
if isinstance(v, Exception):
raise v
return v
original = git_cl.gerrit_util.GetAccountDetails
try:
git_cl.gerrit_util.GetAccountDetails = GetAccountDetailsMock
actual = git_cl.gerrit_util.ValidAccounts(
'host', ['u1', 'u2', 'u3'], max_threads=1)
finally:
git_cl.gerrit_util.GetAccountDetails = original
self.assertEqual(actual, {
'u2': {
'_account_id': 123124,
'avatars': [],
'email': 'u2@example.com',
'name': 'User Number 2',
'status': 'OOO',
},
})
class TestParseIssueURL(unittest.TestCase):
def _validate(self, parsed, issue=None, patchset=None, hostname=None,
codereview=None, fail=False):
self.assertIsNotNone(parsed)
if fail:
self.assertFalse(parsed.valid)
return
self.assertTrue(parsed.valid)
self.assertEqual(parsed.issue, issue)
self.assertEqual(parsed.patchset, patchset)
self.assertEqual(parsed.hostname, hostname)
self.assertEqual(parsed.codereview, codereview)
def _run_and_validate(self, func, url, *args, **kwargs):
result = func(urlparse.urlparse(url))
if kwargs.pop('fail', False):
self.assertIsNone(result)
return None
self._validate(result, *args, fail=False, **kwargs)
def test_gerrit(self):
def test(url, issue=None, patchset=None, hostname=None, fail=None):
self._test_ParseIssueUrl(
git_cl._GerritChangelistImpl.ParseIssueURL,
url, issue, patchset, hostname, fail)
def test(url, *args, **kwargs):
self._run_and_validate(git_cl._GerritChangelistImpl.ParseIssueURL, url,
*args, codereview='gerrit', **kwargs)
test('http://chrome-review.source.com/c/123',
123, None, 'chrome-review.source.com')
test('https://chrome-review.source.com/c/123/',
123, None, 'chrome-review.source.com')
test('https://chrome-review.source.com/c/123/4',
123, 4, 'chrome-review.source.com')
test('https://chrome-review.source.com/#/c/123/4',
123, 4, 'chrome-review.source.com')
test('https://chrome-review.source.com/c/123/4',
123, 4, 'chrome-review.source.com')
test('https://chrome-review.source.com/123',
123, None, 'chrome-review.source.com')
test('https://chrome-review.source.com/123/4',
123, 4, 'chrome-review.source.com')
test('https://chrome-review.source.com/c/123/1/whatisthis', fail=True)
test('https://chrome-review.source.com/c/abc/', fail=True)
test('ssh://chrome-review.source.com/c/123/1/', fail=True)
def test_ParseIssueNumberArgument(self):
def test(arg, *args, **kwargs):
codereview_hint = kwargs.pop('hint', None)
self._validate(git_cl.ParseIssueNumberArgument(arg, codereview_hint),
*args, **kwargs)
test('123', 123)
test('', fail=True)
test('abc', fail=True)
test('123/1', fail=True)
test('123a', fail=True)
test('ssh://chrome-review.source.com/#/c/123/4/', fail=True)
test('https://codereview.source.com/123',
123, None, 'codereview.source.com', 'gerrit',
hint='gerrit')
test('https://codereview.source.com/123',
123, None, 'codereview.source.com', 'gerrit')
test('https://chrome-review.source.com/c/123/4',
123, 4, 'chrome-review.source.com', 'gerrit')
test('https://chrome-review.source.com/bad/123/4', fail=True)
class GitCookiesCheckerTest(TestCase):
def setUp(self):
super(GitCookiesCheckerTest, self).setUp()
self.c = git_cl._GitCookiesChecker()
self.c._all_hosts = []
def mock_hosts_creds(self, subhost_identity_pairs):
def ensure_googlesource(h):
if not h.endswith(self.c._GOOGLESOURCE):
assert not h.endswith('.')
return h + '.' + self.c._GOOGLESOURCE
return h
self.c._all_hosts = [(ensure_googlesource(h), i, '.gitcookies')
for h, i in subhost_identity_pairs]
def test_identity_parsing(self):
self.assertEqual(self.c._parse_identity('ldap.google.com'),
('ldap', 'google.com'))
self.assertEqual(self.c._parse_identity('git-ldap.example.com'),
('ldap', 'example.com'))
self.assertEqual(self.c._parse_identity('git-note.period.chromium.org'),
('note.period', 'chromium.org'))
self.assertEqual(self.c._parse_identity('git-note.period.example.com'),
('note', 'period.example.com'))
def test_analysis_nothing(self):
self.c._all_hosts = []
self.assertFalse(self.c.has_generic_host())
self.assertEqual(set(), self.c.get_conflicting_hosts())
self.assertEqual(set(), self.c.get_duplicated_hosts())
self.assertEqual(set(), self.c.get_partially_configured_hosts())
self.assertEqual(set(), self.c.get_hosts_with_wrong_identities())
def test_analysis(self):
self.mock_hosts_creds([
('.googlesource.com', 'git-example.chromium.org'),
('chromium', 'git-example.google.com'),
('chromium-review', 'git-example.google.com'),
('chrome-internal', 'git-example.chromium.org'),
('chrome-internal-review', 'git-example.chromium.org'),
('conflict', 'git-example.google.com'),
('conflict-review', 'git-example.chromium.org'),
('dup', 'git-example.google.com'),
('dup', 'git-example.google.com'),
('dup-review', 'git-example.google.com'),
('partial', 'git-example.google.com'),
('gpartial-review', 'git-example.google.com'),
])
self.assertTrue(self.c.has_generic_host())
self.assertEqual(set(['conflict.googlesource.com']),
self.c.get_conflicting_hosts())
self.assertEqual(set(['dup.googlesource.com']),
self.c.get_duplicated_hosts())
self.assertEqual(set(['partial.googlesource.com',
'gpartial-review.googlesource.com']),
self.c.get_partially_configured_hosts())
self.assertEqual(set(['chromium.googlesource.com',
'chrome-internal.googlesource.com']),
self.c.get_hosts_with_wrong_identities())
def test_report_no_problems(self):
self.test_analysis_nothing()
self.mock(sys, 'stdout', StringIO.StringIO())
self.assertFalse(self.c.find_and_report_problems())
self.assertEqual(sys.stdout.getvalue(), '')
def test_report(self):
self.test_analysis()
self.mock(sys, 'stdout', StringIO.StringIO())
self.mock(git_cl.gerrit_util.CookiesAuthenticator, 'get_gitcookies_path',
classmethod(lambda _: '~/.gitcookies'))
self.assertTrue(self.c.find_and_report_problems())
with open(os.path.join(os.path.dirname(__file__),
'git_cl_creds_check_report.txt')) as f:
expected = f.read()
def by_line(text):
return [l.rstrip() for l in text.rstrip().splitlines()]
self.maxDiff = 10000
self.assertEqual(by_line(sys.stdout.getvalue().strip()), by_line(expected))
class TestGitCl(TestCase):
def setUp(self):
super(TestGitCl, self).setUp()
self.calls = []
self._calls_done = []
self.mock(git_cl, 'time_time',
lambda: self._mocked_call('time.time'))
self.mock(git_cl.metrics.collector, 'add_repeated',
lambda *a: self._mocked_call('add_repeated', *a))
self.mock(subprocess2, 'call', self._mocked_call)
self.mock(subprocess2, 'check_call', self._mocked_call)
self.mock(subprocess2, 'check_output', self._mocked_call)
self.mock(subprocess2, 'communicate',
lambda *a, **kw: ([self._mocked_call(*a, **kw), ''], 0))
self.mock(git_cl.gclient_utils, 'CheckCallAndFilter', self._mocked_call)
self.mock(git_common, 'is_dirty_git_tree', lambda x: False)
self.mock(git_common, 'get_or_create_merge_base',
lambda *a: (
self._mocked_call(['get_or_create_merge_base']+list(a))))
self.mock(git_cl, 'BranchExists', lambda _: True)
self.mock(git_cl, 'FindCodereviewSettingsFile', lambda: '')
self.mock(git_cl, 'SaveDescriptionBackup', lambda _:
self._mocked_call('SaveDescriptionBackup'))
self.mock(git_cl, 'ask_for_data', lambda *a, **k: self._mocked_call(
*(['ask_for_data'] + list(a)), **k))
self.mock(git_cl, 'write_json', lambda path, contents:
self._mocked_call('write_json', path, contents))
self.mock(git_cl.presubmit_support, 'DoPresubmitChecks', PresubmitMock)
self.mock(git_cl.checkout, 'GitCheckout', GitCheckoutMock)
GitCheckoutMock.reset()
self.mock(git_cl.watchlists, 'Watchlists', WatchlistsMock)
self.mock(git_cl.auth, 'get_authenticator_for_host', AuthenticatorMock)
self.mock(git_cl.gerrit_util, 'GetChangeDetail',
lambda *args, **kwargs: self._mocked_call(
'GetChangeDetail', *args, **kwargs))
self.mock(git_cl.gerrit_util, 'GetChangeComments',
lambda *args, **kwargs: self._mocked_call(
'GetChangeComments', *args, **kwargs))
self.mock(git_cl.gerrit_util, 'GetChangeRobotComments',
lambda *args, **kwargs: self._mocked_call(
'GetChangeRobotComments', *args, **kwargs))
self.mock(git_cl.gerrit_util, 'AddReviewers',
lambda h, i, reviewers, ccs, notify: self._mocked_call(
'AddReviewers', h, i, reviewers, ccs, notify))
self.mock(git_cl.gerrit_util, 'SetReview',
lambda h, i, msg=None, labels=None, notify=None:
self._mocked_call('SetReview', h, i, msg, labels, notify))
self.mock(git_cl.gerrit_util.LuciContextAuthenticator, 'is_luci',
staticmethod(lambda: False))
self.mock(git_cl.gerrit_util.GceAuthenticator, 'is_gce',
classmethod(lambda _: False))
self.mock(git_cl.gerrit_util, 'ValidAccounts',
lambda host, accounts:
self._mocked_call('ValidAccounts', host, accounts))
self.mock(git_cl, 'DieWithError',
lambda msg, change=None: self._mocked_call(['DieWithError', msg]))
git_cl.settings = None
def tearDown(self):
try:
self.assertEquals([], self.calls)
except AssertionError:
if not self.has_failed():
raise
# Sadly, has_failed() returns True if this OR any other tests before this
# one have failed.
git_cl.logging.error(
'!!!!!! IF YOU SEE THIS, READ BELOW, IT WILL SAVE YOUR TIME !!!!!\n'
'There are un-consumed self.calls after this test has finished.\n'
'If you don\'t know which test this is, run:\n'
' tests/git_cl_tests.py -v\n'
'If you are already running only this test, then **first** fix the '
'problem whose exception is emitted below by unittest runner.\n'
'Else, to be sure what\'s going on, run this test **alone** with \n'
' tests/git_cl_tests.py TestGitCl.<name>\n'
'and follow instructions above.\n' +
'=' * 80)
finally:
super(TestGitCl, self).tearDown()
def _mocked_call(self, *args, **_kwargs):
self.assertTrue(
self.calls,
'@%d Expected: <Missing> Actual: %r' % (len(self._calls_done), args))
top = self.calls.pop(0)
expected_args, result = top
# Also logs otherwise it could get caught in a try/finally and be hard to
# diagnose.
if expected_args != args:
N = 5
prior_calls = '\n '.join(
'@%d: %r' % (len(self._calls_done) - N + i, c[0])
for i, c in enumerate(self._calls_done[-N:]))
following_calls = '\n '.join(
'@%d: %r' % (len(self._calls_done) + i + 1, c[0])
for i, c in enumerate(self.calls[:N]))
extended_msg = (
'A few prior calls:\n %s\n\n'
'This (expected):\n @%d: %r\n'
'This (actual):\n @%d: %r\n\n'
'A few following expected calls:\n %s' %
(prior_calls, len(self._calls_done), expected_args,
len(self._calls_done), args, following_calls))
git_cl.logging.error(extended_msg)
self.fail('@%d\n'
' Expected: %r\n'
' Actual: %r' % (
len(self._calls_done), expected_args, args))
self._calls_done.append(top)
if isinstance(result, Exception):
raise result
return result
def test_ask_for_explicit_yes_true(self):
self.calls = [
(('ask_for_data', 'prompt [Yes/No]: '), 'blah'),
(('ask_for_data', 'Please, type yes or no: '), 'ye'),
]
self.assertTrue(git_cl.ask_for_explicit_yes('prompt'))
def test_LoadCodereviewSettingsFromFile_gerrit(self):
codereview_file = StringIO.StringIO('GERRIT_HOST: true')
self.calls = [
((['git', 'config', '--unset-all', 'rietveld.cc'],), CERR1),
((['git', 'config', '--unset-all', 'rietveld.tree-status-url'],), CERR1),
((['git', 'config', '--unset-all', 'rietveld.viewvc-url'],), CERR1),
((['git', 'config', '--unset-all', 'rietveld.bug-prefix'],), CERR1),
((['git', 'config', '--unset-all', 'rietveld.cpplint-regex'],), CERR1),
((['git', 'config', '--unset-all', 'rietveld.cpplint-ignore-regex'],),
CERR1),
((['git', 'config', '--unset-all', 'rietveld.run-post-upload-hook'],),
CERR1),
((['git', 'config', 'gerrit.host', 'true'],), ''),
]
self.assertIsNone(git_cl.LoadCodereviewSettingsFromFile(codereview_file))
@classmethod
def _is_gerrit_calls(cls, gerrit=False):
return [((['git', 'config', 'rietveld.autoupdate'],), ''),
((['git', 'config', 'gerrit.host'],), 'True' if gerrit else '')]
@classmethod
def _git_post_upload_calls(cls):
return [
((['git', 'rev-parse', 'HEAD'],), 'hash'),
((['git', 'symbolic-ref', 'HEAD'],), 'hash'),
((['git',
'config', 'branch.hash.last-upload-hash', 'hash'],), ''),
((['git', 'config', 'rietveld.run-post-upload-hook'],), ''),
]
@staticmethod
def _git_sanity_checks(diff_base, working_branch, get_remote_branch=True):
fake_ancestor = 'fake_ancestor'
fake_cl = 'fake_cl_for_patch'
return [
((['git',
'rev-parse', '--verify', diff_base],), fake_ancestor),
((['git',
'merge-base', fake_ancestor, 'HEAD'],), fake_ancestor),
((['git',
'rev-list', '^' + fake_ancestor, 'HEAD'],), fake_cl),
# Mock a config miss (error code 1)
((['git',
'config', 'gitcl.remotebranch'],), CERR1),
] + ([
# Call to GetRemoteBranch()
((['git',
'config', 'branch.%s.merge' % working_branch],),
'refs/heads/master'),
((['git',
'config', 'branch.%s.remote' % working_branch],), 'origin'),
] if get_remote_branch else []) + [
((['git', 'rev-list', '^' + fake_ancestor,
'refs/remotes/origin/master'],), ''),
]
@classmethod
def _gerrit_ensure_auth_calls(
cls, issue=None, skip_auth_check=False, short_hostname='chromium'):
cmd = ['git', 'config', '--bool', 'gerrit.skip-ensure-authenticated']
if skip_auth_check:
return [((cmd, ), 'true')]
calls = [((cmd, ), CERR1)]
if issue:
calls.extend([
((['git', 'config', 'branch.master.gerritserver'],), CERR1),
])
calls.extend([
((['git', 'config', 'branch.master.merge'],), 'refs/heads/master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://%s.googlesource.com/my/repo' % short_hostname),
])
return calls
@classmethod
def _gerrit_base_calls(cls, issue=None, fetched_description=None,
fetched_status=None, other_cl_owner=None,
custom_cl_base=None, short_hostname='chromium'):
calls = cls._is_gerrit_calls(True)
calls += [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.gerritissue'],),
CERR1 if issue is None else str(issue)),
]
if custom_cl_base:
ancestor_revision = custom_cl_base
else:
# Determine ancestor_revision to be merge base.
ancestor_revision = 'fake_ancestor_sha'
calls += [
((['git', 'config', 'branch.master.merge'],), 'refs/heads/master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['get_or_create_merge_base', 'master',
'refs/remotes/origin/master'],), ancestor_revision),
]
# Calls to verify branch point is ancestor
calls += cls._gerrit_ensure_auth_calls(
issue=issue, short_hostname=short_hostname)
if issue:
calls += [
(('GetChangeDetail', '%s-review.googlesource.com' % short_hostname,
'my%2Frepo~123456',
['DETAILED_ACCOUNTS', 'CURRENT_REVISION', 'CURRENT_COMMIT', 'LABELS']
),
{
'owner': {'email': (other_cl_owner or 'owner@example.com')},
'change_id': '123456789',
'current_revision': 'sha1_of_current_revision',
'revisions': { 'sha1_of_current_revision': {
'commit': {'message': fetched_description},
}},
'status': fetched_status or 'NEW',
}),
]
if fetched_status == 'ABANDONED':
calls += [
(('DieWithError', 'Change https://%s-review.googlesource.com/'
'123456 has been abandoned, new uploads are not '
'allowed' % short_hostname), SystemExitMock()),
]
return calls
if other_cl_owner:
calls += [
(('ask_for_data', 'Press Enter to upload, or Ctrl+C to abort'), ''),
]
calls += cls._git_sanity_checks(ancestor_revision, 'master',
get_remote_branch=False)
calls += [
((['git', 'rev-parse', '--show-cdup'],), ''),
((['git', 'rev-parse', 'HEAD'],), '12345'),
((['git', '-c', 'core.quotePath=false', 'diff', '--name-status',
'--no-renames', '-r', ancestor_revision + '...', '.'],),
'M\t.gitignore\n'),
((['git', 'config', 'branch.master.gerritpatchset'],), CERR1),
]
if not issue:
calls += [
((['git', 'log', '--pretty=format:%s%n%n%b',
ancestor_revision + '...'],),
'foo'),
]
calls += [
((['git', 'config', 'user.email'],), 'me@example.com'),
((['git', 'diff', '--no-ext-diff', '--stat', '-l100000', '-C50'] +
([custom_cl_base] if custom_cl_base else
[ancestor_revision, 'HEAD']),),
'+dat'),
]
return calls
@classmethod
def _gerrit_upload_calls(cls, description, reviewers, squash,
squash_mode='default',
expected_upstream_ref='origin/refs/heads/master',
title=None, notify=False,
post_amend_description=None, issue=None, cc=None,
custom_cl_base=None, tbr=None,
short_hostname='chromium',
labels=None):
if post_amend_description is None:
post_amend_description = description
cc = cc or []
# Determined in `_gerrit_base_calls`.
determined_ancestor_revision = custom_cl_base or 'fake_ancestor_sha'
calls = []
if squash_mode == 'default':
calls.extend([
((['git', 'config', '--bool', 'gerrit.override-squash-uploads'],), ''),
((['git', 'config', '--bool', 'gerrit.squash-uploads'],), ''),
])
elif squash_mode in ('override_squash', 'override_nosquash'):
calls.extend([
((['git', 'config', '--bool', 'gerrit.override-squash-uploads'],),
'true' if squash_mode == 'override_squash' else 'false'),
])
else:
assert squash_mode in ('squash', 'nosquash')
# If issue is given, then description is fetched from Gerrit instead.
if issue is None:
calls += [
((['git', 'log', '--pretty=format:%s\n\n%b',
((custom_cl_base + '..') if custom_cl_base else
'fake_ancestor_sha..HEAD')],),
description),
]
if squash:
title = 'Initial_upload'
else:
if not title:
calls += [
((['git', 'show', '-s', '--format=%s', 'HEAD'],), ''),
(('ask_for_data', 'Title for patchset []: '), 'User input'),
]
title = 'User_input'
if not git_footers.get_footer_change_id(description) and not squash:
calls += [
(('DownloadGerritHook', False), ''),
# Amending of commit message to get the Change-Id.
((['git', 'log', '--pretty=format:%s\n\n%b',
determined_ancestor_revision + '..HEAD'],),
description),
((['git', 'commit', '--amend', '-m', description],), ''),
((['git', 'log', '--pretty=format:%s\n\n%b',
determined_ancestor_revision + '..HEAD'],),
post_amend_description)
]
if squash:
if not issue:
# Prompting to edit description on first upload.
calls += [
((['git', 'config', 'core.editor'],), ''),
((['RunEditor'],), description),
]
ref_to_push = 'abcdef0123456789'
calls += [
((['git', 'config', 'branch.master.merge'],), 'refs/heads/master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
]
if custom_cl_base is None:
calls += [
((['get_or_create_merge_base', 'master',
'refs/remotes/origin/master'],),
'origin/master'),
]
parent = 'origin/master'
else:
calls += [
((['git', 'merge-base', '--is-ancestor', custom_cl_base,
'refs/remotes/origin/master'],),
callError(1)), # Means not ancenstor.
(('ask_for_data',
'Do you take responsibility for cleaning up potential mess '
'resulting from proceeding with upload? Press Enter to upload, '
'or Ctrl+C to abort'), ''),
]
parent = custom_cl_base
calls += [
((['git', 'rev-parse', 'HEAD:'],), # `HEAD:` means HEAD's tree hash.
'0123456789abcdef'),
((['git', 'commit-tree', '0123456789abcdef', '-p', parent,
'-F', '/tmp/named'],),
ref_to_push),
]
else:
ref_to_push = 'HEAD'
calls += [
(('SaveDescriptionBackup',), None),
((['git', 'rev-list',
(custom_cl_base if custom_cl_base else expected_upstream_ref) + '..' +
ref_to_push],),
'1hashPerLine\n'),
]
metrics_arguments = []
if notify:
ref_suffix = '%ready,notify=ALL'
metrics_arguments += ['ready', 'notify=ALL']
else:
if not issue and squash:
ref_suffix = '%wip'
metrics_arguments.append('wip')
else:
ref_suffix = '%notify=NONE'
metrics_arguments.append('notify=NONE')
if title:
ref_suffix += ',m=' + title
metrics_arguments.append('m')
calls += [
((['git', 'config', 'rietveld.cc'],), ''),
]
if short_hostname == 'chromium':
for r in sorted(reviewers):
ref_suffix += ',r=%s' % r
metrics_arguments.append('r')
for c in sorted(['chromium-reviews+test-more-cc@chromium.org',
'joe@example.com'] + cc):
ref_suffix += ',cc=%s' % c
metrics_arguments.append('cc')
reviewers, cc = [], []
else:
calls += [
(('ValidAccounts', '%s-review.googlesource.com' % short_hostname,
sorted(reviewers) + ['joe@example.com',
'chromium-reviews+test-more-cc@chromium.org'] + cc),
{
e: {'email': e}
for e in (reviewers + ['joe@example.com'] + cc)
})
]
for r in sorted(reviewers):
if r != 'bad-account-or-email':
ref_suffix += ',r=%s' % r
metrics_arguments.append('r')
reviewers.remove(r)
for c in sorted(['joe@example.com'] + cc):
ref_suffix += ',cc=%s' % c
metrics_arguments.append('cc')
if c in cc:
cc.remove(c)
for k, v in sorted((labels or {}).items()):
ref_suffix += ',l=%s+%d' % (k, v)
metrics_arguments.append('l=%s+%d' % (k, v))
if tbr:
calls += [
(('GetCodeReviewTbrScore',
'%s-review.googlesource.com' % short_hostname,
'my/repo'),
2,),
]
calls += [
(('time.time',), 1000,),
((['git', 'push',
'https://%s.googlesource.com/my/repo' % short_hostname,
ref_to_push + ':refs/for/refs/heads/master' + ref_suffix],),
(('remote:\n'
'remote: Processing changes: (\)\n'
'remote: Processing changes: (|)\n'
'remote: Processing changes: (/)\n'
'remote: Processing changes: (-)\n'
'remote: Processing changes: new: 1 (/)\n'
'remote: Processing changes: new: 1, done\n'
'remote:\n'
'remote: New Changes:\n'
'remote: https://%s-review.googlesource.com/#/c/my/repo/+/123456'
' XXX\n'
'remote:\n'
'To https://%s.googlesource.com/my/repo\n'
' * [new branch] hhhh -> refs/for/refs/heads/master\n'
) % (short_hostname, short_hostname)),),
(('time.time',), 2000,),
(('add_repeated',
'sub_commands',
{
'execution_time': 1000,
'command': 'git push',
'exit_code': 0,
'arguments': sorted(metrics_arguments),
}),
None,),
]
if squash:
calls += [
((['git', 'config', 'branch.master.gerritissue', '123456'],),
''),
((['git', 'config', 'branch.master.gerritserver',
'https://chromium-review.googlesource.com'],), ''),
((['git', 'config', 'branch.master.gerritsquashhash',
'abcdef0123456789'],), ''),
]
if squash and short_hostname != 'chromium':
calls += [
(('AddReviewers',
'chromium-review.googlesource.com', 'my%2Frepo~123456',
sorted(reviewers),
cc + ['chromium-reviews+test-more-cc@chromium.org'],
notify),
''),
]
calls += cls._git_post_upload_calls()
return calls
def _run_gerrit_upload_test(
self,
upload_args,
description,
reviewers=None,
squash=True,
squash_mode=None,
expected_upstream_ref='origin/refs/heads/master',
title=None,
notify=False,
post_amend_description=None,
issue=None,
cc=None,
fetched_status=None,
other_cl_owner=None,
custom_cl_base=None,
tbr=None,
short_hostname='chromium',
labels=None):
if squash_mode is None:
if '--no-squash' in upload_args:
squash_mode = 'nosquash'
elif '--squash' in upload_args:
squash_mode = 'squash'
else:
squash_mode = 'default'
reviewers = reviewers or []
cc = cc or []
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.mock(git_cl.gerrit_util, 'CookiesAuthenticator',
CookiesAuthenticatorMockFactory(
same_auth=('git-owner.example.com', '', 'pass')))
self.mock(git_cl._GerritChangelistImpl, '_GerritCommitMsgHookCheck',
lambda _, offer_removal: None)
self.mock(git_cl.gclient_utils, 'RunEditor',
lambda *_, **__: self._mocked_call(['RunEditor']))
self.mock(git_cl, 'DownloadGerritHook', lambda force: self._mocked_call(
'DownloadGerritHook', force))
self.calls = self._gerrit_base_calls(
issue=issue,
fetched_description=description,
fetched_status=fetched_status,
other_cl_owner=other_cl_owner,
custom_cl_base=custom_cl_base,
short_hostname=short_hostname)
if fetched_status != 'ABANDONED':
self.mock(tempfile, 'NamedTemporaryFile', MakeNamedTemporaryFileMock(
expected_content=description))
self.mock(os, 'remove', lambda _: True)
self.calls += self._gerrit_upload_calls(
description, reviewers, squash,
squash_mode=squash_mode,
expected_upstream_ref=expected_upstream_ref,
title=title, notify=notify,
post_amend_description=post_amend_description,
issue=issue, cc=cc,
custom_cl_base=custom_cl_base, tbr=tbr,
short_hostname=short_hostname,
labels=labels)
git_cl.main(['upload'] + upload_args)
def test_gerrit_upload_without_change_id(self):
self._run_gerrit_upload_test(
['--no-squash'],
'desc\n\nBUG=\n',
[],
squash=False,
post_amend_description='desc\n\nBUG=\n\nChange-Id: Ixxx')
def test_gerrit_upload_without_change_id_override_nosquash(self):
self._run_gerrit_upload_test(
[],
'desc\n\nBUG=\n',
[],
squash=False,
squash_mode='override_nosquash',
post_amend_description='desc\n\nBUG=\n\nChange-Id: Ixxx')
def test_gerrit_no_reviewer(self):
self._run_gerrit_upload_test(
[],
'desc\n\nBUG=\n\nChange-Id: I123456789\n',
[],
squash=False,
squash_mode='override_nosquash')
def test_gerrit_no_reviewer_non_chromium_host(self):
self._run_gerrit_upload_test(
[],
'desc\n\nBUG=\n\nChange-Id: I123456789\n',
[],
squash=False,
squash_mode='override_nosquash',
short_hostname='other')
def test_gerrit_patchset_title_special_chars(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self._run_gerrit_upload_test(
['-f', '-t', 'We\'ll escape ^_ ^ special chars...@{u}'],
'desc\n\nBUG=\n\nChange-Id: I123456789',
squash=False,
squash_mode='override_nosquash',
title='We%27ll_escape_%5E%5F_%5E_special_chars%2E%2E%2E%40%7Bu%7D')
def test_gerrit_reviewers_cmd_line(self):
self._run_gerrit_upload_test(
['-r', 'foo@example.com', '--send-mail'],
'desc\n\nBUG=\n\nChange-Id: I123456789',
['foo@example.com'],
squash=False,
squash_mode='override_nosquash',
notify=True)
def test_gerrit_reviewer_multiple(self):
self.mock(git_cl.gerrit_util, 'GetCodeReviewTbrScore',
lambda *a: self._mocked_call('GetCodeReviewTbrScore', *a))
self._run_gerrit_upload_test(
[],
'desc\nTBR=reviewer@example.com\nBUG=\nR=another@example.com\n'
'CC=more@example.com,people@example.com\n\n'
'Change-Id: 123456789',
['reviewer@example.com', 'another@example.com'],
expected_upstream_ref='origin/master',
cc=['more@example.com', 'people@example.com'],
tbr='reviewer@example.com',
labels={'Code-Review': 2})
def test_gerrit_upload_squash_first_is_default(self):
self._run_gerrit_upload_test(
[],
'desc\nBUG=\n\nChange-Id: 123456789',
[],
expected_upstream_ref='origin/master')
def test_gerrit_upload_squash_first(self):
self._run_gerrit_upload_test(
['--squash'],
'desc\nBUG=\n\nChange-Id: 123456789',
[],
squash=True,
expected_upstream_ref='origin/master')
def test_gerrit_upload_squash_first_with_labels(self):
self._run_gerrit_upload_test(
['--squash', '--cq-dry-run', '--enable-auto-submit'],
'desc\nBUG=\n\nChange-Id: 123456789',
[],
squash=True,
expected_upstream_ref='origin/master',
labels={'Commit-Queue': 1, 'Auto-Submit': 1})
def test_gerrit_upload_squash_first_against_rev(self):
custom_cl_base = 'custom_cl_base_rev_or_branch'
self._run_gerrit_upload_test(
['--squash', custom_cl_base],
'desc\nBUG=\n\nChange-Id: 123456789',
[],
squash=True,
expected_upstream_ref='origin/master',
custom_cl_base=custom_cl_base)
self.assertIn(
'If you proceed with upload, more than 1 CL may be created by Gerrit',
sys.stdout.getvalue())
def test_gerrit_upload_squash_reupload(self):
description = 'desc\nBUG=\n\nChange-Id: 123456789'
self._run_gerrit_upload_test(
['--squash'],
description,
[],
squash=True,
expected_upstream_ref='origin/master',
issue=123456)
def test_gerrit_upload_squash_reupload_to_abandoned(self):
self.mock(git_cl, 'DieWithError',
lambda msg, change=None: self._mocked_call('DieWithError', msg))
description = 'desc\nBUG=\n\nChange-Id: 123456789'
with self.assertRaises(SystemExitMock):
self._run_gerrit_upload_test(
['--squash'],
description,
[],
squash=True,
expected_upstream_ref='origin/master',
issue=123456,
fetched_status='ABANDONED')
def test_gerrit_upload_squash_reupload_to_not_owned(self):
self.mock(git_cl.gerrit_util, 'GetAccountDetails',
lambda *_, **__: {'email': 'yet-another@example.com'})
description = 'desc\nBUG=\n\nChange-Id: 123456789'
self._run_gerrit_upload_test(
['--squash'],
description,
[],
squash=True,
expected_upstream_ref='origin/master',
issue=123456,
other_cl_owner='other@example.com')
self.assertIn(
'WARNING: Change 123456 is owned by other@example.com, but you '
'authenticate to Gerrit as yet-another@example.com.\n'
'Uploading may fail due to lack of permissions',
git_cl.sys.stdout.getvalue())
def test_upload_branch_deps(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
def mock_run_git(*args, **_kwargs):
if args[0] == ['for-each-ref',
'--format=%(refname:short) %(upstream:short)',
'refs/heads']:
# Create a local branch dependency tree that looks like this:
# test1 -> test2 -> test3 -> test4 -> test5
# -> test3.1
# test6 -> test0
branch_deps = [
'test2 test1', # test1 -> test2
'test3 test2', # test2 -> test3
'test3.1 test2', # test2 -> test3.1
'test4 test3', # test3 -> test4
'test5 test4', # test4 -> test5
'test6 test0', # test0 -> test6
'test7', # test7
]
return '\n'.join(branch_deps)
self.mock(git_cl, 'RunGit', mock_run_git)
class RecordCalls:
times_called = 0
record_calls = RecordCalls()
def mock_CMDupload(*args, **_kwargs):
record_calls.times_called += 1
return 0
self.mock(git_cl, 'CMDupload', mock_CMDupload)
self.calls = [
(('ask_for_data', 'This command will checkout all dependent branches '
'and run "git cl upload". Press Enter to continue, '
'or Ctrl+C to abort'), ''),
]
class MockChangelist():
def __init__(self):
pass
def GetBranch(self):
return 'test1'
def GetIssue(self):
return '123'
def GetPatchset(self):
return '1001'
def IsGerrit(self):
return False
ret = git_cl.upload_branch_deps(MockChangelist(), [])
# CMDupload should have been called 5 times because of 5 dependent branches.
self.assertEquals(5, record_calls.times_called)
self.assertEquals(0, ret)
def test_gerrit_change_id(self):
self.calls = [
((['git', 'write-tree'], ),
'hashtree'),
((['git', 'rev-parse', 'HEAD~0'], ),
'branch-parent'),
((['git', 'var', 'GIT_AUTHOR_IDENT'], ),
'A B <a@b.org> 1456848326 +0100'),
((['git', 'var', 'GIT_COMMITTER_IDENT'], ),
'C D <c@d.org> 1456858326 +0100'),
((['git', 'hash-object', '-t', 'commit', '--stdin'], ),
'hashchange'),
]
change_id = git_cl.GenerateGerritChangeId('line1\nline2\n')
self.assertEqual(change_id, 'Ihashchange')
def test_desecription_append_footer(self):
for init_desc, footer_line, expected_desc in [
# Use unique desc first lines for easy test failure identification.
('foo', 'R=one', 'foo\n\nR=one'),
('foo\n\nR=one', 'BUG=', 'foo\n\nR=one\nBUG='),
('foo\n\nR=one', 'Change-Id: Ixx', 'foo\n\nR=one\n\nChange-Id: Ixx'),
('foo\n\nChange-Id: Ixx', 'R=one', 'foo\n\nR=one\n\nChange-Id: Ixx'),
('foo\n\nR=one\n\nChange-Id: Ixx', 'TBR=two',
'foo\n\nR=one\nTBR=two\n\nChange-Id: Ixx'),
('foo\n\nR=one\n\nChange-Id: Ixx', 'Foo-Bar: baz',
'foo\n\nR=one\n\nChange-Id: Ixx\nFoo-Bar: baz'),
('foo\n\nChange-Id: Ixx', 'Foo-Bak: baz',
'foo\n\nChange-Id: Ixx\nFoo-Bak: baz'),
('foo', 'Change-Id: Ixx', 'foo\n\nChange-Id: Ixx'),
]:
desc = git_cl.ChangeDescription(init_desc)
desc.append_footer(footer_line)
self.assertEqual(desc.description, expected_desc)
def test_update_reviewers(self):
data = [
('foo', [], [],
'foo'),
('foo\nR=xx', [], [],
'foo\nR=xx'),
('foo\nTBR=xx', [], [],
'foo\nTBR=xx'),
('foo', ['a@c'], [],
'foo\n\nR=a@c'),
('foo\nR=xx', ['a@c'], [],
'foo\n\nR=a@c, xx'),
('foo\nTBR=xx', ['a@c'], [],
'foo\n\nR=a@c\nTBR=xx'),
('foo\nTBR=xx\nR=yy', ['a@c'], [],
'foo\n\nR=a@c, yy\nTBR=xx'),
('foo\nBUG=', ['a@c'], [],
'foo\nBUG=\nR=a@c'),
('foo\nR=xx\nTBR=yy\nR=bar', ['a@c'], [],
'foo\n\nR=a@c, bar, xx\nTBR=yy'),
('foo', ['a@c', 'b@c'], [],
'foo\n\nR=a@c, b@c'),
('foo\nBar\n\nR=\nBUG=', ['c@c'], [],
'foo\nBar\n\nR=c@c\nBUG='),
('foo\nBar\n\nR=\nBUG=\nR=', ['c@c'], [],
'foo\nBar\n\nR=c@c\nBUG='),
# Same as the line before, but full of whitespaces.
(
'foo\nBar\n\n R = \n BUG = \n R = ', ['c@c'], [],
'foo\nBar\n\nR=c@c\n BUG =',
),
# Whitespaces aren't interpreted as new lines.
('foo BUG=allo R=joe ', ['c@c'], [],
'foo BUG=allo R=joe\n\nR=c@c'),
('foo\n\nR=a@c\nTBR=t@c', ['b@c', 'a@c'], ['a@c', 't@c'],
'foo\n\nR=a@c, b@c\nTBR=t@c'),
]
expected = [i[-1] for i in data]
actual = []
for orig, reviewers, tbrs, _expected in data:
obj = git_cl.ChangeDescription(orig)
obj.update_reviewers(reviewers, tbrs)
actual.append(obj.description)
self.assertEqual(expected, actual)
def test_get_hash_tags(self):
cases = [
('', []),
('a', []),
('[a]', ['a']),
('[aa]', ['aa']),
('[a ]', ['a']),
('[a- ]', ['a']),
('[a- b]', ['a-b']),
('[a--b]', ['a-b']),
('[a', []),
('[a]x', ['a']),
('[aa]x', ['aa']),
('[a b]', ['a-b']),
('[a b]', ['a-b']),
('[a__b]', ['a-b']),
('[a] x', ['a']),
('[a][b]', ['a', 'b']),
('[a] [b]', ['a', 'b']),
('[a][b]x', ['a', 'b']),
('[a][b] x', ['a', 'b']),
('[a]\n[b]', ['a']),
('[a\nb]', []),
('[a][', ['a']),
('Revert "[a] feature"', ['a']),
('Reland "[a] feature"', ['a']),
('Revert: [a] feature', ['a']),
('Reland: [a] feature', ['a']),
('Revert "Reland: [a] feature"', ['a']),
('Foo: feature', ['foo']),
('Foo Bar: feature', ['foo-bar']),
('Revert "Foo bar: feature"', ['foo-bar']),
('Reland "Foo bar: feature"', ['foo-bar']),
]
for desc, expected in cases:
change_desc = git_cl.ChangeDescription(desc)
actual = change_desc.get_hash_tags()
self.assertEqual(
actual,
expected,
'GetHashTags(%r) == %r, expected %r' % (desc, actual, expected))
self.assertEqual(None, git_cl.GetTargetRef('origin', None, 'master'))
self.assertEqual(None, git_cl.GetTargetRef(None,
'refs/remotes/origin/master',
'master'))
self.assertEqual('refs/heads/master',
git_cl.GetTargetRef('origin', 'refs/remotes/origin/master',
None))
self.assertEqual('refs/heads/master',
git_cl.GetTargetRef('origin', 'refs/remotes/origin/lkgr',
None))
self.assertEqual('refs/heads/master',
git_cl.GetTargetRef('origin', 'refs/remotes/origin/lkcr',
None))
self.assertEqual('refs/branch-heads/123',
git_cl.GetTargetRef('origin',
'refs/remotes/branch-heads/123',
None))
self.assertEqual('refs/diff/test',
git_cl.GetTargetRef('origin',
'refs/remotes/origin/refs/diff/test',
None))
self.assertEqual('refs/heads/chrome/m42',
git_cl.GetTargetRef('origin',
'refs/remotes/origin/chrome/m42',
None))
for branch in ('branch-heads/123', 'remotes/branch-heads/123',
'refs/remotes/branch-heads/123'):
self.assertEqual('refs/branch-heads/123',
git_cl.GetTargetRef('origin',
'refs/remotes/origin/master',
branch))
for branch in ('origin/master', 'remotes/origin/master',
'refs/remotes/origin/master'):
self.assertEqual('refs/heads/master',
git_cl.GetTargetRef('origin',
'refs/remotes/branch-heads/123',
branch))
for branch in ('master', 'heads/master', 'refs/heads/master'):
self.assertEqual('refs/heads/master',
git_cl.GetTargetRef('origin',
'refs/remotes/branch-heads/123',
branch))
def test_patch_when_dirty(self):
self.mock(git_common, 'is_dirty_git_tree', lambda x: True)
self.assertNotEqual(git_cl.main(['patch', '123456']), 0)
@staticmethod
def _get_gerrit_codereview_server_calls(branch, value=None,
git_short_host='host',
detect_branch=True,
detect_server=True):
calls = []
if detect_branch:
calls.append(((['git', 'symbolic-ref', 'HEAD'],), branch))
if detect_server:
calls.append(((['git', 'config', 'branch.' + branch + '.gerritserver'],),
CERR1 if value is None else value))
if value is None:
calls += [
((['git', 'config', 'branch.' + branch + '.merge'],),
'refs/heads' + branch),
((['git', 'config', 'branch.' + branch + '.remote'],),
'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://%s.googlesource.com/my/repo' % git_short_host),
]
return calls
def _patch_common(self, force_codereview=False,
new_branch=False, git_short_host='host',
detect_gerrit_server=False,
actual_codereview=None,
codereview_in_url=False):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.mock(git_cl, 'IsGitVersionAtLeast', lambda *args: True)
if new_branch:
self.calls = [((['git', 'new-branch', 'master'],), ''),]
if codereview_in_url and actual_codereview == 'rietveld':
self.calls += [
((['git', 'rev-parse', '--show-cdup'],), ''),
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
]
if not force_codereview and not codereview_in_url:
self.calls += [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.gerritissue'],), CERR1),
]
if detect_gerrit_server:
self.calls += self._get_gerrit_codereview_server_calls(
'master', git_short_host=git_short_host,
detect_branch=not new_branch and force_codereview)
actual_codereview = 'gerrit'
if actual_codereview == 'gerrit':
self.calls += [
(('GetChangeDetail', git_short_host + '-review.googlesource.com',
'my%2Frepo~123456', ['ALL_REVISIONS', 'CURRENT_COMMIT']),
{
'current_revision': '7777777777',
'revisions': {
'1111111111': {
'_number': 1,
'fetch': {'http': {
'url': 'https://%s.googlesource.com/my/repo' % git_short_host,
'ref': 'refs/changes/56/123456/1',
}},
},
'7777777777': {
'_number': 7,
'fetch': {'http': {
'url': 'https://%s.googlesource.com/my/repo' % git_short_host,
'ref': 'refs/changes/56/123456/7',
}},
},
},
}),
]
def test_patch_gerrit_default(self):
self._patch_common(git_short_host='chromium', detect_gerrit_server=True)
self.calls += [
((['git', 'fetch', 'https://chromium.googlesource.com/my/repo',
'refs/changes/56/123456/7'],), ''),
((['git', 'cherry-pick', 'FETCH_HEAD'],), ''),
((['git', 'config', 'branch.master.gerritissue', '123456'],),
''),
((['git', 'config', 'branch.master.gerritserver',
'https://chromium-review.googlesource.com'],), ''),
((['git', 'config', 'branch.master.gerritpatchset', '7'],), ''),
((['git', 'rev-parse', 'FETCH_HEAD'],), 'deadbeef'),
((['git', 'config', 'branch.master.last-upload-hash', 'deadbeef'],), ''),
((['git', 'config', 'branch.master.gerritsquashhash', 'deadbeef'],), ''),
]
self.assertEqual(git_cl.main(['patch', '123456']), 0)
def test_patch_gerrit_new_branch(self):
self._patch_common(
git_short_host='chromium', detect_gerrit_server=True, new_branch=True)
self.calls += [
((['git', 'fetch', 'https://chromium.googlesource.com/my/repo',
'refs/changes/56/123456/7'],), ''),
((['git', 'cherry-pick', 'FETCH_HEAD'],), ''),
((['git', 'config', 'branch.master.gerritissue', '123456'],),
''),
((['git', 'config', 'branch.master.gerritserver',
'https://chromium-review.googlesource.com'],), ''),
((['git', 'config', 'branch.master.gerritpatchset', '7'],), ''),
((['git', 'rev-parse', 'FETCH_HEAD'],), 'deadbeef'),
((['git', 'config', 'branch.master.last-upload-hash', 'deadbeef'],), ''),
((['git', 'config', 'branch.master.gerritsquashhash', 'deadbeef'],), ''),
]
self.assertEqual(git_cl.main(['patch', '-b', 'master', '123456']), 0)
def test_patch_gerrit_force(self):
self._patch_common(
force_codereview=True, git_short_host='host', detect_gerrit_server=True)
self.calls += [
((['git', 'fetch', 'https://host.googlesource.com/my/repo',
'refs/changes/56/123456/7'],), ''),
((['git', 'reset', '--hard', 'FETCH_HEAD'],), ''),
((['git', 'config', 'branch.master.gerritissue', '123456'],),
''),
((['git', 'config', 'branch.master.gerritserver',
'https://host-review.googlesource.com'],), ''),
((['git', 'config', 'branch.master.gerritpatchset', '7'],), ''),
((['git', 'rev-parse', 'FETCH_HEAD'],), 'deadbeef'),
((['git', 'config', 'branch.master.last-upload-hash', 'deadbeef'],), ''),
((['git', 'config', 'branch.master.gerritsquashhash', 'deadbeef'],), ''),
]
self.assertEqual(git_cl.main(['patch', '--gerrit', '123456', '--force']), 0)
def test_patch_gerrit_guess_by_url(self):
self.calls += self._get_gerrit_codereview_server_calls(
'master', git_short_host='else', detect_server=False)
self._patch_common(
actual_codereview='gerrit', git_short_host='else',
codereview_in_url=True, detect_gerrit_server=False)
self.calls += [
((['git', 'fetch', 'https://else.googlesource.com/my/repo',
'refs/changes/56/123456/1'],), ''),
((['git', 'cherry-pick', 'FETCH_HEAD'],), ''),
((['git', 'config', 'branch.master.gerritissue', '123456'],),
''),
((['git', 'config', 'branch.master.gerritserver',
'https://else-review.googlesource.com'],), ''),
((['git', 'config', 'branch.master.gerritpatchset', '1'],), ''),
((['git', 'rev-parse', 'FETCH_HEAD'],), 'deadbeef'),
((['git', 'config', 'branch.master.last-upload-hash', 'deadbeef'],), ''),
((['git', 'config', 'branch.master.gerritsquashhash', 'deadbeef'],), ''),
]
self.assertEqual(git_cl.main(
['patch', 'https://else-review.googlesource.com/#/c/123456/1']), 0)
def test_patch_gerrit_guess_by_url_with_repo(self):
self.calls += self._get_gerrit_codereview_server_calls(
'master', git_short_host='else', detect_server=False)
self._patch_common(
actual_codereview='gerrit', git_short_host='else',
codereview_in_url=True, detect_gerrit_server=False)
self.calls += [
((['git', 'fetch', 'https://else.googlesource.com/my/repo',
'refs/changes/56/123456/1'],), ''),
((['git', 'cherry-pick', 'FETCH_HEAD'],), ''),
((['git', 'config', 'branch.master.gerritissue', '123456'],),
''),
((['git', 'config', 'branch.master.gerritserver',
'https://else-review.googlesource.com'],), ''),
((['git', 'config', 'branch.master.gerritpatchset', '1'],), ''),
((['git', 'rev-parse', 'FETCH_HEAD'],), 'deadbeef'),
((['git', 'config', 'branch.master.last-upload-hash', 'deadbeef'],), ''),
((['git', 'config', 'branch.master.gerritsquashhash', 'deadbeef'],), ''),
]
self.assertEqual(git_cl.main(
['patch', 'https://else-review.googlesource.com/c/my/repo/+/123456/1']),
0)
def test_patch_gerrit_conflict(self):
self._patch_common(detect_gerrit_server=True, git_short_host='chromium')
self.calls += [
((['git', 'fetch', 'https://chromium.googlesource.com/my/repo',
'refs/changes/56/123456/7'],), ''),
((['git', 'cherry-pick', 'FETCH_HEAD'],), CERR1),
((['DieWithError', 'Command "git cherry-pick FETCH_HEAD" failed.\n'],),
SystemExitMock()),
]
with self.assertRaises(SystemExitMock):
git_cl.main(['patch', '123456'])
def test_patch_gerrit_not_exists(self):
def notExists(_issue, *_, **kwargs):
raise git_cl.gerrit_util.GerritError(404, '')
self.mock(git_cl.gerrit_util, 'GetChangeDetail', notExists)
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.gerritissue'],), CERR1),
((['git', 'config', 'branch.master.gerritserver'],), CERR1),
((['git', 'config', 'branch.master.merge'],), 'refs/heads/master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/my/repo'),
((['DieWithError',
'change 123456 at https://chromium-review.googlesource.com does not '
'exist or you have no access to it'],), SystemExitMock()),
]
with self.assertRaises(SystemExitMock):
self.assertEqual(1, git_cl.main(['patch', '123456']))
def _checkout_calls(self):
return [
((['git', 'config', '--local', '--get-regexp',
'branch\\..*\\.gerritissue'], ),
('branch.ger-branch.gerritissue 123456\n'
'branch.gbranch654.gerritissue 654321\n')),
]
def test_checkout_gerrit(self):
self.calls = self._checkout_calls()
self.calls += [((['git', 'checkout', 'ger-branch'], ), '')]
self.assertEqual(0, git_cl.main(['checkout', '123456']))
def test_checkout_not_found(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.calls = self._checkout_calls()
self.assertEqual(1, git_cl.main(['checkout', '99999']))
def test_checkout_no_branch_issues(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.calls = [
((['git', 'config', '--local', '--get-regexp',
'branch\\..*\\.gerritissue'], ), CERR1),
]
self.assertEqual(1, git_cl.main(['checkout', '99999']))
def _test_gerrit_ensure_authenticated_common(self, auth,
skip_auth_check=False):
self.mock(git_cl.gerrit_util, 'CookiesAuthenticator',
CookiesAuthenticatorMockFactory(hosts_with_creds=auth))
self.mock(git_cl, 'DieWithError',
lambda msg, change=None: self._mocked_call(['DieWithError', msg]))
self.calls = self._gerrit_ensure_auth_calls(skip_auth_check=skip_auth_check)
cl = git_cl.Changelist(codereview='gerrit')
cl.branch = 'master'
cl.branchref = 'refs/heads/master'
cl.lookedup_issue = True
return cl
def test_gerrit_ensure_authenticated_missing(self):
cl = self._test_gerrit_ensure_authenticated_common(auth={
'chromium.googlesource.com': ('git-is.ok', '', 'but gerrit is missing'),
})
self.calls.append(
((['DieWithError',
'Credentials for the following hosts are required:\n'
' chromium-review.googlesource.com\n'
'These are read from ~/.gitcookies (or legacy ~/.netrc)\n'
'You can (re)generate your credentials by visiting '
'https://chromium-review.googlesource.com/new-password'],), ''),)
self.assertIsNone(cl.EnsureAuthenticated(force=False))
def test_gerrit_ensure_authenticated_conflict(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
cl = self._test_gerrit_ensure_authenticated_common(auth={
'chromium.googlesource.com':
('git-one.example.com', None, 'secret1'),
'chromium-review.googlesource.com':
('git-other.example.com', None, 'secret2'),
})
self.calls.append(
(('ask_for_data', 'If you know what you are doing '
'press Enter to continue, or Ctrl+C to abort'), ''))
self.assertIsNone(cl.EnsureAuthenticated(force=False))
def test_gerrit_ensure_authenticated_ok(self):
cl = self._test_gerrit_ensure_authenticated_common(auth={
'chromium.googlesource.com':
('git-same.example.com', None, 'secret'),
'chromium-review.googlesource.com':
('git-same.example.com', None, 'secret'),
})
self.assertIsNone(cl.EnsureAuthenticated(force=False))
def test_gerrit_ensure_authenticated_skipped(self):
cl = self._test_gerrit_ensure_authenticated_common(
auth={}, skip_auth_check=True)
self.assertIsNone(cl.EnsureAuthenticated(force=False))
def test_gerrit_ensure_authenticated_bearer_token(self):
cl = self._test_gerrit_ensure_authenticated_common(auth={
'chromium.googlesource.com':
('', None, 'secret'),
'chromium-review.googlesource.com':
('', None, 'secret'),
})
self.assertIsNone(cl.EnsureAuthenticated(force=False))
header = gerrit_util.CookiesAuthenticator().get_auth_header(
'chromium.googlesource.com')
self.assertTrue('Bearer' in header)
def _cmd_set_commit_gerrit_common(self, vote, notify=None):
self.mock(git_cl.gerrit_util, 'SetReview',
lambda h, i, labels, notify=None:
self._mocked_call(['SetReview', h, i, labels, notify]))
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123'),
((['git', 'config', 'branch.feature.gerritserver'],),
'https://chromium-review.googlesource.com'),
((['git', 'config', 'branch.feature.merge'],), 'refs/heads/master'),
((['git', 'config', 'branch.feature.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/infra/infra.git'),
((['SetReview', 'chromium-review.googlesource.com',
'infra%2Finfra~123',
{'Commit-Queue': vote}, notify],), ''),
]
def test_cmd_set_commit_gerrit_clear(self):
self._cmd_set_commit_gerrit_common(0)
self.assertEqual(0, git_cl.main(['set-commit', '-c']))
def test_cmd_set_commit_gerrit_dry(self):
self._cmd_set_commit_gerrit_common(1, notify=False)
self.assertEqual(0, git_cl.main(['set-commit', '-d']))
def test_cmd_set_commit_gerrit(self):
self._cmd_set_commit_gerrit_common(2)
self.assertEqual(0, git_cl.main(['set-commit']))
def test_description_display(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.mock(git_cl, 'Changelist', ChangelistMock)
ChangelistMock.desc = 'foo\n'
self.assertEqual(0, git_cl.main(['description', '-d']))
self.assertEqual('foo\n', out.getvalue())
def test_StatusFieldOverrideIssueMissingArgs(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stderr', out)
try:
self.assertEqual(git_cl.main(['status', '--issue', '1']), 0)
except SystemExit as ex:
self.assertEqual(ex.code, 2)
self.assertRegexpMatches(out.getvalue(), r'--issue must be specified')
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stderr', out)
try:
self.assertEqual(git_cl.main(['status', '--issue', '1', '--gerrit']), 0)
except SystemExit as ex:
self.assertEqual(ex.code, 2)
self.assertRegexpMatches(out.getvalue(), r'--field must be specified')
def test_StatusFieldOverrideIssue(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
def assertIssue(cl_self, *_args):
self.assertEquals(cl_self.issue, 1)
return 'foobar'
self.mock(git_cl.Changelist, 'GetDescription', assertIssue)
self.assertEqual(
git_cl.main(['status', '--issue', '1', '--gerrit', '--field', 'desc']),
0)
self.assertEqual(out.getvalue(), 'foobar\n')
def test_SetCloseOverrideIssue(self):
def assertIssue(cl_self, *_args):
self.assertEquals(cl_self.issue, 1)
return 'foobar'
self.mock(git_cl.Changelist, 'GetDescription', assertIssue)
self.mock(git_cl.Changelist, 'CloseIssue', lambda *_: None)
self.assertEqual(
git_cl.main(['set-close', '--issue', '1', '--gerrit']), 0)
def test_description(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.merge'],), 'feature'),
((['git', 'config', 'branch.feature.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/my/repo'),
(('GetChangeDetail', 'chromium-review.googlesource.com',
'my%2Frepo~123123', ['CURRENT_REVISION', 'CURRENT_COMMIT']),
{
'current_revision': 'sha1',
'revisions': {'sha1': {
'commit': {'message': 'foobar'},
}},
}),
]
self.assertEqual(0, git_cl.main([
'description',
'https://chromium-review.googlesource.com/c/my/repo/+/123123',
'-d']))
self.assertEqual('foobar\n', out.getvalue())
def test_description_set_raw(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.mock(git_cl, 'Changelist', ChangelistMock)
self.mock(git_cl.sys, 'stdin', StringIO.StringIO('hihi'))
self.assertEqual(0, git_cl.main(['description', '-n', 'hihi']))
self.assertEqual('hihi', ChangelistMock.desc)
def test_description_appends_bug_line(self):
current_desc = 'Some.\n\nChange-Id: xxx'
def RunEditor(desc, _, **kwargs):
self.assertEquals(
'# Enter a description of the change.\n'
'# This will be displayed on the codereview site.\n'
'# The first line will also be used as the subject of the review.\n'
'#--------------------This line is 72 characters long'
'--------------------\n'
'Some.\n\nChange-Id: xxx\nBug: ',
desc)
return 'Some.\n\nChange-Id: xxx\nBug: 123'
def UpdateDescriptionRemote(_, desc, force=False):
self.assertEquals(desc, 'Some.\n\nChange-Id: xxx\nBug: 123')
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.mock(git_cl.Changelist, 'GetDescription',
lambda *args: current_desc)
self.mock(git_cl._GerritChangelistImpl, 'UpdateDescriptionRemote',
UpdateDescriptionRemote)
self.mock(git_cl.gclient_utils, 'RunEditor', RunEditor)
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123'),
((['git', 'config', 'rietveld.autoupdate'],), CERR1),
((['git', 'config', 'rietveld.bug-prefix'],), CERR1),
((['git', 'config', 'core.editor'],), 'vi'),
]
self.assertEqual(0, git_cl.main(['description', '--gerrit']))
def test_description_set_stdin(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.mock(git_cl, 'Changelist', ChangelistMock)
self.mock(git_cl.sys, 'stdin', StringIO.StringIO('hi \r\n\t there\n\nman'))
self.assertEqual(0, git_cl.main(['description', '-n', '-']))
self.assertEqual('hi\n\t there\n\nman', ChangelistMock.desc)
def test_archive(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.calls = \
[((['git', 'for-each-ref', '--format=%(refname)', 'refs/heads'],),
'refs/heads/master\nrefs/heads/foo\nrefs/heads/bar'),
((['git', 'config', 'branch.master.gerritissue'],), '456'),
((['git', 'config', 'branch.foo.gerritissue'],), CERR1),
((['git', 'config', 'branch.bar.gerritissue'],), '789'),
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'tag', 'git-cl-archived-456-foo', 'foo'],), ''),
((['git', 'branch', '-D', 'foo'],), '')]
self.mock(git_cl, 'get_cl_statuses',
lambda branches, fine_grained, max_processes:
[(MockChangelistWithBranchAndIssue('master', 1), 'open'),
(MockChangelistWithBranchAndIssue('foo', 456), 'closed'),
(MockChangelistWithBranchAndIssue('bar', 789), 'open')])
self.assertEqual(0, git_cl.main(['archive', '-f']))
def test_archive_current_branch_fails(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.calls = \
[((['git', 'for-each-ref', '--format=%(refname)', 'refs/heads'],),
'refs/heads/master'),
((['git', 'config', 'branch.master.gerritissue'],), '1'),
((['git', 'symbolic-ref', 'HEAD'],), 'master')]
self.mock(git_cl, 'get_cl_statuses',
lambda branches, fine_grained, max_processes:
[(MockChangelistWithBranchAndIssue('master', 1), 'closed')])
self.assertEqual(1, git_cl.main(['archive', '-f']))
def test_archive_dry_run(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.calls = \
[((['git', 'for-each-ref', '--format=%(refname)', 'refs/heads'],),
'refs/heads/master\nrefs/heads/foo\nrefs/heads/bar'),
((['git', 'config', 'branch.master.gerritissue'],), '456'),
((['git', 'config', 'branch.foo.gerritissue'],), CERR1),
((['git', 'config', 'branch.bar.gerritissue'],), '789'),
((['git', 'symbolic-ref', 'HEAD'],), 'master'),]
self.mock(git_cl, 'get_cl_statuses',
lambda branches, fine_grained, max_processes:
[(MockChangelistWithBranchAndIssue('master', 1), 'open'),
(MockChangelistWithBranchAndIssue('foo', 456), 'closed'),
(MockChangelistWithBranchAndIssue('bar', 789), 'open')])
self.assertEqual(0, git_cl.main(['archive', '-f', '--dry-run']))
def test_archive_no_tags(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.calls = \
[((['git', 'for-each-ref', '--format=%(refname)', 'refs/heads'],),
'refs/heads/master\nrefs/heads/foo\nrefs/heads/bar'),
((['git', 'config', 'branch.master.gerritissue'],), '1'),
((['git', 'config', 'branch.foo.gerritissue'],), '456'),
((['git', 'config', 'branch.bar.gerritissue'],), CERR1),
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'branch', '-D', 'foo'],), '')]
self.mock(git_cl, 'get_cl_statuses',
lambda branches, fine_grained, max_processes:
[(MockChangelistWithBranchAndIssue('master', 1), 'open'),
(MockChangelistWithBranchAndIssue('foo', 456), 'closed'),
(MockChangelistWithBranchAndIssue('bar', 789), 'open')])
self.assertEqual(0, git_cl.main(['archive', '-f', '--notags']))
def test_cmd_issue_erase_existing(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123'),
((['git', 'config', '--unset', 'branch.feature.last-upload-hash'],),
CERR1),
((['git', 'config', '--unset', 'branch.feature.gerritissue'],), ''),
((['git', 'config', '--unset', 'branch.feature.gerritpatchset'],), ''),
((['git', 'config', '--unset', 'branch.feature.gerritserver'],), ''),
((['git', 'config', '--unset', 'branch.feature.gerritsquashhash'],),
''),
((['git', 'log', '-1', '--format=%B'],), 'This is a description'),
]
self.assertEqual(0, git_cl.main(['issue', '0']))
def test_cmd_issue_erase_existing_with_change_id(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.mock(git_cl.Changelist, 'GetDescription',
lambda _: 'This is a description\n\nChange-Id: Ideadbeef')
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123'),
((['git', 'config', '--unset', 'branch.feature.last-upload-hash'],),
CERR1),
((['git', 'config', '--unset', 'branch.feature.gerritissue'],), ''),
((['git', 'config', '--unset', 'branch.feature.gerritpatchset'],), ''),
((['git', 'config', '--unset', 'branch.feature.gerritserver'],), ''),
((['git', 'config', '--unset', 'branch.feature.gerritsquashhash'],),
''),
((['git', 'log', '-1', '--format=%B'],),
'This is a description\n\nChange-Id: Ideadbeef'),
((['git', 'commit', '--amend', '-m', 'This is a description\n'],), ''),
]
self.assertEqual(0, git_cl.main(['issue', '0']))
def test_cmd_issue_json(self):
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123'),
((['git', 'config', 'branch.feature.gerritserver'],),
'https://chromium-review.googlesource.com'),
(('write_json', 'output.json',
{'issue': 123,
'issue_url': 'https://chromium-review.googlesource.com/123'}),
''),
]
self.assertEqual(0, git_cl.main(['issue', '--json', 'output.json']))
def test_git_cl_try_default_cq_dry_run_gerrit(self):
self.mock(git_cl.Changelist, 'GetChange',
lambda _, *a: (
self._mocked_call(['GetChange']+list(a))))
self.mock(git_cl.presubmit_support, 'DoGetTryMasters',
lambda *_, **__: (
self._mocked_call(['DoGetTryMasters'])))
self.mock(git_cl._GerritChangelistImpl, 'SetCQState',
lambda _, s: self._mocked_call(['SetCQState', s]))
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123456'),
((['git', 'config', 'branch.feature.gerritserver'],),
'https://chromium-review.googlesource.com'),
((['git', 'config', 'branch.feature.merge'],), 'feature'),
((['git', 'config', 'branch.feature.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/depot_tools'),
(('GetChangeDetail', 'chromium-review.googlesource.com',
'depot_tools~123456',
['DETAILED_ACCOUNTS', 'ALL_REVISIONS', 'CURRENT_COMMIT']), {
'project': 'depot_tools',
'status': 'OPEN',
'owner': {'email': 'owner@e.mail'},
'revisions': {
'deadbeaf': {
'_number': 6,
},
'beeeeeef': {
'_number': 7,
'fetch': {'http': {
'url': 'https://chromium.googlesource.com/depot_tools',
'ref': 'refs/changes/56/123456/7'
}},
},
},
}),
((['git', 'config', 'branch.feature.merge'],), 'feature'),
((['git', 'config', 'branch.feature.remote'],), 'origin'),
((['get_or_create_merge_base', 'feature', 'feature'],),
'fake_ancestor_sha'),
((['GetChange', 'fake_ancestor_sha', None], ),
git_cl.presubmit_support.GitChange(
'', '', '', '', '', '', '', '')),
((['git', 'rev-parse', '--show-cdup'],), '../'),
((['DoGetTryMasters'], ), None),
((['SetCQState', git_cl._CQState.DRY_RUN], ), None),
]
out = StringIO.StringIO()
self.mock(git_cl.sys, 'stdout', out)
self.assertEqual(0, git_cl.main(['try']))
self.assertEqual(
out.getvalue(),
'Scheduling CQ dry run on: '
'https://chromium-review.googlesource.com/123456\n')
def test_git_cl_try_buildbucket_with_properties_gerrit(self):
self.mock(git_cl.Changelist, 'GetMostRecentPatchset', lambda _: 7)
self.mock(git_cl.uuid, 'uuid4', lambda: 'uuid4')
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123456'),
((['git', 'config', 'branch.feature.gerritserver'],),
'https://chromium-review.googlesource.com'),
((['git', 'config', 'branch.feature.merge'],), 'feature'),
((['git', 'config', 'branch.feature.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/depot_tools'),
(('GetChangeDetail', 'chromium-review.googlesource.com',
'depot_tools~123456',
['DETAILED_ACCOUNTS', 'ALL_REVISIONS', 'CURRENT_COMMIT']), {
'project': 'depot_tools',
'status': 'OPEN',
'owner': {'email': 'owner@e.mail'},
'revisions': {
'deadbeaf': {
'_number': 6,
},
'beeeeeef': {
'_number': 7,
'fetch': {'http': {
'url': 'https://chromium.googlesource.com/depot_tools',
'ref': 'refs/changes/56/123456/7'
}},
},
},
}),
]
def _buildbucket_retry(*_, **kw):
body = json.loads(kw['body'])
self.assertEqual(len(body['builds']), 1)
build = body['builds'][0]
params = json.loads(build.pop('parameters_json'))
self.assertEqual(params, {
u'builder_name': u'win',
u'changes': [{u'author': {u'email': u'owner@e.mail'},
u'revision': None}],
u'properties': {
u'category': u'git_cl_try',
u'key': u'val',
u'json': [{u'a': 1}, None],
u'patch_gerrit_url':
u'https://chromium-review.googlesource.com',
u'patch_issue': 123456,
u'patch_project': u'depot_tools',
u'patch_ref': u'refs/changes/56/123456/7',
u'patch_repository_url':
u'https://chromium.googlesource.com/depot_tools',
u'patch_set': 7,
u'patch_storage': u'gerrit',
}
})
self.assertEqual(build, {
u'bucket': u'luci.chromium.try',
u'client_operation_id': u'uuid4',
u'tags': [
u'builder:win',
u'buildset:patch/gerrit/chromium-review.googlesource.com/123456/7',
u'user_agent:git_cl_try',
],
})
self.mock(git_cl, '_buildbucket_retry', _buildbucket_retry)
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.assertEqual(0, git_cl.main([
'try', '-B', 'luci.chromium.try', '-b', 'win',
'-p', 'key=val', '-p', 'json=[{"a":1}, null]']))
self.assertRegexpMatches(
git_cl.sys.stdout.getvalue(),
'Tried jobs on:\nBucket: luci.chromium.try')
def test_git_cl_try_bots_on_multiple_masters(self):
self.mock(git_cl.Changelist, 'GetMostRecentPatchset', lambda _: 7)
self.mock(git_cl.Changelist, 'GetChange',
lambda _, *a: (
self._mocked_call(['GetChange']+list(a))))
self.mock(git_cl.presubmit_support, 'DoGetTryMasters',
lambda *_, **__: (
self._mocked_call(['DoGetTryMasters'])))
self.mock(git_cl._GerritChangelistImpl, 'SetCQState',
lambda _, s: self._mocked_call(['SetCQState', s]))
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '123456'),
((['git', 'config', 'branch.feature.gerritserver'],),
'https://chromium-review.googlesource.com'),
((['git', 'config', 'branch.feature.merge'],), 'feature'),
((['git', 'config', 'branch.feature.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/depot_tools'),
(('GetChangeDetail', 'chromium-review.googlesource.com',
'depot_tools~123456',
['DETAILED_ACCOUNTS', 'ALL_REVISIONS', 'CURRENT_COMMIT']), {
'project': 'depot_tools',
'status': 'OPEN',
'owner': {'email': 'owner@e.mail'},
'revisions': {
'deadbeaf': {
'_number': 6,
},
'beeeeeef': {
'_number': 7,
'fetch': {'http': {
'url': 'https://chromium.googlesource.com/depot_tools',
'ref': 'refs/changes/56/123456/7'
}},
},
},
}),
]
def _buildbucket_retry(*_, **kw):
body = json.loads(kw['body'])
self.assertEqual(len(body['builds']), 2)
self.assertEqual(body['builds'][0]['bucket'], 'bucket1')
params = json.loads(body['builds'][0]['parameters_json'])
self.assertEqual(params['builder_name'], 'builder1')
self.assertEqual(body['builds'][1]['bucket'], 'bucket2')
params = json.loads(body['builds'][1]['parameters_json'])
self.assertEqual(params['builder_name'], 'builder2')
self.mock(git_cl, '_buildbucket_retry', _buildbucket_retry)
self.mock(git_cl.urllib2, 'urlopen', lambda _: StringIO.StringIO(
json.dumps({
'builder1': {'bucket': 'bucket1'},
'builder2': {'bucket': 'bucket2'},
})))
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.assertEqual(
0, git_cl.main(['try', '-b', 'builder1', '-b', 'builder2']))
self.assertEqual(
git_cl.sys.stdout.getvalue(),
'Tried jobs on:\n'
'Bucket: bucket1\n'
' builder1: []\n'
'Bucket: bucket2\n'
' builder2: []\n'
'To see results here, run: git cl try-results\n'
'To see results in browser, run: git cl web\n')
def _common_GerritCommitMsgHookCheck(self):
self.mock(git_cl.sys, 'stdout', StringIO.StringIO())
self.mock(git_cl.os.path, 'abspath',
lambda path: self._mocked_call(['abspath', path]))
self.mock(git_cl.os.path, 'exists',
lambda path: self._mocked_call(['exists', path]))
self.mock(git_cl.gclient_utils, 'FileRead',
lambda path: self._mocked_call(['FileRead', path]))
self.mock(git_cl.gclient_utils, 'rm_file_or_tree',
lambda path: self._mocked_call(['rm_file_or_tree', path]))
self.calls = [
((['git', 'rev-parse', '--show-cdup'],), '../'),
((['abspath', '../'],), '/abs/git_repo_root'),
]
return git_cl.Changelist(codereview='gerrit', issue=123)
def test_GerritCommitMsgHookCheck_custom_hook(self):
cl = self._common_GerritCommitMsgHookCheck()
self.calls += [
((['exists', '/abs/git_repo_root/.git/hooks/commit-msg'],), True),
((['FileRead', '/abs/git_repo_root/.git/hooks/commit-msg'],),
'#!/bin/sh\necho "custom hook"')
]
cl._codereview_impl._GerritCommitMsgHookCheck(offer_removal=True)
def test_GerritCommitMsgHookCheck_not_exists(self):
cl = self._common_GerritCommitMsgHookCheck()
self.calls += [
((['exists', '/abs/git_repo_root/.git/hooks/commit-msg'],), False),
]
cl._codereview_impl._GerritCommitMsgHookCheck(offer_removal=True)
def test_GerritCommitMsgHookCheck(self):
cl = self._common_GerritCommitMsgHookCheck()
self.calls += [
((['exists', '/abs/git_repo_root/.git/hooks/commit-msg'],), True),
((['FileRead', '/abs/git_repo_root/.git/hooks/commit-msg'],),
'...\n# From Gerrit Code Review\n...\nadd_ChangeId()\n'),
(('ask_for_data', 'Do you want to remove it now? [Yes/No]: '), 'Yes'),
((['rm_file_or_tree', '/abs/git_repo_root/.git/hooks/commit-msg'],),
''),
]
cl._codereview_impl._GerritCommitMsgHookCheck(offer_removal=True)
def test_GerritCmdLand(self):
self.calls += [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritsquashhash'],),
'deadbeaf'),
((['git', 'diff', 'deadbeaf'],), ''),
((['git', 'config', 'branch.feature.gerritserver'],),
'chromium-review.googlesource.com'),
]
cl = git_cl.Changelist(issue=123, codereview='gerrit')
cl._codereview_impl._GetChangeDetail = lambda _: {
'labels': {},
'current_revision': 'deadbeaf',
}
cl._codereview_impl._GetChangeCommit = lambda: {
'commit': 'deadbeef',
'web_links': [{'name': 'gitiles',
'url': 'https://git.googlesource.com/test/+/deadbeef'}],
}
cl._codereview_impl.SubmitIssue = lambda wait_for_merge: None
out = StringIO.StringIO()
self.mock(sys, 'stdout', out)
self.assertEqual(0, cl.CMDLand(force=True,
bypass_hooks=True,
verbose=True,
parallel=False))
self.assertRegexpMatches(out.getvalue(), 'Issue.*123 has been submitted')
self.assertRegexpMatches(out.getvalue(), 'Landed as: .*deadbeef')
BUILDBUCKET_BUILDS_MAP = {
'9000': {
'id': '9000',
'bucket': 'master.x.y',
'created_by': 'user:someone@chromium.org',
'created_ts': '147200002222000',
'experimental': False,
'parameters_json': json.dumps({
'builder_name': 'my-bot',
'properties': {'category': 'cq'},
}),
'status': 'STARTED',
'tags': [
'build_address:x.y/my-bot/2',
'builder:my-bot',
'experimental:false',
'user_agent:cq',
],
'url': 'http://build.cr.org/p/x.y/builders/my-bot/builds/2',
},
'8000': {
'id': '8000',
'bucket': 'master.x.y',
'created_by': 'user:someone@chromium.org',
'created_ts': '147200001111000',
'experimental': False,
'failure_reason': 'BUILD_FAILURE',
'parameters_json': json.dumps({
'builder_name': 'my-bot',
'properties': {'category': 'cq'},
}),
'result_details_json': json.dumps({
'properties': {'buildnumber': 1},
}),
'result': 'FAILURE',
'status': 'COMPLETED',
'tags': [
'build_address:x.y/my-bot/1',
'builder:my-bot',
'experimental:false',
'user_agent:cq',
],
'url': 'http://build.cr.org/p/x.y/builders/my-bot/builds/1',
},
}
def test_write_try_results_json(self):
expected_output = [
{
'bucket': 'master.x.y',
'buildbucket_id': '8000',
'builder_name': 'my-bot',
'created_ts': '147200001111000',
'experimental': False,
'failure_reason': 'BUILD_FAILURE',
'result': 'FAILURE',
'status': 'COMPLETED',
'tags': [
'build_address:x.y/my-bot/1',
'builder:my-bot',
'experimental:false',
'user_agent:cq',
],
'url': 'http://build.cr.org/p/x.y/builders/my-bot/builds/1',
},
{
'bucket': 'master.x.y',
'buildbucket_id': '9000',
'builder_name': 'my-bot',
'created_ts': '147200002222000',
'experimental': False,
'failure_reason': None,
'result': None,
'status': 'STARTED',
'tags': [
'build_address:x.y/my-bot/2',
'builder:my-bot',
'experimental:false',
'user_agent:cq',
],
'url': 'http://build.cr.org/p/x.y/builders/my-bot/builds/2',
},
]
self.calls = [(('write_json', 'output.json', expected_output), '')]
git_cl.write_try_results_json('output.json', self.BUILDBUCKET_BUILDS_MAP)
def _setup_fetch_try_jobs(self, most_recent_patchset=20001):
out = StringIO.StringIO()
self.mock(sys, 'stdout', out)
self.mock(git_cl.Changelist, 'GetMostRecentPatchset',
lambda *args: most_recent_patchset)
self.mock(git_cl.auth, 'get_authenticator_for_host', lambda host, _cfg:
self._mocked_call(['get_authenticator_for_host', host]))
self.mock(git_cl, '_buildbucket_retry', lambda *_, **__:
self._mocked_call(['_buildbucket_retry']))
def _setup_fetch_try_jobs_gerrit(self, *request_results):
self._setup_fetch_try_jobs(most_recent_patchset=13)
self.calls += [
((['git', 'symbolic-ref', 'HEAD'],), 'feature'),
((['git', 'config', 'branch.feature.gerritissue'],), '1'),
((['git', 'config', 'branch.feature.gerritserver'],),
'https://x-review.googlesource.com'),
((['get_authenticator_for_host', 'x-review.googlesource.com'],),
AuthenticatorMock()),
] + [((['_buildbucket_retry'],), r) for r in request_results]
def test_fetch_try_jobs_none_gerrit(self):
self._setup_fetch_try_jobs_gerrit({})
self.assertEqual(0, git_cl.main(['try-results']))
self.assertRegexpMatches(sys.stdout.getvalue(), 'No try jobs')
def test_fetch_try_jobs_some_gerrit(self):
self._setup_fetch_try_jobs_gerrit({
'builds': self.BUILDBUCKET_BUILDS_MAP.values(),
})
self.assertEqual(0, git_cl.main(['try-results', '--patchset', '5']))
self.assertNotRegexpMatches(sys.stdout.getvalue(), 'Warning')
self.assertRegexpMatches(sys.stdout.getvalue(), '^Failures:')
self.assertRegexpMatches(sys.stdout.getvalue(), 'Started:')
self.assertRegexpMatches(sys.stdout.getvalue(), '2 try jobs')
def _mock_gerrit_changes_for_detail_cache(self):
self.mock(git_cl._GerritChangelistImpl, '_GetGerritHost', lambda _: 'host')
def test_gerrit_change_detail_cache_simple(self):
self._mock_gerrit_changes_for_detail_cache()
self.calls = [
(('GetChangeDetail', 'host', 'my%2Frepo~1', []), 'a'),
(('GetChangeDetail', 'host', 'ab%2Frepo~2', []), 'b'),
(('GetChangeDetail', 'host', 'ab%2Frepo~2', []), 'b2'),
]
cl1 = git_cl.Changelist(issue=1, codereview='gerrit')
cl1._cached_remote_url = (
True, 'https://chromium.googlesource.com/a/my/repo.git/')
cl2 = git_cl.Changelist(issue=2, codereview='gerrit')
cl2._cached_remote_url = (
True, 'https://chromium.googlesource.com/ab/repo')
self.assertEqual(cl1._GetChangeDetail(), 'a') # Miss.
self.assertEqual(cl1._GetChangeDetail(), 'a')
self.assertEqual(cl2._GetChangeDetail(), 'b') # Miss.
self.assertEqual(cl2._GetChangeDetail(no_cache=True), 'b2') # Miss.
self.assertEqual(cl1._GetChangeDetail(), 'a')
self.assertEqual(cl2._GetChangeDetail(), 'b2')
def test_gerrit_change_detail_cache_options(self):
self._mock_gerrit_changes_for_detail_cache()
self.calls = [
(('GetChangeDetail', 'host', 'repo~1', ['C', 'A', 'B']), 'cab'),
(('GetChangeDetail', 'host', 'repo~1', ['A', 'D']), 'ad'),
(('GetChangeDetail', 'host', 'repo~1', ['A']), 'a'), # no_cache=True
# no longer in cache.
(('GetChangeDetail', 'host', 'repo~1', ['B']), 'b'),
]
cl = git_cl.Changelist(issue=1, codereview='gerrit')
cl._cached_remote_url = (True, 'https://chromium.googlesource.com/repo/')
self.assertEqual(cl._GetChangeDetail(options=['C', 'A', 'B']), 'cab')
self.assertEqual(cl._GetChangeDetail(options=['A', 'B', 'C']), 'cab')
self.assertEqual(cl._GetChangeDetail(options=['B', 'A']), 'cab')
self.assertEqual(cl._GetChangeDetail(options=['C']), 'cab')
self.assertEqual(cl._GetChangeDetail(options=['A']), 'cab')
self.assertEqual(cl._GetChangeDetail(), 'cab')
self.assertEqual(cl._GetChangeDetail(options=['A', 'D']), 'ad')
self.assertEqual(cl._GetChangeDetail(options=['A']), 'cab')
self.assertEqual(cl._GetChangeDetail(options=['D']), 'ad')
self.assertEqual(cl._GetChangeDetail(), 'cab')
# Finally, no_cache should invalidate all caches for given change.
self.assertEqual(cl._GetChangeDetail(options=['A'], no_cache=True), 'a')
self.assertEqual(cl._GetChangeDetail(options=['B']), 'b')
def test_gerrit_description_caching(self):
def gen_detail(rev, desc):
return {
'current_revision': rev,
'revisions': {rev: {'commit': {'message': desc}}}
}
self.calls = [
(('GetChangeDetail', 'host', 'my%2Frepo~1',
['CURRENT_REVISION', 'CURRENT_COMMIT']),
gen_detail('rev1', 'desc1')),
(('GetChangeDetail', 'host', 'my%2Frepo~1',
['CURRENT_REVISION', 'CURRENT_COMMIT']),
gen_detail('rev2', 'desc2')),
]
self._mock_gerrit_changes_for_detail_cache()
cl = git_cl.Changelist(issue=1, codereview='gerrit')
cl._cached_remote_url = (
True, 'https://chromium.googlesource.com/a/my/repo.git/')
self.assertEqual(cl.GetDescription(), 'desc1')
self.assertEqual(cl.GetDescription(), 'desc1') # cache hit.
self.assertEqual(cl.GetDescription(force=True), 'desc2')
def test_print_current_creds(self):
class CookiesAuthenticatorMock(object):
def __init__(self):
self.gitcookies = {
'host.googlesource.com': ('user', 'pass'),
'host-review.googlesource.com': ('user', 'pass'),
}
self.netrc = self
self.netrc.hosts = {
'github.com': ('user2', None, 'pass2'),
'host2.googlesource.com': ('user3', None, 'pass'),
}
self.mock(git_cl.gerrit_util, 'CookiesAuthenticator',
CookiesAuthenticatorMock)
self.mock(sys, 'stdout', StringIO.StringIO())
git_cl._GitCookiesChecker().print_current_creds(include_netrc=True)
self.assertEqual(list(sys.stdout.getvalue().splitlines()), [
' Host\t User\t Which file',
'============================\t=====\t===========',
'host-review.googlesource.com\t user\t.gitcookies',
' host.googlesource.com\t user\t.gitcookies',
' host2.googlesource.com\tuser3\t .netrc',
])
sys.stdout.buf = ''
git_cl._GitCookiesChecker().print_current_creds(include_netrc=False)
self.assertEqual(list(sys.stdout.getvalue().splitlines()), [
' Host\tUser\t Which file',
'============================\t====\t===========',
'host-review.googlesource.com\tuser\t.gitcookies',
' host.googlesource.com\tuser\t.gitcookies',
])
def _common_creds_check_mocks(self):
def exists_mock(path):
dirname = os.path.dirname(path)
if dirname == os.path.expanduser('~'):
dirname = '~'
base = os.path.basename(path)
if base in ('.netrc', '.gitcookies'):
return self._mocked_call('os.path.exists', '%s/%s' % (dirname, base))
# git cl also checks for existence other files not relevant to this test.
return None
self.mock(os.path, 'exists', exists_mock)
self.mock(sys, 'stdout', StringIO.StringIO())
def test_creds_check_gitcookies_not_configured(self):
self._common_creds_check_mocks()
self.mock(git_cl._GitCookiesChecker, 'get_hosts_with_creds',
lambda _, include_netrc=False: [])
self.calls = [
((['git', 'config', '--path', 'http.cookiefile'],), CERR1),
((['git', 'config', '--global', 'http.cookiefile'],), CERR1),
(('os.path.exists', '~/.netrc'), True),
(('ask_for_data', 'Press Enter to setup .gitcookies, '
'or Ctrl+C to abort'), ''),
((['git', 'config', '--global', 'http.cookiefile',
os.path.expanduser('~/.gitcookies')], ), ''),
]
self.assertEqual(0, git_cl.main(['creds-check']))
self.assertRegexpMatches(
sys.stdout.getvalue(),
'^You seem to be using outdated .netrc for git credentials:')
self.assertRegexpMatches(
sys.stdout.getvalue(),
'\nConfigured git to use .gitcookies from')
def test_creds_check_gitcookies_configured_custom_broken(self):
self._common_creds_check_mocks()
self.mock(git_cl._GitCookiesChecker, 'get_hosts_with_creds',
lambda _, include_netrc=False: [])
self.calls = [
((['git', 'config', '--path', 'http.cookiefile'],), CERR1),
((['git', 'config', '--global', 'http.cookiefile'],),
'/custom/.gitcookies'),
(('os.path.exists', '/custom/.gitcookies'), False),
(('ask_for_data', 'Reconfigure git to use default .gitcookies? '
'Press Enter to reconfigure, or Ctrl+C to abort'), ''),
((['git', 'config', '--global', 'http.cookiefile',
os.path.expanduser('~/.gitcookies')], ), ''),
]
self.assertEqual(0, git_cl.main(['creds-check']))
self.assertRegexpMatches(
sys.stdout.getvalue(),
'WARNING: You have configured custom path to .gitcookies: ')
self.assertRegexpMatches(
sys.stdout.getvalue(),
'However, your configured .gitcookies file is missing.')
def test_git_cl_comment_add_gerrit(self):
self.mock(git_cl.gerrit_util, 'SetReview',
lambda host, change, msg, ready:
self._mocked_call('SetReview', host, change, msg, ready))
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), CERR1),
((['git', 'symbolic-ref', 'HEAD'],), CERR1),
((['git', 'config', 'rietveld.upstream-branch'],), CERR1),
((['git', 'branch', '-r'],), 'origin/HEAD -> origin/master\n'
'origin/master'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/infra/infra'),
(('SetReview', 'chromium-review.googlesource.com', 'infra%2Finfra~10',
'msg', None),
None),
]
self.assertEqual(0, git_cl.main(['comment', '--gerrit', '-i', '10',
'-a', 'msg']))
def test_git_cl_comments_fetch_gerrit(self):
self.mock(sys, 'stdout', StringIO.StringIO())
self.calls = [
((['git', 'config', 'branch.foo.gerritserver'],), ''),
((['git', 'config', 'branch.foo.merge'],), ''),
((['git', 'config', 'rietveld.upstream-branch'],), CERR1),
((['git', 'branch', '-r'],), 'origin/HEAD -> origin/master\n'
'origin/master'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/infra/infra'),
(('GetChangeDetail', 'chromium-review.googlesource.com',
'infra%2Finfra~1',
['MESSAGES', 'DETAILED_ACCOUNTS', 'CURRENT_REVISION',
'CURRENT_COMMIT']), {
'owner': {'email': 'owner@example.com'},
'current_revision': 'ba5eba11',
'revisions': {
'deadbeaf': {
'_number': 1,
},
'ba5eba11': {
'_number': 2,
},
},
'messages': [
{
u'_revision_number': 1,
u'author': {
u'_account_id': 1111084,
u'email': u'commit-bot@chromium.org',
u'name': u'Commit Bot'
},
u'date': u'2017-03-15 20:08:45.000000000',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046dc50b',
u'message': u'Patch Set 1:\n\nDry run: CQ is trying the patch...',
u'tag': u'autogenerated:cq:dry-run'
},
{
u'_revision_number': 2,
u'author': {
u'_account_id': 11151243,
u'email': u'owner@example.com',
u'name': u'owner'
},
u'date': u'2017-03-16 20:00:41.000000000',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046d1234',
u'message': u'PTAL',
},
{
u'_revision_number': 2,
u'author': {
u'_account_id': 148512 ,
u'email': u'reviewer@example.com',
u'name': u'reviewer'
},
u'date': u'2017-03-17 05:19:37.500000000',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046d4568',
u'message': u'Patch Set 2: Code-Review+1',
},
]
}),
(('GetChangeComments', 'chromium-review.googlesource.com',
'infra%2Finfra~1'), {
'/COMMIT_MSG': [
{
'author': {'email': u'reviewer@example.com'},
'updated': u'2017-03-17 05:19:37.500000000',
'patch_set': 2,
'side': 'REVISION',
'message': 'Please include a bug link',
},
],
'codereview.settings': [
{
'author': {'email': u'owner@example.com'},
'updated': u'2017-03-16 20:00:41.000000000',
'patch_set': 2,
'side': 'PARENT',
'line': 42,
'message': 'I removed this because it is bad',
},
]
}),
(('GetChangeRobotComments', 'chromium-review.googlesource.com',
'infra%2Finfra~1'), {}),
((['git', 'config', 'branch.foo.gerritpatchset', '2'],), ''),
] * 2 + [
(('write_json', 'output.json', [
{
u'date': u'2017-03-16 20:00:41.000000',
u'message': (
u'PTAL\n' +
u'\n' +
u'codereview.settings\n' +
u' Base, Line 42: https://chromium-review.googlesource.com/' +
u'c/1/2/codereview.settings
u' I removed this because it is bad\n'),
u'autogenerated': False,
u'approval': False,
u'disapproval': False,
u'sender': u'owner@example.com'
}, {
u'date': u'2017-03-17 05:19:37.500000',
u'message': (
u'Patch Set 2: Code-Review+1\n' +
u'\n' +
u'/COMMIT_MSG\n' +
u' PS2, File comment: https://chromium-review.googlesource' +
u'.com/c/1/2//COMMIT_MSG
u' Please include a bug link\n'),
u'autogenerated': False,
u'approval': False,
u'disapproval': False,
u'sender': u'reviewer@example.com'
}
]),'')
]
expected_comments_summary = [
git_cl._CommentSummary(
message=(
u'PTAL\n' +
u'\n' +
u'codereview.settings\n' +
u' Base, Line 42: https://chromium-review.googlesource.com/' +
u'c/1/2/codereview.settings
u' I removed this because it is bad\n'),
date=datetime.datetime(2017, 3, 16, 20, 0, 41, 0),
autogenerated=False,
disapproval=False, approval=False, sender=u'owner@example.com'),
git_cl._CommentSummary(
message=(
u'Patch Set 2: Code-Review+1\n' +
u'\n' +
u'/COMMIT_MSG\n' +
u' PS2, File comment: https://chromium-review.googlesource.com/' +
u'c/1/2//COMMIT_MSG
u' Please include a bug link\n'),
date=datetime.datetime(2017, 3, 17, 5, 19, 37, 500000),
autogenerated=False,
disapproval=False, approval=False, sender=u'reviewer@example.com'),
]
cl = git_cl.Changelist(
codereview='gerrit', issue=1, branchref='refs/heads/foo')
self.assertEqual(cl.GetCommentsSummary(), expected_comments_summary)
self.mock(git_cl.Changelist, 'GetBranch', lambda _: 'foo')
self.assertEqual(
0, git_cl.main(['comments', '-i', '1', '-j', 'output.json']))
def test_git_cl_comments_robot_comments(self):
# git cl comments also fetches robot comments (which are considered a type
# of autogenerated comment), and unlike other types of comments, only robot
# comments from the latest patchset are shown.
self.mock(sys, 'stdout', StringIO.StringIO())
self.calls = [
((['git', 'config', 'branch.foo.gerritserver'],), ''),
((['git', 'config', 'branch.foo.merge'],), ''),
((['git', 'config', 'rietveld.upstream-branch'],), CERR1),
((['git', 'branch', '-r'],), 'origin/HEAD -> origin/master\n'
'origin/master'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/infra/infra'),
(('GetChangeDetail', 'chromium-review.googlesource.com',
'infra%2Finfra~1',
['MESSAGES', 'DETAILED_ACCOUNTS', 'CURRENT_REVISION',
'CURRENT_COMMIT']), {
'owner': {'email': 'owner@example.com'},
'current_revision': 'ba5eba11',
'revisions': {
'deadbeaf': {
'_number': 1,
},
'ba5eba11': {
'_number': 2,
},
},
'messages': [
{
u'_revision_number': 1,
u'author': {
u'_account_id': 1111084,
u'email': u'commit-bot@chromium.org',
u'name': u'Commit Bot'
},
u'date': u'2017-03-15 20:08:45.000000000',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046dc50b',
u'message': u'Patch Set 1:\n\nDry run: CQ is trying the patch...',
u'tag': u'autogenerated:cq:dry-run'
},
{
u'_revision_number': 1,
u'author': {
u'_account_id': 123,
u'email': u'tricium@serviceaccount.com',
u'name': u'Tricium'
},
u'date': u'2017-03-16 20:00:41.000000000',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046d1234',
u'message': u'(1 comment)',
u'tag': u'autogenerated:tricium',
},
{
u'_revision_number': 1,
u'author': {
u'_account_id': 123,
u'email': u'tricium@serviceaccount.com',
u'name': u'Tricium'
},
u'date': u'2017-03-16 20:00:41.000000000',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046d1234',
u'message': u'(1 comment)',
u'tag': u'autogenerated:tricium',
},
{
u'_revision_number': 2,
u'author': {
u'_account_id': 123 ,
u'email': u'tricium@serviceaccount.com',
u'name': u'reviewer'
},
u'date': u'2017-03-17 05:30:37.000000000',
u'tag': u'autogenerated:tricium',
u'id': u'f5a6c25ecbd3b3b54a43ae418ed97eff046d4568',
u'message': u'(1 comment)',
},
]
}),
(('GetChangeComments', 'chromium-review.googlesource.com',
'infra%2Finfra~1'), {}),
(('GetChangeRobotComments', 'chromium-review.googlesource.com',
'infra%2Finfra~1'), {
'codereview.settings': [
{
u'author': {u'email': u'tricium@serviceaccount.com'},
u'updated': u'2017-03-17 05:30:37.000000000',
u'robot_run_id': u'5565031076855808',
u'robot_id': u'Linter/Category',
u'tag': u'autogenerated:tricium',
u'patch_set': 2,
u'side': u'REVISION',
u'message': u'Linter warning message text',
u'line': 32,
},
],
}),
((['git', 'config', 'branch.foo.gerritpatchset', '2'],), ''),
]
expected_comments_summary = [
git_cl._CommentSummary(date=datetime.datetime(2017, 3, 17, 5, 30, 37),
message=(
u'(1 comment)\n\ncodereview.settings\n'
u' PS2, Line 32: https://chromium-review.googlesource.com/'
u'c/1/2/codereview.settings
u' Linter warning message text\n'),
sender=u'tricium@serviceaccount.com',
autogenerated=True, approval=False, disapproval=False)
]
cl = git_cl.Changelist(
codereview='gerrit', issue=1, branchref='refs/heads/foo')
self.assertEqual(cl.GetCommentsSummary(), expected_comments_summary)
def test_get_remote_url_with_mirror(self):
original_os_path_isdir = os.path.isdir
def selective_os_path_isdir_mock(path):
if path == '/cache/this-dir-exists':
return self._mocked_call('os.path.isdir', path)
return original_os_path_isdir(path)
self.mock(os.path, 'isdir', selective_os_path_isdir_mock)
url = 'https://chromium.googlesource.com/my/repo'
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.merge'],), 'master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'/cache/this-dir-exists'),
(('os.path.isdir', '/cache/this-dir-exists'),
True),
# Runs in /cache/this-dir-exists.
((['git', 'config', 'remote.origin.url'],),
url),
]
cl = git_cl.Changelist(codereview='gerrit', issue=1)
self.assertEqual(cl.GetRemoteUrl(), url)
self.assertEqual(cl.GetRemoteUrl(), url) # Must be cached.
def test_get_remote_url_non_existing_mirror(self):
original_os_path_isdir = os.path.isdir
def selective_os_path_isdir_mock(path):
if path == '/cache/this-dir-doesnt-exist':
return self._mocked_call('os.path.isdir', path)
return original_os_path_isdir(path)
self.mock(os.path, 'isdir', selective_os_path_isdir_mock)
self.mock(logging, 'error',
lambda fmt, *a: self._mocked_call('logging.error', fmt % a))
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.merge'],), 'master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'/cache/this-dir-doesnt-exist'),
(('os.path.isdir', '/cache/this-dir-doesnt-exist'),
False),
(('logging.error',
'Remote "origin" for branch "/cache/this-dir-doesnt-exist" points to'
' "master", but it doesn\'t exist.'), None),
]
cl = git_cl.Changelist(codereview='gerrit', issue=1)
self.assertIsNone(cl.GetRemoteUrl())
def test_get_remote_url_misconfigured_mirror(self):
original_os_path_isdir = os.path.isdir
def selective_os_path_isdir_mock(path):
if path == '/cache/this-dir-exists':
return self._mocked_call('os.path.isdir', path)
return original_os_path_isdir(path)
self.mock(os.path, 'isdir', selective_os_path_isdir_mock)
self.mock(logging, 'error',
lambda *a: self._mocked_call('logging.error', *a))
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.merge'],), 'master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'/cache/this-dir-exists'),
(('os.path.isdir', '/cache/this-dir-exists'), True),
((['git', 'config', 'remote.origin.url'],), ''),
(('logging.error',
'Remote "%(remote)s" for branch "%(branch)s" points to '
'"%(cache_path)s", but it is misconfigured.\n'
'"%(cache_path)s" must be a git repo and must have a remote named '
'"%(remote)s" pointing to the git host.', {
'remote': 'origin',
'cache_path': '/cache/this-dir-exists',
'branch': 'master'}
), None),
]
cl = git_cl.Changelist(codereview='gerrit', issue=1)
self.assertIsNone(cl.GetRemoteUrl())
def test_gerrit_change_identifier_with_project(self):
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.merge'],), 'master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],),
'https://chromium.googlesource.com/a/my/repo.git/'),
]
cl = git_cl.Changelist(codereview='gerrit', issue=123456)
self.assertEqual(cl._GerritChangeIdentifier(), 'my%2Frepo~123456')
def test_gerrit_change_identifier_without_project(self):
self.calls = [
((['git', 'symbolic-ref', 'HEAD'],), 'master'),
((['git', 'config', 'branch.master.merge'],), 'master'),
((['git', 'config', 'branch.master.remote'],), 'origin'),
((['git', 'config', 'remote.origin.url'],), CERR1),
]
cl = git_cl.Changelist(codereview='gerrit', issue=123456)
self.assertEqual(cl._GerritChangeIdentifier(), '123456')
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
unittest.main()
| true | true |
f71a5da87d38b68176352916c419991c5e418c42 | 15,214 | py | Python | treebuilder/partptr/train.py | NLP-Discourse-SoochowU/TDDiscourseParser | 2f9c7cef85c564c47b368ee4935caf1fad7c598d | [
"Apache-2.0"
] | 9 | 2020-11-24T01:16:01.000Z | 2022-01-26T09:37:00.000Z | treebuilder/partptr/train.py | NLP-Discourse-SoochowU/TDDiscourseParser | 2f9c7cef85c564c47b368ee4935caf1fad7c598d | [
"Apache-2.0"
] | 2 | 2020-11-29T17:49:49.000Z | 2021-05-20T02:53:25.000Z | treebuilder/partptr/train.py | NLP-Discourse-SoochowU/TDDiscourseParser | 2f9c7cef85c564c47b368ee4935caf1fad7c598d | [
"Apache-2.0"
] | 1 | 2022-01-26T11:00:33.000Z | 2022-01-26T11:00:33.000Z | # coding: UTF-8
import argparse
import logging
import random
import torch
import copy
import numpy as np
from dataset import CDTB
from collections import Counter
from itertools import chain
from structure.vocab import Vocab, Label
from structure.nodes import node_type_filter, EDU, Relation, Sentence, TEXT
from treebuilder.partptr.model import PartitionPtr
from treebuilder.partptr.parser import PartitionPtrParser
import torch.optim as optim
from util.eval import parse_eval, gen_parse_report
from tensorboardX import SummaryWriter
def build_vocab(dataset):
word_freq = Counter()
pos_freq = Counter()
nuc_freq = Counter()
rel_freq = Counter()
for paragraph in chain(*dataset):
for node in paragraph.iterfind(filter=node_type_filter([EDU, Relation])):
if isinstance(node, EDU):
word_freq.update(node.words)
pos_freq.update(node.tags)
elif isinstance(node, Relation):
nuc_freq[node.nuclear] += 1
rel_freq[node.ftype] += 1
word_vocab = Vocab("word", word_freq)
pos_vocab = Vocab("part of speech", pos_freq)
nuc_label = Label("nuclear", nuc_freq)
rel_label = Label("relation", rel_freq)
return word_vocab, pos_vocab, nuc_label, rel_label
def gen_decoder_data(root, edu2ids):
# splits s0 s1 s2 s3 s4 s5 s6
# edus s/ e0 e1 e2 e3 e4 e5 /s
splits = [] # [(0, 3, 6, NS), (0, 2, 3, SN), ...]
child_edus = [] # [edus]
if isinstance(root, EDU):
child_edus.append(root)
elif isinstance(root, Sentence):
for child in root:
_child_edus, _splits = gen_decoder_data(child, edu2ids)
child_edus.extend(_child_edus)
splits.extend(_splits)
elif isinstance(root, Relation):
children = [gen_decoder_data(child, edu2ids) for child in root]
if len(children) < 2:
raise ValueError("relation node should have at least 2 children")
while children:
left_child_edus, left_child_splits = children.pop(0)
if children:
last_child_edus, _ = children[-1]
start = edu2ids[left_child_edus[0]]
split = edu2ids[left_child_edus[-1]] + 1
end = edu2ids[last_child_edus[-1]] + 1
nuc = root.nuclear
rel = root.ftype
splits.append((start, split, end, nuc, rel))
child_edus.extend(left_child_edus)
splits.extend(left_child_splits)
return child_edus, splits
def numericalize(dataset, word_vocab, pos_vocab, nuc_label, rel_label):
instances = []
for paragraph in filter(lambda d: d.root_relation(), chain(*dataset)):
encoder_inputs = []
decoder_inputs = []
pred_splits = []
pred_nucs = []
pred_rels = []
edus = list(paragraph.edus())
for edu in edus:
edu_word_ids = [word_vocab[word] for word in edu.words]
edu_pos_ids = [pos_vocab[pos] for pos in edu.tags]
encoder_inputs.append((edu_word_ids, edu_pos_ids))
edu2ids = {edu: i for i, edu in enumerate(edus)}
_, splits = gen_decoder_data(paragraph.root_relation(), edu2ids)
for start, split, end, nuc, rel in splits:
decoder_inputs.append((start, end))
pred_splits.append(split)
pred_nucs.append(nuc_label[nuc])
pred_rels.append(rel_label[rel])
instances.append((encoder_inputs, decoder_inputs, pred_splits, pred_nucs, pred_rels))
return instances
def gen_batch_iter(instances, batch_size, use_gpu=False):
random_instances = np.random.permutation(instances)
num_instances = len(instances)
offset = 0
while offset < num_instances:
batch = random_instances[offset: min(num_instances, offset+batch_size)]
# find out max seqlen of edus and words of edus
num_batch = batch.shape[0]
max_edu_seqlen = 0
max_word_seqlen = 0
for encoder_inputs, decoder_inputs, pred_splits, pred_nucs, pred_rels in batch:
max_edu_seqlen = max_edu_seqlen if max_edu_seqlen >= len(encoder_inputs) else len(encoder_inputs)
for edu_word_ids, edu_pos_ids in encoder_inputs:
max_word_seqlen = max_word_seqlen if max_word_seqlen >= len(edu_word_ids) else len(edu_word_ids)
# batch to numpy
e_input_words = np.zeros([num_batch, max_edu_seqlen, max_word_seqlen], dtype=np.long)
e_input_poses = np.zeros([num_batch, max_edu_seqlen, max_word_seqlen], dtype=np.long)
e_masks = np.zeros([num_batch, max_edu_seqlen, max_word_seqlen], dtype=np.uint8)
d_inputs = np.zeros([num_batch, max_edu_seqlen-1, 2], dtype=np.long)
d_outputs = np.zeros([num_batch, max_edu_seqlen-1], dtype=np.long)
d_output_nucs = np.zeros([num_batch, max_edu_seqlen-1], dtype=np.long)
d_output_rels = np.zeros([num_batch, max_edu_seqlen - 1], dtype=np.long)
d_masks = np.zeros([num_batch, max_edu_seqlen-1, max_edu_seqlen+1], dtype=np.uint8)
for batchi, (encoder_inputs, decoder_inputs, pred_splits, pred_nucs, pred_rels) in enumerate(batch):
for edui, (edu_word_ids, edu_pos_ids) in enumerate(encoder_inputs):
word_seqlen = len(edu_word_ids)
e_input_words[batchi][edui][:word_seqlen] = edu_word_ids
e_input_poses[batchi][edui][:word_seqlen] = edu_pos_ids
e_masks[batchi][edui][:word_seqlen] = 1
for di, decoder_input in enumerate(decoder_inputs):
d_inputs[batchi][di] = decoder_input
d_masks[batchi][di][decoder_input[0]+1: decoder_input[1]] = 1
d_outputs[batchi][:len(pred_splits)] = pred_splits
d_output_nucs[batchi][:len(pred_nucs)] = pred_nucs
d_output_rels[batchi][:len(pred_rels)] = pred_rels
# numpy to torch
e_input_words = torch.from_numpy(e_input_words).long()
e_input_poses = torch.from_numpy(e_input_poses).long()
e_masks = torch.from_numpy(e_masks).byte()
d_inputs = torch.from_numpy(d_inputs).long()
d_outputs = torch.from_numpy(d_outputs).long()
d_output_nucs = torch.from_numpy(d_output_nucs).long()
d_output_rels = torch.from_numpy(d_output_rels).long()
d_masks = torch.from_numpy(d_masks).byte()
if use_gpu:
e_input_words = e_input_words.cuda()
e_input_poses = e_input_poses.cuda()
e_masks = e_masks.cuda()
d_inputs = d_inputs.cuda()
d_outputs = d_outputs.cuda()
d_output_nucs = d_output_nucs.cuda()
d_output_rels = d_output_rels.cuda()
d_masks = d_masks.cuda()
yield (e_input_words, e_input_poses, e_masks), (d_inputs, d_masks), (d_outputs, d_output_nucs, d_output_rels)
offset = offset + batch_size
def parse_and_eval(dataset, model):
model.eval()
parser = PartitionPtrParser(model)
golds = list(filter(lambda d: d.root_relation(), chain(*dataset)))
num_instances = len(golds)
strips = []
for paragraph in golds:
edus = []
for edu in paragraph.edus():
edu_copy = EDU([TEXT(edu.text)])
setattr(edu_copy, "words", edu.words)
setattr(edu_copy, "tags", edu.tags)
edus.append(edu_copy)
strips.append(edus)
parses = []
for edus in strips:
parse = parser.parse(edus)
parses.append(parse)
return num_instances, parse_eval(parses, golds)
def model_score(scores):
eval_score = sum(score[2] for score in scores)
return eval_score
def main(args):
# set seed for reproducibility
random.seed(args.seed)
torch.manual_seed(args.seed)
np.random.seed(args.seed)
# load dataset
cdtb = CDTB(args.data, "TRAIN", "VALIDATE", "TEST", ctb_dir=args.ctb_dir, preprocess=True, cache_dir=args.cache_dir)
# build vocabulary
word_vocab, pos_vocab, nuc_label, rel_label = build_vocab(cdtb.train)
trainset = numericalize(cdtb.train, word_vocab, pos_vocab, nuc_label, rel_label)
logging.info("num of instances trainset: %d" % len(trainset))
logging.info("args: %s" % str(args))
# build model
model = PartitionPtr(hidden_size=args.hidden_size, dropout=args.dropout,
word_vocab=word_vocab, pos_vocab=pos_vocab, nuc_label=nuc_label, rel_label=rel_label,
pretrained=args.pretrained, w2v_size=args.w2v_size, w2v_freeze=args.w2v_freeze,
pos_size=args.pos_size,
split_mlp_size=args.split_mlp_size, nuc_mlp_size=args.nuc_mlp_size,
rel_mlp_size=args.rel_mlp_size,
use_gpu=args.use_gpu)
if args.use_gpu:
model.cuda()
logging.info("model:\n%s" % str(model))
# train and evaluate
niter = 0
log_splits_loss = 0.
log_nucs_loss = 0.
log_rels_loss = 0.
log_loss = 0.
optimizer = optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.l2)
writer = SummaryWriter(args.log_dir)
logging.info("hint: run 'tensorboard --logdir %s' to observe training status" % args.log_dir)
best_model = None
best_model_score = 0.
for nepoch in range(1, args.epoch + 1):
batch_iter = gen_batch_iter(trainset, args.batch_size, args.use_gpu)
for nbatch, (e_inputs, d_inputs, grounds) in enumerate(batch_iter, start=1):
niter += 1
model.train()
optimizer.zero_grad()
splits_loss, nucs_loss, rels_loss = model.loss(e_inputs, d_inputs, grounds)
loss = args.a_split_loss * splits_loss + args.a_nuclear_loss * nucs_loss + args.a_relation_loss * rels_loss
loss.backward()
optimizer.step()
log_splits_loss += splits_loss.item()
log_nucs_loss += nucs_loss.item()
log_rels_loss += rels_loss.item()
log_loss += loss.item()
if niter % args.log_every == 0:
logging.info("[iter %-6d]epoch: %-3d, batch %-5d,"
"train splits loss:%.5f, nuclear loss %.5f, relation loss %.5f, loss %.5f" %
(niter, nepoch, nbatch, log_splits_loss, log_nucs_loss, log_rels_loss, log_loss))
writer.add_scalar("train/split_loss", log_splits_loss, niter)
writer.add_scalar("train/nuclear_loss", log_nucs_loss, niter)
writer.add_scalar("train/relation_loss", log_rels_loss, niter)
writer.add_scalar("train/loss", log_loss, niter)
log_splits_loss = 0.
log_nucs_loss = 0.
log_rels_loss = 0.
log_loss = 0.
if niter % args.validate_every == 0:
num_instances, validate_scores = parse_and_eval(cdtb.validate, model)
logging.info("validation on %d instances" % num_instances)
logging.info(gen_parse_report(*validate_scores))
writer.add_scalar("validate/span_f1", validate_scores[0][2], niter)
writer.add_scalar("validate/nuclear_f1", validate_scores[1][2], niter)
writer.add_scalar("validate/coarse_relation_f1", validate_scores[2][2], niter)
writer.add_scalar("validate/fine_relation_f1", validate_scores[3][2], niter)
new_model_score = model_score(validate_scores)
if new_model_score > best_model_score:
# test on testset with new best model
best_model_score = new_model_score
best_model = copy.deepcopy(model)
logging.info("test on new best model")
num_instances, test_scores = parse_and_eval(cdtb.test, best_model)
logging.info("test on %d instances" % num_instances)
logging.info(gen_parse_report(*test_scores))
writer.add_scalar("test/span_f1", test_scores[0][2], niter)
writer.add_scalar("test/nuclear_f1", test_scores[1][2], niter)
writer.add_scalar("test/coarse_relation_f1", test_scores[2][2], niter)
writer.add_scalar("test/fine_relation_f1", test_scores[3][2], niter)
if best_model:
# evaluation and save best model
logging.info("final test result")
num_instances, test_scores = parse_and_eval(cdtb.test, best_model)
logging.info("test on %d instances" % num_instances)
logging.info(gen_parse_report(*test_scores))
logging.info("save best model to %s" % args.model_save)
with open(args.model_save, "wb+") as model_fd:
torch.save(best_model, model_fd)
writer.close()
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
arg_parser = argparse.ArgumentParser()
# dataset parameters
arg_parser.add_argument("--data", default="data/CDTB")
arg_parser.add_argument("--ctb_dir", default="data/CTB")
arg_parser.add_argument("--cache_dir", default="data/cache")
# model parameters
arg_parser.add_argument("-hidden_size", default=512, type=int)
arg_parser.add_argument("-dropout", default=0.33, type=float)
# w2v_group = arg_parser.add_mutually_exclusive_group(required=True)
arg_parser.add_argument("-pretrained", default="data/pretrained/sgns.renmin.word")
arg_parser.add_argument("-w2v_size", type=int)
arg_parser.add_argument("-pos_size", default=30, type=int)
arg_parser.add_argument("-split_mlp_size", default=64, type=int)
arg_parser.add_argument("-nuc_mlp_size", default=32, type=int)
arg_parser.add_argument("-rel_mlp_size", default=128, type=int)
arg_parser.add_argument("--w2v_freeze", dest="w2v_freeze", action="store_true")
arg_parser.set_defaults(w2v_freeze=True)
# train parameters
arg_parser.add_argument("-epoch", default=20, type=int)
arg_parser.add_argument("-batch_size", default=64, type=int)
arg_parser.add_argument("-lr", default=0.001, type=float)
arg_parser.add_argument("-l2", default=0.0, type=float)
arg_parser.add_argument("-log_every", default=10, type=int)
arg_parser.add_argument("-validate_every", default=10, type=int)
arg_parser.add_argument("-a_split_loss", default=0.3, type=float)
arg_parser.add_argument("-a_nuclear_loss", default=1.0, type=float)
arg_parser.add_argument("-a_relation_loss", default=1.0, type=float)
arg_parser.add_argument("-log_dir", default="data/log")
arg_parser.add_argument("-model_save", default="data/models/treebuilder.partptr.model")
arg_parser.add_argument("--seed", default=21, type=int)
arg_parser.add_argument("--use_gpu", dest="use_gpu", action="store_true")
arg_parser.set_defaults(use_gpu=True)
main(arg_parser.parse_args())
| 46.95679 | 121 | 0.63481 |
import argparse
import logging
import random
import torch
import copy
import numpy as np
from dataset import CDTB
from collections import Counter
from itertools import chain
from structure.vocab import Vocab, Label
from structure.nodes import node_type_filter, EDU, Relation, Sentence, TEXT
from treebuilder.partptr.model import PartitionPtr
from treebuilder.partptr.parser import PartitionPtrParser
import torch.optim as optim
from util.eval import parse_eval, gen_parse_report
from tensorboardX import SummaryWriter
def build_vocab(dataset):
word_freq = Counter()
pos_freq = Counter()
nuc_freq = Counter()
rel_freq = Counter()
for paragraph in chain(*dataset):
for node in paragraph.iterfind(filter=node_type_filter([EDU, Relation])):
if isinstance(node, EDU):
word_freq.update(node.words)
pos_freq.update(node.tags)
elif isinstance(node, Relation):
nuc_freq[node.nuclear] += 1
rel_freq[node.ftype] += 1
word_vocab = Vocab("word", word_freq)
pos_vocab = Vocab("part of speech", pos_freq)
nuc_label = Label("nuclear", nuc_freq)
rel_label = Label("relation", rel_freq)
return word_vocab, pos_vocab, nuc_label, rel_label
def gen_decoder_data(root, edu2ids):
splits = []
child_edus = []
if isinstance(root, EDU):
child_edus.append(root)
elif isinstance(root, Sentence):
for child in root:
_child_edus, _splits = gen_decoder_data(child, edu2ids)
child_edus.extend(_child_edus)
splits.extend(_splits)
elif isinstance(root, Relation):
children = [gen_decoder_data(child, edu2ids) for child in root]
if len(children) < 2:
raise ValueError("relation node should have at least 2 children")
while children:
left_child_edus, left_child_splits = children.pop(0)
if children:
last_child_edus, _ = children[-1]
start = edu2ids[left_child_edus[0]]
split = edu2ids[left_child_edus[-1]] + 1
end = edu2ids[last_child_edus[-1]] + 1
nuc = root.nuclear
rel = root.ftype
splits.append((start, split, end, nuc, rel))
child_edus.extend(left_child_edus)
splits.extend(left_child_splits)
return child_edus, splits
def numericalize(dataset, word_vocab, pos_vocab, nuc_label, rel_label):
instances = []
for paragraph in filter(lambda d: d.root_relation(), chain(*dataset)):
encoder_inputs = []
decoder_inputs = []
pred_splits = []
pred_nucs = []
pred_rels = []
edus = list(paragraph.edus())
for edu in edus:
edu_word_ids = [word_vocab[word] for word in edu.words]
edu_pos_ids = [pos_vocab[pos] for pos in edu.tags]
encoder_inputs.append((edu_word_ids, edu_pos_ids))
edu2ids = {edu: i for i, edu in enumerate(edus)}
_, splits = gen_decoder_data(paragraph.root_relation(), edu2ids)
for start, split, end, nuc, rel in splits:
decoder_inputs.append((start, end))
pred_splits.append(split)
pred_nucs.append(nuc_label[nuc])
pred_rels.append(rel_label[rel])
instances.append((encoder_inputs, decoder_inputs, pred_splits, pred_nucs, pred_rels))
return instances
def gen_batch_iter(instances, batch_size, use_gpu=False):
random_instances = np.random.permutation(instances)
num_instances = len(instances)
offset = 0
while offset < num_instances:
batch = random_instances[offset: min(num_instances, offset+batch_size)]
num_batch = batch.shape[0]
max_edu_seqlen = 0
max_word_seqlen = 0
for encoder_inputs, decoder_inputs, pred_splits, pred_nucs, pred_rels in batch:
max_edu_seqlen = max_edu_seqlen if max_edu_seqlen >= len(encoder_inputs) else len(encoder_inputs)
for edu_word_ids, edu_pos_ids in encoder_inputs:
max_word_seqlen = max_word_seqlen if max_word_seqlen >= len(edu_word_ids) else len(edu_word_ids)
e_input_words = np.zeros([num_batch, max_edu_seqlen, max_word_seqlen], dtype=np.long)
e_input_poses = np.zeros([num_batch, max_edu_seqlen, max_word_seqlen], dtype=np.long)
e_masks = np.zeros([num_batch, max_edu_seqlen, max_word_seqlen], dtype=np.uint8)
d_inputs = np.zeros([num_batch, max_edu_seqlen-1, 2], dtype=np.long)
d_outputs = np.zeros([num_batch, max_edu_seqlen-1], dtype=np.long)
d_output_nucs = np.zeros([num_batch, max_edu_seqlen-1], dtype=np.long)
d_output_rels = np.zeros([num_batch, max_edu_seqlen - 1], dtype=np.long)
d_masks = np.zeros([num_batch, max_edu_seqlen-1, max_edu_seqlen+1], dtype=np.uint8)
for batchi, (encoder_inputs, decoder_inputs, pred_splits, pred_nucs, pred_rels) in enumerate(batch):
for edui, (edu_word_ids, edu_pos_ids) in enumerate(encoder_inputs):
word_seqlen = len(edu_word_ids)
e_input_words[batchi][edui][:word_seqlen] = edu_word_ids
e_input_poses[batchi][edui][:word_seqlen] = edu_pos_ids
e_masks[batchi][edui][:word_seqlen] = 1
for di, decoder_input in enumerate(decoder_inputs):
d_inputs[batchi][di] = decoder_input
d_masks[batchi][di][decoder_input[0]+1: decoder_input[1]] = 1
d_outputs[batchi][:len(pred_splits)] = pred_splits
d_output_nucs[batchi][:len(pred_nucs)] = pred_nucs
d_output_rels[batchi][:len(pred_rels)] = pred_rels
e_input_words = torch.from_numpy(e_input_words).long()
e_input_poses = torch.from_numpy(e_input_poses).long()
e_masks = torch.from_numpy(e_masks).byte()
d_inputs = torch.from_numpy(d_inputs).long()
d_outputs = torch.from_numpy(d_outputs).long()
d_output_nucs = torch.from_numpy(d_output_nucs).long()
d_output_rels = torch.from_numpy(d_output_rels).long()
d_masks = torch.from_numpy(d_masks).byte()
if use_gpu:
e_input_words = e_input_words.cuda()
e_input_poses = e_input_poses.cuda()
e_masks = e_masks.cuda()
d_inputs = d_inputs.cuda()
d_outputs = d_outputs.cuda()
d_output_nucs = d_output_nucs.cuda()
d_output_rels = d_output_rels.cuda()
d_masks = d_masks.cuda()
yield (e_input_words, e_input_poses, e_masks), (d_inputs, d_masks), (d_outputs, d_output_nucs, d_output_rels)
offset = offset + batch_size
def parse_and_eval(dataset, model):
model.eval()
parser = PartitionPtrParser(model)
golds = list(filter(lambda d: d.root_relation(), chain(*dataset)))
num_instances = len(golds)
strips = []
for paragraph in golds:
edus = []
for edu in paragraph.edus():
edu_copy = EDU([TEXT(edu.text)])
setattr(edu_copy, "words", edu.words)
setattr(edu_copy, "tags", edu.tags)
edus.append(edu_copy)
strips.append(edus)
parses = []
for edus in strips:
parse = parser.parse(edus)
parses.append(parse)
return num_instances, parse_eval(parses, golds)
def model_score(scores):
eval_score = sum(score[2] for score in scores)
return eval_score
def main(args):
random.seed(args.seed)
torch.manual_seed(args.seed)
np.random.seed(args.seed)
cdtb = CDTB(args.data, "TRAIN", "VALIDATE", "TEST", ctb_dir=args.ctb_dir, preprocess=True, cache_dir=args.cache_dir)
word_vocab, pos_vocab, nuc_label, rel_label = build_vocab(cdtb.train)
trainset = numericalize(cdtb.train, word_vocab, pos_vocab, nuc_label, rel_label)
logging.info("num of instances trainset: %d" % len(trainset))
logging.info("args: %s" % str(args))
model = PartitionPtr(hidden_size=args.hidden_size, dropout=args.dropout,
word_vocab=word_vocab, pos_vocab=pos_vocab, nuc_label=nuc_label, rel_label=rel_label,
pretrained=args.pretrained, w2v_size=args.w2v_size, w2v_freeze=args.w2v_freeze,
pos_size=args.pos_size,
split_mlp_size=args.split_mlp_size, nuc_mlp_size=args.nuc_mlp_size,
rel_mlp_size=args.rel_mlp_size,
use_gpu=args.use_gpu)
if args.use_gpu:
model.cuda()
logging.info("model:\n%s" % str(model))
niter = 0
log_splits_loss = 0.
log_nucs_loss = 0.
log_rels_loss = 0.
log_loss = 0.
optimizer = optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.l2)
writer = SummaryWriter(args.log_dir)
logging.info("hint: run 'tensorboard --logdir %s' to observe training status" % args.log_dir)
best_model = None
best_model_score = 0.
for nepoch in range(1, args.epoch + 1):
batch_iter = gen_batch_iter(trainset, args.batch_size, args.use_gpu)
for nbatch, (e_inputs, d_inputs, grounds) in enumerate(batch_iter, start=1):
niter += 1
model.train()
optimizer.zero_grad()
splits_loss, nucs_loss, rels_loss = model.loss(e_inputs, d_inputs, grounds)
loss = args.a_split_loss * splits_loss + args.a_nuclear_loss * nucs_loss + args.a_relation_loss * rels_loss
loss.backward()
optimizer.step()
log_splits_loss += splits_loss.item()
log_nucs_loss += nucs_loss.item()
log_rels_loss += rels_loss.item()
log_loss += loss.item()
if niter % args.log_every == 0:
logging.info("[iter %-6d]epoch: %-3d, batch %-5d,"
"train splits loss:%.5f, nuclear loss %.5f, relation loss %.5f, loss %.5f" %
(niter, nepoch, nbatch, log_splits_loss, log_nucs_loss, log_rels_loss, log_loss))
writer.add_scalar("train/split_loss", log_splits_loss, niter)
writer.add_scalar("train/nuclear_loss", log_nucs_loss, niter)
writer.add_scalar("train/relation_loss", log_rels_loss, niter)
writer.add_scalar("train/loss", log_loss, niter)
log_splits_loss = 0.
log_nucs_loss = 0.
log_rels_loss = 0.
log_loss = 0.
if niter % args.validate_every == 0:
num_instances, validate_scores = parse_and_eval(cdtb.validate, model)
logging.info("validation on %d instances" % num_instances)
logging.info(gen_parse_report(*validate_scores))
writer.add_scalar("validate/span_f1", validate_scores[0][2], niter)
writer.add_scalar("validate/nuclear_f1", validate_scores[1][2], niter)
writer.add_scalar("validate/coarse_relation_f1", validate_scores[2][2], niter)
writer.add_scalar("validate/fine_relation_f1", validate_scores[3][2], niter)
new_model_score = model_score(validate_scores)
if new_model_score > best_model_score:
best_model_score = new_model_score
best_model = copy.deepcopy(model)
logging.info("test on new best model")
num_instances, test_scores = parse_and_eval(cdtb.test, best_model)
logging.info("test on %d instances" % num_instances)
logging.info(gen_parse_report(*test_scores))
writer.add_scalar("test/span_f1", test_scores[0][2], niter)
writer.add_scalar("test/nuclear_f1", test_scores[1][2], niter)
writer.add_scalar("test/coarse_relation_f1", test_scores[2][2], niter)
writer.add_scalar("test/fine_relation_f1", test_scores[3][2], niter)
if best_model:
logging.info("final test result")
num_instances, test_scores = parse_and_eval(cdtb.test, best_model)
logging.info("test on %d instances" % num_instances)
logging.info(gen_parse_report(*test_scores))
logging.info("save best model to %s" % args.model_save)
with open(args.model_save, "wb+") as model_fd:
torch.save(best_model, model_fd)
writer.close()
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("--data", default="data/CDTB")
arg_parser.add_argument("--ctb_dir", default="data/CTB")
arg_parser.add_argument("--cache_dir", default="data/cache")
arg_parser.add_argument("-hidden_size", default=512, type=int)
arg_parser.add_argument("-dropout", default=0.33, type=float)
arg_parser.add_argument("-pretrained", default="data/pretrained/sgns.renmin.word")
arg_parser.add_argument("-w2v_size", type=int)
arg_parser.add_argument("-pos_size", default=30, type=int)
arg_parser.add_argument("-split_mlp_size", default=64, type=int)
arg_parser.add_argument("-nuc_mlp_size", default=32, type=int)
arg_parser.add_argument("-rel_mlp_size", default=128, type=int)
arg_parser.add_argument("--w2v_freeze", dest="w2v_freeze", action="store_true")
arg_parser.set_defaults(w2v_freeze=True)
arg_parser.add_argument("-epoch", default=20, type=int)
arg_parser.add_argument("-batch_size", default=64, type=int)
arg_parser.add_argument("-lr", default=0.001, type=float)
arg_parser.add_argument("-l2", default=0.0, type=float)
arg_parser.add_argument("-log_every", default=10, type=int)
arg_parser.add_argument("-validate_every", default=10, type=int)
arg_parser.add_argument("-a_split_loss", default=0.3, type=float)
arg_parser.add_argument("-a_nuclear_loss", default=1.0, type=float)
arg_parser.add_argument("-a_relation_loss", default=1.0, type=float)
arg_parser.add_argument("-log_dir", default="data/log")
arg_parser.add_argument("-model_save", default="data/models/treebuilder.partptr.model")
arg_parser.add_argument("--seed", default=21, type=int)
arg_parser.add_argument("--use_gpu", dest="use_gpu", action="store_true")
arg_parser.set_defaults(use_gpu=True)
main(arg_parser.parse_args())
| true | true |
f71a5e69e97dfd4fa78fe7475a89e51f71597592 | 2,911 | py | Python | migrations/env.py | kvshravan/sample-platform | f3cf050d21df9d8e4b3746a5a32d273d839c4898 | [
"0BSD"
] | null | null | null | migrations/env.py | kvshravan/sample-platform | f3cf050d21df9d8e4b3746a5a32d273d839c4898 | [
"0BSD"
] | null | null | null | migrations/env.py | kvshravan/sample-platform | f3cf050d21df9d8e4b3746a5a32d273d839c4898 | [
"0BSD"
] | null | null | null | from __future__ import with_statement
import logging
from logging.config import fileConfig
from alembic import context
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
config.set_main_option(
'sqlalchemy.url', current_app.config.get( # type: ignore
'SQLALCHEMY_DATABASE_URI').replace('%', '%%'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
| 30.642105 | 77 | 0.710752 | from __future__ import with_statement
import logging
from logging.config import fileConfig
from alembic import context
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
config.set_main_option(
'sqlalchemy.url', current_app.config.get( # type: ignore
'SQLALCHEMY_DATABASE_URI').replace('%', '%%'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
| true | true |
f71a5f10643eea16f3e9e3317d0eb53ee89dcc29 | 4,484 | py | Python | setup.py | btcdrak/mitmproxy | cacee3871c6a9f0be7127f3c790e09a1daaf8490 | [
"MIT"
] | 1 | 2018-03-31T17:16:07.000Z | 2018-03-31T17:16:07.000Z | setup.py | btcdrak/mitmproxy | cacee3871c6a9f0be7127f3c790e09a1daaf8490 | [
"MIT"
] | null | null | null | setup.py | btcdrak/mitmproxy | cacee3871c6a9f0be7127f3c790e09a1daaf8490 | [
"MIT"
] | 4 | 2018-04-18T13:17:01.000Z | 2021-02-21T17:08:33.000Z | from setuptools import setup, find_packages
from codecs import open
import os
from netlib import version
# Based on https://github.com/pypa/sampleproject/blob/master/setup.py
# and https://python-packaging-user-guide.readthedocs.org/
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name="mitmproxy",
version=version.VERSION,
description="An interactive, SSL-capable, man-in-the-middle HTTP proxy for penetration testers and software developers.",
long_description=long_description,
url="http://mitmproxy.org",
author="Aldo Cortesi",
author_email="aldo@corte.si",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Environment :: Console :: Curses",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Security",
"Topic :: Internet",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: Proxy Servers",
"Topic :: Software Development :: Testing"
],
packages=find_packages(include=[
"mitmproxy", "mitmproxy.*",
"pathod", "pathod.*",
"netlib", "netlib.*"
]),
include_package_data=True,
entry_points={
'console_scripts': [
"mitmproxy = mitmproxy.main:mitmproxy",
"mitmdump = mitmproxy.main:mitmdump",
"mitmweb = mitmproxy.main:mitmweb",
"pathod = pathod.pathod_cmdline:go_pathod",
"pathoc = pathod.pathoc_cmdline:go_pathoc"
]
},
# https://packaging.python.org/en/latest/requirements/#install-requires
# It is not considered best practice to use install_requires to pin dependencies to specific versions.
install_requires=[
"backports.ssl_match_hostname>=3.5.0.1, <3.6",
"blinker>=1.4, <1.5",
"click>=6.2, <7.0",
"certifi>=2015.11.20.1", # no semver here - this should always be on the last release!
"configargparse>=0.10, <0.11",
"construct>=2.5.2, <2.6",
"cryptography>=1.3, <1.5",
"cssutils>=1.0.1, <1.1",
"Flask>=0.10.1, <0.12",
"h2>=2.4.0, <3",
"html2text>=2016.1.8, <=2016.5.29",
"hyperframe>=4.0.1, <5",
"jsbeautifier>=1.6.3, <1.7",
"lxml>=3.5.0, <=3.6.0", # no wheels for 3.6.1 yet.
"Pillow>=3.2, <3.4",
"passlib>=1.6.5, <1.7",
"pyasn1>=0.1.9, <0.2",
"pyOpenSSL>=16.0, <17.0",
"pyparsing>=2.1.3, <2.2",
"pyperclip>=1.5.22, <1.6",
"requests>=2.9.1, <2.12",
"six>=1.10, <1.11",
"tornado>=4.3, <4.5",
"urwid>=1.3.1, <1.4",
"watchdog>=0.8.3, <0.9",
"brotlipy>=0.3.0, <0.5",
],
extras_require={
':sys_platform == "win32"': [
"pydivert>=0.0.7, <0.1",
],
':sys_platform != "win32"': [
],
# Do not use a range operator here: https://bitbucket.org/pypa/setuptools/issues/380
# Ubuntu Trusty and other still ship with setuptools < 17.1
':python_version == "2.7"': [
"enum34>=1.0.4, <2",
"ipaddress>=1.0.15, <1.1",
"typing==3.5.2.2",
],
'dev': [
"tox>=2.3, <3",
"mock>=2.0, <2.1",
"pytest>=2.8.7, <3",
"pytest-cov>=2.2.1, <3",
"pytest-timeout>=1.0.0, <2",
"pytest-xdist>=1.14, <2",
"sphinx>=1.3.5, <1.5",
"sphinx-autobuild>=0.5.2, <0.7",
"sphinxcontrib-documentedlist>=0.4.0, <0.5",
"sphinx_rtd_theme>=0.1.9, <0.2",
],
'contentviews': [
# TODO: Find Python 3 replacements
# "protobuf>=2.6.1, <2.7",
# "pyamf>=0.8.0, <0.9",
],
'examples': [
"beautifulsoup4>=4.4.1, <4.6",
"pytz>=2015.07.0, <=2016.6.1",
]
}
)
| 35.587302 | 125 | 0.533898 | from setuptools import setup, find_packages
from codecs import open
import os
from netlib import version
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name="mitmproxy",
version=version.VERSION,
description="An interactive, SSL-capable, man-in-the-middle HTTP proxy for penetration testers and software developers.",
long_description=long_description,
url="http://mitmproxy.org",
author="Aldo Cortesi",
author_email="aldo@corte.si",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Environment :: Console :: Curses",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Security",
"Topic :: Internet",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: Proxy Servers",
"Topic :: Software Development :: Testing"
],
packages=find_packages(include=[
"mitmproxy", "mitmproxy.*",
"pathod", "pathod.*",
"netlib", "netlib.*"
]),
include_package_data=True,
entry_points={
'console_scripts': [
"mitmproxy = mitmproxy.main:mitmproxy",
"mitmdump = mitmproxy.main:mitmdump",
"mitmweb = mitmproxy.main:mitmweb",
"pathod = pathod.pathod_cmdline:go_pathod",
"pathoc = pathod.pathoc_cmdline:go_pathoc"
]
},
_requires=[
"backports.ssl_match_hostname>=3.5.0.1, <3.6",
"blinker>=1.4, <1.5",
"click>=6.2, <7.0",
"certifi>=2015.11.20.1",
"configargparse>=0.10, <0.11",
"construct>=2.5.2, <2.6",
"cryptography>=1.3, <1.5",
"cssutils>=1.0.1, <1.1",
"Flask>=0.10.1, <0.12",
"h2>=2.4.0, <3",
"html2text>=2016.1.8, <=2016.5.29",
"hyperframe>=4.0.1, <5",
"jsbeautifier>=1.6.3, <1.7",
"lxml>=3.5.0, <=3.6.0",
"Pillow>=3.2, <3.4",
"passlib>=1.6.5, <1.7",
"pyasn1>=0.1.9, <0.2",
"pyOpenSSL>=16.0, <17.0",
"pyparsing>=2.1.3, <2.2",
"pyperclip>=1.5.22, <1.6",
"requests>=2.9.1, <2.12",
"six>=1.10, <1.11",
"tornado>=4.3, <4.5",
"urwid>=1.3.1, <1.4",
"watchdog>=0.8.3, <0.9",
"brotlipy>=0.3.0, <0.5",
],
extras_require={
':sys_platform == "win32"': [
"pydivert>=0.0.7, <0.1",
],
':sys_platform != "win32"': [
],
':python_version == "2.7"': [
"enum34>=1.0.4, <2",
"ipaddress>=1.0.15, <1.1",
"typing==3.5.2.2",
],
'dev': [
"tox>=2.3, <3",
"mock>=2.0, <2.1",
"pytest>=2.8.7, <3",
"pytest-cov>=2.2.1, <3",
"pytest-timeout>=1.0.0, <2",
"pytest-xdist>=1.14, <2",
"sphinx>=1.3.5, <1.5",
"sphinx-autobuild>=0.5.2, <0.7",
"sphinxcontrib-documentedlist>=0.4.0, <0.5",
"sphinx_rtd_theme>=0.1.9, <0.2",
],
'contentviews': [
],
'examples': [
"beautifulsoup4>=4.4.1, <4.6",
"pytz>=2015.07.0, <=2016.6.1",
]
}
)
| true | true |
f71a5f3662e8e2e441c743a6c1f62a562f34d623 | 2,570 | py | Python | homeassistant/components/fibaro/binary_sensor.py | VirtualL/home-assistant | 301829d02be8d865ab46c8901ac046d060849320 | [
"Apache-2.0"
] | 2 | 2017-10-26T19:43:55.000Z | 2017-12-30T23:29:00.000Z | homeassistant/components/fibaro/binary_sensor.py | VirtualL/home-assistant | 301829d02be8d865ab46c8901ac046d060849320 | [
"Apache-2.0"
] | 3 | 2021-09-08T03:34:57.000Z | 2022-03-12T00:59:48.000Z | homeassistant/components/fibaro/binary_sensor.py | VirtualL/home-assistant | 301829d02be8d865ab46c8901ac046d060849320 | [
"Apache-2.0"
] | 1 | 2019-06-19T07:43:11.000Z | 2019-06-19T07:43:11.000Z | """Support for Fibaro binary sensors."""
import logging
from homeassistant.components.binary_sensor import (
ENTITY_ID_FORMAT, BinarySensorDevice)
from homeassistant.const import CONF_DEVICE_CLASS, CONF_ICON
from . import FIBARO_DEVICES, FibaroDevice
DEPENDENCIES = ['fibaro']
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES = {
'com.fibaro.floodSensor': ['Flood', 'mdi:water', 'flood'],
'com.fibaro.motionSensor': ['Motion', 'mdi:run', 'motion'],
'com.fibaro.doorSensor': ['Door', 'mdi:window-open', 'door'],
'com.fibaro.windowSensor': ['Window', 'mdi:window-open', 'window'],
'com.fibaro.smokeSensor': ['Smoke', 'mdi:smoking', 'smoke'],
'com.fibaro.FGMS001': ['Motion', 'mdi:run', 'motion'],
'com.fibaro.heatDetector': ['Heat', 'mdi:fire', 'heat'],
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Perform the setup for Fibaro controller devices."""
if discovery_info is None:
return
add_entities(
[FibaroBinarySensor(device)
for device in hass.data[FIBARO_DEVICES]['binary_sensor']], True)
class FibaroBinarySensor(FibaroDevice, BinarySensorDevice):
"""Representation of a Fibaro Binary Sensor."""
def __init__(self, fibaro_device):
"""Initialize the binary_sensor."""
self._state = None
super().__init__(fibaro_device)
self.entity_id = ENTITY_ID_FORMAT.format(self.ha_id)
stype = None
devconf = fibaro_device.device_config
if fibaro_device.type in SENSOR_TYPES:
stype = fibaro_device.type
elif fibaro_device.baseType in SENSOR_TYPES:
stype = fibaro_device.baseType
if stype:
self._device_class = SENSOR_TYPES[stype][2]
self._icon = SENSOR_TYPES[stype][1]
else:
self._device_class = None
self._icon = None
# device_config overrides:
self._device_class = devconf.get(CONF_DEVICE_CLASS,
self._device_class)
self._icon = devconf.get(CONF_ICON, self._icon)
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state
def update(self):
"""Get the latest data and update the state."""
self._state = self.current_binary_state
| 32.948718 | 73 | 0.643191 | import logging
from homeassistant.components.binary_sensor import (
ENTITY_ID_FORMAT, BinarySensorDevice)
from homeassistant.const import CONF_DEVICE_CLASS, CONF_ICON
from . import FIBARO_DEVICES, FibaroDevice
DEPENDENCIES = ['fibaro']
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES = {
'com.fibaro.floodSensor': ['Flood', 'mdi:water', 'flood'],
'com.fibaro.motionSensor': ['Motion', 'mdi:run', 'motion'],
'com.fibaro.doorSensor': ['Door', 'mdi:window-open', 'door'],
'com.fibaro.windowSensor': ['Window', 'mdi:window-open', 'window'],
'com.fibaro.smokeSensor': ['Smoke', 'mdi:smoking', 'smoke'],
'com.fibaro.FGMS001': ['Motion', 'mdi:run', 'motion'],
'com.fibaro.heatDetector': ['Heat', 'mdi:fire', 'heat'],
}
def setup_platform(hass, config, add_entities, discovery_info=None):
if discovery_info is None:
return
add_entities(
[FibaroBinarySensor(device)
for device in hass.data[FIBARO_DEVICES]['binary_sensor']], True)
class FibaroBinarySensor(FibaroDevice, BinarySensorDevice):
def __init__(self, fibaro_device):
self._state = None
super().__init__(fibaro_device)
self.entity_id = ENTITY_ID_FORMAT.format(self.ha_id)
stype = None
devconf = fibaro_device.device_config
if fibaro_device.type in SENSOR_TYPES:
stype = fibaro_device.type
elif fibaro_device.baseType in SENSOR_TYPES:
stype = fibaro_device.baseType
if stype:
self._device_class = SENSOR_TYPES[stype][2]
self._icon = SENSOR_TYPES[stype][1]
else:
self._device_class = None
self._icon = None
self._device_class = devconf.get(CONF_DEVICE_CLASS,
self._device_class)
self._icon = devconf.get(CONF_ICON, self._icon)
@property
def icon(self):
return self._icon
@property
def device_class(self):
return self._device_class
@property
def is_on(self):
return self._state
def update(self):
self._state = self.current_binary_state
| true | true |
f71a5f6bde441477b83381af68fd302a858044d3 | 338 | py | Python | fixture_packages/no_mp/setup.py | DuncanBetts/morepath | acad10489b051df9c512f6735a9338854745a599 | [
"BSD-3-Clause"
] | null | null | null | fixture_packages/no_mp/setup.py | DuncanBetts/morepath | acad10489b051df9c512f6735a9338854745a599 | [
"BSD-3-Clause"
] | null | null | null | fixture_packages/no_mp/setup.py | DuncanBetts/morepath | acad10489b051df9c512f6735a9338854745a599 | [
"BSD-3-Clause"
] | null | null | null | import os
from setuptools import setup, find_packages
setup(name='no_mp',
version = '0.1.dev0',
description="No Mp Test Fixture",
author="Martijn Faassen",
author_email="faassen@startifact.com",
license="BSD",
packages=find_packages(),
zip_safe=False,
install_requires=[
]
)
| 22.533333 | 44 | 0.62426 | import os
from setuptools import setup, find_packages
setup(name='no_mp',
version = '0.1.dev0',
description="No Mp Test Fixture",
author="Martijn Faassen",
author_email="faassen@startifact.com",
license="BSD",
packages=find_packages(),
zip_safe=False,
install_requires=[
]
)
| true | true |
f71a60c2e83e89f0d85d50940ea141974ce4e00d | 5,431 | py | Python | homeassistant/components/geo_rss_events/sensor.py | VirtualL/home-assistant | 301829d02be8d865ab46c8901ac046d060849320 | [
"Apache-2.0"
] | null | null | null | homeassistant/components/geo_rss_events/sensor.py | VirtualL/home-assistant | 301829d02be8d865ab46c8901ac046d060849320 | [
"Apache-2.0"
] | 3 | 2021-09-08T03:34:57.000Z | 2022-03-12T00:59:48.000Z | homeassistant/components/geo_rss_events/sensor.py | VirtualL/home-assistant | 301829d02be8d865ab46c8901ac046d060849320 | [
"Apache-2.0"
] | null | null | null | """
Generic GeoRSS events service.
Retrieves current events (typically incidents or alerts) in GeoRSS format, and
shows information on events filtered by distance to the HA instance's location
and grouped by category.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.geo_rss_events/
"""
import logging
from datetime import timedelta
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_UNIT_OF_MEASUREMENT, CONF_NAME,
CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_URL)
from homeassistant.helpers.entity import Entity
REQUIREMENTS = ['georss_generic_client==0.2']
_LOGGER = logging.getLogger(__name__)
ATTR_CATEGORY = 'category'
ATTR_DISTANCE = 'distance'
ATTR_TITLE = 'title'
CONF_CATEGORIES = 'categories'
DEFAULT_ICON = 'mdi:alert'
DEFAULT_NAME = "Event Service"
DEFAULT_RADIUS_IN_KM = 20.0
DEFAULT_UNIT_OF_MEASUREMENT = 'Events'
DOMAIN = 'geo_rss_events'
SCAN_INTERVAL = timedelta(minutes=5)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_URL): cv.string,
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS_IN_KM): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_CATEGORIES, default=[]):
vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_UNIT_OF_MEASUREMENT,
default=DEFAULT_UNIT_OF_MEASUREMENT): cv.string,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the GeoRSS component."""
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
url = config.get(CONF_URL)
radius_in_km = config.get(CONF_RADIUS)
name = config.get(CONF_NAME)
categories = config.get(CONF_CATEGORIES)
unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
_LOGGER.debug("latitude=%s, longitude=%s, url=%s, radius=%s",
latitude, longitude, url, radius_in_km)
# Create all sensors based on categories.
devices = []
if not categories:
device = GeoRssServiceSensor((latitude, longitude), url,
radius_in_km, None, name,
unit_of_measurement)
devices.append(device)
else:
for category in categories:
device = GeoRssServiceSensor((latitude, longitude), url,
radius_in_km, category, name,
unit_of_measurement)
devices.append(device)
add_entities(devices, True)
class GeoRssServiceSensor(Entity):
"""Representation of a Sensor."""
def __init__(self, coordinates, url, radius, category, service_name,
unit_of_measurement):
"""Initialize the sensor."""
self._category = category
self._service_name = service_name
self._state = None
self._state_attributes = None
self._unit_of_measurement = unit_of_measurement
from georss_client.generic_feed import GenericFeed
self._feed = GenericFeed(coordinates, url, filter_radius=radius,
filter_categories=None if not category
else [category])
@property
def name(self):
"""Return the name of the sensor."""
return '{} {}'.format(self._service_name,
'Any' if self._category is None
else self._category)
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the default icon to use in the frontend."""
return DEFAULT_ICON
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._state_attributes
def update(self):
"""Update this sensor from the GeoRSS service."""
import georss_client
status, feed_entries = self._feed.update()
if status == georss_client.UPDATE_OK:
_LOGGER.debug("Adding events to sensor %s: %s", self.entity_id,
feed_entries)
self._state = len(feed_entries)
# And now compute the attributes from the filtered events.
matrix = {}
for entry in feed_entries:
matrix[entry.title] = '{:.0f}km'.format(
entry.distance_to_home)
self._state_attributes = matrix
elif status == georss_client.UPDATE_OK_NO_DATA:
_LOGGER.debug("Update successful, but no data received from %s",
self._feed)
# Don't change the state or state attributes.
else:
_LOGGER.warning("Update not successful, no data received from %s",
self._feed)
# If no events were found due to an error then just set state to
# zero.
self._state = 0
self._state_attributes = {}
| 35.730263 | 79 | 0.645369 | import logging
from datetime import timedelta
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_UNIT_OF_MEASUREMENT, CONF_NAME,
CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_URL)
from homeassistant.helpers.entity import Entity
REQUIREMENTS = ['georss_generic_client==0.2']
_LOGGER = logging.getLogger(__name__)
ATTR_CATEGORY = 'category'
ATTR_DISTANCE = 'distance'
ATTR_TITLE = 'title'
CONF_CATEGORIES = 'categories'
DEFAULT_ICON = 'mdi:alert'
DEFAULT_NAME = "Event Service"
DEFAULT_RADIUS_IN_KM = 20.0
DEFAULT_UNIT_OF_MEASUREMENT = 'Events'
DOMAIN = 'geo_rss_events'
SCAN_INTERVAL = timedelta(minutes=5)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_URL): cv.string,
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS_IN_KM): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_CATEGORIES, default=[]):
vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_UNIT_OF_MEASUREMENT,
default=DEFAULT_UNIT_OF_MEASUREMENT): cv.string,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
url = config.get(CONF_URL)
radius_in_km = config.get(CONF_RADIUS)
name = config.get(CONF_NAME)
categories = config.get(CONF_CATEGORIES)
unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
_LOGGER.debug("latitude=%s, longitude=%s, url=%s, radius=%s",
latitude, longitude, url, radius_in_km)
devices = []
if not categories:
device = GeoRssServiceSensor((latitude, longitude), url,
radius_in_km, None, name,
unit_of_measurement)
devices.append(device)
else:
for category in categories:
device = GeoRssServiceSensor((latitude, longitude), url,
radius_in_km, category, name,
unit_of_measurement)
devices.append(device)
add_entities(devices, True)
class GeoRssServiceSensor(Entity):
def __init__(self, coordinates, url, radius, category, service_name,
unit_of_measurement):
self._category = category
self._service_name = service_name
self._state = None
self._state_attributes = None
self._unit_of_measurement = unit_of_measurement
from georss_client.generic_feed import GenericFeed
self._feed = GenericFeed(coordinates, url, filter_radius=radius,
filter_categories=None if not category
else [category])
@property
def name(self):
return '{} {}'.format(self._service_name,
'Any' if self._category is None
else self._category)
@property
def state(self):
return self._state
@property
def unit_of_measurement(self):
return self._unit_of_measurement
@property
def icon(self):
return DEFAULT_ICON
@property
def device_state_attributes(self):
return self._state_attributes
def update(self):
import georss_client
status, feed_entries = self._feed.update()
if status == georss_client.UPDATE_OK:
_LOGGER.debug("Adding events to sensor %s: %s", self.entity_id,
feed_entries)
self._state = len(feed_entries)
matrix = {}
for entry in feed_entries:
matrix[entry.title] = '{:.0f}km'.format(
entry.distance_to_home)
self._state_attributes = matrix
elif status == georss_client.UPDATE_OK_NO_DATA:
_LOGGER.debug("Update successful, but no data received from %s",
self._feed)
else:
_LOGGER.warning("Update not successful, no data received from %s",
self._feed)
# If no events were found due to an error then just set state to
# zero.
self._state = 0
self._state_attributes = {}
| true | true |
f71a60d6ac54cd0f6a8035a072455dd7fe920d40 | 3,670 | py | Python | akshare/stock/stock_rank_forecast.py | J-Z-Z/akshare | 0a9ca71b381a272e2f56211e455ff2493dfed17a | [
"MIT"
] | 721 | 2021-09-21T12:10:33.000Z | 2022-03-31T09:47:01.000Z | akshare/stock/stock_rank_forecast.py | J-Z-Z/akshare | 0a9ca71b381a272e2f56211e455ff2493dfed17a | [
"MIT"
] | 135 | 2021-09-21T12:07:54.000Z | 2022-03-31T14:15:36.000Z | akshare/stock/stock_rank_forecast.py | J-Z-Z/akshare | 0a9ca71b381a272e2f56211e455ff2493dfed17a | [
"MIT"
] | 234 | 2021-09-21T12:16:27.000Z | 2022-03-31T09:47:04.000Z | #!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
Date: 2021/9/12 18:29
Desc: 巨潮资讯-数据中心-评级预测-投资评级
http://webapi.cninfo.com.cn/#/thematicStatistics?name=%E6%8A%95%E8%B5%84%E8%AF%84%E7%BA%A7
"""
import time
from py_mini_racer import py_mini_racer
import requests
import pandas as pd
js_str = """
function mcode(input) {
var keyStr = "ABCDEFGHIJKLMNOP" + "QRSTUVWXYZabcdef" + "ghijklmnopqrstuv" + "wxyz0123456789+/" + "=";
var output = "";
var chr1, chr2, chr3 = "";
var enc1, enc2, enc3, enc4 = "";
var i = 0;
do {
chr1 = input.charCodeAt(i++);
chr2 = input.charCodeAt(i++);
chr3 = input.charCodeAt(i++);
enc1 = chr1 >> 2;
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
enc4 = chr3 & 63;
if (isNaN(chr2)) {
enc3 = enc4 = 64;
} else if (isNaN(chr3)) {
enc4 = 64;
}
output = output + keyStr.charAt(enc1) + keyStr.charAt(enc2)
+ keyStr.charAt(enc3) + keyStr.charAt(enc4);
chr1 = chr2 = chr3 = "";
enc1 = enc2 = enc3 = enc4 = "";
} while (i < input.length);
return output;
}
"""
def stock_rank_forecast_cninfo(date: str = "20210910") -> pd.DataFrame:
"""
巨潮资讯-数据中心-评级预测-投资评级
http://webapi.cninfo.com.cn/#/thematicStatistics?name=%E6%8A%95%E8%B5%84%E8%AF%84%E7%BA%A7
:param date: 查询日期
:type date: str
:return: 投资评级
:rtype: pandas.DataFrame
"""
url = "http://webapi.cninfo.com.cn/api/sysapi/p_sysapi1089"
params = {"tdate": "-".join([date[:4], date[4:6], date[6:]])}
random_time_str = str(int(time.time()))
js_code = py_mini_racer.MiniRacer()
js_code.eval(js_str)
mcode = js_code.call("mcode", random_time_str)
headers = {
"Accept": "*/*",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
"Cache-Control": "no-cache",
"Content-Length": "0",
"Host": "webapi.cninfo.com.cn",
"mcode": mcode,
"Origin": "http://webapi.cninfo.com.cn",
"Pragma": "no-cache",
"Proxy-Connection": "keep-alive",
"Referer": "http://webapi.cninfo.com.cn/",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36",
"X-Requested-With": "XMLHttpRequest",
}
r = requests.post(url, params=params, headers=headers)
data_json = r.json()
temp_df = pd.DataFrame(data_json["records"])
temp_df.columns = [
"证券简称",
"发布日期",
"前一次投资评级",
"评级变化",
"目标价格-上限",
"是否首次评级",
"投资评级",
"研究员名称",
"研究机构简称",
"目标价格-下限",
"证券代码",
]
temp_df = temp_df[[
"证券代码",
"证券简称",
"发布日期",
"研究机构简称",
"研究员名称",
"投资评级",
"是否首次评级",
"评级变化",
"前一次投资评级",
"目标价格-下限",
"目标价格-上限",
]]
temp_df["目标价格-上限"] = pd.to_numeric(temp_df["目标价格-上限"], errors="coerce")
temp_df["目标价格-下限"] = pd.to_numeric(temp_df["目标价格-下限"], errors="coerce")
return temp_df
if __name__ == "__main__":
stock_rank_forecast_cninfo_df = stock_rank_forecast_cninfo(date="20210907")
print(stock_rank_forecast_cninfo_df)
| 33.063063 | 139 | 0.495368 |
import time
from py_mini_racer import py_mini_racer
import requests
import pandas as pd
js_str = """
function mcode(input) {
var keyStr = "ABCDEFGHIJKLMNOP" + "QRSTUVWXYZabcdef" + "ghijklmnopqrstuv" + "wxyz0123456789+/" + "=";
var output = "";
var chr1, chr2, chr3 = "";
var enc1, enc2, enc3, enc4 = "";
var i = 0;
do {
chr1 = input.charCodeAt(i++);
chr2 = input.charCodeAt(i++);
chr3 = input.charCodeAt(i++);
enc1 = chr1 >> 2;
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
enc4 = chr3 & 63;
if (isNaN(chr2)) {
enc3 = enc4 = 64;
} else if (isNaN(chr3)) {
enc4 = 64;
}
output = output + keyStr.charAt(enc1) + keyStr.charAt(enc2)
+ keyStr.charAt(enc3) + keyStr.charAt(enc4);
chr1 = chr2 = chr3 = "";
enc1 = enc2 = enc3 = enc4 = "";
} while (i < input.length);
return output;
}
"""
def stock_rank_forecast_cninfo(date: str = "20210910") -> pd.DataFrame:
url = "http://webapi.cninfo.com.cn/api/sysapi/p_sysapi1089"
params = {"tdate": "-".join([date[:4], date[4:6], date[6:]])}
random_time_str = str(int(time.time()))
js_code = py_mini_racer.MiniRacer()
js_code.eval(js_str)
mcode = js_code.call("mcode", random_time_str)
headers = {
"Accept": "*/*",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
"Cache-Control": "no-cache",
"Content-Length": "0",
"Host": "webapi.cninfo.com.cn",
"mcode": mcode,
"Origin": "http://webapi.cninfo.com.cn",
"Pragma": "no-cache",
"Proxy-Connection": "keep-alive",
"Referer": "http://webapi.cninfo.com.cn/",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36",
"X-Requested-With": "XMLHttpRequest",
}
r = requests.post(url, params=params, headers=headers)
data_json = r.json()
temp_df = pd.DataFrame(data_json["records"])
temp_df.columns = [
"证券简称",
"发布日期",
"前一次投资评级",
"评级变化",
"目标价格-上限",
"是否首次评级",
"投资评级",
"研究员名称",
"研究机构简称",
"目标价格-下限",
"证券代码",
]
temp_df = temp_df[[
"证券代码",
"证券简称",
"发布日期",
"研究机构简称",
"研究员名称",
"投资评级",
"是否首次评级",
"评级变化",
"前一次投资评级",
"目标价格-下限",
"目标价格-上限",
]]
temp_df["目标价格-上限"] = pd.to_numeric(temp_df["目标价格-上限"], errors="coerce")
temp_df["目标价格-下限"] = pd.to_numeric(temp_df["目标价格-下限"], errors="coerce")
return temp_df
if __name__ == "__main__":
stock_rank_forecast_cninfo_df = stock_rank_forecast_cninfo(date="20210907")
print(stock_rank_forecast_cninfo_df)
| true | true |
f71a61f85926c5c06fd0a3030685cd6256d6daab | 7,369 | py | Python | coremltools/converters/mil/mil/passes/conv_scale_fusion.py | LaudateCorpus1/coremltools | 777a4460d6823e5e91dea4fa3eacb0b11c7d5dfc | [
"BSD-3-Clause"
] | null | null | null | coremltools/converters/mil/mil/passes/conv_scale_fusion.py | LaudateCorpus1/coremltools | 777a4460d6823e5e91dea4fa3eacb0b11c7d5dfc | [
"BSD-3-Clause"
] | null | null | null | coremltools/converters/mil/mil/passes/conv_scale_fusion.py | LaudateCorpus1/coremltools | 777a4460d6823e5e91dea4fa3eacb0b11c7d5dfc | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2021, Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can be
# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
import numpy as np
from coremltools.converters.mil.mil.passes.pass_registry import register_pass
from coremltools.converters.mil.mil.passes.graph_pass import AbstractGraphPass
from coremltools.converters.mil.mil import Builder as mb
def _try_to_transform(conv_op, scale_op, block):
# get the scale
if scale_op.x.val is None and scale_op.y.val is None:
return False
scale_var = scale_op.x if scale_op.x.val is not None else scale_op.y
scale = scale_var.val
# for the scalar case, the scalar can be either
# 1. a python int/float
# 2. a 0d numpy array
# 3. a 1d numpy array with shape (1,)
is_scalar = True
if isinstance(scale, np.ndarray):
if scale.shape == ():
scale = scale.tolist()
elif scale.shape == (1) or scale.shape == (1,):
scale = scale[0]
else:
is_scalar = False
# get weight and bias and groups from conv layer
if conv_op.weight.val is None:
return False
conv_weight = conv_op.weight.val
conv_bias = conv_op.bias
groups = conv_op.groups.val
# get type of the conv layer
is_deconv = conv_op.op_type == 'conv_transpose'
is_conv_1d = len(conv_weight.shape) == 3
# D_in denotes the spatial dimensions for conv kernel weight
# for conv_transpose, conv_weight has shape [Cin, Cout / groups, *D_in]
# for conv, conv_weight has shape [Cout, Cin / groups, *D_in]
if is_deconv:
Cout = conv_weight.shape[1] * groups
Cin = conv_weight.shape[0]
else:
Cout = conv_weight.shape[0]
Cin = conv_weight.shape[1] * groups
# for the vector scale case, check if the shape is broacastable
if not is_scalar:
if not np.product(scale.shape) == Cout:
return False
if len(scale.shape) == len(conv_weight.shape):
if not scale.shape[1] == Cout:
return False
elif len(scale.shape) == len(conv_weight.shape) - 1:
if not scale.shape[0] == Cout:
return False
else:
return False
# transform the scale to 1./scale for the real_div case
if scale_op.op_type == "real_div":
scale = 1./scale
# get the type of the conv weight
conv_weight_type = conv_weight.dtype
# create bias for conv if not exist
if conv_bias is None:
conv_bias = np.zeros(Cout)
else:
conv_bias = conv_bias.val
conv_bias = conv_bias.astype(conv_weight_type)
# get the original shape of weight and bias
origin_weight_shape = conv_weight.shape
origin_bias_shape = conv_bias.shape
# update the weight/bias for conv layer
if is_scalar:
new_conv_bias = np.array(conv_bias * scale).astype(conv_weight_type)
new_conv_weight = np.array(conv_weight * scale).astype(conv_weight_type)
else:
scale = np.reshape(scale, (Cout))
new_conv_bias = np.array(conv_bias * scale).astype(conv_weight_type)
new_conv_weight = []
if is_deconv:
conv_weight = np.transpose(conv_weight, [1, 0, 2] if is_conv_1d else [1, 0, 2, 3])
conv_weight = np.reshape(conv_weight, [Cout, Cin // groups] + list(conv_weight.shape[2:]))
for i in range(Cout):
_conv_weight = conv_weight[i] * scale[i]
new_conv_weight.append(_conv_weight)
new_conv_weight = np.array(new_conv_weight).astype(conv_weight_type)
if is_deconv:
new_conv_weight = np.reshape(new_conv_weight, [Cout // groups, Cin] + list(new_conv_weight.shape[2:]))
new_conv_weight = np.transpose(new_conv_weight, [1, 0, 2] if is_conv_1d else [1, 0, 2, 3])
# make sure the updated weight and bias have the same shape as the original ones
assert new_conv_weight.shape == origin_weight_shape, "conv weight should have the same shape before and after the fuse_conv_scale pass."
assert new_conv_bias.shape == origin_bias_shape, "conv bias should have the same shape before and after the fuse_conv_scale pass."
# create a new conv op with the new weight, bias value, copying rest of the attributes
out_name = scale_op.outputs[0].name
conv_kargs = {"weight": new_conv_weight, "bias": new_conv_bias, "name": out_name, "before_op": conv_op}
for k, v in conv_op.inputs.items():
if k in ["weight", "bias"]:
continue
conv_kargs[k] = v
if is_deconv:
x = mb.conv_transpose(**conv_kargs)
else:
x = mb.conv(**conv_kargs)
scale_op.enclosing_block.replace_uses_of_var_after_op(
anchor_op=scale_op, old_var=scale_op.outputs[0], new_var=x
)
# Remove all the ops at once
block.remove_ops([conv_op, scale_op])
return True
@register_pass(namespace="common")
class fuse_conv_scale(AbstractGraphPass):
"""
Fold mul/div into conv/conv_transpose by updating the weight/bias of the convolution layers.
The scale const can be a single number (scalar) or a vector with a broacasable shape,
for instance, if the output of the conv/deconv layer is (B, Cout, H, W),
const of shape (Cout, 1, 1) and (1, Cout, 1, 1) are allowed.
Given:
%2 = conv(%1)
...
%3 = mul(%2, constant) # where constant is the scale constant
...
Result:
%3 = conv(%1)
...
"""
def __init__(self):
self.ops_to_skip = set()
def set_ops_to_skip(self, prog):
pass
def _fuse_conv_scale_block(self, block):
def _match_pattern(op):
if op.op_type == "conv" or op.op_type == "conv_transpose":
# abort fusion if op output is also a block output
if op.outputs[0] in op.enclosing_block.outputs:
return None
# find batch_norm op
child_ops = op.outputs[0].child_ops
if len(child_ops) == 1:
scale_op_candidate = list(child_ops)[0]
if scale_op_candidate.op_type in ["mul", "real_div"]:
return scale_op_candidate
return None
fusion_occurred = False
for op in list(block.operations):
for b in op.blocks:
block_changed = True
while block_changed:
block_changed = self._fuse_conv_scale_block(b)
if len(op.blocks) > 0:
# This op can't be conv or conv_transpose
continue
scale_op = _match_pattern(op)
if op in self.ops_to_skip or scale_op in self.ops_to_skip:
continue
if scale_op is not None:
with block:
fusion_occurred = _try_to_transform(op, scale_op, block)
# has to break as the downstream iterator is affected.
if fusion_occurred:
return fusion_occurred
return fusion_occurred
def apply(self, prog):
self.set_ops_to_skip(prog)
for f in prog.functions.values():
block_changed = True
while block_changed:
block_changed = self._fuse_conv_scale_block(f)
| 36.122549 | 140 | 0.627086 |
import numpy as np
from coremltools.converters.mil.mil.passes.pass_registry import register_pass
from coremltools.converters.mil.mil.passes.graph_pass import AbstractGraphPass
from coremltools.converters.mil.mil import Builder as mb
def _try_to_transform(conv_op, scale_op, block):
if scale_op.x.val is None and scale_op.y.val is None:
return False
scale_var = scale_op.x if scale_op.x.val is not None else scale_op.y
scale = scale_var.val
is_scalar = True
if isinstance(scale, np.ndarray):
if scale.shape == ():
scale = scale.tolist()
elif scale.shape == (1) or scale.shape == (1,):
scale = scale[0]
else:
is_scalar = False
if conv_op.weight.val is None:
return False
conv_weight = conv_op.weight.val
conv_bias = conv_op.bias
groups = conv_op.groups.val
is_deconv = conv_op.op_type == 'conv_transpose'
is_conv_1d = len(conv_weight.shape) == 3
if is_deconv:
Cout = conv_weight.shape[1] * groups
Cin = conv_weight.shape[0]
else:
Cout = conv_weight.shape[0]
Cin = conv_weight.shape[1] * groups
if not is_scalar:
if not np.product(scale.shape) == Cout:
return False
if len(scale.shape) == len(conv_weight.shape):
if not scale.shape[1] == Cout:
return False
elif len(scale.shape) == len(conv_weight.shape) - 1:
if not scale.shape[0] == Cout:
return False
else:
return False
if scale_op.op_type == "real_div":
scale = 1./scale
conv_weight_type = conv_weight.dtype
if conv_bias is None:
conv_bias = np.zeros(Cout)
else:
conv_bias = conv_bias.val
conv_bias = conv_bias.astype(conv_weight_type)
origin_weight_shape = conv_weight.shape
origin_bias_shape = conv_bias.shape
if is_scalar:
new_conv_bias = np.array(conv_bias * scale).astype(conv_weight_type)
new_conv_weight = np.array(conv_weight * scale).astype(conv_weight_type)
else:
scale = np.reshape(scale, (Cout))
new_conv_bias = np.array(conv_bias * scale).astype(conv_weight_type)
new_conv_weight = []
if is_deconv:
conv_weight = np.transpose(conv_weight, [1, 0, 2] if is_conv_1d else [1, 0, 2, 3])
conv_weight = np.reshape(conv_weight, [Cout, Cin // groups] + list(conv_weight.shape[2:]))
for i in range(Cout):
_conv_weight = conv_weight[i] * scale[i]
new_conv_weight.append(_conv_weight)
new_conv_weight = np.array(new_conv_weight).astype(conv_weight_type)
if is_deconv:
new_conv_weight = np.reshape(new_conv_weight, [Cout // groups, Cin] + list(new_conv_weight.shape[2:]))
new_conv_weight = np.transpose(new_conv_weight, [1, 0, 2] if is_conv_1d else [1, 0, 2, 3])
assert new_conv_weight.shape == origin_weight_shape, "conv weight should have the same shape before and after the fuse_conv_scale pass."
assert new_conv_bias.shape == origin_bias_shape, "conv bias should have the same shape before and after the fuse_conv_scale pass."
out_name = scale_op.outputs[0].name
conv_kargs = {"weight": new_conv_weight, "bias": new_conv_bias, "name": out_name, "before_op": conv_op}
for k, v in conv_op.inputs.items():
if k in ["weight", "bias"]:
continue
conv_kargs[k] = v
if is_deconv:
x = mb.conv_transpose(**conv_kargs)
else:
x = mb.conv(**conv_kargs)
scale_op.enclosing_block.replace_uses_of_var_after_op(
anchor_op=scale_op, old_var=scale_op.outputs[0], new_var=x
)
block.remove_ops([conv_op, scale_op])
return True
@register_pass(namespace="common")
class fuse_conv_scale(AbstractGraphPass):
def __init__(self):
self.ops_to_skip = set()
def set_ops_to_skip(self, prog):
pass
def _fuse_conv_scale_block(self, block):
def _match_pattern(op):
if op.op_type == "conv" or op.op_type == "conv_transpose":
if op.outputs[0] in op.enclosing_block.outputs:
return None
child_ops = op.outputs[0].child_ops
if len(child_ops) == 1:
scale_op_candidate = list(child_ops)[0]
if scale_op_candidate.op_type in ["mul", "real_div"]:
return scale_op_candidate
return None
fusion_occurred = False
for op in list(block.operations):
for b in op.blocks:
block_changed = True
while block_changed:
block_changed = self._fuse_conv_scale_block(b)
if len(op.blocks) > 0:
continue
scale_op = _match_pattern(op)
if op in self.ops_to_skip or scale_op in self.ops_to_skip:
continue
if scale_op is not None:
with block:
fusion_occurred = _try_to_transform(op, scale_op, block)
# has to break as the downstream iterator is affected.
if fusion_occurred:
return fusion_occurred
return fusion_occurred
def apply(self, prog):
self.set_ops_to_skip(prog)
for f in prog.functions.values():
block_changed = True
while block_changed:
block_changed = self._fuse_conv_scale_block(f)
| true | true |
f71a6261577109f2928b029f3952cbc9f28b4dcc | 997 | py | Python | kubernetes/test/test_v1_ceph_fs_volume_source.py | scele/kubernetes-client-python | 9e982cbdb5f19dc1a3935a75bdd92288f3b807fb | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_v1_ceph_fs_volume_source.py | scele/kubernetes-client-python | 9e982cbdb5f19dc1a3935a75bdd92288f3b807fb | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_v1_ceph_fs_volume_source.py | scele/kubernetes-client-python | 9e982cbdb5f19dc1a3935a75bdd92288f3b807fb | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_ceph_fs_volume_source import V1CephFSVolumeSource
class TestV1CephFSVolumeSource(unittest.TestCase):
""" V1CephFSVolumeSource unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1CephFSVolumeSource(self):
"""
Test V1CephFSVolumeSource
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1_ceph_fs_volume_source.V1CephFSVolumeSource()
pass
if __name__ == '__main__':
unittest.main()
| 22.155556 | 105 | 0.719157 |
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_ceph_fs_volume_source import V1CephFSVolumeSource
class TestV1CephFSVolumeSource(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testV1CephFSVolumeSource(self):
pass
if __name__ == '__main__':
unittest.main()
| true | true |
f71a62b2ff79265703f83e0534fed29f3684b334 | 14,167 | py | Python | notebooks/39.1-BDP-unbiased-clustering.py | zeou1/maggot_models | 4e1b518c2981ab1ca9607099c3813e8429d94ca4 | [
"BSD-3-Clause"
] | null | null | null | notebooks/39.1-BDP-unbiased-clustering.py | zeou1/maggot_models | 4e1b518c2981ab1ca9607099c3813e8429d94ca4 | [
"BSD-3-Clause"
] | null | null | null | notebooks/39.1-BDP-unbiased-clustering.py | zeou1/maggot_models | 4e1b518c2981ab1ca9607099c3813e8429d94ca4 | [
"BSD-3-Clause"
] | null | null | null | # %% [markdown]
# # Imports
import json
import os
import warnings
from operator import itemgetter
from pathlib import Path
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
from joblib import Parallel, delayed
from joblib.parallel import Parallel, delayed
from sklearn.metrics import adjusted_rand_score
import networkx as nx
from graspy.cluster import GaussianCluster, AutoGMMCluster
from graspy.embed import AdjacencySpectralEmbed, OmnibusEmbed
from graspy.models import DCSBMEstimator, SBMEstimator
from graspy.plot import heatmap, pairplot
from graspy.utils import binarize, cartprod, get_lcc, pass_to_ranks
from src.data import load_everything
from src.utils import export_skeleton_json, savefig
from src.visualization import clustergram, palplot, sankey
from src.hierarchy import signal_flow
warnings.simplefilter("ignore", category=FutureWarning)
FNAME = os.path.basename(__file__)[:-3]
print(FNAME)
# %% [markdown]
# # Parameters
BRAIN_VERSION = "2019-12-09"
GRAPH_TYPES = ["Gad", "Gaa", "Gdd", "Gda"]
GRAPH_TYPE_LABELS = [r"A $\to$ D", r"A $\to$ A", r"D $\to$ D", r"D $\to$ A"]
N_GRAPH_TYPES = len(GRAPH_TYPES)
SAVEFIGS = True
DEFAULT_FMT = "png"
DEFUALT_DPI = 150
SAVESKELS = False
MIN_CLUSTERS = 8
MAX_CLUSTERS = 8
N_INIT = 50
PTR = True
ONLY_RIGHT = True
embed = "LSE"
cluster = "GMM"
n_components = 4
if cluster == "GMM":
gmm_params = {"n_init": N_INIT, "covariance_type": "all"}
elif cluster == "AutoGMM":
gmm_params = {"max_agglom_size": None}
np.random.seed(23409857)
def stashfig(name, **kws):
if SAVEFIGS:
savefig(name, foldername=FNAME, fmt=DEFAULT_FMT, dpi=DEFUALT_DPI, **kws)
def stashskel(name, ids, colors, palette=None, **kws):
if SAVESKELS:
return export_skeleton_json(
name, ids, colors, palette=palette, foldername=FNAME, **kws
)
def ase(adj, n_components):
if PTR:
adj = pass_to_ranks(adj)
ase = AdjacencySpectralEmbed(n_components=n_components)
latent = ase.fit_transform(adj)
latent = np.concatenate(latent, axis=-1)
return latent
def to_laplace(graph, form="DAD", regularizer=None):
r"""
A function to convert graph adjacency matrix to graph laplacian.
Currently supports I-DAD, DAD, and R-DAD laplacians, where D is the diagonal
matrix of degrees of each node raised to the -1/2 power, I is the
identity matrix, and A is the adjacency matrix.
R-DAD is regularized laplacian: where :math:`D_t = D + regularizer*I`.
Parameters
----------
graph: object
Either array-like, (n_vertices, n_vertices) numpy array,
or an object of type networkx.Graph.
form: {'I-DAD' (default), 'DAD', 'R-DAD'}, string, optional
- 'I-DAD'
Computes :math:`L = I - D*A*D`
- 'DAD'
Computes :math:`L = D*A*D`
- 'R-DAD'
Computes :math:`L = D_t*A*D_t` where :math:`D_t = D + regularizer*I`
regularizer: int, float or None, optional (default=None)
Constant to be added to the diagonal of degree matrix. If None, average
node degree is added. If int or float, must be >= 0. Only used when
``form`` == 'R-DAD'.
Returns
-------
L: numpy.ndarray
2D (n_vertices, n_vertices) array representing graph
laplacian of specified form
References
----------
.. [1] Qin, Tai, and Karl Rohe. "Regularized spectral clustering
under the degree-corrected stochastic blockmodel." In Advances
in Neural Information Processing Systems, pp. 3120-3128. 2013
"""
valid_inputs = ["I-DAD", "DAD", "R-DAD"]
if form not in valid_inputs:
raise TypeError("Unsuported Laplacian normalization")
A = graph
in_degree = np.sum(A, axis=0)
out_degree = np.sum(A, axis=1)
# regularize laplacian with parameter
# set to average degree
if form == "R-DAD":
if regularizer is None:
regularizer = 1
elif not isinstance(regularizer, (int, float)):
raise TypeError(
"Regularizer must be a int or float, not {}".format(type(regularizer))
)
elif regularizer < 0:
raise ValueError("Regularizer must be greater than or equal to 0")
regularizer = regularizer * np.mean(out_degree)
in_degree += regularizer
out_degree += regularizer
with np.errstate(divide="ignore"):
in_root = 1 / np.sqrt(in_degree) # this is 10x faster than ** -0.5
out_root = 1 / np.sqrt(out_degree)
in_root[np.isinf(in_root)] = 0
out_root[np.isinf(out_root)] = 0
in_root = np.diag(in_root) # just change to sparse diag for sparse support
out_root = np.diag(out_root)
if form == "I-DAD":
L = np.diag(in_degree) - A
L = in_root @ L @ in_root
elif form == "DAD" or form == "R-DAD":
L = out_root @ A @ in_root
# return symmetrize(L, method="avg") # sometimes machine prec. makes this necessary
return L
def lse(adj, n_components, regularizer=None):
if PTR:
adj = pass_to_ranks(adj)
lap = to_laplace(adj, form="R-DAD")
ase = AdjacencySpectralEmbed(n_components=n_components)
latent = ase.fit_transform(lap)
latent = np.concatenate(latent, axis=-1)
return latent
def omni(adjs, n_components):
if PTR:
adjs = [pass_to_ranks(a) for a in adjs]
omni = OmnibusEmbed(n_components=n_components // len(adjs))
latent = omni.fit_transform(adjs)
latent = np.concatenate(latent, axis=-1) # first is for in/out
latent = np.concatenate(latent, axis=-1) # second is for concat. each graph
return latent
def ase_concatenate(adjs, n_components):
if PTR:
adjs = [pass_to_ranks(a) for a in adjs]
ase = AdjacencySpectralEmbed(n_components=n_components // len(adjs))
graph_latents = []
for a in adjs:
latent = ase.fit_transform(a)
latent = np.concatenate(latent, axis=-1)
graph_latents.append(latent)
latent = np.concatenate(graph_latents, axis=-1)
return latent
def sub_ari(known_inds, true_labels, pred_labels):
true_known_labels = true_labels[known_inds]
pred_known_labels = pred_labels[known_inds]
ari = adjusted_rand_score(true_known_labels, pred_known_labels)
return ari
# Set up plotting constants
plt.style.use("seaborn-white")
sns.set_palette("deep")
sns.set_context("talk", font_scale=1)
# %% [markdown]
# # Load the data
adj, class_labels, side_labels, skeleton_labels = load_everything(
"Gad",
version=BRAIN_VERSION,
return_keys=["Merge Class", "Hemisphere"],
return_ids=True,
)
# select the right hemisphere
if ONLY_RIGHT:
side = "right hemisphere"
right_inds = np.where(side_labels == "R")[0]
adj = adj[np.ix_(right_inds, right_inds)]
class_labels = class_labels[right_inds]
skeleton_labels = skeleton_labels[right_inds]
else:
side = "full brain"
# sort by number of synapses
degrees = adj.sum(axis=0) + adj.sum(axis=1)
sort_inds = np.argsort(degrees)[::-1]
adj = adj[np.ix_(sort_inds, sort_inds)]
class_labels = class_labels[sort_inds]
skeleton_labels = skeleton_labels[sort_inds]
# remove disconnected nodes
adj, lcc_inds = get_lcc(adj, return_inds=True)
class_labels = class_labels[lcc_inds]
skeleton_labels = skeleton_labels[lcc_inds]
# remove pendants
degrees = np.count_nonzero(adj, axis=0) + np.count_nonzero(adj, axis=1)
not_pendant_mask = degrees != 1
not_pendant_inds = np.array(range(len(degrees)))[not_pendant_mask]
adj = adj[np.ix_(not_pendant_inds, not_pendant_inds)]
class_labels = class_labels[not_pendant_inds]
skeleton_labels = skeleton_labels[not_pendant_inds]
# plot degree sequence
d_sort = np.argsort(degrees)[::-1]
degrees = degrees[d_sort]
plt.figure(figsize=(10, 5))
sns.scatterplot(x=range(len(degrees)), y=degrees, s=30, linewidth=0)
known_inds = np.where(class_labels != "Unk")[0]
# %% [markdown]
# # Run clustering using LSE on the sum graph
n_verts = adj.shape[0]
latent = lse(adj, n_components, regularizer=None)
pairplot(latent, labels=class_labels, title=embed)
k_list = list(range(MIN_CLUSTERS, MAX_CLUSTERS + 1))
n_runs = len(k_list)
out_dicts = []
bin_adj = binarize(adj)
last_pred_labels = np.zeros(n_verts)
if cluster == "GMM":
ClusterModel = GaussianCluster
elif cluster == "AutoGMM":
ClusterModel = AutoGMMCluster
for k in k_list:
run_name = f"k = {k}, {cluster}, {embed}, {side} (A to D), PTR, raw"
print(run_name)
print()
# Do clustering
# TODO: make this autogmm instead
gmm = ClusterModel(min_components=k, max_components=k, **gmm_params)
gmm.fit(latent)
pred_labels = gmm.predict(latent)
# Score unsupervised metrics
base_dict = {
"K": k,
"Cluster": cluster,
"Embed": embed,
"Method": f"{cluster} o {embed}",
}
# GMM likelihood
score = gmm.model_.score(latent)
temp_dict = base_dict.copy()
temp_dict["Metric"] = "GMM likelihood"
temp_dict["Score"] = score
out_dicts.append(temp_dict)
# GMM BIC
score = gmm.model_.bic(latent)
temp_dict = base_dict.copy()
temp_dict["Metric"] = "GMM BIC"
temp_dict["Score"] = score
out_dicts.append(temp_dict)
# SBM likelihood
sbm = SBMEstimator(directed=True, loops=False)
sbm.fit(bin_adj, y=pred_labels)
score = sbm.score(bin_adj)
temp_dict = base_dict.copy()
temp_dict["Metric"] = "SBM likelihood"
temp_dict["Score"] = score
out_dicts.append(temp_dict)
# DCSBM likelihood
dcsbm = DCSBMEstimator(directed=True, loops=False)
dcsbm.fit(bin_adj, y=pred_labels)
score = dcsbm.score(bin_adj)
temp_dict = base_dict.copy()
temp_dict["Metric"] = "DCSBM likelihood"
temp_dict["Score"] = score
out_dicts.append(temp_dict)
# ARI of the subset with labels
score = sub_ari(known_inds, class_labels, pred_labels)
temp_dict = base_dict.copy()
temp_dict["Metric"] = "Simple ARI"
temp_dict["Score"] = score
out_dicts.append(temp_dict)
# ARI vs K - 1
score = adjusted_rand_score(last_pred_labels, pred_labels)
temp_dict = base_dict.copy()
temp_dict["Metric"] = "K-1 ARI"
temp_dict["Score"] = score
out_dicts.append(temp_dict)
last_pred_labels = pred_labels
save_name = f"k{k}-{cluster}-{embed}-right-ad-PTR-raw"
# Plot embedding
# pairplot(latent, labels=pred_labels, title=run_name)
# stashfig("latent-" + save_name)
# Plot everything else
clustergram(adj, class_labels, pred_labels)
stashfig("clustergram-" + save_name)
# New plot
# - Compute signal flow
# - Get the centroid of each cluster and project to 1d
# - Alternatively, just take the first dimension
# - For each cluster plot as a node
# output skeletons
if SAVESKELS:
_, colormap, pal = stashskel(
save_name, skeleton_labels, pred_labels, palette="viridis", multiout=True
)
palplot(k, cmap="viridis")
stashfig("palplot-" + save_name)
# save dict colormapping
filename = (
Path("./maggot_models/notebooks/outs")
/ Path(FNAME)
/ str("colormap-" + save_name + ".json")
)
with open(filename, "w") as fout:
json.dump(colormap, fout)
stashskel(
save_name, skeleton_labels, pred_labels, palette="viridis", multiout=False
)
# %% [markdown]
# # Plot results of unsupervised metrics
result_df = pd.DataFrame(out_dicts)
fg = sns.FacetGrid(result_df, col="Metric", col_wrap=3, sharey=False, height=4)
fg.map(sns.lineplot, "K", "Score")
stashfig(f"metrics-{cluster}-{embed}-right-ad-PTR-raw")
# Modifications i need to make to the above
# - Increase the height of the sankey diagram overall
# - Look into color maps that could be better
# - Color the cluster labels by what gets written to the JSON
# - Plot the clusters as nodes in a small network
# %% [markdown]
# # try graph flow
node_signal_flow = signal_flow(adj)
mean_sf = np.zeros(k)
for i in np.unique(pred_labels):
inds = np.where(pred_labels == i)[0]
mean_sf[i] = np.mean(node_signal_flow[inds])
cluster_mean_latent = gmm.model_.means_[:, 0]
block_probs = SBMEstimator().fit(bin_adj, y=pred_labels).block_p_
block_prob_df = pd.DataFrame(data=block_probs, index=range(k), columns=range(k))
block_g = nx.from_pandas_adjacency(block_prob_df, create_using=nx.DiGraph)
plt.figure(figsize=(10, 10))
# don't ever let em tell you you're too pythonic
pos = dict(zip(range(k), zip(cluster_mean_latent, mean_sf)))
# nx.draw_networkx_nodes(block_g, pos=pos)
labels = nx.get_edge_attributes(block_g, "weight")
# nx.draw_networkx_edge_labels(block_g, pos, edge_labels=labels)
from matplotlib.cm import ScalarMappable
import matplotlib as mpl
norm = mpl.colors.LogNorm(vmin=0.01, vmax=0.1)
sm = ScalarMappable(cmap="Reds", norm=norm)
cmap = sm.to_rgba(np.array(list(labels.values())) + 0.01)
nx.draw_networkx(
block_g,
pos,
edge_cmap="Reds",
edge_color=cmap,
connectionstyle="arc3,rad=0.2",
width=1.5,
)
# %% [markdown]
# # signal flow marginals
signal_flow_marginal(adj, pred_labels)
# %% [markdown]
# #
def signal_flow_marginal(adj, labels, col_wrap=5, palette="tab20"):
sf = signal_flow(adj)
uni_labels = np.unique(labels)
medians = []
for i in uni_labels:
inds = np.where(labels == i)[0]
medians.append(np.median(sf[inds]))
sort_inds = np.argsort(medians)[::-1]
col_order = uni_labels[sort_inds]
plot_df = pd.DataFrame()
plot_df["Signal flow"] = sf
plot_df["Class"] = labels
fg = sns.FacetGrid(
plot_df,
col="Class",
aspect=1.5,
palette=palette,
col_order=col_order,
sharey=False,
col_wrap=col_wrap,
xlim=(-3, 3),
)
fg = fg.map(sns.distplot, "Signal flow") # bins=np.linspace(-2.2, 2.2))
fg.set(yticks=[], yticklabels=[])
plt.tight_layout()
return fg
signal_flow_marginal(adj, class_labels)
stashfig("known-class-sf-marginal")
# tomorrow
# DEFINITELY
# run with unsupervised metrics from k=2-50
# IF TIME
# run hgmm
| 29.150206 | 88 | 0.673678 |
son
import os
import warnings
from operator import itemgetter
from pathlib import Path
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
from joblib import Parallel, delayed
from joblib.parallel import Parallel, delayed
from sklearn.metrics import adjusted_rand_score
import networkx as nx
from graspy.cluster import GaussianCluster, AutoGMMCluster
from graspy.embed import AdjacencySpectralEmbed, OmnibusEmbed
from graspy.models import DCSBMEstimator, SBMEstimator
from graspy.plot import heatmap, pairplot
from graspy.utils import binarize, cartprod, get_lcc, pass_to_ranks
from src.data import load_everything
from src.utils import export_skeleton_json, savefig
from src.visualization import clustergram, palplot, sankey
from src.hierarchy import signal_flow
warnings.simplefilter("ignore", category=FutureWarning)
FNAME = os.path.basename(__file__)[:-3]
print(FNAME)
ON = "2019-12-09"
GRAPH_TYPES = ["Gad", "Gaa", "Gdd", "Gda"]
GRAPH_TYPE_LABELS = [r"A $\to$ D", r"A $\to$ A", r"D $\to$ D", r"D $\to$ A"]
N_GRAPH_TYPES = len(GRAPH_TYPES)
SAVEFIGS = True
DEFAULT_FMT = "png"
DEFUALT_DPI = 150
SAVESKELS = False
MIN_CLUSTERS = 8
MAX_CLUSTERS = 8
N_INIT = 50
PTR = True
ONLY_RIGHT = True
embed = "LSE"
cluster = "GMM"
n_components = 4
if cluster == "GMM":
gmm_params = {"n_init": N_INIT, "covariance_type": "all"}
elif cluster == "AutoGMM":
gmm_params = {"max_agglom_size": None}
np.random.seed(23409857)
def stashfig(name, **kws):
if SAVEFIGS:
savefig(name, foldername=FNAME, fmt=DEFAULT_FMT, dpi=DEFUALT_DPI, **kws)
def stashskel(name, ids, colors, palette=None, **kws):
if SAVESKELS:
return export_skeleton_json(
name, ids, colors, palette=palette, foldername=FNAME, **kws
)
def ase(adj, n_components):
if PTR:
adj = pass_to_ranks(adj)
ase = AdjacencySpectralEmbed(n_components=n_components)
latent = ase.fit_transform(adj)
latent = np.concatenate(latent, axis=-1)
return latent
def to_laplace(graph, form="DAD", regularizer=None):
valid_inputs = ["I-DAD", "DAD", "R-DAD"]
if form not in valid_inputs:
raise TypeError("Unsuported Laplacian normalization")
A = graph
in_degree = np.sum(A, axis=0)
out_degree = np.sum(A, axis=1)
if form == "R-DAD":
if regularizer is None:
regularizer = 1
elif not isinstance(regularizer, (int, float)):
raise TypeError(
"Regularizer must be a int or float, not {}".format(type(regularizer))
)
elif regularizer < 0:
raise ValueError("Regularizer must be greater than or equal to 0")
regularizer = regularizer * np.mean(out_degree)
in_degree += regularizer
out_degree += regularizer
with np.errstate(divide="ignore"):
in_root = 1 / np.sqrt(in_degree)
out_root = 1 / np.sqrt(out_degree)
in_root[np.isinf(in_root)] = 0
out_root[np.isinf(out_root)] = 0
in_root = np.diag(in_root)
out_root = np.diag(out_root)
if form == "I-DAD":
L = np.diag(in_degree) - A
L = in_root @ L @ in_root
elif form == "DAD" or form == "R-DAD":
L = out_root @ A @ in_root
ularizer=None):
if PTR:
adj = pass_to_ranks(adj)
lap = to_laplace(adj, form="R-DAD")
ase = AdjacencySpectralEmbed(n_components=n_components)
latent = ase.fit_transform(lap)
latent = np.concatenate(latent, axis=-1)
return latent
def omni(adjs, n_components):
if PTR:
adjs = [pass_to_ranks(a) for a in adjs]
omni = OmnibusEmbed(n_components=n_components // len(adjs))
latent = omni.fit_transform(adjs)
latent = np.concatenate(latent, axis=-1)
latent = np.concatenate(latent, axis=-1)
return latent
def ase_concatenate(adjs, n_components):
if PTR:
adjs = [pass_to_ranks(a) for a in adjs]
ase = AdjacencySpectralEmbed(n_components=n_components // len(adjs))
graph_latents = []
for a in adjs:
latent = ase.fit_transform(a)
latent = np.concatenate(latent, axis=-1)
graph_latents.append(latent)
latent = np.concatenate(graph_latents, axis=-1)
return latent
def sub_ari(known_inds, true_labels, pred_labels):
true_known_labels = true_labels[known_inds]
pred_known_labels = pred_labels[known_inds]
ari = adjusted_rand_score(true_known_labels, pred_known_labels)
return ari
plt.style.use("seaborn-white")
sns.set_palette("deep")
sns.set_context("talk", font_scale=1)
abels, side_labels, skeleton_labels = load_everything(
"Gad",
version=BRAIN_VERSION,
return_keys=["Merge Class", "Hemisphere"],
return_ids=True,
)
if ONLY_RIGHT:
side = "right hemisphere"
right_inds = np.where(side_labels == "R")[0]
adj = adj[np.ix_(right_inds, right_inds)]
class_labels = class_labels[right_inds]
skeleton_labels = skeleton_labels[right_inds]
else:
side = "full brain"
degrees = adj.sum(axis=0) + adj.sum(axis=1)
sort_inds = np.argsort(degrees)[::-1]
adj = adj[np.ix_(sort_inds, sort_inds)]
class_labels = class_labels[sort_inds]
skeleton_labels = skeleton_labels[sort_inds]
adj, lcc_inds = get_lcc(adj, return_inds=True)
class_labels = class_labels[lcc_inds]
skeleton_labels = skeleton_labels[lcc_inds]
degrees = np.count_nonzero(adj, axis=0) + np.count_nonzero(adj, axis=1)
not_pendant_mask = degrees != 1
not_pendant_inds = np.array(range(len(degrees)))[not_pendant_mask]
adj = adj[np.ix_(not_pendant_inds, not_pendant_inds)]
class_labels = class_labels[not_pendant_inds]
skeleton_labels = skeleton_labels[not_pendant_inds]
d_sort = np.argsort(degrees)[::-1]
degrees = degrees[d_sort]
plt.figure(figsize=(10, 5))
sns.scatterplot(x=range(len(degrees)), y=degrees, s=30, linewidth=0)
known_inds = np.where(class_labels != "Unk")[0]
, n_components, regularizer=None)
pairplot(latent, labels=class_labels, title=embed)
k_list = list(range(MIN_CLUSTERS, MAX_CLUSTERS + 1))
n_runs = len(k_list)
out_dicts = []
bin_adj = binarize(adj)
last_pred_labels = np.zeros(n_verts)
if cluster == "GMM":
ClusterModel = GaussianCluster
elif cluster == "AutoGMM":
ClusterModel = AutoGMMCluster
for k in k_list:
run_name = f"k = {k}, {cluster}, {embed}, {side} (A to D), PTR, raw"
print(run_name)
print()
gmm = ClusterModel(min_components=k, max_components=k, **gmm_params)
gmm.fit(latent)
pred_labels = gmm.predict(latent)
base_dict = {
"K": k,
"Cluster": cluster,
"Embed": embed,
"Method": f"{cluster} o {embed}",
}
score = gmm.model_.score(latent)
temp_dict = base_dict.copy()
temp_dict["Metric"] = "GMM likelihood"
temp_dict["Score"] = score
out_dicts.append(temp_dict)
score = gmm.model_.bic(latent)
temp_dict = base_dict.copy()
temp_dict["Metric"] = "GMM BIC"
temp_dict["Score"] = score
out_dicts.append(temp_dict)
sbm = SBMEstimator(directed=True, loops=False)
sbm.fit(bin_adj, y=pred_labels)
score = sbm.score(bin_adj)
temp_dict = base_dict.copy()
temp_dict["Metric"] = "SBM likelihood"
temp_dict["Score"] = score
out_dicts.append(temp_dict)
dcsbm = DCSBMEstimator(directed=True, loops=False)
dcsbm.fit(bin_adj, y=pred_labels)
score = dcsbm.score(bin_adj)
temp_dict = base_dict.copy()
temp_dict["Metric"] = "DCSBM likelihood"
temp_dict["Score"] = score
out_dicts.append(temp_dict)
score = sub_ari(known_inds, class_labels, pred_labels)
temp_dict = base_dict.copy()
temp_dict["Metric"] = "Simple ARI"
temp_dict["Score"] = score
out_dicts.append(temp_dict)
score = adjusted_rand_score(last_pred_labels, pred_labels)
temp_dict = base_dict.copy()
temp_dict["Metric"] = "K-1 ARI"
temp_dict["Score"] = score
out_dicts.append(temp_dict)
last_pred_labels = pred_labels
save_name = f"k{k}-{cluster}-{embed}-right-ad-PTR-raw"
clustergram(adj, class_labels, pred_labels)
stashfig("clustergram-" + save_name)
if SAVESKELS:
_, colormap, pal = stashskel(
save_name, skeleton_labels, pred_labels, palette="viridis", multiout=True
)
palplot(k, cmap="viridis")
stashfig("palplot-" + save_name)
filename = (
Path("./maggot_models/notebooks/outs")
/ Path(FNAME)
/ str("colormap-" + save_name + ".json")
)
with open(filename, "w") as fout:
json.dump(colormap, fout)
stashskel(
save_name, skeleton_labels, pred_labels, palette="viridis", multiout=False
)
g = sns.FacetGrid(result_df, col="Metric", col_wrap=3, sharey=False, height=4)
fg.map(sns.lineplot, "K", "Score")
stashfig(f"metrics-{cluster}-{embed}-right-ad-PTR-raw")
low = signal_flow(adj)
mean_sf = np.zeros(k)
for i in np.unique(pred_labels):
inds = np.where(pred_labels == i)[0]
mean_sf[i] = np.mean(node_signal_flow[inds])
cluster_mean_latent = gmm.model_.means_[:, 0]
block_probs = SBMEstimator().fit(bin_adj, y=pred_labels).block_p_
block_prob_df = pd.DataFrame(data=block_probs, index=range(k), columns=range(k))
block_g = nx.from_pandas_adjacency(block_prob_df, create_using=nx.DiGraph)
plt.figure(figsize=(10, 10))
pos = dict(zip(range(k), zip(cluster_mean_latent, mean_sf)))
labels = nx.get_edge_attributes(block_g, "weight")
from matplotlib.cm import ScalarMappable
import matplotlib as mpl
norm = mpl.colors.LogNorm(vmin=0.01, vmax=0.1)
sm = ScalarMappable(cmap="Reds", norm=norm)
cmap = sm.to_rgba(np.array(list(labels.values())) + 0.01)
nx.draw_networkx(
block_g,
pos,
edge_cmap="Reds",
edge_color=cmap,
connectionstyle="arc3,rad=0.2",
width=1.5,
)
adj, pred_labels)
def signal_flow_marginal(adj, labels, col_wrap=5, palette="tab20"):
sf = signal_flow(adj)
uni_labels = np.unique(labels)
medians = []
for i in uni_labels:
inds = np.where(labels == i)[0]
medians.append(np.median(sf[inds]))
sort_inds = np.argsort(medians)[::-1]
col_order = uni_labels[sort_inds]
plot_df = pd.DataFrame()
plot_df["Signal flow"] = sf
plot_df["Class"] = labels
fg = sns.FacetGrid(
plot_df,
col="Class",
aspect=1.5,
palette=palette,
col_order=col_order,
sharey=False,
col_wrap=col_wrap,
xlim=(-3, 3),
)
fg = fg.map(sns.distplot, "Signal flow")
fg.set(yticks=[], yticklabels=[])
plt.tight_layout()
return fg
signal_flow_marginal(adj, class_labels)
stashfig("known-class-sf-marginal")
| true | true |
f71a6368df82f8cba23fa6c4aacdc3254b4af1ca | 702 | py | Python | Cklib/Filter.py | kamphaus/HPCGrunner | 1885ee87bf02bab51cc71d560d86217c79c5f46b | [
"MIT"
] | null | null | null | Cklib/Filter.py | kamphaus/HPCGrunner | 1885ee87bf02bab51cc71d560d86217c79c5f46b | [
"MIT"
] | null | null | null | Cklib/Filter.py | kamphaus/HPCGrunner | 1885ee87bf02bab51cc71d560d86217c79c5f46b | [
"MIT"
] | null | null | null | import copy
def filterRemaining(remaining, environment):
returned = copy.copy(remaining)
for i in range(len(returned)-1, -1, -1):
r = returned[i]
if any(not(r[e]==environment[e]) for e in environment if e in r):
del returned[i]
else:
runs = copy.copy(r['runs'])
for j in range(len(runs)-1, -1, -1):
u = runs[j]
if any(not(u[e]==environment[e]) for e in environment):
del runs[j]
if len(runs)==0:
del returned[i]
else:
r = copy.deepcopy(r)
r['runs'] = runs
returned[i] = r
return returned
| 30.521739 | 73 | 0.474359 | import copy
def filterRemaining(remaining, environment):
returned = copy.copy(remaining)
for i in range(len(returned)-1, -1, -1):
r = returned[i]
if any(not(r[e]==environment[e]) for e in environment if e in r):
del returned[i]
else:
runs = copy.copy(r['runs'])
for j in range(len(runs)-1, -1, -1):
u = runs[j]
if any(not(u[e]==environment[e]) for e in environment):
del runs[j]
if len(runs)==0:
del returned[i]
else:
r = copy.deepcopy(r)
r['runs'] = runs
returned[i] = r
return returned
| true | true |
f71a637927490a1a25d4576addd9a32c1d6e1ce3 | 2,617 | py | Python | acregnet/data.py | luoyi1hao/ACRN_Chest_X-ray_IA | b2ecaf88e6b1bb59101fd2d611bf9d1e6716367a | [
"MIT"
] | 1 | 2021-09-23T10:37:53.000Z | 2021-09-23T10:37:53.000Z | acregnet/data.py | luoyi1hao/ACRN_Chest_X-ray_IA | b2ecaf88e6b1bb59101fd2d611bf9d1e6716367a | [
"MIT"
] | null | null | null | acregnet/data.py | luoyi1hao/ACRN_Chest_X-ray_IA | b2ecaf88e6b1bb59101fd2d611bf9d1e6716367a | [
"MIT"
] | null | null | null | import os
import numpy as np
from sklearn.model_selection import train_test_split
import cv2
class DataHandler(object):
def _load_data(im_fnames, add_channel_dim=True):
im0 = cv2.imread(im_fnames[0], 0)
im_batch = np.zeros((len(im_fnames),) + im0.shape)
im_batch[0] = im0
for i, fname in enumerate(im_fnames[1:], 1):
im_batch[i] = cv2.imread(fname, 0)
if add_channel_dim:
return np.expand_dims(im_batch, axis=-1)
return im_batch
@staticmethod
def load_images(_file, normalize=True):
im_fnames = list(np.loadtxt(_file, dtype='str'))
im_batch = DataHandler._load_data(im_fnames).astype(np.float32)
if normalize:
im_batch = im_batch / 255.
return im_batch, im_fnames
@staticmethod
def load_labels(_file):
lb_fnames = list(np.loadtxt(_file, dtype='str'))
lb_batch = DataHandler._load_data(lb_fnames).astype(np.int32)
cur_labels = np.unique(lb_batch)
new_labels = range(np.unique(lb_batch).shape[0])
if not np.array_equal(cur_labels, new_labels):
for cur_l, new_l in zip(cur_labels, new_labels):
lb_batch[lb_batch == cur_l] = new_l
return lb_batch, lb_fnames
@staticmethod
def train_test_split(data_dir, out_dir,
test_size=0.2, seed=1):
data_fnames = [
os.path.join(data_dir, f) for f in sorted(os.listdir(data_dir))]
train_fnames, test_fnames = train_test_split(
data_fnames, test_size, True, seed)
np.savetxt(os.path.join(out_dir, 'train_fnames'),
np.array(train_fnames), fmt='%s')
np.savetxt(os.path.join(out_dir, 'test_fnames'),
np.array(test_fnames), fmt='%s')
@staticmethod
def train_valid_test_split(data_dir, out_dir, valid_size=0.1,
test_size=0.2, seed=1):
data_fnames = [
os.path.join(data_dir, f) for f in sorted(os.listdir(data_dir))]
train_fnames, test_fnames = train_test_split(
data_fnames, test_size, True, seed)
train_fnames, valid_fnames = train_test_split(
train_fnames, valid_size/(1 - test_size), False, seed + 1)
np.savetxt(os.path.join(out_dir, 'train_fnames'),
np.array(train_fnames), fmt='%s')
np.savetxt(os.path.join(out_dir, 'valid_fnames'),
np.array(valid_fnames), fmt='%s')
np.savetxt(os.path.join(out_dir, 'test_fnames'),
np.array(test_fnames), fmt='%s')
| 34.893333 | 76 | 0.610623 | import os
import numpy as np
from sklearn.model_selection import train_test_split
import cv2
class DataHandler(object):
def _load_data(im_fnames, add_channel_dim=True):
im0 = cv2.imread(im_fnames[0], 0)
im_batch = np.zeros((len(im_fnames),) + im0.shape)
im_batch[0] = im0
for i, fname in enumerate(im_fnames[1:], 1):
im_batch[i] = cv2.imread(fname, 0)
if add_channel_dim:
return np.expand_dims(im_batch, axis=-1)
return im_batch
@staticmethod
def load_images(_file, normalize=True):
im_fnames = list(np.loadtxt(_file, dtype='str'))
im_batch = DataHandler._load_data(im_fnames).astype(np.float32)
if normalize:
im_batch = im_batch / 255.
return im_batch, im_fnames
@staticmethod
def load_labels(_file):
lb_fnames = list(np.loadtxt(_file, dtype='str'))
lb_batch = DataHandler._load_data(lb_fnames).astype(np.int32)
cur_labels = np.unique(lb_batch)
new_labels = range(np.unique(lb_batch).shape[0])
if not np.array_equal(cur_labels, new_labels):
for cur_l, new_l in zip(cur_labels, new_labels):
lb_batch[lb_batch == cur_l] = new_l
return lb_batch, lb_fnames
@staticmethod
def train_test_split(data_dir, out_dir,
test_size=0.2, seed=1):
data_fnames = [
os.path.join(data_dir, f) for f in sorted(os.listdir(data_dir))]
train_fnames, test_fnames = train_test_split(
data_fnames, test_size, True, seed)
np.savetxt(os.path.join(out_dir, 'train_fnames'),
np.array(train_fnames), fmt='%s')
np.savetxt(os.path.join(out_dir, 'test_fnames'),
np.array(test_fnames), fmt='%s')
@staticmethod
def train_valid_test_split(data_dir, out_dir, valid_size=0.1,
test_size=0.2, seed=1):
data_fnames = [
os.path.join(data_dir, f) for f in sorted(os.listdir(data_dir))]
train_fnames, test_fnames = train_test_split(
data_fnames, test_size, True, seed)
train_fnames, valid_fnames = train_test_split(
train_fnames, valid_size/(1 - test_size), False, seed + 1)
np.savetxt(os.path.join(out_dir, 'train_fnames'),
np.array(train_fnames), fmt='%s')
np.savetxt(os.path.join(out_dir, 'valid_fnames'),
np.array(valid_fnames), fmt='%s')
np.savetxt(os.path.join(out_dir, 'test_fnames'),
np.array(test_fnames), fmt='%s')
| true | true |
f71a63e9f0ba5a8f65374e8816ee7e58d28c35bf | 4,706 | py | Python | gatenlp/processing/annotator.py | gitter-badger/python-gatenlp | bfed863b404cfd62c98a6cb08ad287c3b4b6ccae | [
"Apache-2.0"
] | null | null | null | gatenlp/processing/annotator.py | gitter-badger/python-gatenlp | bfed863b404cfd62c98a6cb08ad287c3b4b6ccae | [
"Apache-2.0"
] | null | null | null | gatenlp/processing/annotator.py | gitter-badger/python-gatenlp | bfed863b404cfd62c98a6cb08ad287c3b4b6ccae | [
"Apache-2.0"
] | null | null | null | """
Module with the base class and supporting functions for all annotators.
Any callable that can be called by passing a document can be used as an annotator,
but the base class "Annotator" defined in here is designed to allow for a more
flexible approach to do things.
"""
from abc import ABC, abstractmethod
__pdoc__ = {"Annotator.__call__": True}
class Annotator(ABC):
@abstractmethod
def __call__(self, doc, **kwargs):
"""
This method MUST get implemented in a concrete subclass to do the actual processing
and annotation. It must accept a document and arbitrary keyword arguments and it must
return either a document which may be the same or a different object than the document passed,
or None or an empty list or a list of one or more documents. The method also may raise an
exception.
The semantics of returning None or an empty list are not strictly defined: this may be used to
handle processing errors where documents which cannot be processed are quietly ignored or
filtering.
The method must accept arbitrary keyword arguments which will be passed on to sub-annotators and
may be used to configure or parametrize processing.
NOTE: some annotators may set or use special document features in order to handle
document context or the document id when processing a corpus or streams where a document id
is important.
Args:
doc: the document to process
kwargs: any arguments to pass to the annotator or sub-annotators called by this annotator
Returns:
a document, None, or a possibly empty list of documents
"""
raise Exception("This method must be implemented!")
def pipe(self, documents, **kwargs):
"""
If this method gets overridden, it should take an iterable of documents and yield processed documents.
This allows for batching, caching, and other optimizations over streams of documents.
If with_context is True, then the documents parameter should be an iterable over tuples (document, context).
Args:
documents: an iterable over documents or (document, context) tuples if with_context=True
**kwargs: arbitrary other keyword arguments must be accepted
Yields:
processed documents
"""
for el in documents:
if el is not None:
doc = self.__call__(el, **kwargs)
yield doc
def start(self):
"""
A method that gets called when processing starts, e.g. before the first document in
corpus gets processed. This is invoked by an executor to initialize processing a batch
of documents.
This is different from initializing the Annotator: initializing may load large data which
can be reused even if the same annotator instance is run several times over documents.
"""
pass
def finish(self):
"""
A method that gets called when processing ends, e.g. when all documents of a corpus
have been processed. It should return some result for processing the whole batch of documents
it has seen - that result may be None.
Returns:
The overall result of processing all documents or None
"""
pass
def reduce(self, results):
"""
A method that should know how to combine the results passed on in some collection into a
single result. This method should behave like a static method, i.e. not make use of any
data that is specific to the concrete instance.
This can be used to combine corpus results obtained from several processes running on
different parts of a corpus.
This gets invoked by the executor if more than one instance of the annotator was run
over separate sets of documents. If only a single instance was used, the result returned
from finish is used directly.
Args:
results: an iterable of individual results over some documents each or None if no results are available.
If no results have been passed back from the finish method of any of the processes, the executor should
not call reduce, but if it does, reduce should accept None or an iterator of all None and return None.
Returns:
The combined overall result or None if there are no individual results
"""
return results
class AnnotatorFunction(Annotator):
def __init__(self, funct):
self.funct = funct
def __call__(self, doc, **kwargs):
return self.funct(doc, **kwargs)
| 41.280702 | 116 | 0.679558 | from abc import ABC, abstractmethod
__pdoc__ = {"Annotator.__call__": True}
class Annotator(ABC):
@abstractmethod
def __call__(self, doc, **kwargs):
raise Exception("This method must be implemented!")
def pipe(self, documents, **kwargs):
for el in documents:
if el is not None:
doc = self.__call__(el, **kwargs)
yield doc
def start(self):
pass
def finish(self):
pass
def reduce(self, results):
return results
class AnnotatorFunction(Annotator):
def __init__(self, funct):
self.funct = funct
def __call__(self, doc, **kwargs):
return self.funct(doc, **kwargs)
| true | true |
f71a650b60dea15af020b9d6037cca6aa1d1b85d | 3,943 | py | Python | muti_thread.py | fanlushuai/jd-assistant | ac9fce2cc87d2a6702743c28d4a3eeb3ee99f9ac | [
"MIT"
] | 2 | 2021-01-13T00:16:30.000Z | 2021-01-31T01:34:57.000Z | muti_thread.py | fanlushuai/jd-assistant | ac9fce2cc87d2a6702743c28d4a3eeb3ee99f9ac | [
"MIT"
] | null | null | null | muti_thread.py | fanlushuai/jd-assistant | ac9fce2cc87d2a6702743c28d4a3eeb3ee99f9ac | [
"MIT"
] | 1 | 2020-12-16T12:10:06.000Z | 2020-12-16T12:10:06.000Z | import functools
import queue
import random
import time
from concurrent.futures import ThreadPoolExecutor
from itertools import repeat
from log import logger
shut_down_pool_queue = queue.Queue()
# sys_thread_pool = ThreadPoolExecutor(max_workers=2)
def shutdown_listener():
for _ in repeat(None):
t_pool = shut_down_pool_queue.get()
t_pool.shutdown()
logger.info("shutdown")
# sys_thread_pool.submit(shutdown_listener)
# 根据一系列逻辑,估算出来的整个流程,任务不等待,情况下的合理线程数
no_task_wait_size_assessed = 35
concurrent_pool_assessed = ThreadPoolExecutor(max_workers=no_task_wait_size_assessed)
def do_nothing():
# 休息5s。保证能创建新的线程,而不是复用线程
time.sleep(5)
return
def pre_concurrent_pool():
# 预热线程池里的线程
t = time.perf_counter()
for i in range(no_task_wait_size_assessed):
concurrent_pool_assessed.submit(do_nothing)
time.sleep(5) #便于使用过期时间进行调试
logger.info("预热线程池,耗时%s", time.perf_counter() - t)
def threads(concurrent_size=1, try_times=1, try_internal=0.05):
"""
并发工具。
:param concurrent_size: 每次重试的并发数
:param try_times: 重试次数
:param try_internal: 重试间隔
:return: 多线程,多次重试。的所有任务中,哪个最快获得结果,就将哪个返回。如果都没有获得,就返回None
"""
def decorate(func):
@functools.wraps(func)
def wrapper(*args, **kw):
re = Job(concurrent_size, try_times, try_internal).run(func, *args, **kw)
logger.info("threads tool return %s", re)
return re
return wrapper
return decorate
class Job(object):
"""
并发处理工具。
可以在一个周期并发相应的请求。并且,上个周期的任务不会影响下个周期的延迟。
具体来讲:周期1执行时间t1,周期2执行时间为 t2= t1 + try_internal
解决的问题:
传统的for循环,周期1执行时间t1,周期2执行时间为 t2= t1+任务耗时+try_internal。
(可见传统方式的毛病,并不能带来真正的并发。只是单线程重试,并且重试的间隔受到上个周期任务执行时间的影响,严格讲,这种重试的间隔参数毫无意义,尤其是在io操作的时候)
"""
def __init__(self, concurrent_size=1, try_times=1, try_internal=0.05):
self.concurrent_size = concurrent_size
self.try_times = try_times
self.try_internal = try_internal
self.futures = []
# 整个流程共享这一个线程池
self.thread_pool = concurrent_pool_assessed
self.loop = True
def run(self, fn, *args, **kwargs):
# 开启异步线程去做这个
self.thread_pool.submit(self._loop, fn, *args, **kwargs)
logger.info("同步等待结果……")
# 同步获取返回结果
try_return_count = 0
for _ in repeat(None):
futures = self.futures
for future in futures:
if future.done():
re = future.result()
if re:
self.loop = False
# !!!!!! 确的修饰的方法,必须有明返回值。None或者其他。不然会一直搞
shut_down_pool_queue.put(self.thread_pool)
return re
else:
try_return_count += 1
futures.remove(future)
if try_return_count >= self.try_times * self.concurrent_size:
return None
def _loop(self, fn, *args, **kwargs):
for try_count in range(self.try_times):
for i in range(self.concurrent_size):
self.futures.append(self.thread_pool.submit(fn, *args, **kwargs))
logger.info("启动线程")
if not self.loop:
# loop会一直执行,直到结果获得,或者循环结束,即self.try_times*self.concurrent_size
logger.debug("获取到结果,结束")
return
if not self.loop:
logger.debug("获取到结果,结束")
# loop会一直执行,直到结果获得,或者循环结束,即self.try_times*self.concurrent_size
return
time.sleep(self.try_internal)
@threads(concurrent_size=3, try_times=100, try_internal=0.1)
def test_g():
t = random.choice([0.1, 0.2, 0.3, 0.4, 0.5, 1])
logger.info("run%s", t)
time.sleep(t)
return "java{}".format(t)
if __name__ == '__main__':
pre_concurrent_pool()
logger.info("拿到结果%s", test_g())
| 29.425373 | 88 | 0.609942 | import functools
import queue
import random
import time
from concurrent.futures import ThreadPoolExecutor
from itertools import repeat
from log import logger
shut_down_pool_queue = queue.Queue()
def shutdown_listener():
for _ in repeat(None):
t_pool = shut_down_pool_queue.get()
t_pool.shutdown()
logger.info("shutdown")
no_task_wait_size_assessed = 35
concurrent_pool_assessed = ThreadPoolExecutor(max_workers=no_task_wait_size_assessed)
def do_nothing():
time.sleep(5)
return
def pre_concurrent_pool():
t = time.perf_counter()
for i in range(no_task_wait_size_assessed):
concurrent_pool_assessed.submit(do_nothing)
time.sleep(5)
logger.info("预热线程池,耗时%s", time.perf_counter() - t)
def threads(concurrent_size=1, try_times=1, try_internal=0.05):
def decorate(func):
@functools.wraps(func)
def wrapper(*args, **kw):
re = Job(concurrent_size, try_times, try_internal).run(func, *args, **kw)
logger.info("threads tool return %s", re)
return re
return wrapper
return decorate
class Job(object):
def __init__(self, concurrent_size=1, try_times=1, try_internal=0.05):
self.concurrent_size = concurrent_size
self.try_times = try_times
self.try_internal = try_internal
self.futures = []
self.thread_pool = concurrent_pool_assessed
self.loop = True
def run(self, fn, *args, **kwargs):
self.thread_pool.submit(self._loop, fn, *args, **kwargs)
logger.info("同步等待结果……")
try_return_count = 0
for _ in repeat(None):
futures = self.futures
for future in futures:
if future.done():
re = future.result()
if re:
self.loop = False
shut_down_pool_queue.put(self.thread_pool)
return re
else:
try_return_count += 1
futures.remove(future)
if try_return_count >= self.try_times * self.concurrent_size:
return None
def _loop(self, fn, *args, **kwargs):
for try_count in range(self.try_times):
for i in range(self.concurrent_size):
self.futures.append(self.thread_pool.submit(fn, *args, **kwargs))
logger.info("启动线程")
if not self.loop:
logger.debug("获取到结果,结束")
return
if not self.loop:
logger.debug("获取到结果,结束")
return
time.sleep(self.try_internal)
@threads(concurrent_size=3, try_times=100, try_internal=0.1)
def test_g():
t = random.choice([0.1, 0.2, 0.3, 0.4, 0.5, 1])
logger.info("run%s", t)
time.sleep(t)
return "java{}".format(t)
if __name__ == '__main__':
pre_concurrent_pool()
logger.info("拿到结果%s", test_g())
| true | true |
f71a66863303bb27d7b14ce461ffa23d7ac9b033 | 534 | py | Python | web_api/api/migrations/0103_gateway_mqtt_password.py | IoT-BA/project_noe-backend | 4b63b4604dd9f3d53a1bdb6ad8e6ad20fe53ebd9 | [
"MIT"
] | 2 | 2017-02-27T07:41:18.000Z | 2017-03-05T22:13:39.000Z | web_api/api/migrations/0103_gateway_mqtt_password.py | IoT-BA/lorawan-sk-backend | 4b63b4604dd9f3d53a1bdb6ad8e6ad20fe53ebd9 | [
"MIT"
] | null | null | null | web_api/api/migrations/0103_gateway_mqtt_password.py | IoT-BA/lorawan-sk-backend | 4b63b4604dd9f3d53a1bdb6ad8e6ad20fe53ebd9 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-01-22 09:20
from __future__ import unicode_literals
import api.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0102_auto_20170121_2038'),
]
operations = [
migrations.AddField(
model_name='gateway',
name='mqtt_password',
field=models.CharField(blank=True, default=api.models.generate_mqtt_password, max_length=16, null=True),
),
]
| 24.272727 | 116 | 0.651685 |
from __future__ import unicode_literals
import api.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0102_auto_20170121_2038'),
]
operations = [
migrations.AddField(
model_name='gateway',
name='mqtt_password',
field=models.CharField(blank=True, default=api.models.generate_mqtt_password, max_length=16, null=True),
),
]
| true | true |
f71a677c5c16ac76e38db599d0a5eac2507bf63b | 747 | py | Python | ScriptEngine/app.py | daizhaolin/scriptengine | eb3aee0381193d5550d31b59574ca60a4706cb25 | [
"BSD-3-Clause"
] | null | null | null | ScriptEngine/app.py | daizhaolin/scriptengine | eb3aee0381193d5550d31b59574ca60a4706cb25 | [
"BSD-3-Clause"
] | null | null | null | ScriptEngine/app.py | daizhaolin/scriptengine | eb3aee0381193d5550d31b59574ca60a4706cb25 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: UTF-8 -*-
'''
Created on 2020-03-08
@author: daizhaolin
'''
from .config import Config
from .helper import cached_property
from .logging import create_logger
class ScriptEngine(object):
def __init__(self):
self.name = __name__
self.config = Config({
'DEBUG': False
})
self.extensions = dict()
self.controller_queue = list()
@property
def debug(self):
return self.config['DEBUG']
@cached_property
def logger(self):
return create_logger(self)
def register_controller(self, controller):
self.controller_queue.append(controller)
def run(self):
for controller in self.controller_queue:
controller(self)
| 19.153846 | 48 | 0.630522 |
from .config import Config
from .helper import cached_property
from .logging import create_logger
class ScriptEngine(object):
def __init__(self):
self.name = __name__
self.config = Config({
'DEBUG': False
})
self.extensions = dict()
self.controller_queue = list()
@property
def debug(self):
return self.config['DEBUG']
@cached_property
def logger(self):
return create_logger(self)
def register_controller(self, controller):
self.controller_queue.append(controller)
def run(self):
for controller in self.controller_queue:
controller(self)
| true | true |
f71a679ff4b8d5cbe23ab5310c5a07b000075f19 | 8,622 | py | Python | examples/tutorials/advanced/websockets-example-MNIST-parallel/run_websocket_client.py | theoptips/PySyft | 4b68c3c6fbe0c18cdf87dfe6ddc3c2071a71f1cc | [
"Apache-2.0"
] | 1 | 2019-07-14T01:18:34.000Z | 2019-07-14T01:18:34.000Z | examples/tutorials/advanced/websockets-example-MNIST-parallel/run_websocket_client.py | theoptips/PySyft | 4b68c3c6fbe0c18cdf87dfe6ddc3c2071a71f1cc | [
"Apache-2.0"
] | null | null | null | examples/tutorials/advanced/websockets-example-MNIST-parallel/run_websocket_client.py | theoptips/PySyft | 4b68c3c6fbe0c18cdf87dfe6ddc3c2071a71f1cc | [
"Apache-2.0"
] | 1 | 2021-02-12T12:11:44.000Z | 2021-02-12T12:11:44.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision import transforms, datasets
import logging
import argparse
import sys
import asyncio
import numpy as np
import syft as sy
from syft import workers
from syft.frameworks.torch.federated import utils
logger = logging.getLogger(__name__)
LOG_INTERVAL = 25
# Loss function
@torch.jit.script
def loss_fn(pred, target):
return F.nll_loss(input=pred, target=target)
# Model
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 20, 5, 1)
self.conv2 = nn.Conv2d(20, 50, 5, 1)
self.fc1 = nn.Linear(4 * 4 * 50, 500)
self.fc2 = nn.Linear(500, 10)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.max_pool2d(x, 2, 2)
x = F.relu(self.conv2(x))
x = F.max_pool2d(x, 2, 2)
x = x.view(-1, 4 * 4 * 50)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return F.log_softmax(x, dim=1)
def define_and_get_arguments(args=sys.argv[1:]):
parser = argparse.ArgumentParser(
description="Run federated learning using websocket client workers."
)
parser.add_argument("--batch_size", type=int, default=32, help="batch size of the training")
parser.add_argument(
"--test_batch_size", type=int, default=128, help="batch size used for the test data"
)
parser.add_argument(
"--training_rounds", type=int, default=40, help="number of federated learning rounds"
)
parser.add_argument(
"--federate_after_n_batches",
type=int,
default=10,
help="number of training steps performed on each remote worker before averaging",
)
parser.add_argument("--lr", type=float, default=0.1, help="learning rate")
parser.add_argument("--cuda", action="store_true", help="use cuda")
parser.add_argument("--seed", type=int, default=1, help="seed used for randomization")
parser.add_argument("--save_model", action="store_true", help="if set, model will be saved")
parser.add_argument(
"--verbose",
"-v",
action="store_true",
help="if set, websocket client workers will be started in verbose mode",
)
args = parser.parse_args(args=args)
return args
async def fit_model_on_worker(
worker: workers.WebsocketClientWorker,
traced_model: torch.jit.ScriptModule,
batch_size: int,
curr_round: int,
max_nr_batches: int,
lr: float,
):
"""Send the model to the worker and fit the model on the worker's training data.
Args:
worker: Remote location, where the model shall be trained.
traced_model: Model which shall be trained.
batch_size: Batch size of each training step.
curr_round: Index of the current training round (for logging purposes).
max_nr_batches: If > 0, training on worker will stop at min(max_nr_batches, nr_available_batches).
lr: Learning rate of each training step.
Returns:
A tuple containing:
* worker_id: Union[int, str], id of the worker.
* improved model: torch.jit.ScriptModule, model after training at the worker.
* loss: Loss on last training batch, torch.tensor.
"""
train_config = sy.TrainConfig(
model=traced_model,
loss_fn=loss_fn,
batch_size=batch_size,
shuffle=True,
max_nr_batches=max_nr_batches,
epochs=1,
lr=lr,
)
train_config.send(worker)
logger.info(
"Training round %s, calling fit on worker: %s, lr = %s",
curr_round,
worker.id,
"{:.3f}".format(train_config.lr),
)
loss = await worker.async_fit(dataset_key="mnist", return_ids=[0])
logger.info("Training round: %s, worker: %s, avg_loss: %s", curr_round, worker.id, loss.mean())
model = train_config.model_ptr.get().obj
return worker.id, model, loss
def evaluate_models_on_test_data(test_loader, results):
np.set_printoptions(formatter={"float": "{: .0f}".format})
for worker_id, worker_model, _ in results:
evaluate_model(worker_id, worker_model, "cpu", test_loader, print_target_hist=False)
def evaluate_model(worker_id, model, device, test_loader, print_target_hist=False):
model.eval()
test_loss = 0.0
correct = 0
hist_target = np.zeros(10)
hist_pred = np.zeros(10)
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
hist, _ = np.histogram(target, bins=10, range=(0, 10))
hist_target += hist
output = model(data)
test_loss += loss_fn(output, target).item() # sum up batch loss
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
hist, _ = np.histogram(pred, bins=10, range=(0, 10))
hist_pred += hist
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
if print_target_hist:
logger.info("Target histogram: %s", hist_target)
logger.info("Prediction hist.: %s", hist_pred)
logger.info(
"%s: Test set: Average loss: %s, Accuracy: %s/%s (%s)",
worker_id,
"{:.4f}".format(test_loss),
correct,
len(test_loader.dataset),
"{:.2f}".format(100.0 * correct / len(test_loader.dataset)),
)
async def main():
args = define_and_get_arguments()
hook = sy.TorchHook(torch)
kwargs_websocket = {"host": "localhost", "hook": hook, "verbose": args.verbose}
alice = workers.WebsocketClientWorker(id="alice", port=8777, **kwargs_websocket)
bob = workers.WebsocketClientWorker(id="bob", port=8778, **kwargs_websocket)
charlie = workers.WebsocketClientWorker(id="charlie", port=8779, **kwargs_websocket)
worker_instances = [alice, bob, charlie]
use_cuda = args.cuda and torch.cuda.is_available()
torch.manual_seed(args.seed)
device = torch.device("cuda" if use_cuda else "cpu")
kwargs = {"num_workers": 1, "pin_memory": True} if use_cuda else {}
test_loader = torch.utils.data.DataLoader(
datasets.MNIST(
"../data",
train=False,
transform=transforms.Compose(
[transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]
),
),
batch_size=args.test_batch_size,
shuffle=False,
drop_last=False,
**kwargs,
)
model = Net().to(device)
(data, target) = test_loader.__iter__().next()
traced_model = torch.jit.trace(model, data)
learning_rate = args.lr
for curr_round in range(1, args.training_rounds + 1):
logger.info("Starting training round %s/%s", curr_round, args.training_rounds)
results = await asyncio.gather(
*[
fit_model_on_worker(
worker=worker,
traced_model=traced_model,
batch_size=args.batch_size,
curr_round=curr_round,
max_nr_batches=args.federate_after_n_batches,
lr=learning_rate,
)
for worker in worker_instances
]
)
models = {}
loss_values = {}
test_models = curr_round % 10 == 1 or curr_round == args.training_rounds
if test_models:
evaluate_models_on_test_data(test_loader, results)
for worker_id, worker_model, worker_loss in results:
if worker_model is not None:
models[worker_id] = worker_model
loss_values[worker_id] = worker_loss
traced_model = utils.federated_avg(models)
if test_models:
evaluate_model(
"Federated model", traced_model, "cpu", test_loader, print_target_hist=True
)
# decay learning rate
learning_rate = max(0.98 * learning_rate, args.lr * 0.01)
if args.save_model:
torch.save(model.state_dict(), "mnist_cnn.pt")
if __name__ == "__main__":
# Logging setup
logger = logging.getLogger("run_websocket_server")
FORMAT = "%(asctime)s %(levelname)s %(filename)s(l:%(lineno)d, p:%(process)d) - %(message)s"
logging.basicConfig(format=FORMAT)
logger.setLevel(level=logging.DEBUG)
# Websockets setup
websockets_logger = logging.getLogger("websockets")
websockets_logger.setLevel(logging.INFO)
websockets_logger.addHandler(logging.StreamHandler())
# Run main
asyncio.get_event_loop().run_until_complete(main())
| 33.034483 | 106 | 0.63164 | import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision import transforms, datasets
import logging
import argparse
import sys
import asyncio
import numpy as np
import syft as sy
from syft import workers
from syft.frameworks.torch.federated import utils
logger = logging.getLogger(__name__)
LOG_INTERVAL = 25
@torch.jit.script
def loss_fn(pred, target):
return F.nll_loss(input=pred, target=target)
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 20, 5, 1)
self.conv2 = nn.Conv2d(20, 50, 5, 1)
self.fc1 = nn.Linear(4 * 4 * 50, 500)
self.fc2 = nn.Linear(500, 10)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.max_pool2d(x, 2, 2)
x = F.relu(self.conv2(x))
x = F.max_pool2d(x, 2, 2)
x = x.view(-1, 4 * 4 * 50)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return F.log_softmax(x, dim=1)
def define_and_get_arguments(args=sys.argv[1:]):
parser = argparse.ArgumentParser(
description="Run federated learning using websocket client workers."
)
parser.add_argument("--batch_size", type=int, default=32, help="batch size of the training")
parser.add_argument(
"--test_batch_size", type=int, default=128, help="batch size used for the test data"
)
parser.add_argument(
"--training_rounds", type=int, default=40, help="number of federated learning rounds"
)
parser.add_argument(
"--federate_after_n_batches",
type=int,
default=10,
help="number of training steps performed on each remote worker before averaging",
)
parser.add_argument("--lr", type=float, default=0.1, help="learning rate")
parser.add_argument("--cuda", action="store_true", help="use cuda")
parser.add_argument("--seed", type=int, default=1, help="seed used for randomization")
parser.add_argument("--save_model", action="store_true", help="if set, model will be saved")
parser.add_argument(
"--verbose",
"-v",
action="store_true",
help="if set, websocket client workers will be started in verbose mode",
)
args = parser.parse_args(args=args)
return args
async def fit_model_on_worker(
worker: workers.WebsocketClientWorker,
traced_model: torch.jit.ScriptModule,
batch_size: int,
curr_round: int,
max_nr_batches: int,
lr: float,
):
train_config = sy.TrainConfig(
model=traced_model,
loss_fn=loss_fn,
batch_size=batch_size,
shuffle=True,
max_nr_batches=max_nr_batches,
epochs=1,
lr=lr,
)
train_config.send(worker)
logger.info(
"Training round %s, calling fit on worker: %s, lr = %s",
curr_round,
worker.id,
"{:.3f}".format(train_config.lr),
)
loss = await worker.async_fit(dataset_key="mnist", return_ids=[0])
logger.info("Training round: %s, worker: %s, avg_loss: %s", curr_round, worker.id, loss.mean())
model = train_config.model_ptr.get().obj
return worker.id, model, loss
def evaluate_models_on_test_data(test_loader, results):
np.set_printoptions(formatter={"float": "{: .0f}".format})
for worker_id, worker_model, _ in results:
evaluate_model(worker_id, worker_model, "cpu", test_loader, print_target_hist=False)
def evaluate_model(worker_id, model, device, test_loader, print_target_hist=False):
model.eval()
test_loss = 0.0
correct = 0
hist_target = np.zeros(10)
hist_pred = np.zeros(10)
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
hist, _ = np.histogram(target, bins=10, range=(0, 10))
hist_target += hist
output = model(data)
test_loss += loss_fn(output, target).item()
pred = output.argmax(dim=1, keepdim=True)
hist, _ = np.histogram(pred, bins=10, range=(0, 10))
hist_pred += hist
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
if print_target_hist:
logger.info("Target histogram: %s", hist_target)
logger.info("Prediction hist.: %s", hist_pred)
logger.info(
"%s: Test set: Average loss: %s, Accuracy: %s/%s (%s)",
worker_id,
"{:.4f}".format(test_loss),
correct,
len(test_loader.dataset),
"{:.2f}".format(100.0 * correct / len(test_loader.dataset)),
)
async def main():
args = define_and_get_arguments()
hook = sy.TorchHook(torch)
kwargs_websocket = {"host": "localhost", "hook": hook, "verbose": args.verbose}
alice = workers.WebsocketClientWorker(id="alice", port=8777, **kwargs_websocket)
bob = workers.WebsocketClientWorker(id="bob", port=8778, **kwargs_websocket)
charlie = workers.WebsocketClientWorker(id="charlie", port=8779, **kwargs_websocket)
worker_instances = [alice, bob, charlie]
use_cuda = args.cuda and torch.cuda.is_available()
torch.manual_seed(args.seed)
device = torch.device("cuda" if use_cuda else "cpu")
kwargs = {"num_workers": 1, "pin_memory": True} if use_cuda else {}
test_loader = torch.utils.data.DataLoader(
datasets.MNIST(
"../data",
train=False,
transform=transforms.Compose(
[transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]
),
),
batch_size=args.test_batch_size,
shuffle=False,
drop_last=False,
**kwargs,
)
model = Net().to(device)
(data, target) = test_loader.__iter__().next()
traced_model = torch.jit.trace(model, data)
learning_rate = args.lr
for curr_round in range(1, args.training_rounds + 1):
logger.info("Starting training round %s/%s", curr_round, args.training_rounds)
results = await asyncio.gather(
*[
fit_model_on_worker(
worker=worker,
traced_model=traced_model,
batch_size=args.batch_size,
curr_round=curr_round,
max_nr_batches=args.federate_after_n_batches,
lr=learning_rate,
)
for worker in worker_instances
]
)
models = {}
loss_values = {}
test_models = curr_round % 10 == 1 or curr_round == args.training_rounds
if test_models:
evaluate_models_on_test_data(test_loader, results)
for worker_id, worker_model, worker_loss in results:
if worker_model is not None:
models[worker_id] = worker_model
loss_values[worker_id] = worker_loss
traced_model = utils.federated_avg(models)
if test_models:
evaluate_model(
"Federated model", traced_model, "cpu", test_loader, print_target_hist=True
)
learning_rate = max(0.98 * learning_rate, args.lr * 0.01)
if args.save_model:
torch.save(model.state_dict(), "mnist_cnn.pt")
if __name__ == "__main__":
logger = logging.getLogger("run_websocket_server")
FORMAT = "%(asctime)s %(levelname)s %(filename)s(l:%(lineno)d, p:%(process)d) - %(message)s"
logging.basicConfig(format=FORMAT)
logger.setLevel(level=logging.DEBUG)
websockets_logger = logging.getLogger("websockets")
websockets_logger.setLevel(logging.INFO)
websockets_logger.addHandler(logging.StreamHandler())
asyncio.get_event_loop().run_until_complete(main())
| true | true |
f71a67e87a44037f0e910996ddb201d1c1d0ca36 | 373 | py | Python | Lib/site-packages/spyder/plugins/layout/__init__.py | hirorin-demon/hirorin-streamlit | 03fbb6f03ec94f909d451e708a3b30b177607695 | [
"0BSD"
] | 1 | 2021-06-20T14:52:40.000Z | 2021-06-20T14:52:40.000Z | spyder/plugins/layout/__init__.py | Pancakerr/spyder | 34a9878bba97f427fbdd7b4a6d77ac0651327565 | [
"MIT"
] | 1 | 2020-11-02T21:11:19.000Z | 2020-11-02T21:11:19.000Z | spyder/plugins/layout/__init__.py | Pancakerr/spyder | 34a9878bba97f427fbdd7b4a6d77ac0651327565 | [
"MIT"
] | 1 | 2020-06-14T07:03:50.000Z | 2020-06-14T07:03:50.000Z | # -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""
spyder.plugins.layout
=====================
Layout plugin.
"""
from spyder.plugins.layout.plugin import Layout
# The following statement is required to be able to grab internal plugins.
PLUGIN_CLASSES = [Layout]
| 20.722222 | 74 | 0.699732 |
from spyder.plugins.layout.plugin import Layout
PLUGIN_CLASSES = [Layout]
| true | true |
f71a685556aab5e675c6c3f4e360e0b1d91795d0 | 5,029 | py | Python | nezzle/graphics/arrows/basearrow.py | dwgoon/nezzle | c69d111ae5e57ee2a7db85e14299c23d3b98a6d5 | [
"MIT"
] | 2 | 2021-10-06T08:54:02.000Z | 2021-10-06T16:17:18.000Z | nezzle/graphics/arrows/basearrow.py | dwgoon/nezzle | c69d111ae5e57ee2a7db85e14299c23d3b98a6d5 | [
"MIT"
] | null | null | null | nezzle/graphics/arrows/basearrow.py | dwgoon/nezzle | c69d111ae5e57ee2a7db85e14299c23d3b98a6d5 | [
"MIT"
] | null | null | null | from qtpy.QtCore import QPointF
from nezzle.utils import TriggerDict
class BaseArrow(object):
ITEM_TYPE = 'BASE_HEAD'
DEFAULT_OFFSET = 4
def __init__(self, width, height, offset):
self._attr = TriggerDict()
self._attr['ITEM_TYPE'] = self.ITEM_TYPE
self._offset = offset
self._height = height
self._width = width
self._attr.set_trigger('WIDTH', self._trigger_set_width, when='set')
self._attr.set_trigger('HEIGHT', self._trigger_set_height, when='set')
self._attr.set_trigger('OFFSET', self._trigger_set_offset, when='set')
self._attr['WIDTH'] = width
self._attr['HEIGHT'] = height
self._attr['OFFSET'] = offset
# Read-write properties
@property
def parent(self):
return self._parent
@parent.setter
def parent(self, obj):
self._parent = obj
@property
def width(self):
return self._width
@width.setter
def width(self, val):
self._attr['WIDTH'] = val
self.update()
def _trigger_set_width(self, key, value):
self._width = value
return value
@property
def height(self):
return self._height
@height.setter
def height(self, val):
self._attr['HEIGHT'] = val
self.update()
def _trigger_set_height(self, key, value):
self._height = value
return value
@property
def offset(self):
return self._offset
@offset.setter
def offset(self, val):
if not hasattr(self, "_parent") or not self._parent:
raise ValueError("A edge should be assigned for this arrow before setting offset.")
self._attr['OFFSET'] = val
self.update()
def _trigger_set_offset(self, key, value):
self._offset = value
return value
def update(self):
self.parent.update()
def identify_points(self, head, edge_body_width, angle=None):
raise NotImplementedError("identify_pos should be implemented!")
def to_dict(self):
dict_head = {}
dict_head['ITEM_TYPE'] = self.ITEM_TYPE
dict_head['WIDTH'] = self.width
dict_head['HEIGHT'] = self.height
dict_head['OFFSET'] = self.offset
dict_head.update(self._attr)
return dict_head
@classmethod
def from_dict(cls, dict_head):
width = dict_head['WIDTH']
height = dict_head['HEIGHT']
offset = dict_head['OFFSET']
return cls(width, height, offset=offset)
class Triangle(BaseArrow):
ITEM_TYPE = "TRIANGLE"
DEFAULT_WIDTH = 10
DEFAULT_HEIGHT = 10
DEFAULT_OFFSET = 4
def __init__(self, width=None, height=None, offset=None, *args, **kwargs):
if not width:
width = Triangle.DEFAULT_WIDTH
if not height:
height = Triangle.DEFAULT_HEIGHT
if not offset:
offset = Triangle.DEFAULT_OFFSET
super().__init__(width, height, offset, *args, **kwargs)
def identify_points(self, head, edge_body_width, transform=None):
neck1 = head + QPointF(0, -edge_body_width/2)
neck2 = head + QPointF(0, +edge_body_width/2)
face1 = head + QPointF(0.0, -self.width/2)
face2 = head + QPointF(0.0, +self.width/2)
top = head + QPointF(self.height, 0)
points = [neck1, face1, top, face2, neck2]
# transform is a callable object, which defines its own transformation in __call__.
if transform:
for i, pt in enumerate(points):
points[i] = transform(pt, head)
return points
# end of def identify_pos
def set_size_from_edge(self, edge_width):
self.width = 5*edge_width
self.height = 5*edge_width
self.parent.update()
class Hammer(BaseArrow):
ITEM_TYPE = "HAMMER"
DEFAULT_WIDTH = 14
DEFAULT_HEIGHT = 2
DEFAULT_OFFSET = 4
def __init__(self, width=None, height=None, offset=None, *args, **kwargs):
if not width:
width = Hammer.DEFAULT_WIDTH
if not height:
height = Hammer.DEFAULT_HEIGHT
if not offset:
offset = Hammer.DEFAULT_OFFSET
super().__init__(width, height, offset, *args, **kwargs)
def identify_points(self, head, edge_body_width, transform=None):
neck1 = head + QPointF(0, -edge_body_width/2)
neck2 = head + QPointF(0, +edge_body_width/2)
face1 = head + QPointF(0, -self.width/2)
face2 = head + QPointF(self.height, -self.width/2)
face3 = head + QPointF(self.height, +self.width/2)
face4 = head + QPointF(0, +self.width/2)
points = [neck1, face1, face2, face3, face4, neck2]
if transform:
for i, pt in enumerate(points):
points[i] = transform(pt, head)
return points
# end of def identify_pos
def set_size_from_edge(self, edge_width):
self.width = 7*edge_width
self.height = edge_width
self.parent.update() | 26.329843 | 95 | 0.611652 | from qtpy.QtCore import QPointF
from nezzle.utils import TriggerDict
class BaseArrow(object):
ITEM_TYPE = 'BASE_HEAD'
DEFAULT_OFFSET = 4
def __init__(self, width, height, offset):
self._attr = TriggerDict()
self._attr['ITEM_TYPE'] = self.ITEM_TYPE
self._offset = offset
self._height = height
self._width = width
self._attr.set_trigger('WIDTH', self._trigger_set_width, when='set')
self._attr.set_trigger('HEIGHT', self._trigger_set_height, when='set')
self._attr.set_trigger('OFFSET', self._trigger_set_offset, when='set')
self._attr['WIDTH'] = width
self._attr['HEIGHT'] = height
self._attr['OFFSET'] = offset
@property
def parent(self):
return self._parent
@parent.setter
def parent(self, obj):
self._parent = obj
@property
def width(self):
return self._width
@width.setter
def width(self, val):
self._attr['WIDTH'] = val
self.update()
def _trigger_set_width(self, key, value):
self._width = value
return value
@property
def height(self):
return self._height
@height.setter
def height(self, val):
self._attr['HEIGHT'] = val
self.update()
def _trigger_set_height(self, key, value):
self._height = value
return value
@property
def offset(self):
return self._offset
@offset.setter
def offset(self, val):
if not hasattr(self, "_parent") or not self._parent:
raise ValueError("A edge should be assigned for this arrow before setting offset.")
self._attr['OFFSET'] = val
self.update()
def _trigger_set_offset(self, key, value):
self._offset = value
return value
def update(self):
self.parent.update()
def identify_points(self, head, edge_body_width, angle=None):
raise NotImplementedError("identify_pos should be implemented!")
def to_dict(self):
dict_head = {}
dict_head['ITEM_TYPE'] = self.ITEM_TYPE
dict_head['WIDTH'] = self.width
dict_head['HEIGHT'] = self.height
dict_head['OFFSET'] = self.offset
dict_head.update(self._attr)
return dict_head
@classmethod
def from_dict(cls, dict_head):
width = dict_head['WIDTH']
height = dict_head['HEIGHT']
offset = dict_head['OFFSET']
return cls(width, height, offset=offset)
class Triangle(BaseArrow):
ITEM_TYPE = "TRIANGLE"
DEFAULT_WIDTH = 10
DEFAULT_HEIGHT = 10
DEFAULT_OFFSET = 4
def __init__(self, width=None, height=None, offset=None, *args, **kwargs):
if not width:
width = Triangle.DEFAULT_WIDTH
if not height:
height = Triangle.DEFAULT_HEIGHT
if not offset:
offset = Triangle.DEFAULT_OFFSET
super().__init__(width, height, offset, *args, **kwargs)
def identify_points(self, head, edge_body_width, transform=None):
neck1 = head + QPointF(0, -edge_body_width/2)
neck2 = head + QPointF(0, +edge_body_width/2)
face1 = head + QPointF(0.0, -self.width/2)
face2 = head + QPointF(0.0, +self.width/2)
top = head + QPointF(self.height, 0)
points = [neck1, face1, top, face2, neck2]
if transform:
for i, pt in enumerate(points):
points[i] = transform(pt, head)
return points
def set_size_from_edge(self, edge_width):
self.width = 5*edge_width
self.height = 5*edge_width
self.parent.update()
class Hammer(BaseArrow):
ITEM_TYPE = "HAMMER"
DEFAULT_WIDTH = 14
DEFAULT_HEIGHT = 2
DEFAULT_OFFSET = 4
def __init__(self, width=None, height=None, offset=None, *args, **kwargs):
if not width:
width = Hammer.DEFAULT_WIDTH
if not height:
height = Hammer.DEFAULT_HEIGHT
if not offset:
offset = Hammer.DEFAULT_OFFSET
super().__init__(width, height, offset, *args, **kwargs)
def identify_points(self, head, edge_body_width, transform=None):
neck1 = head + QPointF(0, -edge_body_width/2)
neck2 = head + QPointF(0, +edge_body_width/2)
face1 = head + QPointF(0, -self.width/2)
face2 = head + QPointF(self.height, -self.width/2)
face3 = head + QPointF(self.height, +self.width/2)
face4 = head + QPointF(0, +self.width/2)
points = [neck1, face1, face2, face3, face4, neck2]
if transform:
for i, pt in enumerate(points):
points[i] = transform(pt, head)
return points
def set_size_from_edge(self, edge_width):
self.width = 7*edge_width
self.height = edge_width
self.parent.update() | true | true |
f71a69117f18301e660b95414a5b6b4799351cfc | 14,078 | py | Python | glance/tests/functional/test_api.py | ilay09/glance | 60814cb577401c121d5d786980b3b801be5f4e9e | [
"Apache-2.0"
] | null | null | null | glance/tests/functional/test_api.py | ilay09/glance | 60814cb577401c121d5d786980b3b801be5f4e9e | [
"Apache-2.0"
] | null | null | null | glance/tests/functional/test_api.py | ilay09/glance | 60814cb577401c121d5d786980b3b801be5f4e9e | [
"Apache-2.0"
] | null | null | null | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Version-independent api tests"""
import httplib2
from oslo_serialization import jsonutils
from six.moves import http_client
from glance.tests import functional
class TestApiVersions(functional.FunctionalTest):
def test_version_configurations(self):
"""Test that versioning is handled properly through all channels"""
# v1 and v2 api enabled
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = {'versions': [
{
'id': 'v2.5',
'status': 'CURRENT',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.4',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.3',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.2',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.1',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.0',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v1.1',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
{
'id': 'v1.0',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
]}
versions_json = jsonutils.dumps(versions)
# Verify version choices returned.
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(versions_json, content)
def test_v2_api_configuration(self):
self.api_server.enable_v1_api = False
self.api_server.enable_v2_api = True
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = {'versions': [
{
'id': 'v2.5',
'status': 'CURRENT',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.4',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.3',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.2',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.1',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.0',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
]}
versions_json = jsonutils.dumps(versions)
# Verify version choices returned.
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(versions_json, content)
def test_v1_api_configuration(self):
self.api_server.enable_v1_api = True
self.api_server.enable_v2_api = False
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = {'versions': [
{
'id': 'v1.1',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
{
'id': 'v1.0',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
]}
versions_json = jsonutils.dumps(versions)
# Verify version choices returned.
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(versions_json, content)
class TestApiPaths(functional.FunctionalTest):
def setUp(self):
super(TestApiPaths, self).setUp()
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = {'versions': [
{
'id': 'v2.5',
'status': 'CURRENT',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.4',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.3',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.2',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.1',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.0',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v1.1',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
{
'id': 'v1.0',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
]}
self.versions_json = jsonutils.dumps(versions)
images = {'images': []}
self.images_json = jsonutils.dumps(images)
def test_get_root_path(self):
"""Assert GET / with `no Accept:` header.
Verify version choices returned.
Bug lp:803260 no Accept header causes a 500 in glance-api
"""
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_images_path(self):
"""Assert GET /images with `no Accept:` header.
Verify version choices returned.
"""
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_v1_images_path(self):
"""GET /v1/images with `no Accept:` header.
Verify empty images list returned.
"""
path = 'http://%s:%d/v1/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.OK, response.status)
def test_get_root_path_with_unknown_header(self):
"""Assert GET / with Accept: unknown header
Verify version choices returned. Verify message in API log about
unknown accept header.
"""
path = 'http://%s:%d/' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'unknown'}
response, content = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_root_path_with_openstack_header(self):
"""Assert GET / with an Accept: application/vnd.openstack.images-v1
Verify empty image list returned
"""
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.images-v1'}
response, content = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.OK, response.status)
self.assertEqual(self.images_json, content)
def test_get_images_path_with_openstack_header(self):
"""Assert GET /images with a
`Accept: application/vnd.openstack.compute-v1` header.
Verify version choices returned. Verify message in API log
about unknown accept header.
"""
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.compute-v1'}
response, content = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_v10_images_path(self):
"""Assert GET /v1.0/images with no Accept: header
Verify version choices returned
"""
path = 'http://%s:%d/v1.a/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
def test_get_v1a_images_path(self):
"""Assert GET /v1.a/images with no Accept: header
Verify version choices returned
"""
path = 'http://%s:%d/v1.a/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
def test_get_va1_images_path(self):
"""Assert GET /va.1/images with no Accept: header
Verify version choices returned
"""
path = 'http://%s:%d/va.1/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_versions_path(self):
"""Assert GET /versions with no Accept: header
Verify version choices returned
"""
path = 'http://%s:%d/versions' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.OK, response.status)
self.assertEqual(self.versions_json, content)
def test_get_versions_path_with_openstack_header(self):
"""Assert GET /versions with the
`Accept: application/vnd.openstack.images-v1` header.
Verify version choices returned.
"""
path = 'http://%s:%d/versions' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.images-v1'}
response, content = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.OK, response.status)
self.assertEqual(self.versions_json, content)
def test_get_v1_versions_path(self):
"""Assert GET /v1/versions with `no Accept:` header
Verify 404 returned
"""
path = 'http://%s:%d/v1/versions' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.NOT_FOUND, response.status)
def test_get_versions_choices(self):
"""Verify version choices returned"""
path = 'http://%s:%d/v10' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_images_path_with_openstack_v2_header(self):
"""Assert GET /images with a
`Accept: application/vnd.openstack.compute-v2` header.
Verify version choices returned. Verify message in API log
about unknown version in accept header.
"""
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.images-v10'}
response, content = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_v12_images_path(self):
"""Assert GET /v1.2/images with `no Accept:` header
Verify version choices returned
"""
path = 'http://%s:%d/v1.2/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
| 38.359673 | 78 | 0.533954 |
import httplib2
from oslo_serialization import jsonutils
from six.moves import http_client
from glance.tests import functional
class TestApiVersions(functional.FunctionalTest):
def test_version_configurations(self):
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = {'versions': [
{
'id': 'v2.5',
'status': 'CURRENT',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.4',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.3',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.2',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.1',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.0',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v1.1',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
{
'id': 'v1.0',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
]}
versions_json = jsonutils.dumps(versions)
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(versions_json, content)
def test_v2_api_configuration(self):
self.api_server.enable_v1_api = False
self.api_server.enable_v2_api = True
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = {'versions': [
{
'id': 'v2.5',
'status': 'CURRENT',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.4',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.3',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.2',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.1',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.0',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
]}
versions_json = jsonutils.dumps(versions)
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(versions_json, content)
def test_v1_api_configuration(self):
self.api_server.enable_v1_api = True
self.api_server.enable_v2_api = False
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = {'versions': [
{
'id': 'v1.1',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
{
'id': 'v1.0',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
]}
versions_json = jsonutils.dumps(versions)
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(versions_json, content)
class TestApiPaths(functional.FunctionalTest):
def setUp(self):
super(TestApiPaths, self).setUp()
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = {'versions': [
{
'id': 'v2.5',
'status': 'CURRENT',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.4',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.3',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.2',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.1',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.0',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v1.1',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
{
'id': 'v1.0',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
]}
self.versions_json = jsonutils.dumps(versions)
images = {'images': []}
self.images_json = jsonutils.dumps(images)
def test_get_root_path(self):
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_images_path(self):
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_v1_images_path(self):
path = 'http://%s:%d/v1/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.OK, response.status)
def test_get_root_path_with_unknown_header(self):
path = 'http://%s:%d/' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'unknown'}
response, content = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_root_path_with_openstack_header(self):
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.images-v1'}
response, content = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.OK, response.status)
self.assertEqual(self.images_json, content)
def test_get_images_path_with_openstack_header(self):
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.compute-v1'}
response, content = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_v10_images_path(self):
path = 'http://%s:%d/v1.a/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
def test_get_v1a_images_path(self):
path = 'http://%s:%d/v1.a/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
def test_get_va1_images_path(self):
path = 'http://%s:%d/va.1/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_versions_path(self):
path = 'http://%s:%d/versions' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.OK, response.status)
self.assertEqual(self.versions_json, content)
def test_get_versions_path_with_openstack_header(self):
path = 'http://%s:%d/versions' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.images-v1'}
response, content = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.OK, response.status)
self.assertEqual(self.versions_json, content)
def test_get_v1_versions_path(self):
path = 'http://%s:%d/v1/versions' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.NOT_FOUND, response.status)
def test_get_versions_choices(self):
path = 'http://%s:%d/v10' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_images_path_with_openstack_v2_header(self):
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.images-v10'}
response, content = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
def test_get_v12_images_path(self):
path = 'http://%s:%d/v1.2/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
self.assertEqual(self.versions_json, content)
| true | true |
f71a691a5ad95f1250cb884753aea776f113110d | 7,737 | py | Python | interface.py | Owiti-Charles/Password-Locker | 3e2a0fd883d033fe784af387b52d7360a1157d34 | [
"MIT"
] | 3 | 2019-08-31T08:48:15.000Z | 2021-12-14T08:21:05.000Z | interface.py | Owiti-Charles/Password-Locker | 3e2a0fd883d033fe784af387b52d7360a1157d34 | [
"MIT"
] | null | null | null | interface.py | Owiti-Charles/Password-Locker | 3e2a0fd883d033fe784af387b52d7360a1157d34 | [
"MIT"
] | 24 | 2020-03-09T10:42:17.000Z | 2022-02-20T19:25:56.000Z | #!/usr/bin/env python3.6
from passlock import User, Credentials
def function():
print(" ____ _____ _ ")
print(" | _ \ / ____|| | ")
print(" | |_) ) ____ ___ ___ / ____ | |__ _____ _ _ ____ ")
print(" | __/ / _ |/ __ / __ \___ \ | __) / _ \| '_|/ __ \ ")
print(" | | / (_| |\__ \ \__ \ ___ / | |___ ( (_) ) | | ___/ ")
print(" |_| \_____| ___/ ___/ |____/ |_____) \_____/|_| \____ ")
function()
def create_new_user(username,password):
'''
Function to create a new user with a username and password
'''
new_user = User(username,password)
return new_user
def save_user(user):
'''
Function to save a new user
'''
user.save_user()
def display_user():
"""
Function to display existing user
"""
return User.display_user()
def login_user(username,password):
"""
function that checks whether a user exist and then login the user in.
"""
check_user = Credentials.verify_user(username,password)
return check_user
def create_new_credential(account,userName,password):
"""
Function that creates new credentials for a given user account
"""
new_credential = Credentials(account,userName,password)
return new_credential
def save_credentials(credentials):
"""
Function to save Credentials to the credentials list
"""
credentials. save_details()
def display_accounts_details():
"""
Function that returns all the saved credential.
"""
return Credentials.display_credentials()
def delete_credential(credentials):
"""
Function to delete a Credentials from credentials list
"""
credentials.delete_credentials()
def find_credential(account):
"""
Function that finds a Credentials by an account name and returns the Credentials that belong to that account
"""
return Credentials.find_credential(account)
def check_credendtials(account):
"""
Function that check if a Credentials exists with that account name and return true or false
"""
return Credentials.if_credential_exist(account)
def generate_Password():
'''
generates a random password for the user.
'''
auto_password=Credentials.generatePassword()
return auto_password
def copy_password(account):
"""
A funct that copies the password using the pyperclip framework
We import the framework then declare a function that copies the emails.
"""
return Credentials.copy_password(account)
def passlocker():
print("Hello Welcome to your Accounts Password Store...\n Please enter one of the following to proceed.\n CA --- Create New Account \n LI --- Have An Account \n")
short_code=input("").lower().strip()
if short_code == "ca":
print("Sign Up")
print('*' * 50)
username = input("User_name: ")
while True:
print(" TP - To type your own pasword:\n GP - To generate random Password")
password_Choice = input().lower().strip()
if password_Choice == 'tp':
password = input("Enter Password\n")
break
elif password_Choice == 'gp':
password = generate_Password()
break
else:
print("Invalid password please try again")
save_user(create_new_user(username,password))
print("*"*85)
print(f"Hello {username}, Your account has been created succesfully! Your password is: {password}")
print("*"*85)
elif short_code == "li":
print("*"*50)
print("Enter your User name and your Password to log in:")
print('*' * 50)
username = input("User name: ")
password = input("password: ")
login = login_user(username,password)
if login_user == login:
print(f"Hello {username}.Welcome To PassWord Locker Manager")
print('\n')
while True:
print("Use these short codes:\n CC - Create a new credential \n DC - Display Credentials \n FC - Find a credential \n GP - Generate A randomn password \n D - Delete credential \n EX - Exit the application \n")
short_code = input().lower().strip()
if short_code == "cc":
print("Create New Credential")
print("."*20)
print("Account name ....")
account = input().lower()
print("Your Account username")
userName = input()
while True:
print(" TP - To type your own pasword if you already have an account:\n GP - To generate random Password")
password_Choice = input().lower().strip()
if password_Choice == 'tp':
password = input("Enter Your Own Password\n")
break
elif password_Choice == 'gp':
password = generate_Password()
break
else:
print("Invalid password please try again")
save_credentials(create_new_credential(account,userName,password))
print('\n')
print(f"Account Credential for: {account} - UserName: {userName} - Password:{password} created succesfully")
print('\n')
elif short_code == "dc":
if display_accounts_details():
print("Here's your list of acoounts: ")
print('*' * 30)
print('_'* 30)
for account in display_accounts_details():
print(f" Account:{account.account} \n User Name:{username}\n Password:{password}")
print('_'* 30)
print('*' * 30)
else:
print("You don't have any credentials saved yet..........")
elif short_code == "fc":
print("Enter the Account Name you want to search for")
search_name = input().lower()
if find_credential(search_name):
search_credential = find_credential(search_name)
print(f"Account Name : {search_credential.account}")
print('-' * 50)
print(f"User Name: {search_credential.userName} Password :{search_credential.password}")
print('-' * 50)
else:
print("That Credential does not exist")
print('\n')
elif short_code == "d":
print("Enter the account name of the Credentials you want to delete")
search_name = input().lower()
if find_credential(search_name):
search_credential = find_credential(search_name)
print("_"*50)
search_credential.delete_credentials()
print('\n')
print(f"Your stored credentials for : {search_credential.account} successfully deleted!!!")
print('\n')
else:
print("That Credential you want to delete does not exist in your store yet")
elif short_code == 'gp':
password = generate_Password()
print(f" {password} Has been generated succesfull. You can proceed to use it to your account")
elif short_code == 'ex':
print("Thanks for using passwords store manager.. See you next time!")
break
else:
print("Wrong entry... Check your entry again and let it match those in the menu")
else:
print("Please enter a valid input to continue")
if __name__ == '__main__':
passlocker() | 39.676923 | 217 | 0.569342 |
from passlock import User, Credentials
def function():
print(" ____ _____ _ ")
print(" | _ \ / ____|| | ")
print(" | |_) ) ____ ___ ___ / ____ | |__ _____ _ _ ____ ")
print(" | __/ / _ |/ __ / __ \___ \ | __) / _ \| '_|/ __ \ ")
print(" | | / (_| |\__ \ \__ \ ___ / | |___ ( (_) ) | | ___/ ")
print(" |_| \_____| ___/ ___/ |____/ |_____) \_____/|_| \____ ")
function()
def create_new_user(username,password):
new_user = User(username,password)
return new_user
def save_user(user):
user.save_user()
def display_user():
return User.display_user()
def login_user(username,password):
check_user = Credentials.verify_user(username,password)
return check_user
def create_new_credential(account,userName,password):
new_credential = Credentials(account,userName,password)
return new_credential
def save_credentials(credentials):
credentials. save_details()
def display_accounts_details():
return Credentials.display_credentials()
def delete_credential(credentials):
credentials.delete_credentials()
def find_credential(account):
return Credentials.find_credential(account)
def check_credendtials(account):
return Credentials.if_credential_exist(account)
def generate_Password():
auto_password=Credentials.generatePassword()
return auto_password
def copy_password(account):
return Credentials.copy_password(account)
def passlocker():
print("Hello Welcome to your Accounts Password Store...\n Please enter one of the following to proceed.\n CA --- Create New Account \n LI --- Have An Account \n")
short_code=input("").lower().strip()
if short_code == "ca":
print("Sign Up")
print('*' * 50)
username = input("User_name: ")
while True:
print(" TP - To type your own pasword:\n GP - To generate random Password")
password_Choice = input().lower().strip()
if password_Choice == 'tp':
password = input("Enter Password\n")
break
elif password_Choice == 'gp':
password = generate_Password()
break
else:
print("Invalid password please try again")
save_user(create_new_user(username,password))
print("*"*85)
print(f"Hello {username}, Your account has been created succesfully! Your password is: {password}")
print("*"*85)
elif short_code == "li":
print("*"*50)
print("Enter your User name and your Password to log in:")
print('*' * 50)
username = input("User name: ")
password = input("password: ")
login = login_user(username,password)
if login_user == login:
print(f"Hello {username}.Welcome To PassWord Locker Manager")
print('\n')
while True:
print("Use these short codes:\n CC - Create a new credential \n DC - Display Credentials \n FC - Find a credential \n GP - Generate A randomn password \n D - Delete credential \n EX - Exit the application \n")
short_code = input().lower().strip()
if short_code == "cc":
print("Create New Credential")
print("."*20)
print("Account name ....")
account = input().lower()
print("Your Account username")
userName = input()
while True:
print(" TP - To type your own pasword if you already have an account:\n GP - To generate random Password")
password_Choice = input().lower().strip()
if password_Choice == 'tp':
password = input("Enter Your Own Password\n")
break
elif password_Choice == 'gp':
password = generate_Password()
break
else:
print("Invalid password please try again")
save_credentials(create_new_credential(account,userName,password))
print('\n')
print(f"Account Credential for: {account} - UserName: {userName} - Password:{password} created succesfully")
print('\n')
elif short_code == "dc":
if display_accounts_details():
print("Here's your list of acoounts: ")
print('*' * 30)
print('_'* 30)
for account in display_accounts_details():
print(f" Account:{account.account} \n User Name:{username}\n Password:{password}")
print('_'* 30)
print('*' * 30)
else:
print("You don't have any credentials saved yet..........")
elif short_code == "fc":
print("Enter the Account Name you want to search for")
search_name = input().lower()
if find_credential(search_name):
search_credential = find_credential(search_name)
print(f"Account Name : {search_credential.account}")
print('-' * 50)
print(f"User Name: {search_credential.userName} Password :{search_credential.password}")
print('-' * 50)
else:
print("That Credential does not exist")
print('\n')
elif short_code == "d":
print("Enter the account name of the Credentials you want to delete")
search_name = input().lower()
if find_credential(search_name):
search_credential = find_credential(search_name)
print("_"*50)
search_credential.delete_credentials()
print('\n')
print(f"Your stored credentials for : {search_credential.account} successfully deleted!!!")
print('\n')
else:
print("That Credential you want to delete does not exist in your store yet")
elif short_code == 'gp':
password = generate_Password()
print(f" {password} Has been generated succesfull. You can proceed to use it to your account")
elif short_code == 'ex':
print("Thanks for using passwords store manager.. See you next time!")
break
else:
print("Wrong entry... Check your entry again and let it match those in the menu")
else:
print("Please enter a valid input to continue")
if __name__ == '__main__':
passlocker() | true | true |
f71a697a4e4fb47cb796149291e6b50fd45b68f7 | 2,233 | py | Python | v1.0.0.test/toontown/toon/NPCForceAcknowledge.py | TTOFFLINE-LEAK/ttoffline | bb0e91704a755d34983e94288d50288e46b68380 | [
"MIT"
] | 4 | 2019-07-01T15:46:43.000Z | 2021-07-23T16:26:48.000Z | v1.0.0.test/toontown/toon/NPCForceAcknowledge.py | TTOFFLINE-LEAK/ttoffline | bb0e91704a755d34983e94288d50288e46b68380 | [
"MIT"
] | 1 | 2019-06-29T03:40:05.000Z | 2021-06-13T01:15:16.000Z | v1.0.0.test/toontown/toon/NPCForceAcknowledge.py | TTOFFLINE-LEAK/ttoffline | bb0e91704a755d34983e94288d50288e46b68380 | [
"MIT"
] | 4 | 2019-07-28T21:18:46.000Z | 2021-02-25T06:37:25.000Z | from panda3d.core import *
from toontown.toontowngui import TTDialog
from toontown.toonbase import TTLocalizer
from direct.gui import DirectLabel
from toontown.quest import Quests
class NPCForceAcknowledge:
def __init__(self, doneEvent):
self.doneEvent = doneEvent
self.dialog = None
return
def enter(self):
doneStatus = {}
questHistory = base.localAvatar.getQuestHistory()
imgScale = 0.5
if questHistory != [] and questHistory != [1000] and questHistory != [101, 110]:
doneStatus['mode'] = 'complete'
messenger.send(self.doneEvent, [doneStatus])
elif len(base.localAvatar.quests) > 1 or len(base.localAvatar.quests) == 0:
doneStatus['mode'] = 'complete'
messenger.send(self.doneEvent, [doneStatus])
elif base.localAvatar.quests[0][0] != Quests.TROLLEY_QUEST_ID:
doneStatus['mode'] = 'complete'
messenger.send(self.doneEvent, [doneStatus])
else:
base.localAvatar.b_setAnimState('neutral', 1)
doneStatus['mode'] = 'incomplete'
self.doneStatus = doneStatus
imageModel = loader.loadModel('phase_4/models/gui/tfa_images')
if Quests.avatarHasTrolleyQuest(base.localAvatar):
if base.localAvatar.quests[0][4] != 0:
imgNodePath = imageModel.find('**/hq-dialog-image')
imgPos = (0, 0, -0.02)
msg = TTLocalizer.NPCForceAcknowledgeMessage2
else:
imgNodePath = imageModel.find('**/trolley-dialog-image')
imgPos = (0, 0, 0.04)
msg = TTLocalizer.NPCForceAcknowledgeMessage
self.dialog = TTDialog.TTDialog(text=msg, command=self.handleOk, style=TTDialog.Acknowledge)
imgLabel = DirectLabel.DirectLabel(parent=self.dialog, relief=None, pos=imgPos, scale=TTLocalizer.NPCFimgLabel, image=imgNodePath, image_scale=imgScale)
return
def exit(self):
if self.dialog:
self.dialog.cleanup()
self.dialog = None
return
def handleOk(self, value):
messenger.send(self.doneEvent, [self.doneStatus]) | 42.942308 | 164 | 0.617555 | from panda3d.core import *
from toontown.toontowngui import TTDialog
from toontown.toonbase import TTLocalizer
from direct.gui import DirectLabel
from toontown.quest import Quests
class NPCForceAcknowledge:
def __init__(self, doneEvent):
self.doneEvent = doneEvent
self.dialog = None
return
def enter(self):
doneStatus = {}
questHistory = base.localAvatar.getQuestHistory()
imgScale = 0.5
if questHistory != [] and questHistory != [1000] and questHistory != [101, 110]:
doneStatus['mode'] = 'complete'
messenger.send(self.doneEvent, [doneStatus])
elif len(base.localAvatar.quests) > 1 or len(base.localAvatar.quests) == 0:
doneStatus['mode'] = 'complete'
messenger.send(self.doneEvent, [doneStatus])
elif base.localAvatar.quests[0][0] != Quests.TROLLEY_QUEST_ID:
doneStatus['mode'] = 'complete'
messenger.send(self.doneEvent, [doneStatus])
else:
base.localAvatar.b_setAnimState('neutral', 1)
doneStatus['mode'] = 'incomplete'
self.doneStatus = doneStatus
imageModel = loader.loadModel('phase_4/models/gui/tfa_images')
if Quests.avatarHasTrolleyQuest(base.localAvatar):
if base.localAvatar.quests[0][4] != 0:
imgNodePath = imageModel.find('**/hq-dialog-image')
imgPos = (0, 0, -0.02)
msg = TTLocalizer.NPCForceAcknowledgeMessage2
else:
imgNodePath = imageModel.find('**/trolley-dialog-image')
imgPos = (0, 0, 0.04)
msg = TTLocalizer.NPCForceAcknowledgeMessage
self.dialog = TTDialog.TTDialog(text=msg, command=self.handleOk, style=TTDialog.Acknowledge)
imgLabel = DirectLabel.DirectLabel(parent=self.dialog, relief=None, pos=imgPos, scale=TTLocalizer.NPCFimgLabel, image=imgNodePath, image_scale=imgScale)
return
def exit(self):
if self.dialog:
self.dialog.cleanup()
self.dialog = None
return
def handleOk(self, value):
messenger.send(self.doneEvent, [self.doneStatus]) | true | true |
f71a6b6d7ebfa629b63064b6a06dfb7bca79a040 | 2,157 | py | Python | htmltreediff/edit_script_runner.py | nomadicfm/htmltreediff | 02a27b2339d5a9a96902eed5d12bca1b755bb109 | [
"BSD-3-Clause"
] | 3 | 2015-04-04T20:35:17.000Z | 2021-08-06T16:51:09.000Z | htmltreediff/edit_script_runner.py | tex/htmltreediff | ce5a94edd0cfb05ed5130aaed3f06c63668df127 | [
"BSD-3-Clause"
] | 14 | 2015-01-15T16:03:14.000Z | 2020-03-23T16:29:02.000Z | htmltreediff/edit_script_runner.py | tex/htmltreediff | ce5a94edd0cfb05ed5130aaed3f06c63668df127 | [
"BSD-3-Clause"
] | 2 | 2017-05-16T04:17:46.000Z | 2018-04-30T20:05:32.000Z | from xml.dom import Node
from htmltreediff.util import (
get_child,
get_location,
remove_node,
insert_or_append,
)
class EditScriptRunner(object):
def __init__(self, dom, edit_script):
self.dom = dom
self.edit_script = edit_script
self.del_nodes = []
self.ins_nodes = []
# edit script actions #
def action_delete(self, node):
parent = node.parentNode
next_sibling = node.nextSibling
remove_node(node)
node.orig_parent = parent
node.orig_next_sibling = next_sibling
self.del_nodes.append(node)
def action_insert(
self,
parent,
child_index,
node_type=None,
node_name=None,
node_value=None,
attributes=None,
):
node = None
if node_type == Node.ELEMENT_NODE:
node = self.dom.createElement(node_name)
if attributes:
for key, value in attributes.items():
node.setAttribute(key, value)
elif node_type == Node.TEXT_NODE:
node = self.dom.createTextNode(node_value)
if node is not None:
self.action_insert_node(parent, child_index, node)
def action_insert_node(self, parent, child_index, node):
next_sibling = get_child(parent, child_index)
insert_or_append(parent, node, next_sibling)
# add node to ins_nodes
assert node.parentNode is not None
node.orig_parent = parent
node.orig_next_sibling = next_sibling
self.ins_nodes.append(node)
# script running #
def run_edit_script(self):
"""
Run an xml edit script, and return the new html produced.
"""
for action, location, properties in self.edit_script:
if action == 'delete':
node = get_location(self.dom, location)
self.action_delete(node)
elif action == 'insert':
parent = get_location(self.dom, location[:-1])
child_index = location[-1]
self.action_insert(parent, child_index, **properties)
return self.dom
| 30.814286 | 69 | 0.601298 | from xml.dom import Node
from htmltreediff.util import (
get_child,
get_location,
remove_node,
insert_or_append,
)
class EditScriptRunner(object):
def __init__(self, dom, edit_script):
self.dom = dom
self.edit_script = edit_script
self.del_nodes = []
self.ins_nodes = []
def action_delete(self, node):
parent = node.parentNode
next_sibling = node.nextSibling
remove_node(node)
node.orig_parent = parent
node.orig_next_sibling = next_sibling
self.del_nodes.append(node)
def action_insert(
self,
parent,
child_index,
node_type=None,
node_name=None,
node_value=None,
attributes=None,
):
node = None
if node_type == Node.ELEMENT_NODE:
node = self.dom.createElement(node_name)
if attributes:
for key, value in attributes.items():
node.setAttribute(key, value)
elif node_type == Node.TEXT_NODE:
node = self.dom.createTextNode(node_value)
if node is not None:
self.action_insert_node(parent, child_index, node)
def action_insert_node(self, parent, child_index, node):
next_sibling = get_child(parent, child_index)
insert_or_append(parent, node, next_sibling)
assert node.parentNode is not None
node.orig_parent = parent
node.orig_next_sibling = next_sibling
self.ins_nodes.append(node)
def run_edit_script(self):
for action, location, properties in self.edit_script:
if action == 'delete':
node = get_location(self.dom, location)
self.action_delete(node)
elif action == 'insert':
parent = get_location(self.dom, location[:-1])
child_index = location[-1]
self.action_insert(parent, child_index, **properties)
return self.dom
| true | true |
f71a6bcaeb8ae82f35824738ce05e63e951e4767 | 4,632 | py | Python | archives/src/episode7/he_is_back.py | NovelBox/sherlock-no-adventure | 9fe59ade8446d5c27e7bd390de9de42e26fc63a1 | [
"MIT"
] | null | null | null | archives/src/episode7/he_is_back.py | NovelBox/sherlock-no-adventure | 9fe59ade8446d5c27e7bd390de9de42e26fc63a1 | [
"MIT"
] | null | null | null | archives/src/episode7/he_is_back.py | NovelBox/sherlock-no-adventure | 9fe59ade8446d5c27e7bd390de9de42e26fc63a1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Episode 7-3
'''
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
sys.path.append('storybuilder')
from storybuilder.builder.world import World
# DEFINE
TITLE = "英雄の帰還"
# NOTE: outlines
ABSTRACT = """
変身して$sherlockたちを追い詰める$jake。しかし$sherlockの機転で工場に穴を開け、日光を浴びせかけることで$jakeは皮膚から大量に出血し、爆発した。
その爆音を聞いて$limeたちが駆けつける。$maryが身を挺して$sherlockを守っていたが、$maryは大怪我を負ってしまった。入院することになる$mary。
戻った$sherlockは、一旦$wilsonの家で$limeたちに事情を語る。
$morianoとの対決により滝壺に落下し、死を覚悟した$sherlockだったが、$maryが繕ってくれた服の裾が引っかかり、何とか死だけは免れた。
ただ大怪我をしており、そこを助けてくれたのが、$jackだった。彼女の別荘で回復するまで休養しながら各国の情報を集め、$moriano配下の動向を追いかけていた。
未だに$sherlockを探す動きが見えたので、おびき出すために空き家の件をでっち上げた。だがそれを利用した$jakeにより$maryがおびき出された、というのが今回の一件だった。
$sherlockは$maryに預けておいた$blue_stoneを取り戻す必要があると言う。
しかし$sherlockたちが病院に駆けつけると、$maryの姿が消えていた。
"""
# Episode
def main(w: World):
return w.episode(TITLE,
# NOTE
w.plot_setup("連続殺人犯$jakeは$maryを殺そうとする"),
w.plot_turnpoint("そこにホームレスが助けに入る"),
w.plot_develop("$sherlockは$jakeがどんな人生を歩んできたかを全て言い当て$jakeの牙を無力化しようとする"),
w.plot_turnpoint("$transformした$maryにより$sherlockが守られるが、彼女が負傷する"),
w.plot_resolve("$sherlockが呼んでおいた警察により$jakeは捕らえられた。$maryは入院し、$sherlockも治療を受ける"),
w.plot_turnpoint("入院している$maryから$blue_stoneを貰おうと思ったが$patosonにより連れ出された後だった"),
w.plot_note("$maryは病室で目覚める"),
w.plot_note("そこには$patsonの姿があった"),
w.plot_note("$maryは$sherlockは? と尋ねるが、わからないと言われる"),
w.plot_note("$patsonは$maryへの事情聴取を行う"),
w.plot_note("一体あそこで何を見たのか"),
w.plot_note("$maryはその黒焦げの遺体が、連続猟奇殺人事件の犯人だと証言した"),
w.plot_note("$patsonは$jakeがそう告白したのか? と尋ねた"),
#
w.plot_note("$limeは$ignesたちから$maryが爆発現場で発見されたと聞く"),
w.plot_note("その$ignesはホームレスと仲良さそうに話している"),
w.plot_note("その男こそ$sherlockだった"),
w.plot_note("$limeは驚き、事情を聞く"),
w.plot_note("$sherlockは実はずいぶん前に国内に戻ってきていて、$ignesは事情を知らされていた"),
w.plot_note("$sherlockを狙う連中をごまかすために、色々と嘘の情報をばらまいていた"),
w.plot_note("空き家情報も嘘のものだったが、それを使って猟奇殺人犯の$jakeが細工をし、$maryをおびき出した"),
w.plot_note("それを先導した人間が誰かいる、と$sherlockは言う"),
w.plot_note("滝壺から落ちたあと、$jackに助けられ、彼女の隠れ家で治療をしてもらっていた"),
w.plot_note("今回殺害されていた$ronaldが所有していた最後の$black_stoneが盗まれたことがわかり、戻ってきた"),
w.plot_note("四つ$stoneを揃えられるとまずい、と$shserlockは言う"),
w.plot_note("ひとまず$maryの様子を見に行くことにし、タクシーを拾う(これが$jack)"),
#
w.plot_note("病院にやってくると先に様子をみにきていた$refiがいる"),
w.plot_note("$refiは泣きそうになって、$maryを$patsonが連れ出したという"),
w.plot_note("$sherlockはそれで理解し、すぐに大聖堂に向かうと"),
w.plot_note("しかし$wilsonがいない。タクシー運転手に頼んで向かってもらう"),
#
w.plot_note("車内で説明する$sherlock"),
w.plot_note("四つの$stoneは$boss復活の儀式に必要な祭具だった"),
w.plot_note("かつて$bossを倒した$heroたちの神器にはまっていたものだが、$bossの力を吸収し、封じ込めたもの"),
w.plot_note("それが時代を経て、売られたり、盗まれたりし、行方不明になった"),
w.plot_note("今ある多くはレプリカだという"),
w.plot_note("実際に四つ揃え、かつての$boss城があった場所で儀式を行う"),
w.plot_note("それが大聖堂だという"),
w.plot_note("$boss城を封じる目的であの場所に建っていたのだ"),
w.plot_note("昨年春にあった地震は儀式の失敗だという"),
w.plot_note("その頃はまだ何が必要なのか、すべて判明していなかった。だが$stein教授により解明された"),
w.plot_note("その資料は$morianoにより盗まれ、紛失している"),
w.plot_note("実際にどういうものなのかは$sherlockも知らない"),
#
"$wilsonは最後に登場",
w.plot_note("大聖堂にやってくると、何があったのか警官($parkerたち)が警備していた"),
w.plot_note("巨大な爆弾が見つかったというのでみんなを避難させるように言われたと"),
w.plot_note("そこに$restradeもやってきて、困惑している"),
w.plot_note("一体何をやってるんだ、$patsonはと"),
w.plot_note("$sherlockはすぐ$patsonの家を調べるように言う。彼が$cultXの手先だった"),
w.plot_note("$sherlockは中に入る"),
#
w.plot_note("大聖堂の中は人がいなくなり、静まり返っていた"),
w.plot_note("聖堂を進む"),
w.plot_note("偉人たちの墓が並ぶ聖廟でもあった"),
w.plot_note("その一つが開けられている。中身はない"),
w.plot_note("扉があり、奥にいくと地下への階段"),
w.plot_note("地下に降りていく$sherlockたち"),
w.plot_note("そこには巨大なホールが広がっていた"),
w.plot_note("祭壇には四つの$stoneが供えられ、$patsonが儀式を始めようとしている"),
w.plot_note("誰も入れるなと言ったのに、と不敵な顔の$patson"),
w.plot_note("$maryは倒れていた。服が少し破れている。中に$stoneを身に着けていたからだ"),
w.plot_note("$sherlockがすぐにやめるように忠告する"),
w.plot_note("儀式は失敗すると言った"),
w.plot_note("しかし$patsonは儀式を行うべく、祝詞をとなえる"),
w.plot_note("その$patsonを現れた$wilsonが$gunで撃ち抜いた"),
w.plot_note("「間に合ってよかったよ」という$wilson"),
outline=ABSTRACT)
| 44.970874 | 91 | 0.655009 |
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
sys.path.append('storybuilder')
from storybuilder.builder.world import World
TITLE = "英雄の帰還"
ABSTRACT = """
変身して$sherlockたちを追い詰める$jake。しかし$sherlockの機転で工場に穴を開け、日光を浴びせかけることで$jakeは皮膚から大量に出血し、爆発した。
その爆音を聞いて$limeたちが駆けつける。$maryが身を挺して$sherlockを守っていたが、$maryは大怪我を負ってしまった。入院することになる$mary。
戻った$sherlockは、一旦$wilsonの家で$limeたちに事情を語る。
$morianoとの対決により滝壺に落下し、死を覚悟した$sherlockだったが、$maryが繕ってくれた服の裾が引っかかり、何とか死だけは免れた。
ただ大怪我をしており、そこを助けてくれたのが、$jackだった。彼女の別荘で回復するまで休養しながら各国の情報を集め、$moriano配下の動向を追いかけていた。
未だに$sherlockを探す動きが見えたので、おびき出すために空き家の件をでっち上げた。だがそれを利用した$jakeにより$maryがおびき出された、というのが今回の一件だった。
$sherlockは$maryに預けておいた$blue_stoneを取り戻す必要があると言う。
しかし$sherlockたちが病院に駆けつけると、$maryの姿が消えていた。
"""
def main(w: World):
return w.episode(TITLE,
w.plot_setup("連続殺人犯$jakeは$maryを殺そうとする"),
w.plot_turnpoint("そこにホームレスが助けに入る"),
w.plot_develop("$sherlockは$jakeがどんな人生を歩んできたかを全て言い当て$jakeの牙を無力化しようとする"),
w.plot_turnpoint("$transformした$maryにより$sherlockが守られるが、彼女が負傷する"),
w.plot_resolve("$sherlockが呼んでおいた警察により$jakeは捕らえられた。$maryは入院し、$sherlockも治療を受ける"),
w.plot_turnpoint("入院している$maryから$blue_stoneを貰おうと思ったが$patosonにより連れ出された後だった"),
w.plot_note("$maryは病室で目覚める"),
w.plot_note("そこには$patsonの姿があった"),
w.plot_note("$maryは$sherlockは? と尋ねるが、わからないと言われる"),
w.plot_note("$patsonは$maryへの事情聴取を行う"),
w.plot_note("一体あそこで何を見たのか"),
w.plot_note("$maryはその黒焦げの遺体が、連続猟奇殺人事件の犯人だと証言した"),
w.plot_note("$patsonは$jakeがそう告白したのか? と尋ねた"),
w.plot_note("$limeは$ignesたちから$maryが爆発現場で発見されたと聞く"),
w.plot_note("その$ignesはホームレスと仲良さそうに話している"),
w.plot_note("その男こそ$sherlockだった"),
w.plot_note("$limeは驚き、事情を聞く"),
w.plot_note("$sherlockは実はずいぶん前に国内に戻ってきていて、$ignesは事情を知らされていた"),
w.plot_note("$sherlockを狙う連中をごまかすために、色々と嘘の情報をばらまいていた"),
w.plot_note("空き家情報も嘘のものだったが、それを使って猟奇殺人犯の$jakeが細工をし、$maryをおびき出した"),
w.plot_note("それを先導した人間が誰かいる、と$sherlockは言う"),
w.plot_note("滝壺から落ちたあと、$jackに助けられ、彼女の隠れ家で治療をしてもらっていた"),
w.plot_note("今回殺害されていた$ronaldが所有していた最後の$black_stoneが盗まれたことがわかり、戻ってきた"),
w.plot_note("四つ$stoneを揃えられるとまずい、と$shserlockは言う"),
w.plot_note("ひとまず$maryの様子を見に行くことにし、タクシーを拾う(これが$jack)"),
w.plot_note("病院にやってくると先に様子をみにきていた$refiがいる"),
w.plot_note("$refiは泣きそうになって、$maryを$patsonが連れ出したという"),
w.plot_note("$sherlockはそれで理解し、すぐに大聖堂に向かうと"),
w.plot_note("しかし$wilsonがいない。タクシー運転手に頼んで向かってもらう"),
w.plot_note("車内で説明する$sherlock"),
w.plot_note("四つの$stoneは$boss復活の儀式に必要な祭具だった"),
w.plot_note("かつて$bossを倒した$heroたちの神器にはまっていたものだが、$bossの力を吸収し、封じ込めたもの"),
w.plot_note("それが時代を経て、売られたり、盗まれたりし、行方不明になった"),
w.plot_note("今ある多くはレプリカだという"),
w.plot_note("実際に四つ揃え、かつての$boss城があった場所で儀式を行う"),
w.plot_note("それが大聖堂だという"),
w.plot_note("$boss城を封じる目的であの場所に建っていたのだ"),
w.plot_note("昨年春にあった地震は儀式の失敗だという"),
w.plot_note("その頃はまだ何が必要なのか、すべて判明していなかった。だが$stein教授により解明された"),
w.plot_note("その資料は$morianoにより盗まれ、紛失している"),
w.plot_note("実際にどういうものなのかは$sherlockも知らない"),
"$wilsonは最後に登場",
w.plot_note("大聖堂にやってくると、何があったのか警官($parkerたち)が警備していた"),
w.plot_note("巨大な爆弾が見つかったというのでみんなを避難させるように言われたと"),
w.plot_note("そこに$restradeもやってきて、困惑している"),
w.plot_note("一体何をやってるんだ、$patsonはと"),
w.plot_note("$sherlockはすぐ$patsonの家を調べるように言う。彼が$cultXの手先だった"),
w.plot_note("$sherlockは中に入る"),
w.plot_note("大聖堂の中は人がいなくなり、静まり返っていた"),
w.plot_note("聖堂を進む"),
w.plot_note("偉人たちの墓が並ぶ聖廟でもあった"),
w.plot_note("その一つが開けられている。中身はない"),
w.plot_note("扉があり、奥にいくと地下への階段"),
w.plot_note("地下に降りていく$sherlockたち"),
w.plot_note("そこには巨大なホールが広がっていた"),
w.plot_note("祭壇には四つの$stoneが供えられ、$patsonが儀式を始めようとしている"),
w.plot_note("誰も入れるなと言ったのに、と不敵な顔の$patson"),
w.plot_note("$maryは倒れていた。服が少し破れている。中に$stoneを身に着けていたからだ"),
w.plot_note("$sherlockがすぐにやめるように忠告する"),
w.plot_note("儀式は失敗すると言った"),
w.plot_note("しかし$patsonは儀式を行うべく、祝詞をとなえる"),
w.plot_note("その$patsonを現れた$wilsonが$gunで撃ち抜いた"),
w.plot_note("「間に合ってよかったよ」という$wilson"),
outline=ABSTRACT)
| true | true |
f71a6d9110d6e2d9754fc6dd198852e4d0c18cb8 | 14,416 | py | Python | tmapi/models/topic_map.py | ajenhl/django-tmapi | 02f009e1b508218cf330ca7748c3a1dd110f3e8d | [
"Apache-2.0"
] | 2 | 2015-03-22T03:23:36.000Z | 2017-01-08T10:57:18.000Z | tmapi/models/topic_map.py | ajenhl/django-tmapi | 02f009e1b508218cf330ca7748c3a1dd110f3e8d | [
"Apache-2.0"
] | null | null | null | tmapi/models/topic_map.py | ajenhl/django-tmapi | 02f009e1b508218cf330ca7748c3a1dd110f3e8d | [
"Apache-2.0"
] | 1 | 2020-12-28T04:40:34.000Z | 2020-12-28T04:40:34.000Z | # Copyright 2011 Jamie Norrish (jamie@artefact.org.nz)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.contrib.sites.models import Site
from django.db import models
from tmapi.exceptions import ModelConstraintException, \
UnsupportedOperationException
from tmapi.indices.literal_index import LiteralIndex
from tmapi.indices.scoped_index import ScopedIndex
from tmapi.indices.type_instance_index import TypeInstanceIndex
from association import Association
from construct_fields import BaseConstructFields
from identifier import Identifier
from item_identifier import ItemIdentifier
from locator import Locator
from reifiable import Reifiable
from subject_identifier import SubjectIdentifier
from subject_locator import SubjectLocator
from topic import Topic
from copy_utils import copy
class TopicMap (BaseConstructFields, Reifiable):
"""Represents a topic map item."""
topic_map_system = models.ForeignKey('TopicMapSystem',
related_name='topic_maps')
iri = models.CharField(max_length=512)
title = models.CharField(max_length=128, blank=True)
base_address = models.CharField(max_length=512, blank=True)
class Meta:
app_label = 'tmapi'
def __init__ (self, *args, **kwargs):
super(TopicMap, self).__init__(*args, **kwargs)
self._indices = {}
def create_association (self, association_type, scope=None,
proxy=Association):
"""Creates an `Association` in this topic map with the
specified type and scope.
:param association_type: the association type
:type association_type: `Topic`
:param scope: scope
:type scope: list of `Topic`s
:param proxy: Django proxy model class
:type proxy: class
:rtype: `Association`
"""
if association_type is None:
raise ModelConstraintException(self, 'The type may not be None')
if self != association_type.topic_map:
raise ModelConstraintException(
self, 'The type is not from this topic map')
association = proxy(type=association_type, topic_map=self)
association.save()
if scope is None:
scope = []
for topic in scope:
if self != topic.topic_map:
raise ModelConstraintException(
self, 'The theme is not from this topic map')
association.scope.add(topic)
return association
def create_empty_topic (self):
"""Returns a `Topic` instance with no other information.
:rtype: `Topic`
"""
topic = Topic(topic_map=self)
topic.save()
return topic
def create_locator (self, reference):
"""Returns a `Locator` instance representing the specified IRI
reference.
The specified IRI reference is assumed to be absolute.
:param reference: a string which uses the IRI notation
:type reference: string
:rtype: `Locator`
"""
return Locator(reference)
def create_topic (self, proxy=Topic):
"""Returns a `Topic` instance with an automatically generated
item identifier.
This method never returns an existing `Topic` but creates a
new one with an automatically generated item identifier.
Returns the newly created `Topic` instance with an automatically
generated item identifier.
:param proxy: Django proxy model class
:type proxy: class
:rtype: `Topic`
"""
topic = proxy(topic_map=self)
topic.save()
address = 'http://%s/tmapi/iid/auto/%d' % \
(Site.objects.get_current().domain, topic.id)
ii = ItemIdentifier(address=address, containing_topic_map=self)
ii.save()
topic.item_identifiers.add(ii)
return topic
def create_topic_by_item_identifier (self, item_identifier):
"""Returns a `Topic` instance with the specified item identifier.
This method returns either an existing `Topic` or creates a
new `Topic` instance with the specified item identifier.
If a topic with the specified item identifier exists in the
topic map, that topic is returned. If a topic with a subject
identifier equal to the specified item identifier exists, the
specified item identifier is added to that topic and the topic
is returned. If neither a topic with the specified item
identifier nor with a subject identifier equal to the subject
identifier exists, a topic with the item identifier is
created.
:param item_identifier: the item identifier the topic should contain
:type item_identifier: `Locator`
:rtype: `Topic`
"""
if item_identifier is None:
raise ModelConstraintException(
self, 'The item identifier may not be None')
reference = item_identifier.to_external_form()
try:
topic = self.topic_constructs.get(
item_identifiers__address=reference)
except Topic.DoesNotExist:
try:
topic = self.topic_constructs.get(
subject_identifiers__address=reference)
except Topic.DoesNotExist:
topic = Topic(topic_map=self)
topic.save()
ii = ItemIdentifier(address=reference, containing_topic_map=self)
ii.save()
topic.item_identifiers.add(ii)
return topic
def create_topic_by_subject_identifier (self, subject_identifier):
"""Returns a `Topic` instance with the specified subject identifier.
This method returns either an existing `Topic` or creates a
new `Topic` instance with the specified subject identifier.
If a topic with the specified subject identifier exists in
this topic map, that topic is returned. If a topic with an
item identifier equal to the specified subject identifier
exists, the specified subject identifier is added to that
topic and the topic is returned. If neither a topic with the
specified subject identifier nor with an item identifier equal
to the subject identifier exists, a topic with the subject
identifier is created.
:param subject_identifier: the subject identifier the topic
should contain
:type subject_identifier: `Locator`
:rtype: `Topic`
"""
if subject_identifier is None:
raise ModelConstraintException(
self, 'The subject identifier may not be None')
reference = subject_identifier.to_external_form()
try:
topic = self.topic_constructs.get(
subject_identifiers__address=reference)
except Topic.DoesNotExist:
try:
topic = self.topic_constructs.get(
item_identifiers__address=reference)
except Topic.DoesNotExist:
topic = Topic(topic_map=self)
topic.save()
si = SubjectIdentifier(topic=topic, address=reference,
containing_topic_map=self)
si.save()
topic.subject_identifiers.add(si)
return topic
def create_topic_by_subject_locator (self, subject_locator):
"""Returns a `Topic` instance with the specified subject locator.
This method returns either an existing `Topic` or creates a
new `Topic` instance with the specified subject locator.
:param subject_locator: the subject locator the topic should
contain
:type subject_locator: `Locator`
:rtype: `Topic`
"""
if subject_locator is None:
raise ModelConstraintException(
self, 'The subject locator may not be None')
reference = subject_locator.to_external_form()
try:
topic = self.topic_constructs.get(
subject_locators__address=reference)
except Topic.DoesNotExist:
topic = Topic(topic_map=self)
topic.save()
sl = SubjectLocator(topic=topic, address=reference,
containing_topic_map=self)
sl.save()
topic.subject_locators.add(sl)
return topic
def get_associations (self):
"""Returns all `Association`s contained in this topic map.
:rtype: `QuerySet` of `Association`s
"""
return self.association_constructs.all()
def get_construct_by_id (self, id, proxy=None):
"""Returns a `Construct` by its (system specific) identifier.
:param id: the identifier of the construct to be returned
:type id: string
:param proxy: Django proxy model
:type proxy: class
:rtype: `Construct`, proxy object, or None
"""
try:
identifier = Identifier.objects.get(pk=int(id),
containing_topic_map=self)
construct = identifier.get_construct()
if proxy is not None and construct is not None:
construct = proxy.objects.get(pk=construct.id)
except Identifier.DoesNotExist:
construct = None
return construct
def get_construct_by_item_identifier (self, item_identifier):
"""Returns a `Construct` by its item identifier.
:param item_identifier: the item identifier of the construct
to be returned
:type item_identifier: `Locator`
:rtype: a construct or None
"""
address = item_identifier.to_external_form()
try:
ii = ItemIdentifier.objects.get(address=address,
containing_topic_map=self)
construct = ii.get_construct()
except ItemIdentifier.DoesNotExist:
construct = None
return construct
def get_index (self, index_interface):
"""Returns the specified index.
:param index_interface: the index to return
:type index_interface: class
:rtype: `Index`
"""
if index_interface not in (LiteralIndex, ScopedIndex,
TypeInstanceIndex):
raise UnsupportedOperationException(
'This TMAPI implementation does not support that index')
if index_interface not in self._indices:
self._indices[index_interface] = index_interface(self)
return self._indices[index_interface]
def get_locator (self):
"""Returns the `Locator` that was used to create the topic map.
Note: The returned locator represents the storage address of
the topic map and implies no further semantics.
:rtype: `Locator`
"""
return Locator(self.iri)
def get_parent (self):
"""Returns None.
:rtype: None
"""
return None
def get_topics (self):
"""Returns all `Topic`s contained in this topic map.
:rtype: `QuerySet` of `Topic`s
"""
return self.topic_constructs.all()
def get_topic_by_subject_identifier (self, subject_identifier):
"""Returns a topic by its subject identifier.
If no topic with the specified subject identifier exists, this
method returns `None`.
:param subject_identifier: the subject identifier of the topic
to be returned
:type subject_identifier: `Locator`
:rtype: `Topic` or `None`
"""
reference = subject_identifier.to_external_form()
try:
topic = self.topic_constructs.get(
subject_identifiers__address=reference)
except Topic.DoesNotExist:
topic = None
return topic
def get_topic_by_subject_locator (self, subject_locator):
"""Returns a topic by its subject locator.
If no topic with the specified subject locator exists, this
method returns `None`.
:param subject_locator: the subject locator of the topic to be
returned
:type subject_locator: `Locator`
:rtype: `Topic` of `None`
"""
reference = subject_locator.to_external_form()
try:
topic = self.topic_constructs.get(
subject_locators__address=reference)
except Topic.DoesNotExist:
topic = None
return topic
def get_topic_map (self):
"""Returns self.
:rtype: `TopicMap`
"""
return self
def merge_in (self, other):
"""Merges the topic map `other` into this topic map.
All `Topic`s and `Association`s and all of their contents in
`other` will be added to this topic map.
All information items in `other` will be merged into this
topic map as defined by the Topic Maps - Data Model (TMDM)
merging rules.
The merge process will not modify `other` in any way.
If this topic map equals `other`, no changes are made to the
topic map.
:param other: the topic map to be merged with this topic map
instance
:type other: `TopicMap`
"""
if other is None:
raise ModelConstraintException(
self, 'The topic map to merge in may not be None')
copy(other, self)
def remove (self):
self.delete()
def __eq__ (self, other):
if isinstance(other, TopicMap) and self.id == other.id:
return True
return False
def __ne__ (self, other):
return not(self.__eq__(other))
def __unicode__ (self):
name = self.title or 'Topic map'
return u'%s (%s)' % (name, self.iri)
| 34.821256 | 77 | 0.629231 |
from django.contrib.sites.models import Site
from django.db import models
from tmapi.exceptions import ModelConstraintException, \
UnsupportedOperationException
from tmapi.indices.literal_index import LiteralIndex
from tmapi.indices.scoped_index import ScopedIndex
from tmapi.indices.type_instance_index import TypeInstanceIndex
from association import Association
from construct_fields import BaseConstructFields
from identifier import Identifier
from item_identifier import ItemIdentifier
from locator import Locator
from reifiable import Reifiable
from subject_identifier import SubjectIdentifier
from subject_locator import SubjectLocator
from topic import Topic
from copy_utils import copy
class TopicMap (BaseConstructFields, Reifiable):
topic_map_system = models.ForeignKey('TopicMapSystem',
related_name='topic_maps')
iri = models.CharField(max_length=512)
title = models.CharField(max_length=128, blank=True)
base_address = models.CharField(max_length=512, blank=True)
class Meta:
app_label = 'tmapi'
def __init__ (self, *args, **kwargs):
super(TopicMap, self).__init__(*args, **kwargs)
self._indices = {}
def create_association (self, association_type, scope=None,
proxy=Association):
if association_type is None:
raise ModelConstraintException(self, 'The type may not be None')
if self != association_type.topic_map:
raise ModelConstraintException(
self, 'The type is not from this topic map')
association = proxy(type=association_type, topic_map=self)
association.save()
if scope is None:
scope = []
for topic in scope:
if self != topic.topic_map:
raise ModelConstraintException(
self, 'The theme is not from this topic map')
association.scope.add(topic)
return association
def create_empty_topic (self):
topic = Topic(topic_map=self)
topic.save()
return topic
def create_locator (self, reference):
return Locator(reference)
def create_topic (self, proxy=Topic):
topic = proxy(topic_map=self)
topic.save()
address = 'http://%s/tmapi/iid/auto/%d' % \
(Site.objects.get_current().domain, topic.id)
ii = ItemIdentifier(address=address, containing_topic_map=self)
ii.save()
topic.item_identifiers.add(ii)
return topic
def create_topic_by_item_identifier (self, item_identifier):
if item_identifier is None:
raise ModelConstraintException(
self, 'The item identifier may not be None')
reference = item_identifier.to_external_form()
try:
topic = self.topic_constructs.get(
item_identifiers__address=reference)
except Topic.DoesNotExist:
try:
topic = self.topic_constructs.get(
subject_identifiers__address=reference)
except Topic.DoesNotExist:
topic = Topic(topic_map=self)
topic.save()
ii = ItemIdentifier(address=reference, containing_topic_map=self)
ii.save()
topic.item_identifiers.add(ii)
return topic
def create_topic_by_subject_identifier (self, subject_identifier):
if subject_identifier is None:
raise ModelConstraintException(
self, 'The subject identifier may not be None')
reference = subject_identifier.to_external_form()
try:
topic = self.topic_constructs.get(
subject_identifiers__address=reference)
except Topic.DoesNotExist:
try:
topic = self.topic_constructs.get(
item_identifiers__address=reference)
except Topic.DoesNotExist:
topic = Topic(topic_map=self)
topic.save()
si = SubjectIdentifier(topic=topic, address=reference,
containing_topic_map=self)
si.save()
topic.subject_identifiers.add(si)
return topic
def create_topic_by_subject_locator (self, subject_locator):
if subject_locator is None:
raise ModelConstraintException(
self, 'The subject locator may not be None')
reference = subject_locator.to_external_form()
try:
topic = self.topic_constructs.get(
subject_locators__address=reference)
except Topic.DoesNotExist:
topic = Topic(topic_map=self)
topic.save()
sl = SubjectLocator(topic=topic, address=reference,
containing_topic_map=self)
sl.save()
topic.subject_locators.add(sl)
return topic
def get_associations (self):
return self.association_constructs.all()
def get_construct_by_id (self, id, proxy=None):
try:
identifier = Identifier.objects.get(pk=int(id),
containing_topic_map=self)
construct = identifier.get_construct()
if proxy is not None and construct is not None:
construct = proxy.objects.get(pk=construct.id)
except Identifier.DoesNotExist:
construct = None
return construct
def get_construct_by_item_identifier (self, item_identifier):
address = item_identifier.to_external_form()
try:
ii = ItemIdentifier.objects.get(address=address,
containing_topic_map=self)
construct = ii.get_construct()
except ItemIdentifier.DoesNotExist:
construct = None
return construct
def get_index (self, index_interface):
if index_interface not in (LiteralIndex, ScopedIndex,
TypeInstanceIndex):
raise UnsupportedOperationException(
'This TMAPI implementation does not support that index')
if index_interface not in self._indices:
self._indices[index_interface] = index_interface(self)
return self._indices[index_interface]
def get_locator (self):
return Locator(self.iri)
def get_parent (self):
return None
def get_topics (self):
return self.topic_constructs.all()
def get_topic_by_subject_identifier (self, subject_identifier):
reference = subject_identifier.to_external_form()
try:
topic = self.topic_constructs.get(
subject_identifiers__address=reference)
except Topic.DoesNotExist:
topic = None
return topic
def get_topic_by_subject_locator (self, subject_locator):
reference = subject_locator.to_external_form()
try:
topic = self.topic_constructs.get(
subject_locators__address=reference)
except Topic.DoesNotExist:
topic = None
return topic
def get_topic_map (self):
return self
def merge_in (self, other):
if other is None:
raise ModelConstraintException(
self, 'The topic map to merge in may not be None')
copy(other, self)
def remove (self):
self.delete()
def __eq__ (self, other):
if isinstance(other, TopicMap) and self.id == other.id:
return True
return False
def __ne__ (self, other):
return not(self.__eq__(other))
def __unicode__ (self):
name = self.title or 'Topic map'
return u'%s (%s)' % (name, self.iri)
| true | true |
f71a6db30e3de5c2849fe9a5b19812ba331899e0 | 2,275 | py | Python | python-sdk/tutorials/automl-with-azureml/forecasting-recipes-univariate/forecasting_script.py | Ali-ry/azureml-examples | 817ae89d2766dcafd70937a22cb3a80f100a2906 | [
"MIT"
] | null | null | null | python-sdk/tutorials/automl-with-azureml/forecasting-recipes-univariate/forecasting_script.py | Ali-ry/azureml-examples | 817ae89d2766dcafd70937a22cb3a80f100a2906 | [
"MIT"
] | null | null | null | python-sdk/tutorials/automl-with-azureml/forecasting-recipes-univariate/forecasting_script.py | Ali-ry/azureml-examples | 817ae89d2766dcafd70937a22cb3a80f100a2906 | [
"MIT"
] | null | null | null | """
This is the script that is executed on the compute instance. It relies
on the model.pkl file which is uploaded along with this script to the
compute instance.
"""
import argparse
from azureml.core import Dataset, Run
from azureml.automl.core.shared.constants import TimeSeriesInternal
from sklearn.externals import joblib
parser = argparse.ArgumentParser()
parser.add_argument(
"--target_column_name",
type=str,
dest="target_column_name",
help="Target Column Name",
)
parser.add_argument(
"--test_dataset", type=str, dest="test_dataset", help="Test Dataset"
)
args = parser.parse_args()
target_column_name = args.target_column_name
test_dataset_id = args.test_dataset
run = Run.get_context()
ws = run.experiment.workspace
# get the input dataset by id
test_dataset = Dataset.get_by_id(ws, id=test_dataset_id)
X_test = (
test_dataset.drop_columns(columns=[target_column_name])
.to_pandas_dataframe()
.reset_index(drop=True)
)
y_test_df = (
test_dataset.with_timestamp_columns(None)
.keep_columns(columns=[target_column_name])
.to_pandas_dataframe()
)
# generate forecast
fitted_model = joblib.load("model.pkl")
# We have default quantiles values set as below(95th percentile)
quantiles = [0.025, 0.5, 0.975]
predicted_column_name = "predicted"
PI = "prediction_interval"
fitted_model.quantiles = quantiles
pred_quantiles = fitted_model.forecast_quantiles(X_test)
pred_quantiles[PI] = pred_quantiles[[min(quantiles), max(quantiles)]].apply(
lambda x: "[{}, {}]".format(x[0], x[1]), axis=1
)
X_test[target_column_name] = y_test_df[target_column_name]
X_test[PI] = pred_quantiles[PI]
X_test[predicted_column_name] = pred_quantiles[0.5]
# drop rows where prediction or actuals are nan
# happens because of missing actuals
# or at edges of time due to lags/rolling windows
clean = X_test[
X_test[[target_column_name, predicted_column_name]].notnull().all(axis=1)
]
clean.rename(columns={target_column_name: "actual"}, inplace=True)
file_name = "outputs/predictions.csv"
export_csv = clean.to_csv(file_name, header=True, index=False) # added Index
# Upload the predictions into artifacts
run.upload_file(name=file_name, path_or_stream=file_name)
| 32.042254 | 78 | 0.744176 |
import argparse
from azureml.core import Dataset, Run
from azureml.automl.core.shared.constants import TimeSeriesInternal
from sklearn.externals import joblib
parser = argparse.ArgumentParser()
parser.add_argument(
"--target_column_name",
type=str,
dest="target_column_name",
help="Target Column Name",
)
parser.add_argument(
"--test_dataset", type=str, dest="test_dataset", help="Test Dataset"
)
args = parser.parse_args()
target_column_name = args.target_column_name
test_dataset_id = args.test_dataset
run = Run.get_context()
ws = run.experiment.workspace
test_dataset = Dataset.get_by_id(ws, id=test_dataset_id)
X_test = (
test_dataset.drop_columns(columns=[target_column_name])
.to_pandas_dataframe()
.reset_index(drop=True)
)
y_test_df = (
test_dataset.with_timestamp_columns(None)
.keep_columns(columns=[target_column_name])
.to_pandas_dataframe()
)
fitted_model = joblib.load("model.pkl")
quantiles = [0.025, 0.5, 0.975]
predicted_column_name = "predicted"
PI = "prediction_interval"
fitted_model.quantiles = quantiles
pred_quantiles = fitted_model.forecast_quantiles(X_test)
pred_quantiles[PI] = pred_quantiles[[min(quantiles), max(quantiles)]].apply(
lambda x: "[{}, {}]".format(x[0], x[1]), axis=1
)
X_test[target_column_name] = y_test_df[target_column_name]
X_test[PI] = pred_quantiles[PI]
X_test[predicted_column_name] = pred_quantiles[0.5]
clean = X_test[
X_test[[target_column_name, predicted_column_name]].notnull().all(axis=1)
]
clean.rename(columns={target_column_name: "actual"}, inplace=True)
file_name = "outputs/predictions.csv"
export_csv = clean.to_csv(file_name, header=True, index=False)
run.upload_file(name=file_name, path_or_stream=file_name)
| true | true |
f71a6e91a09965fe94395d5877040ab4bd936107 | 4,760 | py | Python | matching/matching.py | nielsbril/best | 8a902293605f1bee1abf3ca66ae3708706658772 | [
"MIT"
] | 21 | 2019-07-02T05:54:22.000Z | 2021-04-07T13:52:50.000Z | matching/matching.py | nielsbril/best | 8a902293605f1bee1abf3ca66ae3708706658772 | [
"MIT"
] | 55 | 2019-07-03T18:59:26.000Z | 2020-12-15T08:10:00.000Z | matching/matching.py | nielsbril/best | 8a902293605f1bee1abf3ca66ae3708706658772 | [
"MIT"
] | 9 | 2019-09-10T13:38:46.000Z | 2021-09-01T08:02:42.000Z | import pandas as pd
import argparse
import logging
import sys
import json
def get_best_logger(log_file, verbose):
# Setup logger - (Python logger breaks PEP8 by default)
logger = logging.getLogger(__name__)
if verbose:
logger.setLevel('DEBUG')
# file_handler logs to file, stream_handler to console
file_handler = logging.FileHandler(log_file)
stream_handler = logging.StreamHandler()
# formatter sets log format
formatter = logging.Formatter(
'%(asctime)s - %(name)s : %(levelname)s - %(message)s')
# add formatter to both handlers
file_handler.setFormatter(formatter)
stream_handler.setFormatter(formatter)
# add both handlers to logger
logger.addHandler(file_handler)
logger.addHandler(stream_handler)
return logger
def compare_addresses(args):
"""Compare the addresses of two files
"""
logger.info('Started reading BOSA address file')
try:
bosa = pd.read_csv(args.input_file_1)
logger.info('Read the BOSA address file')
except IOError as io:
logger.fatal(io)
sys.exit(1)
logger.info('Started reading comparison file')
try:
comparison = pd.read_csv(args.input_file_2)
logger.info('Read the comparison file')
except IOError as io:
logger.fatal(io)
sys.exit(1)
comp_keys = []
bosa_ids = []
for comp_key, bosa_key in args.mapping.items():
try:
comp_keys.append(comp_key)
bosa_ids.append(bosa.columns.get_loc(bosa_key))
except KeyError as ke:
logger.error(
'Column %s of column mapping (%s -> %s) not found in BOSA file', ke, comp_key, bosa_key)
sys.exit(1)
address_dict = {}
logger.info('Building data structure to perform matching')
for i, row in enumerate(bosa.values):
if i % 50_000 == 0:
logger.info('Processed %i / %i addresses', i, len(bosa))
address_dict[tuple(el.lower() if type(
el) == str else el for el in row[bosa_ids])] = row
extended = perform_exact_matching(
bosa, comparison, address_dict, comp_keys)
try:
extended.to_csv(args.output_file, index=False)
except IOError as io:
logger.fatal(io)
sys.exit(1)
def perform_exact_matching(bosa, comparison, address_dict, comp_keys):
"""Match the addresses in the comparison file and add address_id and coordinates when matched
"""
addr_id = bosa.columns.get_loc('address_id')
lon_id = bosa.columns.get_loc('EPSG:4326_lon')
lat_id = bosa.columns.get_loc('EPSG:4326_lat')
extended = []
logger.info('Performing matching')
for i, row in comparison.iterrows():
if i % 50_000 == 0:
logger.info('Matched %i / %i addresses', i, len(comparison))
try:
key = tuple(el.lower() if type(el) ==
str else el for el in row[comp_keys])
except KeyError as ke:
logger.error('Column %s not found in the comparison file', ke)
sys.exit(1)
if key in address_dict:
# If the address is matched add address_id and coordinates to it
data = address_dict[key]
row['address_id'] = data[addr_id]
row['EPSG:4326_lon'] = data[lon_id]
row['EPSG:4326_lat'] = data[lat_id]
extended.append(row)
extended = pd.DataFrame(extended)
# Convert column to int type that can handle NaN
extended['address_id'] = extended['address_id'].astype('Int64')
return extended
if __name__ == "__main__":
# Setup argument parser
parser = argparse.ArgumentParser(
description='Compare addresses between two csv files.')
parser.add_argument(
'input_file_1', help='BOSA address file, in csv format')
parser.add_argument(
'input_file_2', help='Address file to compare to BOSA address file, in csv format')
parser.add_argument('output_file', help='Name of file to write output to')
parser.add_argument('--mode', default='exact',
choices=['exact'], help='How to compare the addresses.')
parser.add_argument(
'--mapping', default={}, type=json.loads, help='Column names to consider in the comparison and how they map to the \
column names of the BOSA address file. (as a json dict of {comparison_key: bosa_key})')
parser.add_argument('--log_name', default="compare.log",
help='name of the log file')
parser.add_argument('--verbose', action="store_true",
help="toggle verbose output", default=False)
args = parser.parse_args()
logger = get_best_logger(args.log_name, args.verbose)
compare_addresses(args)
| 36.060606 | 124 | 0.640336 | import pandas as pd
import argparse
import logging
import sys
import json
def get_best_logger(log_file, verbose):
logger = logging.getLogger(__name__)
if verbose:
logger.setLevel('DEBUG')
file_handler = logging.FileHandler(log_file)
stream_handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s - %(name)s : %(levelname)s - %(message)s')
file_handler.setFormatter(formatter)
stream_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.addHandler(stream_handler)
return logger
def compare_addresses(args):
logger.info('Started reading BOSA address file')
try:
bosa = pd.read_csv(args.input_file_1)
logger.info('Read the BOSA address file')
except IOError as io:
logger.fatal(io)
sys.exit(1)
logger.info('Started reading comparison file')
try:
comparison = pd.read_csv(args.input_file_2)
logger.info('Read the comparison file')
except IOError as io:
logger.fatal(io)
sys.exit(1)
comp_keys = []
bosa_ids = []
for comp_key, bosa_key in args.mapping.items():
try:
comp_keys.append(comp_key)
bosa_ids.append(bosa.columns.get_loc(bosa_key))
except KeyError as ke:
logger.error(
'Column %s of column mapping (%s -> %s) not found in BOSA file', ke, comp_key, bosa_key)
sys.exit(1)
address_dict = {}
logger.info('Building data structure to perform matching')
for i, row in enumerate(bosa.values):
if i % 50_000 == 0:
logger.info('Processed %i / %i addresses', i, len(bosa))
address_dict[tuple(el.lower() if type(
el) == str else el for el in row[bosa_ids])] = row
extended = perform_exact_matching(
bosa, comparison, address_dict, comp_keys)
try:
extended.to_csv(args.output_file, index=False)
except IOError as io:
logger.fatal(io)
sys.exit(1)
def perform_exact_matching(bosa, comparison, address_dict, comp_keys):
addr_id = bosa.columns.get_loc('address_id')
lon_id = bosa.columns.get_loc('EPSG:4326_lon')
lat_id = bosa.columns.get_loc('EPSG:4326_lat')
extended = []
logger.info('Performing matching')
for i, row in comparison.iterrows():
if i % 50_000 == 0:
logger.info('Matched %i / %i addresses', i, len(comparison))
try:
key = tuple(el.lower() if type(el) ==
str else el for el in row[comp_keys])
except KeyError as ke:
logger.error('Column %s not found in the comparison file', ke)
sys.exit(1)
if key in address_dict:
data = address_dict[key]
row['address_id'] = data[addr_id]
row['EPSG:4326_lon'] = data[lon_id]
row['EPSG:4326_lat'] = data[lat_id]
extended.append(row)
extended = pd.DataFrame(extended)
extended['address_id'] = extended['address_id'].astype('Int64')
return extended
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Compare addresses between two csv files.')
parser.add_argument(
'input_file_1', help='BOSA address file, in csv format')
parser.add_argument(
'input_file_2', help='Address file to compare to BOSA address file, in csv format')
parser.add_argument('output_file', help='Name of file to write output to')
parser.add_argument('--mode', default='exact',
choices=['exact'], help='How to compare the addresses.')
parser.add_argument(
'--mapping', default={}, type=json.loads, help='Column names to consider in the comparison and how they map to the \
column names of the BOSA address file. (as a json dict of {comparison_key: bosa_key})')
parser.add_argument('--log_name', default="compare.log",
help='name of the log file')
parser.add_argument('--verbose', action="store_true",
help="toggle verbose output", default=False)
args = parser.parse_args()
logger = get_best_logger(args.log_name, args.verbose)
compare_addresses(args)
| true | true |
f71a6f98576f957a645a7ce60612e5c8ac44efe1 | 3,987 | py | Python | islykill2/parser.py | sindrig/islykill2 | 2ad9e0d249637d7bb03a3535f4e054f3570427b2 | [
"MIT"
] | 1 | 2019-08-24T23:59:32.000Z | 2019-08-24T23:59:32.000Z | islykill2/parser.py | sindrig/islykill2 | 2ad9e0d249637d7bb03a3535f4e054f3570427b2 | [
"MIT"
] | null | null | null | islykill2/parser.py | sindrig/islykill2 | 2ad9e0d249637d7bb03a3535f4e054f3570427b2 | [
"MIT"
] | 1 | 2021-06-25T11:15:23.000Z | 2021-06-25T11:15:23.000Z | import os
import traceback
import base64
import datetime
import logging
from xml.etree.ElementTree import XML
from signxml import xmldsig
__all__ = ['AuthenticationError', 'parse_saml']
def decode_response(resp):
return base64.b64decode(resp.encode('utf8'))
# Getters
def get_xmldoc(xmlstring):
return XML(xmlstring)
def get_assertion(doc):
return doc.find('{urn:oasis:names:tc:SAML:2.0:assertion}Assertion')
def get_assertion_attributes(assertion):
ns = '{urn:oasis:names:tc:SAML:2.0:assertion}'
attributes = {}
for attr in assertion.find(
'{}AttributeStatement'.format(ns)).getchildren():
val = attr.find('{}AttributeValue'.format(ns))
attributes[attr.attrib['Name']] = val.text
return attributes
def get_conditions(assertion):
ns = '{urn:oasis:names:tc:SAML:2.0:assertion}'
return assertion.find('{}Conditions'.format(ns))
def strptime(dtstr):
# Example dtstr: 2014-01-18T11:10:44.9568516Z
return datetime.datetime.strptime(dtstr.split('.')[0], '%Y-%m-%dT%H:%M:%S')
# Verifications
def verify_ip(reported_ip, client_ip):
logger = logging.getLogger('islykill')
logger.debug('Reported ip "%s" - client_ip "%s"',
reported_ip, client_ip)
return reported_ip == client_ip
def verify_date_is_after(reported_date, current_date):
return reported_date < current_date
def verify_date_is_before(reported_date, current_date):
return reported_date > current_date
# Helper methods for import
class AuthenticationError(Exception):
pass
class SAMLResponse(object):
def __init__(self, kt):
self.kt = kt
def parse_saml(saml, ip, disable_checks=[], decode=True):
logger = logging.getLogger('islykill')
logger.debug('Starting SAML authentication process')
logger.debug(saml)
try:
logger.debug(saml.__class__)
if decode:
dec_resp = decode_response(saml)
else:
dec_resp = saml
logger.debug(dec_resp.__class__)
ca_pem_loc = os.path.dirname(os.path.abspath(__file__))
ca_pem_file = os.path.join(ca_pem_loc, 'Oll_kedjan.pem')
logger.debug('Using ca_pem_file: %s' % ca_pem_file)
xmldsig(dec_resp).verify(ca_pem_file=ca_pem_file)
logger.debug('verify OK')
xml = get_xmldoc(dec_resp)
assertion = get_assertion(xml)
attributes = get_assertion_attributes(assertion)
conditions = get_conditions(assertion)
logger.debug('all XML fetched...')
now = datetime.datetime.now()
if not verify_ip(attributes['IPAddress'], ip):
checkError('verify_ip failed', disable_checks)
if not verify_date_is_after(
strptime(conditions.attrib['NotBefore']), now):
checkError('verify_date_is_after failed', disable_checks)
if not verify_date_is_before(
strptime(conditions.attrib['NotOnOrAfter']), now):
checkError('verify_date_is_before', disable_checks)
logger.warning(
'NotOnOrAfter: %s',
conditions.attrib['NotOnOrAfter'])
logger.warning(
'Parsed date: %s',
strptime(conditions.attrib['NotOnOrAfter']))
logger.warning(
'Current date: %s',
now)
kt = attributes['UserSSN']
logger.debug('authenticated successfully: %s', kt)
return SAMLResponse(kt)
except AuthenticationError as e:
logger.error('AuthenticationError: %s', e.message)
raise e
except Exception:
logger.error('Unknown error occurred:')
logger.error(traceback.format_exc())
from django.core.mail import mail_admins
mail_admins('SAML authentication error', traceback.format_exc())
checkError('Unknown error', disable_checks)
def checkError(name, disable_checks=[]):
if name not in disable_checks:
raise AuthenticationError(name)
| 28.683453 | 79 | 0.658641 | import os
import traceback
import base64
import datetime
import logging
from xml.etree.ElementTree import XML
from signxml import xmldsig
__all__ = ['AuthenticationError', 'parse_saml']
def decode_response(resp):
return base64.b64decode(resp.encode('utf8'))
def get_xmldoc(xmlstring):
return XML(xmlstring)
def get_assertion(doc):
return doc.find('{urn:oasis:names:tc:SAML:2.0:assertion}Assertion')
def get_assertion_attributes(assertion):
ns = '{urn:oasis:names:tc:SAML:2.0:assertion}'
attributes = {}
for attr in assertion.find(
'{}AttributeStatement'.format(ns)).getchildren():
val = attr.find('{}AttributeValue'.format(ns))
attributes[attr.attrib['Name']] = val.text
return attributes
def get_conditions(assertion):
ns = '{urn:oasis:names:tc:SAML:2.0:assertion}'
return assertion.find('{}Conditions'.format(ns))
def strptime(dtstr):
return datetime.datetime.strptime(dtstr.split('.')[0], '%Y-%m-%dT%H:%M:%S')
def verify_ip(reported_ip, client_ip):
logger = logging.getLogger('islykill')
logger.debug('Reported ip "%s" - client_ip "%s"',
reported_ip, client_ip)
return reported_ip == client_ip
def verify_date_is_after(reported_date, current_date):
return reported_date < current_date
def verify_date_is_before(reported_date, current_date):
return reported_date > current_date
class AuthenticationError(Exception):
pass
class SAMLResponse(object):
def __init__(self, kt):
self.kt = kt
def parse_saml(saml, ip, disable_checks=[], decode=True):
logger = logging.getLogger('islykill')
logger.debug('Starting SAML authentication process')
logger.debug(saml)
try:
logger.debug(saml.__class__)
if decode:
dec_resp = decode_response(saml)
else:
dec_resp = saml
logger.debug(dec_resp.__class__)
ca_pem_loc = os.path.dirname(os.path.abspath(__file__))
ca_pem_file = os.path.join(ca_pem_loc, 'Oll_kedjan.pem')
logger.debug('Using ca_pem_file: %s' % ca_pem_file)
xmldsig(dec_resp).verify(ca_pem_file=ca_pem_file)
logger.debug('verify OK')
xml = get_xmldoc(dec_resp)
assertion = get_assertion(xml)
attributes = get_assertion_attributes(assertion)
conditions = get_conditions(assertion)
logger.debug('all XML fetched...')
now = datetime.datetime.now()
if not verify_ip(attributes['IPAddress'], ip):
checkError('verify_ip failed', disable_checks)
if not verify_date_is_after(
strptime(conditions.attrib['NotBefore']), now):
checkError('verify_date_is_after failed', disable_checks)
if not verify_date_is_before(
strptime(conditions.attrib['NotOnOrAfter']), now):
checkError('verify_date_is_before', disable_checks)
logger.warning(
'NotOnOrAfter: %s',
conditions.attrib['NotOnOrAfter'])
logger.warning(
'Parsed date: %s',
strptime(conditions.attrib['NotOnOrAfter']))
logger.warning(
'Current date: %s',
now)
kt = attributes['UserSSN']
logger.debug('authenticated successfully: %s', kt)
return SAMLResponse(kt)
except AuthenticationError as e:
logger.error('AuthenticationError: %s', e.message)
raise e
except Exception:
logger.error('Unknown error occurred:')
logger.error(traceback.format_exc())
from django.core.mail import mail_admins
mail_admins('SAML authentication error', traceback.format_exc())
checkError('Unknown error', disable_checks)
def checkError(name, disable_checks=[]):
if name not in disable_checks:
raise AuthenticationError(name)
| true | true |
f71a703f2090876a8e79cf5a51d2bb5e3344842c | 153,793 | py | Python | spyke/sort.py | spyke/spyke | 20934521de9c557924911cf6190690ac1c6f8e80 | [
"CNRI-Python"
] | 22 | 2015-06-01T03:31:00.000Z | 2022-03-18T09:12:28.000Z | spyke/sort.py | spyke/spyke | 20934521de9c557924911cf6190690ac1c6f8e80 | [
"CNRI-Python"
] | 3 | 2017-03-24T19:16:02.000Z | 2021-01-27T14:34:30.000Z | spyke/sort.py | spyke/spyke | 20934521de9c557924911cf6190690ac1c6f8e80 | [
"CNRI-Python"
] | 6 | 2015-07-10T15:28:08.000Z | 2022-03-17T19:30:45.000Z | """Spike sorting classes and window"""
from __future__ import division
from __future__ import print_function
__authors__ = ['Martin Spacek', 'Reza Lotun']
import os
import sys
import time
import datetime
from copy import copy
import operator
import random
import shutil
import hashlib
import multiprocessing as mp
from PyQt4 import QtCore, QtGui
from PyQt4.QtCore import Qt
from PyQt4.QtGui import QAction, QIcon, QApplication
import numpy as np
import scipy
import scipy.signal
#from scipy.cluster.hierarchy import fclusterdata
import pylab as pl
import pyximport
pyximport.install(build_in_temp=False, inplace=True)
from . import util # .pyx file
from . import core
from .core import (WaveForm, Gaussian, MAXLONGLONG, R, toiter, intround, printflush, lstrip,
rstrip, lrstrip, pad, td2days, SpykeToolWindow, NList, NSList, dist,
USList, ClusterChange, SpikeSelectionSlider, lrrep2Darrstripis, rollwin2D)
from .detect import DEBUG
from .surf import EPOCH
from .plot import SpikeSortPanel, CLUSTERCOLOURDICT, WHITE
from .__version__ import __version__
#MAXCHANTOLERANCE = 100 # um
NSLISTWIDTH = 70 # minimize nslist width, enough for 7 digit spike IDs
PANELWIDTHPERCOLUMN = 120 # sort panel width per column of channels
PANELHEIGHTPERROW = 50 # sort panel height per row of channels
VSCROLLBARWIDTH = 14 # hack
SORTWINDOWHEIGHT = 1035 # TODO: this should be set programmatically
MINSORTWINDOWWIDTH = 566
MEANWAVEMAXSAMPLES = 2000
NPCSPERCHAN = 7
PCALIB = 'mdp'
ICALIB = 'sklearn'
DEFMINISI = 50 # default minimum ISI to check for on export, us
MAXGROUPISI = 100000 # us (100 ms)
MAXGROUPDT = 100000000 # us (100 s)
class Sort(object):
"""A spike sorting session, in which you can detect spikes and sort them into Neurons.
A .sort file is a single Python2-pickled Sort object. A .json file is a
jsonpickle-pickled Sort object"""
def __init__(self, detector=None, stream=None, tw=None):
self.__version__ = __version__
self.fname = ''
self.user = ''
self.notes = ''
self.detector = detector # this Sort's current Detector object
self.tw = tw # time window (us) relative to spike time
self.stream = stream
self.probe = stream.probe # only one probe design per sort allowed
self.converter = stream.converter
self.neurons = {}
self.clusters = {} # neurons with multidm params scaled for plotting
self.norder = [] # stores order of neuron ids display in nlist
self.npcsperchan = NPCSPERCHAN
def get_nextnid(self):
"""nextnid is used to retrieve the next unique single unit ID"""
nids = list(self.neurons)
if len(nids) == 0:
return 1 # single unit nids start at 1
else:
return max(max(nids) + 1, 1) # at least 1
nextnid = property(get_nextnid)
def get_nextmuid(self):
"""nextmuid is used to retrieve the next unique multiunit ID"""
nids = list(self.neurons)
if len(nids) == 0:
return -1 # multiunit ids start at -1
else:
return min(min(nids) - 1, -1) # at most -1
nextmuid = property(get_nextmuid)
def get_good(self):
"""Return array of nids marked by user as 'good'"""
good = []
for neuron in self.neurons.values():
try:
if neuron.good:
good.append(neuron.id)
except AttributeError: # neuron is from older sort, no .good attrib
neuron.good = False
return np.asarray(good)
def set_good(self, good):
"""Set good flag to True for nids in good, False otherwise"""
nids = list(self.neurons)
assert np.all([ nid in nids for nid in good ]) # make sure all nids in good exist
notgood = np.setdiff1d(nids, good)
for nid in notgood:
neuron = self.neurons[nid]
neuron.good = False
for nid in good:
neuron = self.neurons[nid]
neuron.good = True
good = property(get_good, set_good)
def get_stream(self):
try:
return self._stream
except AttributeError:
# this is likely a brand new sort, has yet to be assigned a Stream
return None
def set_stream(self, stream=None):
"""Check stream type and name and probe type, and restore filtmeth, car, sampfreq and
shcorrect to stream when binding/modifying stream to self"""
oldstream = self.stream
if stream != None and oldstream != None:
# do stream types match?
if type(stream) != type(oldstream):
raise ValueError("Stream types don't match: %s, %s"
% (type(oldstream), type(stream)))
# do stream probe types match?
if type(stream.probe) != type(oldstream.probe):
raise ValueError("Stream probe types don't match: %s, %s"
% (type(oldstream.probe), type(stream.probe)))
# is one stream fname a superset of the other?
if (stream.fname not in oldstream.fname) and (oldstream.fname not in stream.fname):
raise ValueError("Stream file names are not supersets of each other: %s, %s"
% (oldstream.fname, stream.fname))
else:
print('Stream file names are similar enough to proceed: %s, %s'
% (stream.fname, oldstream.fname))
try:
stream.filtmeth = self.filtmeth
stream.car = self.car
stream.sampfreq = self.sampfreq
stream.shcorrect = self.shcorrect
except AttributeError:
pass # one of the above aren't bound
self._stream = stream # set it
print('Bound stream %r to sort %r' % (stream.fname, self.fname))
# now that tres is known, calculate window timepoints wrt spike time:
self.calc_twts_twi()
stream = property(get_stream, set_stream)
def calc_twts_twi(self):
"""Calculate temporal window timepoints wrt spike time, and the indices of these
timepoints wrt spike time"""
tres = self.tres
tw = self.tw
twts = np.arange(tw[0], tw[1], tres)
twts += twts[0] % tres # get rid of mod, so twts go through zero
self.twts = twts
self.twi = intround(twts[0] / tres), intround(twts[-1] / tres)
#info('twi = %s' % (self.twi,))
def update_tw(self, tw):
"""Update tw and everything that depends on it. Note that this shouldn't
be called directly by the user. Call SpykeWindow.update_spiketw() instead"""
oldtw = self.tw
self.tw = tw
self.calc_twts_twi()
dtw = np.asarray(tw) - np.asarray(oldtw) # new minus old
self.spikes['t0'] += dtw[0]
self.spikes['t1'] += dtw[1]
self.spikes['tis'] = self.spikes['tis'] - intround(dtw[0] / self.tres)
# recalculate any existing templates:
for neuron in self.neurons.values():
if neuron.wave.data != None:
neuron.update_wave()
print('WARNING: all spike waveforms need to be reloaded!')
def get_tres(self):
return self.stream.tres
tres = property(get_tres)
def __getstate__(self):
"""Get object state for pickling"""
# copy it cuz we'll be making changes, this is fast because it's just a shallow copy
d = self.__dict__.copy()
# Spikes and wavedata arrays are (potentially) saved separately.
# usids and PCs/ICs can be regenerated from the spikes array.
for attr in ['spikes', 'wavedata', 'usids', 'X', 'Xhash']:
# keep _stream during normal pickling for multiprocessing, but remove it
# manually when pickling to sort file
try: del d[attr]
except KeyError: pass
return d
def get_nspikes(self):
try: return len(self.spikes)
except AttributeError: return 0
nspikes = property(get_nspikes)
def update_usids(self):
"""Update usids, which is an array of indices of unsorted spikes"""
nids = self.spikes['nid']
self.usids, = np.where(nids == 0) # 0 means unclustered
def get_spikes_sortedby(self, attr='id'):
"""Return array of all spikes, sorted by attribute 'attr'"""
vals = self.spikes[attr]
spikes = self.spikes[vals.argsort()]
return spikes
def get_wave(self, sid):
"""Return WaveForm corresponding to spike sid"""
spikes = self.spikes
nchans = spikes['nchans'][sid]
chans = spikes['chans'][sid, :nchans]
t0 = spikes['t0'][sid]
t1 = spikes['t1'][sid]
wavedata = self.wavedata[sid, 0:nchans]
ts = np.arange(t0, t1, self.tres) # build them up
return WaveForm(data=wavedata, ts=ts, chans=chans, tres=self.tres)
def get_maxchan_wavedata(self, sid=None, nid=None):
"""Return wavedata of maxchan of spike sid or neuron nid"""
if sid != None:
assert nid == None
chani = self.spikes['chani'][sid]
return self.wavedata[sid, chani]
elif nid != None:
assert sid == None
neuron = self.neurons[nid]
chani, = np.where(neuron.chans == neuron.chan)
assert len(chani) == 1
chani = chani[0] # pull out of length 1 array
return neuron.wave.data[chani]
def get_mean_wave(self, sids, nid=None):
"""Return the mean and std waveform of spike waveforms in sids"""
spikes = self.spikes
nsids = len(sids)
if nsids > MEANWAVEMAXSAMPLES:
step = nsids // MEANWAVEMAXSAMPLES + 1
s = ("get_mean_wave() sampling every %d spikes instead of all %d"
% (step, nsids))
if nid != None:
s = "neuron %d: " % nid + s
print(s)
sids = sids[::step]
nsids = len(sids) # update
chanss = spikes['chans'][sids]
nchanss = spikes['nchans'][sids]
chanslist = [ chans[:nchans] for chans, nchans in zip(chanss, nchanss) ] # list of arrays
chanpopulation = np.concatenate(chanslist)
groupchans = np.unique(chanpopulation) # comes out sorted
wavedata = self.wavedata[sids]
if wavedata.ndim == 2: # should be 3, get only 2 if nsids == 1
wavedata.shape = 1, wavedata.shape[0], wavedata.shape[1] # give it a singleton 3rd dim
nt = wavedata.shape[-1]
maxnchans = len(groupchans)
data = np.zeros((maxnchans, nt))
# all spikes have same nt, but not necessarily same nchans, keep track of
# how many spikes contributed to each of the group's chans
nspikes = np.zeros((maxnchans, 1), dtype=int)
for chans, wd in zip(chanslist, wavedata):
chanis = groupchans.searchsorted(chans) # each spike's chans is a subset of groupchans
data[chanis] += wd[:len(chans)] # accumulate
nspikes[chanis] += 1 # inc spike count for this spike's chans
#t0 = time.time()
data /= nspikes # normalize all data points appropriately, this is now the mean
var = np.zeros((maxnchans, nt))
for chans, wd in zip(chanslist, wavedata):
chanis = groupchans.searchsorted(chans) # each spike's chans is a subset of groupchans
var[chanis] += (wd[:len(chans)] - data[chanis]) ** 2 # accumulate 2nd moment
var /= nspikes # normalize all data points appropriately, this is now the variance
std = np.sqrt(var)
# keep only those chans that at least 1/2 the spikes contributed to
bins = list(groupchans) + [np.inf] # concatenate rightmost bin edge
hist, bins = np.histogram(chanpopulation, bins=bins)
chans = groupchans[hist >= nsids/2]
chanis = groupchans.searchsorted(chans)
data = data[chanis]
std = std[chanis]
return WaveForm(data=data, std=std, chans=chans)
def check_ISIs(self, nids='good'):
"""Check that interspike intervals of spikes in each nid never fall below DEFMINISI"""
print('Checking inter-spike intervals')
if nids == 'good':
nids = self.good
elif nids == 'all':
nids = sorted(self.neurons)
for nid in nids:
neuron = self.neurons[nid]
spikets = self.spikes['t'][neuron.sids] # should be a sorted copy
assert spikets.flags['OWNDATA'] # safe to modify in place
spikets.sort() # just in case it isn't perfectly sorted
ndupl = (np.diff(spikets) < DEFMINISI).sum()
if ndupl > 0:
msg = ('n%d has %d duplicate spikes (given DEFMINISI=%d us).\n'
'Remove duplicate spikes with the ISI tool in the Verify tab'
% (nid, ndupl, DEFMINISI))
raise RuntimeError(msg)
def check_wavealign(self, nids='good', maxdti=1):
"""Check that each neurons's primary peak on the max chan is no more than +/- maxdti
timepoints away from the t=0 alignment timepoint"""
print('Checking neuron mean waveform alignment')
if nids == 'good':
nids = self.good
elif nids == 'all':
nids = sorted(self.neurons)
nt = self.twi[1] - self.twi[0] + 1 # expected number of points of each chan's wavedata
for nid in nids:
neuron = self.neurons[nid]
wd = self.get_maxchan_wavedata(nid=nid)
assert len(wd) == nt
# find biggest positive and negative peaks, check which comes first, ensure
# the primary peak is within maxdti of t=0 alignment timepoint:
ppeakis, _ = scipy.signal.find_peaks(wd) # positive peak indices
npeakis, _ = scipy.signal.find_peaks(-wd) # negative peak indices
pmaxi = ppeakis[wd[ppeakis].argmax()] # max positive peak index
nmaxi = npeakis[wd[npeakis].argmin()] # max negative peak index
if nmaxi < pmaxi: # usual case: -ve then +ve peak
peak1i = nmaxi
else: # less common: +ve then -ve peak, make sure +ve peak is worthy of alignment
pmax, nmax = wd[pmaxi], wd[nmaxi]
if pmax > abs(nmax): # +ve peak is bigger than -ve peak, align to +ve peak
peak1i = pmaxi
else:
peak1i = nmaxi # default to -ve peak
alignti = 0 - self.twi[0] # +ve
dti = peak1i - alignti
#print("n%d: dti=%d" % (nid, dti))
if abs(dti) > maxdti:
peak1uV = self.converter.AD2uV(wd[peak1i])
peak1us = intround(self.tres*(peak1i-alignti))
msg = ('Primary peak (%+d uV @ t=%d us) of n%d is %+d timepoints away from '
'the t=0 us alignment point. Shift it closer and try again'
% (peak1uV, peak1us, nid, dti))
raise RuntimeError(msg)
def check_wavepadding(self, nids='good', npad=2):
"""Check if any spikes are edge padded, presumably due to being shifted but not
reloaded. For robustness, check for consistent signs of padding across all channels.
An edge is considered padded if it does not change over npad datapoints"""
print('Checking spike waveform padding')
assert npad >= 2 # need at least 2 points to do a diff
if nids == 'good':
nids = self.good
elif nids == 'all':
nids = sorted(self.neurons)
for nid in nids:
neuron = self.neurons[nid]
for sid in neuron.sids:
wd = self.wavedata[sid] # multichannel waveform data
# are left and right edges of wavedata identical for npad number of points?
l, r = wd[:, :npad], wd[:, -npad:] # shape (nchans, npad)
leftpadded = (np.diff(l, axis=1) == 0).all()
rightpadded = (np.diff(r, axis=1) == 0).all()
# handle case where spike is right after or right before a 0-padded
# region of data due to gaps between experiments:
if leftpadded:
if (wd[:, 0] == 0).all():
leftpadded = False
if rightpadded:
if (wd[:, -1] == 0).all():
rightpadded = False
if leftpadded or rightpadded:
msg = ('n%d has s%d that looks like it has been padded.\n'
'leftpadded, rightpadded = %r, %r\n'
'Reload s%d or n%d or all spikes and try again'
% (nid, sid, leftpadded, rightpadded, sid, nid))
raise RuntimeError(msg)
def check_contiguous_nids(self):
"""Check that neuron IDs are contiguous (no gaps)"""
print('Checking that neuron IDs are contiguous')
nids = np.array(list(self.neurons))
nids = nids[nids > 0] # only consider +ve nids
nids.sort()
if (np.diff(nids) != 1).any():
raise RuntimeError('Neuron IDs are not contiguous, renumber all and try again')
def exportptcsfiles(self, basepath, sortpath, user='', notes=''):
"""Export spike data to binary .ptcs files under basepath, one file per recording"""
# First check to make sure various things are OK before exporting:
self.check_ISIs()
self.check_wavealign()
self.check_wavepadding()
self.check_contiguous_nids()
spikes = self.spikes
exportdt = str(datetime.datetime.now()) # get an export datetime stamp
exportdt = exportdt.split('.')[0] # ditch the us
if self.stream.is_multi(): # self.stream is a MultiStream
streams = self.stream.streams
else: # self.stream is a single Stream
streams = [self.stream]
print('Exporting "good" clusters to:')
# do a separate export for each recording:
# absolute start and stop times of all streams, rounded to nearest raw timepoint:
tranges = self.stream.tranges
t0 = tranges[0, 0] # absolute start time of first stream
for stream, trange in zip(streams, tranges):
abst0 = trange[0] # absolute start time of this stream relative to t0
# time delta between this stream and first stream, to nearest raw timepoint, us:
dt = abst0 - t0
dt = intround(dt) # to nearest int us
self.exportptcsfile(stream, basepath, dt, exportdt, sortpath,
user=user, notes=notes)
def exportptcsfile(self, stream, basepath, dt, exportdt, sortpath, user='', notes=''):
"""Export spike data of all "good" spikes to binary .ptcs file in basepath.
Constrain to spikes in stream, and undo any time delta in spike times.
dt is the integer time difference between start of stream and start of first stream in
the track, rounded to the nearest us (spike times are stored as int64 us in .ptcs)"""
# build up list of PTCSNeuronRecords that have spikes in this stream,
# and tally their spikes
nsamplebytes = 4 # float32
nrecs = []
nspikes = 0
# only export neurons marked as "good", could be single or multi unit:
for nid in sorted(self.good):
neuron = self.neurons[nid]
spikets = self.spikes['t'][neuron.sids] # should be a sorted copy
assert spikets.flags['OWNDATA'] # safe to modify in place
spikets.sort() # just in case it isn't perfectly sorted
spikets -= dt # export spike times relative to t=0 of this recording
# only include spikes that occurred during this recording
lo, hi = spikets.searchsorted([stream.t0, stream.t1])
spikets = spikets[lo:hi]
if len(spikets) == 0:
continue # don't save empty neurons
nrec = PTCSNeuronRecord(neuron, spikets, nsamplebytes, descr='')
nrecs.append(nrec)
nspikes += len(spikets)
nneurons = len(nrecs)
# create the header and write everything to file:
path = os.path.join(basepath, stream.srcfnameroot)
try: os.mkdir(path)
except OSError: pass # path already exists?
fname = stream.srcfnameroot + '.ptcs'
fullfname = os.path.join(path, fname)
header = PTCSHeader(self, sortpath, stream, nneurons, nspikes, nsamplebytes,
fullfname, exportdt, user=user, notes=notes)
with open(fullfname, 'wb') as f:
header.write(f)
for nrec in nrecs:
nrec.write(f)
print(fullfname)
def exportcsv(self, fname):
"""Export all "good" spikes to a .csv file with time (s), nid, and maxchan as the
columns"""
sids = []
#chans = []
for nid in sorted(self.good):
neuron = self.neurons[nid]
sids.append(neuron.sids)
# the alternative is to export each spike's unit's channel:
#chans.append(np.tile(neuron.chan, neuron.nspikes))
sids = np.hstack(sids)
spikes = self.spikes[sids]
tsecs = spikes['t'] / 1e6 # convert from us to s
nids = spikes['nid']
chans = spikes['chan']
#chans = np.hstack(chans)
data = np.column_stack([tsecs, nids, chans])
print('Exporting (tsec, nid, chan) of all spikes marked as "good" to %s' % fname)
np.savetxt(fname, data, fmt='%.6f, %d, %d')
def exporttschid(self, basepath):
"""Export int64 (timestamp, channel, neuron id) 3 tuples to binary file"""
raise NotImplementedError('Needs to be redone to work with multiple streams')
spikes = self.spikes[self.spikes['nid'] > 0] # don't export unsorted/multiunit spikes
dt = str(datetime.datetime.now()) # get an export timestamp
dt = dt.split('.')[0] # ditch the us
dt = dt.replace(' ', '_')
dt = dt.replace(':', '.')
srffnameroot = srffnameroot.replace(' ', '_')
tschidfname = dt + '_' + srffnameroot + '.tschid'
tschid = np.empty((len(spikes), 3), dtype=np.int64)
tschid[:, 0] = spikes['t']
tschid[:, 1] = spikes['chan']
tschid[:, 2] = spikes['nid']
tschid.tofile(os.path.join(path, tschidfname)) # save it
print(tschidfname)
def exportdin(self, basepath):
"""Export stimulus din(s) to binary .din file(s) in basepath"""
if self.stream.is_multi(): # self.stream is a MultiStream
streams = self.stream.streams
else: # self.stream is a single Stream
streams = [self.stream]
dinfiledtype=[('TimeStamp', '<i8'), ('SVal', '<i8')] # pairs of int64s
print('Exporting DIN(s) to:')
for stream in streams:
try: # neither of these attribs should exist for recordings with no stimuli:
svrecs = stream.srff.digitalsvalrecords
dsprecs = stream.srff.displayrecords
except AttributeError:
continue # no din to export for this stream
if len(svrecs) == 0 or stream.srff.ndigitalsvalrecords == 0:
raise ValueError("digitalsvalrecords are empty for stream %r. Attribute "
"shouldn't exist" % stream.fname)
path = os.path.join(basepath, stream.srcfnameroot)
try: os.mkdir(path)
except OSError: pass # path already exists?
# upcast SVal field from uint16 to int64, creates a copy,
# but it's not too expensive:
svrecs = svrecs.astype(dinfiledtype)
# convert to normal n x 2 int64 array
svrecs = svrecs.view(np.int64).reshape(-1, 2)
# Some old recordings (<= ptc15) contain multiple experiments.
# To deal with this, iterate over stream.srff.displayrecords, export one .din
# per displayrecord. Append experiment ID to each .din filename, if necessary.
svrects = svrecs[:, 0]
dsprects = [ dsprec.TimeStamp for dsprec in dsprecs ]
svalrecis = svrects.searchsorted(dsprects)
assert svalrecis[0] == 0
svalrecis = svalrecis[1:] # exclude the trivial 0 index
# split sval records according to displayrecord timestamps:
dins = np.split(svrecs, svalrecis)
assert len(dins) == len(dsprecs)
for eid, din in enumerate(dins):
if eid == 0 and len(dins) == 1:
eidstr = ''
elif len(dins) < 10:
eidstr = '.%d' % eid
else: # include leading zero to maintain alphabetical fname order
eidstr = '.%02d' % eid
dinfname = stream.srcfnameroot + eidstr + '.din'
fullfname = os.path.join(path, dinfname)
din.tofile(fullfname) # save it
print(fullfname)
def exporttextheader(self, basepath):
"""Export stimulus text header(s) to .textheader file(s) in basepath"""
if self.stream.is_multi(): # self.stream is a MultiStream
streams = self.stream.streams
else: # self.stream is a single Stream
streams = [self.stream]
print('Exporting text header(s) to:')
for stream in streams:
try:
dsprecs = stream.srff.displayrecords
except AttributeError: # no textheader to export for this stream
continue
if len(dsprecs) == 0:
raise ValueError("displayrecords are empty for stream %r. Attribute "
"shouldn't exist" % stream.fname)
path = os.path.join(basepath, stream.srcfnameroot)
try: os.mkdir(path)
except OSError: pass # path already exists?
# Some old recordings (<= ptc15) contain multiple experiments.
# To deal with this, iterate over stream.srff.displayrecords, export one
# .textheader per displayrecord. Append experiment ID to each .textheader
# filename, if necessary.
for eid, dsprec in enumerate(dsprecs):
textheader = dsprec.Header.python_tbl
if eid == 0 and len(dsprecs) == 1:
eidstr = ''
elif len(dsprecs) < 10:
eidstr = '.%d' % eid
else: # include leading zero to maintain alphabetical fname order
eidstr = '.%02d' % eid
textheaderfname = stream.srcfnameroot + eidstr + '.textheader'
fullfname = os.path.join(path, textheaderfname)
with open(fullfname, 'w') as f:
f.write(textheader) # save it
print(fullfname)
def exportall(self, basepath, sortpath):
"""Export spike data, stimulus din and textheader to basepath"""
self.exportptcsfiles(basepath, sortpath)
self.exportdin(basepath)
self.exporttextheader(basepath)
def exportspikewaves(self, sids, selchans, tis, fname, format):
"""Export spike waveform data of selected sids, selchans and tis to binary
.spikes.zip file or text .spikes.csv file"""
nspikes = len(sids)
chans, chanslist = self.get_common_chans(sids, selchans)
nchans = len(chans)
ti0, ti1 = tis
nt = ti1 - ti0
# fill in 3D data array:
dtype = self.wavedata.dtype
data = np.zeros((nspikes, nchans, nt), dtype=dtype)
for sii, sid in enumerate(sids):
spikechans = chanslist[sii]
spikechanis = spikechans.searchsorted(chans)
data[sii] = self.wavedata[sid][spikechanis, ti0:ti1]
if format == 'text': # flatten timepoints of all chans into columns
data.shape = nspikes, nchans*nt
stream = self.stream
assert stream.kind == 'highpass' # should be the only type ever saved to self
if format == 'binary':
nids = self.spikes['nid'][sids]
spiketimes = self.spikes['t'][sids]
chanpos = stream.probe.siteloc_arr()
uVperAD = stream.converter.AD2uV(1) # convert 1 AD unit to uV
with open(fname, 'wb') as f:
np.savez_compressed(f, data=data, sids=sids, nids=nids,
spiketimes=spiketimes, chans=chans, tis=tis,
chanpos=chanpos, uVperAD=uVperAD)
elif format == 'text':
np.savetxt(fname, data, fmt='%d', delimiter=',') # data should be int
else:
raise ValueError('Unknown format: %r' % format)
print('Exported %d spikes on chans=%r and tis=%r to %s'
% (nspikes, list(chans), list(tis), fname))
def get_param_matrix(self, kind=None, sids=None, tis=None, selchans=None, norm=False,
dims=None, scale=True):
"""Organize dims parameters from sids into a data matrix, each column
corresponding to a dim. To do PCA/ICA clustering on all spikes, one maxchan at
a time, caller needs to call this multiple times, one for each set of
maxchan unique spikes,"""
spikes = self.spikes
dtypefields = list(spikes.dtype.fields)
if sids is None:
sids = spikes['id'] # default to all spikes
comps = [ dim for dim in dims if dim.startswith('c') and dim[-1].isdigit() ]
rmserror = np.any([ dim == 'RMSerror' for dim in dims ])
ncomp = len(comps)
hascomps = ncomp > 0
if hascomps:
X = self.get_component_matrix(kind, sids, tis=tis, chans=selchans,
minncomp=ncomp, norm=norm)
if rmserror:
rms = self.get_rms_error(sids, tis=tis, chans=selchans)
data = []
for dim in dims:
if dim in dtypefields:
data.append( np.float32(spikes[dim][sids]) )
elif dim.startswith('c') and dim[-1].isdigit():
compid = int(lstrip(dim, 'c'))
data.append( np.float32(X[:, compid]) )
elif dim == 'RMSerror':
data.append( np.float32(rms) )
else:
raise RuntimeError('Unknown dim %r' % dim)
# np.column_stack returns a copy, not modifying the original array
data = np.column_stack(data)
if scale:
# ensure 0 mean, and unit variance/stdev
for dim, d in zip(dims, data.T): # d iterates over columns
d -= d.mean()
if dim in ['x0', 'y0'] and self.probe.ncols > 1:
try: x0std # normalize spatial params by x0 std
except NameError: x0std = spikes['x0'].std()
if x0std != 0.0:
d /= x0std
#elif dim == 't': # the longer the recording in hours, the greater the
# # scaling in time
# trange = d.max() - d.min()
# tscale = trange / (60*60*1e6)
# d *= tscale / d.std()
else: # normalize all other dims by their std
dstd = d.std()
if dstd != 0.0:
d /= dstd
return data
def get_component_matrix(self, kind, sids, tis=None, chans=None, minncomp=None,
norm=False):
"""Find set of chans common to all sids, and do PCA/ICA on those waveforms. Or,
if chans are specified, limit PCA/ICA to them. Return component matrix with at
least minncomp dimensions"""
spikes = self.spikes
nt = self.wavedata.shape[2]
if tis is None: # use full waveform
tis = np.asarray([0, nt])
#print('tis: %r' % (tis,))
ti0, ti1 = tis
assert ti0 < ti1 <= nt
nt = ti1 - ti0
chans, chanslist = self.get_common_chans(sids, chans)
nchans = len(chans)
nspikes = len(sids)
if nspikes < 2:
raise RuntimeError("Need at least 2 spikes for %s" % kind)
if nchans == 0:
raise RuntimeError("Spikes have no common chans for %s" % kind)
# check if desired components have already been calculated (cache hit):
Xhash = self.get_Xhash(kind, sids, tis, chans, self.npcsperchan, norm)
self.Xhash = Xhash # save as key to most recent component matrix in self.X
try: self.X
except AttributeError: self.X = {} # init the dimension reduction cache attrib
if Xhash in self.X:
print('Cache hit, using cached %ss from tis=%r, chans=%r of %d spikes' %
(kind[:-1], list(tis), list(chans), nspikes))
return self.X[Xhash] # no need to recalculate
print('Cache miss, (re)calculating %ss' % kind[:-1])
# collect data between tis from chans from all spikes:
print('Doing %s on tis=%r, chans=%r of %d spikes' %
(kind, list(tis), list(chans), nspikes))
# MDP complains of roundoff errors with float32 for large covariance matrices
data = np.zeros((nspikes, nchans, nt), dtype=np.float64)
for sii, sid in enumerate(sids):
spikechans = chanslist[sii]
spikechanis = spikechans.searchsorted(chans)
spikedata = self.wavedata[sid][spikechanis, ti0:ti1]
if norm:
# normalize by Vpp of chan with max Vpp:
maxptp = spikedata.ptp(axis=1).max()
if maxptp != 0: # prevent div by 0
spikedata = spikedata / maxptp
data[sii] = spikedata
print('Input shape for %s: %r' % (kind, data.shape))
t0 = time.time()
data.shape = nspikes, nchans*nt # flatten timepoints of all chans into columns
print('Reshaped input for %s: %r' % (kind, data.shape))
if kind == 'PCA': # principal components analysis
if PCALIB == 'mdp':
import mdp # delay as late as possible
X = mdp.pca(data, output_dim=5, svd=False) # svd=False is default
elif PCALIB == 'sklearn':
# sklearn's PCA is about 8x slower than mdp.pca, I think because it
# doesn't tap into scipy.linalg.eig compiled code. RandomizedPCA is faster
# than PCA, but isn't deterministic, and is still 2-3x slower than mdp.pca
from sklearn.decomposition import PCA
pca = PCA(n_components=5)
X = pca.fit_transform(data) # do both the fit and the transform
else:
raise ValueError('Invalid PCALIB %r' % PCALIB)
if X.shape[1] < minncomp:
raise RuntimeError("Can't satisfy minncomp=%d request" % minncomp)
elif kind == 'sPCA': # sparse principal components analysis
from sklearn.decomposition import SparsePCA
n_components = 5
alpha = 1 # sparseness parameter
n_jobs = mp.cpu_count()
spca = SparsePCA(n_components=n_components, alpha=alpha, n_jobs=n_jobs)
X = spca.fit_transform(data) # do both the fit and the transform
elif kind == 'mbsPCA': # mini batch sparse principal components analysis
from sklearn.decomposition import MiniBatchSparsePCA
n_components = 5
alpha = 1 # sparseness parameter
n_jobs = mp.cpu_count()
mbspca = MiniBatchSparsePCA(n_components=n_components, alpha=alpha, n_jobs=n_jobs)
X = mbspca.fit_transform(data) # do both the fit and the transform
elif kind == 'NMF': # non-negative matrix factorization
from sklearn.decomposition import NMF
n_components = 5
init = None # 'random', 'nndsvd', 'nndsvda', 'nndsvdar', 'custom'
nmf = NMF(n_components=n_components, init=init)
X = nmf.fit_transform(data) # do both the fit and the transform
elif kind == 'tSNE': # t-distributed stochastic neighbor embedding
# limit number of PCs to feed into ICA, keep up to npcsperchan components per
# chan on average:
ncomp = min((self.npcsperchan*nchans, data.shape[1]))
print('ncomp: %d' % ncomp)
import mdp # delay as late as possible
# do PCA first, to reduce dimensionality and speed up ICA:
data = mdp.pca(data, output_dim=ncomp)
from sklearn.manifold import TSNE
n_components = 3 # not suited for any more than 3, according to the paper
#init = 'random', 'pca'
tsne = TSNE(n_components=n_components)
X = tsne.fit_transform(data) # do both the fit and the transform
elif kind == 'ICA': # independent components analysis
# ensure nspikes >= ndims**2 for good ICA convergence
maxncomp = intround(np.sqrt(nspikes))
if maxncomp < minncomp:
raise RuntimeError("Can't satisfy minncomp=%d request" % minncomp)
if data.shape[0] <= data.shape[1]:
raise RuntimeError('Need more observations than dimensions for ICA')
# limit number of PCs to feed into ICA, keep up to npcsperchan components per
# chan on average:
ncomp = min((self.npcsperchan*nchans, maxncomp, data.shape[1]))
if ICALIB == 'mdp':
import mdp # delay as late as possible
# do PCA first, to reduce dimensionality and speed up ICA:
print('ncomp: %d' % ncomp)
data = mdp.pca(data, output_dim=ncomp)
# nonlinearity g='pow3', ie x**3. tanh seems to separate better,
# but is a bit slower. gaus seems to be slower still, and no better
# than tanh, but these are just vague impressions.
# defaults to whitened=False, ie assumes data isn't whitened
node = mdp.nodes.FastICANode(g='pow3')
X = node(data)
pm = node.get_projmatrix()
X = X[:, np.any(pm, axis=0)] # keep only the non zero columns
elif ICALIB == 'sklearn':
from sklearn.decomposition import FastICA
# when whiten=True (default), FastICA preprocesses the data using PCA, and
# n_components is the number of PCs that are kept before doing ICA.
alg = 'parallel' # parallel or deflation, default is parallel
fun = 'logcosh' # logcosh, exp, or cube, default is logcosh
maxiter = 100 # default is 200
tol = 0.5 # default is 0.0001, seems need >~ 0.1 to exit faster
## TODO: make FastICA algorithm (parallel, deflation), nonlinearity (logcosh,
## exp, cube) and IC sort method (abs(kurtosis) vs. negentropy) GUI options
print('ncomp=%d, alg=%r, fun=%r, maxiter=%d, tol=%g'
% (ncomp, alg, fun, maxiter, tol))
fastica = FastICA(n_components=ncomp, algorithm=alg,
whiten=True, fun=fun, fun_args=None,
max_iter=maxiter, tol=tol, w_init=None,
random_state=None)
X = fastica.fit_transform(data) # do both the fit and the transform
#pm = fastica.components_
print('fastica niters: %d' % (fastica.n_iter_))
else:
raise ValueError('Invalid ICALIB %r' % ICALIB)
if X.shape[1] < 3:
raise RuntimeError('Need at least 3 columns')
# Sort ICs by decreasing kurtosis or negentropy. For kurtosis, see Scholz2004 (or
# rather, opposite to their approach, which picked ICs with most negative
# kurtosis). For methods of estimating negentropy, see Hyvarinen1997.
'''
# sort by abs(kurtosis) of each IC (column)
k = scipy.stats.kurtosis(X, axis=0)
ki = abs(k).argsort()[::-1] # decreasing order of abs(kurtosis)
print('Sort by abs(kurtosis):')
print(k[ki])
X = X[:, ki] # sort the ICs
'''
# sort by negentropy of each IC (column), this seems to work better than kurtosis
# at separating clusters of similar size:
ne = core.negentropy(X, axis=0)
assert (ne > 0).all()
nei = ne.argsort()[::-1] # decreasing order of negentropy
print('Sort by negentropy:')
print(ne[nei])
X = X[:, nei] # sort the ICs
'''
import pylab as pl
pl.figure()
pl.imshow(pm)
pl.colorbar()
pl.title('original projmatrix')
pl.figure()
pl.imshow(pm[:, ki])
pl.colorbar()
pl.title('decreasing abs(kurtosis) projmatrix')
pl.figure()
pl.imshow(pm[:, nei])
pl.colorbar()
pl.title('decreasing negentropy projmatrix')
'''
else:
raise ValueError('Unknown kind %r' % kind)
print('Output shape for %s: %r' % (kind, X.shape))
self.X[Xhash] = X # cache for fast future retrieval
print('%s took %.3f sec' % (kind, time.time()-t0))
unids = list(np.unique(spikes['nid'][sids])) # set of all nids that sids span
for nid in unids:
# don't update pos of junk cluster, if any, since it might not have any chans
# common to all its spikes, and therefore can't have PCA/ICA done on it
if nid != 0:
self.clusters[nid].update_comppos(X, sids)
return X
def get_rms_error(self, sids, tis=None, chans=None):
"""Calculate RMS error of spike waveforms (all from the same cluster) relative to
their cluster's mean waveform. Consider only selected tis and chans"""
spikes = self.spikes
nids = np.unique(spikes['nid'][sids])
nid = nids[0]
if len(nids) > 1 or nid == 0:
raise RuntimeError("Spikes must all belong to the same (non-junk) cluster for "
"RMS error calculation")
nt = self.wavedata.shape[2]
if tis is None: # use full waveform
tis = np.asarray([0, nt])
#print('tis: %r' % (tis,))
ti0, ti1 = tis
assert ti0 < ti1 <= nt
nt = ti1 - ti0
chans, chanslist = self.get_common_chans(sids, chans)
nchans = len(chans)
nspikes = len(sids)
if nchans == 0:
raise RuntimeError("Spikes have no common chans for RMS error")
# collect data between tis from chans from all spikes:
print('Getting RMS error on tis=%r, chans=%r of %d spikes' %
(list(tis), list(chans), nspikes))
data = np.zeros((nspikes, nchans, nt), dtype=np.float64)
for sii, sid in enumerate(sids):
spikechans = chanslist[sii]
spikechanis = spikechans.searchsorted(chans)
data[sii] = self.wavedata[sid][spikechanis, ti0:ti1]
# get cluster mean waveform between tis on chans:
wave = self.neurons[nid].get_wave()
chanis = wave.chans.searchsorted(chans)
meandata = np.float64(wave.data[chanis, ti0:ti1])
# calculate RMS error between each spike and the cluster mean waveform:
se = (data - meandata) ** 2 # squared error
# take mean across timepoints and chans, but not across spikes:
mse = se.mean(axis=2).mean(axis=1) # mean squared error
return np.sqrt(mse)
def get_common_chans(self, sids, chans=None):
"""Find channels common to all sids, and optionally to chans as well. Also,
return chanslist, ie list of arrays of chans of sids"""
spikes = self.spikes
chanss = spikes['chans'][sids]
nchanss = spikes['nchans'][sids]
#t0 = time.time()
chanslist = [ cs[:ncs] for cs, ncs in zip(chanss, nchanss) ] # list of arrays
#print('Building chanslist took %.3f sec' % (time.time()-t0))
commonchans = util.intersect1d_uint8(chanslist) # find intersection
if chans is not None and len(chans) > 0:
# values in chans but not in commonchans:
diffchans = np.setdiff1d(chans, commonchans)
commonchans = np.intersect1d(chans, commonchans) # values in both
if len(diffchans) > 0:
print('WARNING: ignored chans %r not common to all spikes' % list(diffchans))
return commonchans, chanslist
def get_Xhash(self, kind, sids, tis, chans, npcsperchan, norm):
"""Return MD5 hex digest of args, for uniquely identifying the matrix resulting
from dimension reduction of spike data"""
h = hashlib.md5()
h.update(kind.encode())
h.update(sids)
h.update(tis)
h.update(chans)
if kind == 'ICA': # consider npcsperchan only if doing ICA
h.update(str(npcsperchan).encode())
h.update(str(norm).encode())
return h.hexdigest()
def create_neuron(self, id=None, inserti=None):
"""Create and return a new Neuron with a unique ID"""
if id == None:
id = self.nextnid
if id in self.neurons:
raise RuntimeError('Neuron %d already exists' % id)
id = int(id) # get rid of numpy ints
neuron = Neuron(self, id)
# add neuron to self
self.neurons[neuron.id] = neuron
if inserti == None:
self.norder.append(neuron.id)
else:
self.norder.insert(inserti, neuron.id)
return neuron
def remove_neuron(self, id):
try:
del self.neurons[id] # may already be removed due to recursive call
del self.clusters[id]
self.norder.remove(id)
except (KeyError, ValueError):
pass
def shift(self, sids, nt):
"""Shift sid waveforms by nt timepoints: -ve shifts waveforms left, +ve shifts right.
For speed, pad waveforms with edge values at the appropriate end"""
spikes = self.spikes
wd = self.wavedata
for sid in sids: # maybe there's a more efficient way than iterating over sids
core.shiftpad(wd[sid], nt) # modifies wd in-place
# update spike parameters:
dt = intround(nt * self.tres) # amount of time to shift by, signed, in us
# so we can later reload the wavedata accurately, shifting the waveform right and
# padding it on its left requires decrementing the associated timepoints
# (and vice versa)
spikes['t'][sids] -= dt
spikes['t0'][sids] -= dt
spikes['t1'][sids] -= dt
# might result in some out of bounds tis because the original peaks
# have shifted off the ends. Opposite sign wrt timepoints above, referencing within
# wavedata:
spikes['tis'][sids] = spikes['tis'][sids] + nt
# this in-place operation raises a TypeError in numpy 1.11.2, something related to
# subtracting an int from an unsigned int:
#spikes['tis'][sid] += nt
# caller should treat all sids as dirty
'''
# replaced by util.alignbest_cy():
def alignbest(self, sids, tis, chans):
"""Align all sids between tis on chans by best fit according to mean squared error.
chans are assumed to be a subset of channels of sids. Return sids
that were actually moved and therefore need to be marked as dirty"""
spikes = self.spikes
nspikes = len(sids)
nchans = len(chans)
wd = self.wavedata
nt = wd.shape[2] # num timepoints in each waveform
ti0, ti1 = tis
subnt = ti1 - ti0 # num timepoints to slice from each waveform
# TODO: make maxshift a f'n of interpolation factor
maxshift = 2 # shift +/- this many timepoints
subntdiv2 = subnt // 2
#print('subntdiv2 on either side of t=0: %d' % subntdiv2)
if subntdiv2 < maxshift:
raise ValueError("Selected waveform duration too short")
#maxshiftus = maxshift * self.stream.tres
# NOTE: in this case, it may be faster to keep shifts and sti0s and sti1s as lists
# of ints instead of np int arrays, maybe because their values are faster to iterate
# over or index with in python loops and lists:
shifts = range(-maxshift, maxshift+1) # from -maxshift to maxshift, inclusive
nshifts = len(shifts)
sti0s = [ ti0+shifti for shifti in range(nshifts) ] # shifted ti0 values
sti1s = [ ti1+shifti for shifti in range(nshifts) ] # shifted ti1 values
sti0ssti1s = zip(sti0s, sti1s)
print("Padding waveforms with up to +/- %d points of fake data" % maxshift)
# not worth subsampling here while calculating meandata, since all this
# stuff in this loop is needed in the shift loop below
subsd = np.zeros((nspikes, nchans, subnt), dtype=wd.dtype) # subset of spike data
spikechanis = np.zeros((nspikes, nchans), dtype=np.int64)
t0 = time.time()
for sidi, sid in enumerate(sids):
spike = spikes[sid]
nspikechans = spike['nchans']
spikechans = spike['chans'][:nspikechans]
spikechanis[sidi] = spikechans.searchsorted(chans)
subsd[sidi] = wd[sid, spikechanis[sidi], ti0:ti1]
print('Mean prep loop for best shift took %.3f sec' % (time.time()-t0))
t0 = time.time()
meandata = subsd.mean(axis=0) # float64
print('Mean for best shift took %.3f sec' % (time.time()-t0))
# choose best shifted waveform for each spike
# widesd holds current spike data plus padding on either side
# to allow for full width slicing for all time shifts:
maxnchans = spikes['nchans'].max() # of all spikes in sort
widesd = np.zeros((maxnchans, maxshift+nt+maxshift), dtype=wd.dtype)
shiftedsubsd = subsd.copy() # init
tempsubshifts = np.zeros((nshifts, nchans, subnt), dtype=wd.dtype)
dirtysids = []
t0 = time.time()
for sidi, sid in enumerate(sids):
# for speed, instead of adding real data, pad start and end with fake values
chanis = spikechanis[sidi]
sd = wd[sid] # sid's spike data
widesd[:, maxshift:-maxshift] = sd # 2D
widesd[:, :maxshift] = sd[:, 0, None] # pad start with first point per chan
widesd[:, -maxshift:] = sd[:, -1, None] # pad end with last point per chan
wideshortsd = widesd[chanis] # sid's padded spike data on chanis, 2D
# keep this inner loop as fast as possible:
for shifti, (sti0, sti1) in enumerate(sti0ssti1s):
tempsubshifts[shifti] = wideshortsd[:, sti0:sti1] # len: subnt
errors = tempsubshifts - meandata # (nshifts, nchans, subnt) - (nchans, subnt)
# get sum squared errors by taking sum across highest two dims - for purpose
# of error comparison, don't need to take mean or square root. Also, order
# of summation along axes doesn't matter, as long as it's done on the highest two:
sserrors = (errors**2).sum(axis=2).sum(axis=1) # nshifts long
bestshifti = sserrors.argmin()
bestshift = shifts[bestshifti]
if bestshift != 0: # no need to update sort.wavedata[sid] if there's no shift
# update time values:
dt = bestshift * self.tres # time to shift by, signed, in us
spikes['t'][sid] += dt # should remain halfway between t0 and t1
spikes['t0'][sid] += dt
spikes['t1'][sid] += dt
# might result in some out of bounds tis because the original peaks
# have shifted off the ends. Opposite sign, referencing within wavedata:
spikes['tis'][sid] -= bestshift
# update sort.wavedata
wd[sid] = widesd[:, bestshifti:bestshifti+nt]
shiftedsubsd[sidi] = tempsubshifts[bestshifti]
dirtysids.append(sid) # mark sid as dirty
print('Shifting loop took %.3f sec' % (time.time()-t0))
AD2uV = self.converter.AD2uV
stdevbefore = AD2uV(subsd.std(axis=0).mean())
stdevafter = AD2uV(shiftedsubsd.std(axis=0).mean())
print('stdev went from %.3f to %.3f uV' % (stdevbefore, stdevafter))
return dirtysids
'''
def alignminmax(self, sids, to):
"""Align sids by their min or max. Return those that were actually moved
and therefore need to be marked as dirty"""
if not self.stream.is_open():
raise RuntimeError("No open stream to reload spikes from")
spikes = self.spikes
V0s = spikes['V0'][sids]
V1s = spikes['V1'][sids]
Vss = np.column_stack((V0s, V1s))
alignis = spikes['aligni'][sids]
b = np.column_stack((alignis==0, alignis==1)) # 2D boolean array
if to == 'min':
i = Vss[b] > 0 # indices into sids of spikes aligned to the max peak
elif to == 'max':
i = Vss[b] < 0 # indices into sids of spikes aligned to the min peak
else:
raise ValueError('Unknown to %r' % to)
sids = sids[i] # sids that need realigning
nspikes = len(sids)
print("Realigning %d spikes" % nspikes)
if nspikes == 0: # nothing to do
return [] # no sids to mark as dirty
multichantis = spikes['tis'][sids] # nspikes x nchans x 2 arr
chanis = spikes['chani'][sids] # nspikes arr of max chanis
# peak tis on max chan of each spike, convert from uint8 to int32 for safe math
tis = np.int32(multichantis[np.arange(nspikes), chanis]) # nspikes x 2 arr
# NOTE: tis aren't always in temporal order!
dpeaktis = tis[:, 1] - tis[:, 0] # could be +ve or -ve
dpeaks = spikes['dt'][sids] # stored as +ve
# for each spike, decide whether to add or subtract dpeak to/from its temporal values
ordered = dpeaktis > 0 # in temporal order
reversed = dpeaktis < 0 # in reversed temporal order
alignis = spikes['aligni'][sids]
alignis0 = alignis == 0
alignis1 = alignis == 1
dpeaki = np.zeros(nspikes, dtype=int)
# add dpeak to temporal values to align to later peak
dpeaki[ordered & alignis0 | reversed & alignis1] = 1
# subtact dpeak from temporal values to align to earlier peak
dpeaki[ordered & alignis1 | reversed & alignis0] = -1
# upcast aligni from 1 byte to an int before doing arithmetic on it:
#dalignis = -np.int32(alignis)*2 + 1
dts = dpeaki * dpeaks
dtis = -dpeaki * abs(dpeaktis)
# shift values
spikes['t'][sids] += dts
spikes['t0'][sids] += dts
spikes['t1'][sids] += dts
spikes['tis'][sids] = spikes['tis'][sids] + dtis[:, None, None] # update wrt new t0i
spikes['aligni'][sids[alignis0]] = 1
spikes['aligni'][sids[alignis1]] = 0
# update wavedata for each shifted spike
self.reload_spikes(sids)
return sids # mark all sids as dirty
def choose_new_meanchans(self, sids):
"""Get mean waveform of all sids, then find the mean's chan with max Vpp, then
choose det.maxnchansperspike channels around that maxchan.
Return meanchans, furthestchan, and furthestchani"""
print('Choosing new channel set for all selected spikes')
det = self.detector
meanwave = self.get_mean_wave(sids)
# mean chan with max Vpp:
maxchan = meanwave.chans[meanwave.data.ptp(axis=1).argmax()]
maxchani = det.chans.searchsorted(maxchan)
distances = det.dm.data[maxchani]
# keep the maxnchansperspike closest chans to maxchan, including maxchan:
chanis = distances.argsort()[:det.maxnchansperspike]
meanchans = det.chans[chanis]
meanchans.sort() # keep them sorted
print('meanchans: %r' % list(meanchans))
furthestchan = det.chans[chanis[-1]]
print('furthestchan: %d' % furthestchan)
furthestchani = meanchans.searchsorted(furthestchan)
# sanity checks:
assert len(meanchans) == det.maxnchansperspike
assert maxchan in meanchans
return meanchans, furthestchan, furthestchani
def reload_spikes(self, sids, usemeanchans=False):
"""Update wavedata of designated spikes from stream. Optionally fix incorrect
time values from .sort 0.3 files. Optionally choose new set of channels for all
sids based on the chans closest to the mean of the sids. It's the caller's
responsibility to mark sids as dirty and trigger resaving of .wave file"""
## TODO: add findmaxchan=False and recenteronmaxchan=False kwargs
nsids = len(sids)
print('(Re)loading %d spikes' % nsids)
stream = self.stream
if not stream.is_open():
raise RuntimeError("No open stream to reload spikes from")
spikes = self.spikes
det = self.detector
ver_lte_03 = float(self.__version__) <= 0.3
if ver_lte_03:
print('Fixing potentially incorrect time values during spike reloading')
nfixed = 0
treload = time.time()
if usemeanchans:
if ver_lte_03:
raise RuntimeError("Best not to choose new chans from mean until after "
"converting to .sort >= 0.4")
meanchans, furthestchan, furthestchani = self.choose_new_meanchans(sids)
nmeanchans = len(meanchans)
# split up sids into groups efficient for loading from stream:
ts = spikes[sids]['t'] # noncontig, not a copy
# ensure they're in temporal order:
if not (np.diff(ts) >= 0).all():
print("Selected sids aren't in temporal order, sorting by time...")
tsis = ts.argsort()
sids = sids[tsis]
print("Done sorting sids by time")
# break up spikes by ISIs >= MAXGROUPISI:
splitis = np.where(np.diff(ts) >= MAXGROUPISI)[0] + 1
groups = np.split(sids, splitis)
# limit each group of sids to no more than MAXGROUPDT:
groupi = 0
while groupi < len(groups):
group = groups[groupi] # group of sids all with ISIs < MAXGROUPISI
## TODO: not a copy: is this the optimal way to get the times in this case?
relts = spikes[group]['t'] - spikes[group[0]]['t']
splitis = np.where(np.diff(relts // MAXGROUPDT) > 0)[0] + 1
nsubgroups = len(splitis) + 1
if nsubgroups > 1:
# del original group, replace with subgroups
del groups[groupi]
subgroups = np.split(group, splitis)
groups[groupi:groupi] = subgroups
groupi += len(subgroups)
else:
groupi += 1
print('ngroups: %d' % len(groups))
# process each group:
sidi = 0 # init sid index across all groups, used as status counter
for groupi, group in enumerate(groups):
printflush('<%d>' % groupi, end='')
assert len(group) > 0 # otherwise something went wrong above
t0 = spikes[group[0]]['t0']
t1 = spikes[group[-1]]['t1']
if ver_lte_03:
# load a little extra, in case we need to reload misaligned first and/or
# last spike in this group
t0 -= 5000 # -5 ms
t1 += 5000 # +5 ms
"""
Find union of chans of sids in this group, ask Stream for only those such that no
unnecessary resampling takes place on unneeded chans. Note that this doesn't make
a difference when CAR is enabled in the stream, because the full set of enabled
chans have to be maintained in Stream.__call__ until the very end. Don't bother
cutting out the correct nchans for each sid. At worst, chan 0 (the "empty" chans
array value) will be unnecessarily added to unionchans, and we'll retrieve one
extra chan when creating tempwave, which will then later be discarded:
"""
unionchans = np.unique(spikes['chans'][group])
if usemeanchans:
# now that we have the original unionchans of this group,
# update this group's spikes array entries with meanchans:
spikes['nchans'][group] = nmeanchans
# we're using the max num chans, so assign the full array:
spikes['chans'][group] = meanchans
# now update unionchans as well:
unionchans = np.unique(np.hstack((unionchans, meanchans)))
if 0 not in stream.chans: # if chan 0 is disabled in stream
# remove 0 from unionchans, otherwise an error would be raised when
# calling stream()
unionchans = unionchans[unionchans != 0]
# load and resample only what's needed for this group:
tempwave = stream(t0, t1, unionchans)
# slice out each spike's reloaded data from tempwave:
for sid in group:
# print status:
if sidi % 10000 == 0:
printflush(sidi, end='')
elif sidi % 1000 == 0:
printflush('.', end='')
if usemeanchans: # already checked above that ver_lte_03 == False
# this spike's chans have been set to meanchans, now
# check that each spike's maxchan is in meanchans:
chan = spikes[sid]['chan']
if chan not in meanchans:
# replace furthest chan with spike's maxchan:
print("spike %d: replacing furthestchan %d with spike's maxchan %d"
% (sid, furthestchan, chan))
nchans = spikes[sid]['nchans']
chans = spikes[sid]['chans'][:nchans]
# replace furthest chan with max chan, modifies spikes array in-place:
chans[furthestchani] = chan
# make sure chans remain sorted:
chans.sort()
# this isn't necessary, because all the above was in-place:
#spikes['chans'][sid][:nchans] = chans
spike = spikes[sid]
nchans = spike['nchans']
chans = spike['chans'][:nchans]
rd = tempwave[spike['t0']:spike['t1']][chans].data # reloaded data
if ver_lte_03: # fix potentially incorrect spike tis
result = self.reload_spike_ver_lte_03(sid, nchans, tempwave, rd)
if result == None:
sidi += 1 # inc status counter
continue # rollwin2D won't work, skip to next sid
else:
rd, fixed = result
if fixed:
nfixed += 1
nt = rd.shape[1]
self.wavedata[sid, :nchans, :nt] = rd # update wavedata
sidi += 1 # inc status counter
print()
if ver_lte_03:
print('Fixed time values of %d spikes' % nfixed)
print('(Re)loaded %d spikes, took %.3f sec' % (len(sids), time.time()-treload))
def reload_spike_ver_lte_03(self, sid, nchans, tempwave, rd):
"""In sort.__version__ <= 0.3, t, t0, t1, and tis were not updated
during alignbest() calls. To fix this, load new data with old potentially
incorrect t0 and t1 values, and compare this new data to existing old data
in wavedata array. Find where the non-repeating parts of the old data fits
into the new, and calculate the correction needed to fix the time values.
Finally, reload new data according to these corrected time values."""
#print('Reloading sid from ver_lte_03: %d' % sid)
od = self.wavedata[sid, :nchans] # old data
# indices that strip const values from left and right ends:
lefti, righti = lrrep2Darrstripis(od)
od = od[:, lefti:righti] # stripped old data
# reloaded data rd uses old incorrect t0 and t1, but they should be
# wide enough to encompass the non-repeating parts of the old data
width = od.shape[1] # rolling window width
if not width <= rd.shape[1]:
print('') # newline
print("WARNING: od.shape[1]=%d > rd.shape[1]=%d for sid %d" %
(od.shape[1], rd.shape[1], sid))
#import pdb; pdb.set_trace()
return
odinndis = np.where((rollwin2D(rd, width) == od).all(axis=1).all(axis=1))[0]
if len(odinndis) == 0: # no hits of old data in new
dnt = 0 # reload data based on current timepoints
elif len(odinndis) == 1: # exactly 1 hit of old data in new
odinndi = odinndis[0] # pull it out
dnt = odinndi - lefti # num timepoints to correct by, signed
else:
raise RuntimeError("Multiple hits of old data in new, don't know "
"how to reload spike %d" % sid)
newrd, fixed = rd, False
if dnt != 0:
dt = intround(dnt * self.tres) # time to correct by, signed, in us
spikes['t'][sid] += dt # should remain halfway between t0 and t1
spikes['t0'][sid] += dt
spikes['t1'][sid] += dt
# might result in some out of bounds tis because the original peaks
# have shifted off the ends. Use opposite sign because we're
# referencing within wavedata:
# in versions <= 0.3, 'tis' were named 'phasetis':
spikes['phasetis'][sid] = spikes['phasetis'][sid] - dnt
spike = spikes[sid]
# reslice tempwave again now that t0 and t1 have changed
newrd = tempwave[spike['t0']:spike['t1']][chans].data
fixed = True
#printflush('F', end='')
return newrd, fixed
def reload_spikes_and_templates(self, sids, usemeanchans=False):
self.reload_spikes(sids, usemeanchans=usemeanchans)
# update neuron templates:
unids = np.unique(self.spikes['nid'][sids])
unids = unids[unids != 0] # exclude junk cluster, which doesn't have a neuron
neurons = [ self.neurons[nid] for nid in unids ]
for neuron in neurons:
neuron.update_wave() # update affected mean waveforms
def init_spike_alignment(self):
"""Set initial spike alignment points according to alignment points of each
spike's neuron"""
print('Setting initial spike alignment points')
ntis, nalignis = {}, {} # tis and aligni derived from each neuron's mean waveform
for neuron in self.neurons.values():
nwave = neuron.get_wave() # update and return mean waveform
mintis = nwave.data.argmin(axis=1)
maxtis = nwave.data.argmax(axis=1)
ntis[neuron.id] = np.column_stack([mintis, maxtis])
# choose aligni with least variance:
nalignis[neuron.id] = np.argmin([mintis.std(), maxtis.std()])
AD2uV = self.converter.AD2uV
for s, wd in zip(self.spikes, self.wavedata):
sid = s['id']
# print out progress on a regular basis:
if sid % 100000 == 0:
printflush(sid, end='')
elif sid % 10000 == 0:
printflush('.', end='')
nid = s['nid']
#chan = s['chan']
nchans = s['nchans']
chans = s['chans'][:nchans]
neuronchans = self.neurons[nid].wave.chans
assert (chans == neuronchans).all()
s['tis'][:nchans] = ntis[nid] # set according to its neuron, wrt t0i=0
s['aligni'] = nalignis[nid] # set according to its neuron
maxchani = s['chani']
t0i, t1i = int(s['tis'][maxchani, 0]), int(s['tis'][maxchani, 1])
s['dt'] = abs(t1i - t0i) / self.sampfreq * 1e6 # us
# note that V0 and V1 might not be of opposite sign, because tis are derived
# from mean neuron waveform, not from each individual spike:
s['V0'], s['V1'] = AD2uV(wd[maxchani, t0i]), wd[maxchani, t1i] # uV
s['Vpp'] = abs(s['V1'] - s['V0']) # uV
print()
def spatially_localize_spikes(self, sortwin, method='fit'):
"""Assuming that wavedata have been extracted and neuron mean waveforms calculated,
find tis and perform spatial localization of every spike in self"""
det = self.detector
weights2f = self.extractor.weights2spatial
weights2spatialmean = self.extractor.weights2spatialmean
f = self.extractor.f
nreject = 0 # number spikes rejected during spatial localization
print('Running spatial localization on all %d spikes' % self.nspikes)
tstart = time.clock()
## TODO: chan this be multithreaded/processed?
for s, wd in zip(self.spikes, self.wavedata):
# Get Vpp at each inclchan's tis, use as spatial weights:
# see core.rowtake() or util.rowtake_cy() for indexing explanation:
sid = s['id']
# print out progress on a regular basis:
if sid % 10000 == 0:
printflush(sid, end='')
elif sid % 1000 == 0:
printflush('.', end='')
chan = s['chan']
nchans = s['nchans']
chans = s['chans'][:nchans]
maxchani = s['chani']
chanis = det.chans.searchsorted(chans)
w = np.float32(wd[np.arange(s['nchans'])[:, None], s['tis'][:nchans]]) # nchans x 2
w = abs(w).sum(axis=1) # Vpp for each chan, measured at t0i and t1i
x = det.siteloc[chanis, 0] # 1D array (row)
y = det.siteloc[chanis, 1]
if method == 'fit':
# localize by fitting extractor.f function to wavedata
params = weights2f(f, w, x, y, maxchani)
elif method == 'mean':
# set localization to Vpp-weighted spatial mean and 0 sigma:
x0, y0 = weights2spatialmean(w, x, y)
# a very ad-hoc guess for spatial sigma:
sx = 2 * dist((x0, y0), self.probe.SiteLoc[chan])
params = x0, y0, sx, sx
else:
print('Unknown method %r' % method)
if params == None: # presumably a non-localizable many-channel noise event
#printflush('X', end='') # to indicate a rejected spike
if DEBUG:
spiket = intround(s['t']) # nearest us
det.log("Reject spike %d at t=%d based on fit params" % (sid, spiket))
neuron = self.neurons[s['nid']]
# remove from its neuron, add to unsorted list of spikes:
sortwin.MoveSpikes2List(neuron, [sid], update=False)
# manually set localization params to Vpp-weighted spatial mean and 0 sigma:
x0, y0 = weights2spatialmean(w, x, y)
# set sigma to 0 um, and then later round lockr up to 1 um so that only one
# raster tick shows up for each rejected spike, reducing clutter
params = x0, y0, 0, 0
nreject += 1
# Save spatial fit params, and "lockout" only the channels within lockrx*sx
# of the fit spatial location of the spike, up to a max of inclr. "Lockout"
# in this case only refers to which channels are highlighted with a raster tick
# for each spike:
s['x0'], s['y0'], s['sx'], s['sy'] = params
x0, y0 = s['x0'], s['y0']
# lockout radius for this spike:
lockr = min(det.lockrx*s['sx'], det.inclr) # in um
lockr = max(lockr, 1) # at least 1 um, so at least the maxchan gets a tick
# test y coords of chans in y array, ylockchaniis can be used to index
# into x, y and chans:
ylockchaniis, = np.where(np.abs(y - y0) <= lockr) # convert bool arr to int
# test Euclid distance from x0, y0 for each ylockchani:
lockchaniis = ylockchaniis.copy()
for ylockchanii in ylockchaniis:
if dist((x[ylockchanii], y[ylockchanii]), (x0, y0)) > lockr:
# Euclidean distance is too great, remove ylockchanii from lockchaniis:
lockchaniis = lockchaniis[lockchaniis != ylockchanii]
lockchans = chans[lockchaniis]
nlockchans = len(lockchans)
s['lockchans'][:nlockchans], s['nlockchans'] = lockchans, nlockchans
print('Spatial localization of spikes took %.3f s' % (time.clock() - tstart))
return nreject
'''
def get_component_matrix(self, dims=None, weighting=None):
"""Convert spike param matrix into pca/ica data for clustering"""
import mdp # can't delay this any longer
X = self.get_param_matrix(dims=dims)
if weighting == None:
return X
if weighting.lower() == 'ica':
node = mdp.nodes.FastICANode()
elif weighting.lower() == 'pca':
node = mdp.nodes.PCANode()
else:
raise ValueError, 'unknown weighting %r' % weighting
node.train(X)
features = node.execute(X) # returns all available components
#self.node = node
#self.weighting = weighting
#self.features = features
return features
def get_ids(self, cids, spikes):
"""Convert a list of cluster ids into 2 dicts: n2sids maps neuron IDs to
spike IDs; s2nids maps spike IDs to neuron IDs"""
cids = np.asarray(cids)
cids = cids - cids.min() # make sure cluster IDs are 0-based
uniquecids = set(cids)
nclusters = len(uniquecids)
# neuron ID to spike IDs (plural) mapping
n2sids = dict(zip(uniquecids, [ [] for i in range(nclusters) ]))
s2nids = {} # spike ID to neuron ID mapping
for spike, nid in zip(spikes, cids):
s2nids[spike['id']] = nid
n2sids[nid].append(spike['id'])
return n2sids, s2nids
def write_spc_input(self):
"""Generate input data file to SPC"""
X = self.get_component_matrix()
# write to space-delimited .dat file. Each row is a spike, each column a param
spykedir = os.path.dirname(__file__)
dt = str(datetime.datetime.now())
dt = dt.split('.')[0] # ditch the us
dt = dt.replace(' ', '_')
dt = dt.replace(':', '.')
self.spcdatfname = os.path.join(spykedir, 'spc', dt+'.dat')
# not sure why spc adds the dg_01 part:
self.spclabfname = os.path.join(spykedir, 'spc', dt+'.dg_01.lab')
f = open(self.spcdatfname, 'w')
for params in X: # write text data to file, one row at a time
params.tofile(f, sep=' ', format='%.6f')
f.write('\n')
f.close()
def parse_spc_lab_file(self, fname=None):
"""Parse output .lab file from SPC. Each row in the file is the assignment of each
spin (datapoint) to a cluster, one row per temperature datapoint. First column is
temperature run number (0-based). 2nd column is the temperature. All remaining
columns correspond to the datapoints in the order presented in the input .dat file.
Returns (Ts, cids)"""
#spikes = self.get_spikes_sortedby('id')
if fname == None:
defaultDir = r"C:\Documents and Settings\Administrator\Desktop\Charlie\From"
dlg = wx.FileDialog(None, message="Open SPC .lab file",
defaultDir=defaultDir, defaultFile='',
wildcard="All files (*.*)|*.*|.lab files (*.lab)|*.lab|",
style=wx.OPEN)
if dlg.ShowModal() == wx.ID_OK:
fname = dlg.GetPath()
dlg.Destroy()
data = np.loadtxt(fname, dtype=np.float32)
Ts = data[:, 1] # 2nd column
cids = np.int32(data[:, 2:]) # 3rd column on
print('Parsed %r' % fname)
return Ts, cids
def parse_charlies_output(self, fname=None):
if fname == None:
fname = (r'C:\Documents and Settings\Administrator\Desktop\Charlie\'
'From\2009-07-20\clustered_events_coiflet_T0.125.txt')
nids = np.loadtxt(fname, dtype=int) # one neuron id per spike
return nids
def write_spc_app_input(self):
"""Generate input data file to spc_app"""
spikes = self.get_spikes_sortedby('id')
X = self.get_component_matrix()
# write to tab-delimited data file. Each row is a param, each column a spike
# (this is the transpose of X)
# first row has labels "AFFX", "NAME", and then spike ids
# first col has labels "AFFX", and then param names
f = open(r'C:\home\mspacek\Desktop\Work\SPC\Weizmann\spc_app\spc_app_input.txt', 'w')
f.write('AFFX\tNAME\t')
for spike in spikes:
f.write('s%d\t' % spike['id'])
f.write('\n')
for parami, param in enumerate(['Vpp', 'dt', 'x0', 'y0', 'sx', 'sy', 'theta']):
f.write(param+'\t'+param+'\t')
for val in X[:, parami]:
f.write('%f\t' % val)
f.write('\n')
f.close()
def hcluster(self, t=1.0):
"""Hierarchically cluster self.spikes
TODO: consider doing multiple cluster runs. First, cluster by spatial location (x0,
y0). Then split those clusters up by Vpp. Then those by spatial distrib (sy/sx,
theta), then by temporal distrib (dt, s1, s2). This will ensure that the lousier
params will only be considered after the best ones already have, and therefore that
you start off with pretty good clusters that are then only slightly refined using
the lousy params
"""
spikes = self.get_spikes_sortedby('id')
X = self.get_component_matrix()
print(X)
# try 'weighted' or 'average' with 'mahalanobis'
cids = fclusterdata(X, t=t, method='single', metric='euclidean')
n2sids, s2nids = self.get_ids(cids, spikes)
return n2sids
def export2Charlie(self, fname='spike_data', onlymaxchan=False, nchans=3, npoints=32):
"""Export spike data to a text file, one spike per row.
Columns are x0, y0, followed by most prominent npoints datapoints
(1/4, 3/4 wrt spike time) of each nearest nchans. This is to
give to Charlie to do WPD and SPC on"""
if onlymaxchan:
nchans = 1
assert np.log2(npoints) % 1 == 0, 'npoints is not a power of 2'
# get ti - time index each spike is assumed to be centered on
self.spikes[0].update_wave(self.stream) # make sure it has a wave
ti = intround(self.spikes[0].wave.data.shape[-1] / 4) # 13 for 50 kHz, 6 for 25 kHz
dims = self.nspikes, 2+nchans*npoints
output = np.empty(dims, dtype=np.float32)
dm = self.detector.dm
chanis = np.arange(len(dm.data))
coords = np.asarray(dm.coords)
xcoords = coords[:, 0]
ycoords = coords[:, 1]
sids = list(self.spikes) # self.spikes is a dict!
sids.sort()
for sid in sids:
spike = self.spikes[sid]
chani = spike.chani # max chani
x0, y0 = spike.x0, spike.y0
if onlymaxchan:
nearestchanis = np.asarray([chani])
else:
# find closest chans to x0, y0
d2s = (xcoords - x0)**2 + (ycoords - y0)**2 # squared distances
sortis = d2s.argsort()
nearestchanis = chanis[sortis][0:nchans] # pick the first nchan nearest chans
if chani not in nearestchanis:
print("WARNING: max chani %d is not among the %d chanis nearest "
"(x0, y0) = (%.1f, %.1f) for spike %d at t=%d"
% (chani, nchans, x0, y0, sid, spike.t))
if spike.wave.data is None:
spike.update_wave(self.stream)
row = [x0, y0]
for chani in nearestchanis:
chan = dm.chans[chani] # dereference
try:
data = spike.wave[chan].data[0] # pull out singleton dimension
except IndexError: # empty array
data = np.zeros(data.shape[-1], data.dtype)
row.extend(data[ti-npoints/4:ti+npoints*3/4])
output[sid] = row
dt = str(datetime.datetime.now())
dt = dt.split('.')[0] # ditch the us
dt = dt.replace(' ', '_')
dt = dt.replace(':', '.')
fname += '.' + dt + '.txt'
np.savetxt(fname, output, fmt='%.1f', delimiter=' ')
def match(self, templates=None, weighting='signal', sort=True):
"""Match templates to all .spikes with nearby maxchans,
save error values to respective templates.
Note: slowest step by far is loading in the wave data from disk.
(First match is slow, subsequent ones are ~ 15X faster.)
Unless something's done about that in advance, don't bother optimizing here much.
Right now, once waves are loaded, performance is roughly 20000 matches/sec
TODO: Nick's alternative to gaussian distance weighting: have two templates: a mean
template, and an stdev template, and weight the error between each matched
spike and the mean on each chan at each timepoint by the corresponding stdev value
(divide the error by the stdev, so that timepoints with low stdev are more sensitive
to error)
TODO: looks like I still need to make things more nonlinear - errors at high signal
values aren't penalized enough, while errors at small signal values are penalized
too much. Try cubing both signals, then taking sum(err**2)
DONE: maybe even better, instead of doing an elaborate cubing of signal, followed by
a rather elaborate gaussian spatiotemporal weighting of errors, just take difference
of signals, and weight the error according to the abs(template_signal) at each point
in time and across chans. That way, error in parts of the signal far from zero are
considered more important than deviance of perhaps similar absolute value for signal
close to zero
"""
# None defaults to matching all templates:
templates = templates or self.templates.values()
sys.stdout.write('matching')
t0 = time.time()
nspikes = len(self.spikes)
dm = self.detector.dm
for template in templates:
template.err = [] # overwrite any existing .err attrib
tw = template.tw
templatewave = template.wave[template.chans] # pull out template's enabled chans
#stdev = template.get_stdev()[template.chans] # pull out template's enabled chans
# replace any 0s with 1s - TODO: what's best way to avoid singularities?:
#stdev[stdev == 0] = 1
# Gaussian weighting in space and/or time:
weights = template.get_weights(weighting=weighting, sstdev=self.detector.slock/2,
tstdev=self.detector.tlock/2)
for spike in self.spikes.values():
# check if spike.maxchan is outside some minimum distance from template.maxchan
if dm[template.maxchan, spike.maxchan] > MAXCHANTOLERANCE: # um
continue # don't even bother
if spike.wave.data is None or template.tw != TW: # make sure their data line up
spike.update_wave(tw) # this slows things down a lot, but is necessary
# slice template's enabled chans out of spike, calculate sum of
# squared weighted error
# first impression is that dividing by stdev makes separation worse, not better
# low stdev means more sensitive to error:
#err = (templatewave.data - spike.wave[template.chans].data) / stdev * weights
# pull out template's enabled chans from spike:
spikewave = spike.wave[template.chans]
if weighting == 'signal':
tsdata = np.asarray([templatewave.data, spikewave.data])
# take elementwise max of abs of template and spike data:
weights = np.abs(tsdata).max(axis=0)
err = (templatewave.data - spikewave.data) * weights # weighted error
err = (err**2).sum(axis=None) # sum of squared weighted error
template.err.append((spike.id, intround(err)))
template.err = np.asarray(template.err, dtype=np.int64)
if sort and len(template.err) != 0:
i = template.err[:, 1].argsort() # row indices that sort by error
template.err = template.err[i]
sys.stdout.write('.')
print('\nmatch took %.3f sec' % (time.time()-t0))
'''
class Neuron(object):
"""A collection of spikes that have been deemed somehow, whether manually
or automatically, to have come from the same cell. A Neuron's waveform
is the mean of its member spikes"""
def __init__(self, sort, id=None):
self.sort = sort
self.id = id # neuron id
self.wave = WaveForm() # init to empty waveform
self.sids = np.array([], dtype=int) # indices of spikes that make up this neuron
# relative reference timestamp, here for symmetry with fellow spike rec
# (obj.t comes up sometimes):
self.t = 0
self.plt = None # Plot currently holding self
self.cluster = None
self.good = False # user can mark this neuron as "good" if so desired
#self.fname # not here, let's allow neurons to have spikes from different files?
def get_chans(self):
if self.wave.data is None:
self.update_wave()
return self.wave.chans # self.chans just refers to self.wave.chans
chans = property(get_chans)
def get_chan(self):
if self.wave.data is None:
self.update_wave()
return self.wave.chans[self.wave.data.ptp(axis=1).argmax()] # chan with max Vpp
chan = property(get_chan)
def get_nspikes(self):
return len(self.sids)
nspikes = property(get_nspikes)
def __getstate__(self):
"""Get object state for pickling"""
d = self.__dict__.copy()
# don't save any calculated PCs/ICs:
#d.pop('X', None)
#d.pop('Xhash', None)
# don't save plot self is assigned to, since that'll change anyway on unpickle
d['plt'] = None
return d
def get_wave(self):
"""Check for valid mean and std waveform before returning it"""
# many neuron waveforms saved in old .sort files won't have a wave.std field:
try:
self.wave.std
except AttributeError:
return self.update_wave()
if self.wave == None or self.wave.data is None or self.wave.std is None:
return self.update_wave()
else:
return self.wave # return existing waveform
def update_wave(self):
"""Update mean and std of self's waveform"""
sort = self.sort
spikes = sort.spikes
if len(self.sids) == 0: # no member spikes, perhaps I should be deleted?
raise RuntimeError("n%d has no spikes and its waveform can't be updated" % self.id)
meanwave = sort.get_mean_wave(self.sids, nid=self.id)
# update self's Waveform object
self.wave.data = meanwave.data
self.wave.std = meanwave.std
self.wave.ts = sort.twts.copy() # meanwave has no .ts, copy for clean jsonpickle
self.wave.chans = meanwave.chans
self.wave.tres = sort.tres # meanwave has no .tres
return self.wave
def __sub__(self, other):
"""Return difference array between self and other neurons' waveforms
on common channels"""
selfwavedata, otherwavedata = self.getCommonWaveData(other.chan, other.chans,
other.wave.data)
return selfwavedata - otherwavedata
def getCommonWaveData(self, otherchan, otherchans, otherwavedata):
"""Return waveform data common to self's chans and otherchans, while
requiring that both include the other's maxchan"""
chans = np.intersect1d(self.chans, otherchans, assume_unique=True)
if len(chans) == 0:
raise ValueError('No common chans')
if self.chan not in chans or otherchan not in chans:
raise ValueError("maxchans aren't part of common chans")
selfchanis = self.chans.searchsorted(chans)
otherchanis = otherchans.searchsorted(chans)
return self.wave.data[selfchanis], otherwavedata[otherchanis]
'''
def get_stdev(self):
"""Return 2D array of stddev of each timepoint of each chan of member spikes.
Assumes self.update_wave has already been called"""
data = []
# TODO: speed this up by pre-allocating memory and then filling in the array
for spike in self.spikes:
data.append(spike.wave.data) # collect spike's data
stdev = np.asarray(data).std(axis=0)
return stdev
def get_weights(self, weighting=None, sstdev=None, tstdev=None):
"""Returns unity, spatial, temporal, or spatiotemporal Gaussian weights
for self's enabled chans in self.wave.data, given spatial and temporal
stdevs"""
nchans = len(self.wave.chans)
nt = len(self.wave.data[0]) # assume all chans have the same number of timepoints
if weighting == None:
weights = 1
elif weighting == 'spatial':
weights = self.get_gaussian_spatial_weights(sstdev) # vector
elif weighting == 'temporal':
weights = self.get_gaussian_temporal_weights(tstdev) # vector
elif weighting == 'spatiotemporal':
sweights = self.get_gaussian_spatial_weights(sstdev)
tweights = self.get_gaussian_temporal_weights(tstdev)
weights = np.outer(sweights, tweights) # matrix, outer product of the two
elif weighting == 'signal':
weights = None # this is handled by caller
#print('\nweights:\n%r' % weights)
return weights
def get_gaussian_spatial_weights(self, stdev):
"""Return a vector that weights self.chans according to a 2D gaussian
centered on self.maxchan with standard deviation stdev in um"""
g = Gaussian(mean=0, stdev=stdev)
# distances between maxchan and all enabled chans:
d = self.sort.detector.dm[self.maxchan, self.chans]
weights = g[d]
weights.shape = (-1, 1) # vertical vector with nchans rows, 1 column
return weights
def get_gaussian_temporal_weights(self, stdev):
"""Return a vector that weights timepoints in self's mean waveform
by a gaussian centered on t=0, with standard deviation stdev in us"""
g = Gaussian(mean=0, stdev=stdev)
ts = self.wave.ts # template mean timepoints relative to t=0 spike time
weights = g[ts] # horizontal vector with 1 row, nt timepoints
return weights
'''
class PTCSHeader(object):
"""
Polytrode clustered spikes file header:
formatversion: int64 (currently version 3)
ndescrbytes: uint64 (nbytes, keep as multiple of 8 for nice alignment)
descr: ndescrbytes of ASCII text
(padded with null bytes if needed for 8 byte alignment)
nneurons: uint64 (number of neurons)
nspikes: uint64 (total number of spikes)
nsamplebytes: uint64 (number of bytes per template waveform sample)
samplerate: uint64 (Hz)
npttypebytes: uint64 (nbytes, keep as multiple of 8 for nice alignment)
pttype: npttypebytes of ASCII text
(padded with null bytes if needed for 8 byte alignment)
nptchans: uint64 (total num chans in polytrode)
chanpos: nptchans * 2 * float64
(array of (x, y) positions, in um, relative to top of polytrode,
indexed by 0-based channel IDs)
nsrcfnamebytes: uint64 (nbytes, keep as multiple of 8 for nice alignment)
srcfname: nsrcfnamebytes of ASCII text
(source file name, probably .srf, padded with null bytes if needed for
8 byte alignment)
datetime: float64
(absolute datetime corresponding to t=0 us timestamp, stored as days since
epoch: December 30, 1899 at 00:00)
ndatetimestrbytes: uint64
datetimestr: ndatetimestrbytes of ASCII text
(human readable string representation of datetime, preferrably ISO 8601,
padded with null bytes if needed for 8 byte alignment)
"""
FORMATVERSION = 3 # overall .ptcs file format version, not header format version
def __init__(self, sort, sortpath, stream, nneurons, nspikes, nsamplebytes,
fullfname, exportdt, user='', notes=''):
self.sort = sort
self.stream = stream
self.nneurons = nneurons
self.nspikes = nspikes
self.nsamplebytes = nsamplebytes
homelessfullfname = lstrip(fullfname, os.path.expanduser('~'))
sortfname = sort.fname
sortfullfname = os.path.join(sortpath, sortfname)
sortfmoddt = str(datetime.datetime.fromtimestamp(os.path.getmtime(sortfullfname)))
sortfmoddt = sortfmoddt.split('.')[0] # ditch the us
sortfsize = os.path.getsize(sortfullfname) # in bytes
d = {'file_type': '.ptcs (polytrode clustered spikes) file',
'original_fname': homelessfullfname, 'export_time': exportdt,
'sort': {'fname': sortfname, 'path': sortpath,
'fmtime': sortfmoddt, 'fsize': sortfsize},
'user': user, 'notes': notes}
descr = str(d)
self.descr = pad(descr, align=8)
self.srcfname = pad(lstrip(stream.fname, '../'), align=8)
self.pttype = pad(stream.probe.name, align=8)
self.dt = stream.datetime
self.dtstr = pad(self.dt.isoformat(), align=8)
def write(self, f):
s = self.sort
np.int64(self.FORMATVERSION).tofile(f) # formatversion
np.uint64(len(self.descr)).tofile(f) # ndescrbytes
f.write(self.descr) # descr
np.uint64(self.nneurons).tofile(f) # nneurons
np.uint64(self.nspikes).tofile(f) # nspikes
np.uint64(self.nsamplebytes).tofile(f) # nsamplebytes
np.uint64(s.sampfreq).tofile(f) # samplerate
np.uint64(len(self.pttype)).tofile(f) # npttypebytes
f.write(self.pttype) # pttype
np.uint64(s.stream.probe.nchans).tofile(f) # nptchans
np.float64(s.stream.probe.siteloc_arr()).tofile(f) # chanpos
np.uint64(len(self.srcfname)).tofile(f) # nsrcfnamebytes
f.write(self.srcfname) # srcfname
np.float64(td2days(self.dt - EPOCH)).tofile(f) # datetime (in days)
np.uint64(len(self.dtstr)).tofile(f) # ndatetimestrbytes
f.write(self.dtstr)
class PTCSNeuronRecord(object):
"""
Polytrode clustered spikes file neuron record:
nid: int64 (signed neuron id, could be -ve, could be non-contiguous with previous)
ndescrbytes: uint64 (nbytes, keep as multiple of 8 for nice alignment, defaults to 0)
descr: ndescrbytes of ASCII text
(padded with null bytes if needed for 8 byte alignment)
clusterscore: float64
xpos: float64 (um)
ypos: float64 (um)
sigma: float64 (um) (Gaussian spatial sigma)
nchans: uint64 (num chans in template waveforms)
chanids: nchans * uint64 (0 based IDs of channels in template waveforms)
maxchanid: uint64 (0 based ID of max channel in template waveforms)
nt: uint64 (num timepoints per template waveform channel)
nwavedatabytes: uint64 (nbytes, keep as multiple of 8 for nice alignment)
wavedata: nwavedatabytes of nsamplebytes sized floats
(template waveform data, laid out as nchans * nt, in uV,
padded with null bytes if needed for 8 byte alignment)
nwavestdbytes: uint64 (nbytes, keep as multiple of 8 for nice alignment)
wavestd: nwavestdbytes of nsamplebytes sized floats
(template waveform standard deviation, laid out as nchans * nt, in uV,
padded with null bytes if needed for 8 byte alignment)
nspikes: uint64 (number of spikes in this neuron)
spike timestamps: nspikes * uint64 (us, should be sorted)
"""
def __init__(self, neuron, spikets=None, nsamplebytes=None, descr=''):
n = neuron
AD2uV = n.sort.converter.AD2uV
self.neuron = neuron
self.spikets = spikets # constrained to stream range, may be < neuron.sids
self.wavedtype = {2: np.float16, 4: np.float32, 8: np.float64}[nsamplebytes]
if n.wave.data is None or n.wave.std is None: # some may have never been displayed
n.update_wave()
# wavedata and wavestd are nchans * nt * nsamplebytes long:
self.wavedata = pad(self.wavedtype(AD2uV(n.wave.data)), align=8)
self.wavestd = pad(self.wavedtype(AD2uV(n.wave.std)), align=8)
self.descr = pad(descr, align=8)
def write(self, f):
n = self.neuron
np.int64(n.id).tofile(f) # nid
np.uint64(len(self.descr)).tofile(f) # ndescrbytes
f.write(self.descr) # descr, bytes
np.float64(np.nan).tofile(f) # clusterscore
np.float64(n.cluster.pos['x0']).tofile(f) # xpos (um)
np.float64(n.cluster.pos['y0']).tofile(f) # ypos (um)
np.float64(n.cluster.pos['sx']).tofile(f) # sigma (um)
np.uint64(len(n.wave.chans)).tofile(f) # nchans
np.uint64(n.wave.chans).tofile(f) # chanids
np.uint64(n.chan).tofile(f) # maxchanid
np.uint64(len(n.wave.ts)).tofile(f) # nt
np.uint64(self.wavedata.nbytes).tofile(f) # nwavedatabytes
self.wavedata.tofile(f) # wavedata
np.uint64(self.wavestd.nbytes).tofile(f) # nwavestdbytes
self.wavestd.tofile(f) # wavestd
np.uint64(len(self.spikets)).tofile(f) # nspikes
np.uint64(self.spikets).tofile(f) # spike timestamps (us)
class PanelScrollArea(QtGui.QScrollArea):
"""A scroll area for the spikesortpanel"""
def keyPressEvent(self, event):
key = event.key()
# seems the ENTER key needs be handled to directly call plot, unlike in sortwin
# where the event is passed on to be handled by the list widgets
if key in [Qt.Key_Enter, Qt.Key_Return]:
sortwin = self.topLevelWidget()
sortwin.parent().ui.plotButton.click()
else:
QtGui.QScrollArea.keyPressEvent(self, event) # pass it on
class SortWindow(SpykeToolWindow):
"""Sort window"""
def __init__(self, parent, pos=None):
SpykeToolWindow.__init__(self, parent, flags=QtCore.Qt.Tool)
self.spykewindow = parent
ncols = self.sort.probe.ncols
nrows = self.sort.probe.nrows
# try and allow the same amount of horizontal space per column for 2 and 3 col probes:
if ncols <= 2:
self.MAINSPLITTERPOS = 300
else:
self.MAINSPLITTERPOS = 265 # move it more to the left
# make horizontal sort slider use as little vertical space as possible
self.VSPLITTERPOS = 1
panelwidth = PANELWIDTHPERCOLUMN * ncols
panelheight = PANELHEIGHTPERROW * nrows
width = max(self.MAINSPLITTERPOS + panelwidth + VSCROLLBARWIDTH, MINSORTWINDOWWIDTH)
size = (width, SORTWINDOWHEIGHT)
self.setWindowTitle('Sort Window')
self.move(*pos)
self.resize(*size)
self._source = None # source cluster for comparison
self.slider = SpikeSelectionSlider(Qt.Horizontal, self)
self.slider.setInvertedControls(True)
self.slider.setToolTip('Position of sliding spike selection time window')
self.connect(self.slider, QtCore.SIGNAL('valueChanged(int)'),
self.on_slider_valueChanged)
self.connect(self.slider, QtCore.SIGNAL('sliderPressed()'),
self.on_slider_sliderPressed)
self.nlist = NList(self)
self.nlist.setToolTip('Neuron list')
self.nslist = NSList(self)
self.nslist.setToolTip('Sorted spike list')
self.uslist = USList(self) # should really be multicolumn tableview
self.uslist.setToolTip('Unsorted spike list')
tw = self.spykewindow.sort.tw
self.panel = SpikeSortPanel(self, tw=tw)
self.panel.setMinimumSize(QtCore.QSize(panelwidth, panelheight))
self.panelscrollarea = PanelScrollArea(self)
self.panelscrollarea.setWidget(self.panel)
self.panelscrollarea.setMinimumWidth(panelwidth + VSCROLLBARWIDTH)
self.panelscrollarea.setWidgetResizable(True) # allows panel to size bigger than min
self.vsplitter = QtGui.QSplitter(Qt.Vertical)
self.vsplitter.addWidget(self.slider)
self.vsplitter.addWidget(self.nlist)
self.vsplitter.addWidget(self.nslist)
self.vsplitter.addWidget(self.uslist)
self.mainsplitter = QtGui.QSplitter(Qt.Horizontal)
self.mainsplitter.addWidget(self.vsplitter)
self.mainsplitter.addWidget(self.panelscrollarea)
self.layout = QtGui.QVBoxLayout()
self.layout.setContentsMargins(0, 0, 0, 0)
self.layout.addWidget(self.mainsplitter)
mainwidget = QtGui.QWidget(self)
mainwidget.setLayout(self.layout)
self.setCentralWidget(mainwidget)
self.toolbar = self.setupToolbar()
self.addToolBar(self.toolbar)
def setupToolbar(self):
toolbar = QtGui.QToolBar(self)
toolbar.setObjectName('toolbar')
toolbar.setFloatable(True)
toolbar.setIconSize(QtCore.QSize(16, 16)) # like in main spyke window
actionDelete = QAction(QIcon('res/edit-delete.svg'), 'Del', self)
tt = ('<nobr><b>Del</b> Delete selected spikes or clusters</nobr>\n'
'<nobr><b>CTRL+Del</b> Delete selected spikes</nobr>')
actionDelete.setToolTip(tt)
self.connect(actionDelete, QtCore.SIGNAL('triggered()'),
self.on_actionDelete_triggered)
toolbar.addAction(actionDelete)
actionMergeClusters = QAction('M', self)
tt = '<nobr><b>M</b> Merge clusters</nobr>'
actionMergeClusters.setToolTip(tt)
self.connect(actionMergeClusters, QtCore.SIGNAL('triggered()'),
self.on_actionMergeClusters_triggered)
toolbar.addAction(actionMergeClusters)
#actionToggleClustersGood = QAction(QIcon('res/dialog-apply.svg'), 'G', self)
actionToggleClustersGood = QAction('G', self)
tt = '<nobr><b>G</b> Toggle clusters as "good"</nobr>'
actionToggleClustersGood.setToolTip(tt)
self.connect(actionToggleClustersGood, QtCore.SIGNAL('triggered()'),
self.on_actionToggleClustersGood_triggered)
toolbar.addAction(actionToggleClustersGood)
actionSplit = QAction('+', self)
tt = '<nobr><b>+</b> Split off selected spikes</nobr>'
actionSplit.setToolTip(tt)
self.connect(actionSplit, QtCore.SIGNAL('triggered()'),
self.on_actionSplit_triggered)
toolbar.addAction(actionSplit)
actionLabelMultiunit = QAction('-', self)
tt = '<nobr><b>-</b> Label clusters as multiunit</nobr>'
actionLabelMultiunit.setToolTip(tt)
self.connect(actionLabelMultiunit, QtCore.SIGNAL('triggered()'),
self.on_actionLabelMultiunit_triggered)
toolbar.addAction(actionLabelMultiunit)
actionChanSplitClusters = QAction('/', self)
tt = '<nobr><b>/</b> Split clusters by channels</nobr>'
actionChanSplitClusters.setToolTip(tt)
self.connect(actionChanSplitClusters, QtCore.SIGNAL('triggered()'),
self.on_actionChanSplitClusters_triggered)
toolbar.addAction(actionChanSplitClusters)
actionDensitySplit = QAction('P', self)
tt = ('<nobr><b>P</b> Split cluster pair by density along line between '
'their centers</nobr>')
actionDensitySplit.setToolTip(tt)
self.connect(actionDensitySplit, QtCore.SIGNAL('triggered()'),
self.on_actionDensitySplit_triggered)
toolbar.addAction(actionDensitySplit)
actionRandomSplit = QAction('\\', self)
tt = ('<nobr><b>\\</b> Randomly split each selected cluster in half</nobr>')
actionRandomSplit.setToolTip(tt)
self.connect(actionRandomSplit, QtCore.SIGNAL('triggered()'),
self.on_actionRandomSplit_triggered)
toolbar.addAction(actionRandomSplit)
#actionRenumber = QAction(QIcon('res/gtk-edit.svg'), '#', self)
actionRenumber = QAction('#', self)
tt = ('<nobr><b>#</b> Renumber all clusters in vertical spatial order</nobr>\n'
'<nobr><b>CTRL+#</b> Renumber selected cluster</nobr>')
actionRenumber.setToolTip(tt)
self.connect(actionRenumber, QtCore.SIGNAL('triggered()'),
self.on_actionRenumber_triggered)
toolbar.addAction(actionRenumber)
actionFind = QAction(QIcon('res/edit-find.svg'), 'Find', self)
tt = ('<nobr><b>CTRL+F</b> Find spike in cluster plot</nobr>')
actionFind.setToolTip(tt)
self.connect(actionFind, QtCore.SIGNAL('triggered()'),
self.on_actionFind_triggered)
toolbar.addAction(actionFind)
actionSelectRandomSpikes = QAction('R', self)
tt = '<nobr><b>R</b> Select random sample of spikes of current clusters</nobr>'
actionSelectRandomSpikes.setToolTip(tt)
self.connect(actionSelectRandomSpikes, QtCore.SIGNAL('triggered()'),
self.on_actionSelectRandomSpikes_triggered)
toolbar.addAction(actionSelectRandomSpikes)
actionToggleErrors = QAction('E', self)
actionToggleErrors.setCheckable(True)
actionToggleErrors.setChecked(self.panel.enable_fills)
tt = '<nobr><b>CTRL+E</b> Toggle visibility of template error limits</nobr>'
actionToggleErrors.setToolTip(tt)
self.connect(actionToggleErrors, QtCore.SIGNAL('toggled(bool)'),
self.on_actionToggleErrors_toggled)
toolbar.addAction(actionToggleErrors)
self.actionToggleErrors = actionToggleErrors
nsamplesComboBox = QtGui.QComboBox(self)
nsamplesComboBox.setToolTip('Number of spikes per cluster to randomly select')
nsamplesComboBox.setFocusPolicy(Qt.NoFocus)
nsamplesComboBox.addItems(['100', '50', '20', '10', '5', '1'])
nsamplesComboBox.setCurrentIndex(2)
toolbar.addWidget(nsamplesComboBox)
self.connect(nsamplesComboBox, QtCore.SIGNAL('activated(int)'),
self.on_actionSelectRandomSpikes_triggered)
self.nsamplesComboBox = nsamplesComboBox
gainComboBox = QtGui.QComboBox(self)
gainComboBox.setToolTip('Waveform gain (default: 1.5)')
gainComboBox.setFocusPolicy(Qt.NoFocus)
gainComboBox.addItems(['4', '3.75', '3.5', '3.25', '3', '2.75', '2.5', '2.25', '2',
'1.75', '1.5', '1.25', '1', '0.75', '0.5', '0.25'])
gainComboBox.setCurrentIndex(3)
toolbar.addWidget(gainComboBox)
self.connect(gainComboBox, QtCore.SIGNAL('activated(int)'),
self.on_gainComboBox_triggered)
self.gainComboBox = gainComboBox
#actionAlignMin = QAction(QIcon('res/go-bottom.svg'), 'Min', self)
actionAlignMin = QAction('Min', self)
actionAlignMin.setToolTip('Align selected spikes to min')
self.connect(actionAlignMin, QtCore.SIGNAL('triggered()'),
self.on_actionAlignMin_triggered)
toolbar.addAction(actionAlignMin)
#actionAlignMax = QAction(QIcon('res/go-top.svg'), 'Max', self)
actionAlignMax = QAction('Max', self)
actionAlignMax.setToolTip('Align selected spikes to max')
self.connect(actionAlignMax, QtCore.SIGNAL('triggered()'),
self.on_actionAlignMax_triggered)
toolbar.addAction(actionAlignMax)
#actionAlignBest = QAction(QIcon('res/emblem-OK.png'), 'Best', self)
actionAlignBest = QAction('B', self)
tt = '<nobr><b>B</b> Align selected spikes by best fit</nobr>'
actionAlignBest.setToolTip(tt)
self.connect(actionAlignBest, QtCore.SIGNAL('triggered()'),
self.on_actionAlignBest_triggered)
toolbar.addAction(actionAlignBest)
actionShiftLeft = QAction('[', self)
tt = ('<nobr><b>[</b> Shift selected spikes 2 points left</nobr>\n'
'<nobr><b>CTRL+[</b> Shift selected spikes 1 point left</nobr>')
actionShiftLeft.setToolTip(tt)
self.connect(actionShiftLeft, QtCore.SIGNAL('triggered()'),
self.on_actionShiftLeft_triggered)
toolbar.addAction(actionShiftLeft)
actionShiftRight = QAction(']', self)
tt = ('<nobr><b>]</b> Shift selected spikes 2 points right</nobr>\n'
'<nobr><b>CTRL+]</b> Shift selected spikes 1 point right</nobr>')
actionShiftRight.setToolTip(tt)
self.connect(actionShiftRight, QtCore.SIGNAL('triggered()'),
self.on_actionShiftRight_triggered)
toolbar.addAction(actionShiftRight)
incltComboBox = QtGui.QComboBox(self)
incltComboBox.setToolTip("Waveform duration (us) to include for component "
"analysis,\nasymmetric around spike time")
incltComboBox.setFocusPolicy(Qt.NoFocus)
dtw = self.sort.tw[1] - self.sort.tw[0] # spike time window width
incltstep = intround(dtw / 10) # evenly spaced inclt values
incltvals = np.arange(dtw, 0, -incltstep)
incltComboBox.addItems([ str(incltval) for incltval in incltvals ])
incltComboBox.setCurrentIndex(0)
toolbar.addWidget(incltComboBox)
self.connect(incltComboBox, QtCore.SIGNAL('activated(int)'),
self.on_incltComboBox_triggered)
self.incltComboBox = incltComboBox
#incltunitsLabel = QtGui.QLabel('us', self)
#toolbar.addWidget(incltunitsLabel)
nPCsPerChanSpinBox = QtGui.QSpinBox(self)
nPCsPerChanSpinBox.setToolTip("Number of PCs to use per channel to feed into ICA")
nPCsPerChanSpinBox.setFocusPolicy(Qt.NoFocus)
toolbar.addWidget(nPCsPerChanSpinBox)
nPCsPerChanSpinBox.setMinimum(1)
self.connect(nPCsPerChanSpinBox, QtCore.SIGNAL('valueChanged(int)'),
self.on_nPCsPerChanSpinBox_valueChanged)
nPCsPerChanSpinBox.setValue(self.sort.npcsperchan)
self.nPCsPerChanSpinBox = nPCsPerChanSpinBox
#actionFindPrevMostSimilar = QAction(QIcon('res/go-previous.svg'), '<', self)
actionFindPrevMostSimilar = QAction('<', self)
tt = '<nobr><b><</b> Find previous most similar cluster</nobr>'
actionFindPrevMostSimilar.setToolTip(tt)
self.connect(actionFindPrevMostSimilar, QtCore.SIGNAL('triggered()'),
self.on_actionFindPrevMostSimilar_triggered)
toolbar.addAction(actionFindPrevMostSimilar)
#actionFindNextMostSimilar = QAction(QIcon('res/go-next.svg'), '>', self)
actionFindNextMostSimilar = QAction('>', self)
tt = '<nobr><b>></b> Find next most similar cluster</nobr>'
actionFindNextMostSimilar.setToolTip(tt)
self.connect(actionFindNextMostSimilar, QtCore.SIGNAL('triggered()'),
self.on_actionFindNextMostSimilar_triggered)
toolbar.addAction(actionFindNextMostSimilar)
actionReloadSpikes = QAction(QIcon('res/view-refresh.svg'), 'Reload', self)
tt = ('<nobr><b>F5</b> Reload waveforms of selected spikes. '
'If none selected, reload all</nobr>\n'
'<nobr><b>CTRL+F5</b> Use mean waveform to choose chans to reload</nobr>')
actionReloadSpikes.setToolTip(tt)
self.connect(actionReloadSpikes, QtCore.SIGNAL('triggered()'),
self.on_actionReloadSpikes_triggered)
toolbar.addAction(actionReloadSpikes)
actionSave = QAction(QIcon('res/document-save.svg'), '&Save', self)
actionSave.setToolTip('Save sort panel to file')
self.connect(actionSave, QtCore.SIGNAL('triggered()'),
self.on_actionSave_triggered)
toolbar.addAction(actionSave)
return toolbar
def get_sort(self):
return self.spykewindow.sort
sort = property(get_sort) # make this a property for proper behaviour after unpickling
def closeEvent(self, event):
self.spykewindow.HideWindow('Sort')
def mousePressEvent(self, event):
"""These are mostly passed on up from spyke list views and sort panel. Left
clicks are (or should be) filtered out"""
buttons = event.buttons()
if buttons == QtCore.Qt.MiddleButton:
#self.on_actionSelectRandomSpikes_triggered()
self.spykewindow.ui.plotButton.click() # same as hitting ENTER in nslist
elif buttons == QtCore.Qt.RightButton:
self.clear()
def keyPressEvent(self, event):
"""Alpha character keypresses are by default caught by the child lists for quickly
scrolling down to and selecting list items. However, the appropriate alpha
keypresses have been set in the child lists to be ignored, so they propagate
up to here"""
key = event.key()
modifiers = event.modifiers()
ctrl = modifiers & Qt.ControlModifier # ctrl is down
spw = self.spykewindow
if key == Qt.Key_A: # ignored in SpykeListViews
spw.ui.plotButton.click() # same as hitting ENTER in nslist
elif key == Qt.Key_X: # ignored in SpykeListViews
spw.ui.plotXcorrsButton.click()
elif key == Qt.Key_N: # ignored in SpykeListViews
spw.ui.normButton.click()
elif key == Qt.Key_Escape: # deselect all spikes and all clusters
self.clear()
elif key == Qt.Key_Delete:
self.on_actionDelete_triggered()
elif key == Qt.Key_M: # ignored in SpykeListViews
self.on_actionMergeClusters_triggered()
elif key == Qt.Key_G: # ignored in SpykeListViews
self.on_actionToggleClustersGood_triggered()
elif key == Qt.Key_Equal: # ignored in SpykeListViews
self.on_actionSplit_triggered()
elif key == Qt.Key_Minus: # ignored in SpykeListViews
self.on_actionLabelMultiunit_triggered()
elif key == Qt.Key_Slash: # ignored in SpykeListViews
self.on_actionChanSplitClusters_triggered()
elif key == Qt.Key_P: # ignored in SpykeListViews
self.on_actionDensitySplit_triggered()
elif key == Qt.Key_Backslash: # ignored in SpykeListViews
self.on_actionRandomSplit_triggered()
elif key == Qt.Key_NumberSign: # ignored in SpykeListViews
self.on_actionRenumber_triggered()
elif key == Qt.Key_F: # ignored in SpykeListViews
if ctrl:
self.FindSpike()
else:
self.FindCluster()
elif key == Qt.Key_R: # ignored in SpykeListViews
self.on_actionSelectRandomSpikes_triggered()
elif key == Qt.Key_Space: # ignored in SpykeListViews
if ctrl:
SpykeToolWindow.keyPressEvent(self, event) # pass it on
else:
spw.on_clusterButton_clicked()
elif key == Qt.Key_B: # ignored in SpykeListViews
self.on_actionAlignBest_triggered()
elif key == Qt.Key_BracketLeft: # ignored in SpykeListViews
self.on_actionShiftLeft_triggered()
elif key == Qt.Key_BracketRight: # ignored in SpykeListViews
self.on_actionShiftRight_triggered()
elif key == Qt.Key_Comma: # ignored in SpykeListViews
self.on_actionFindPrevMostSimilar_triggered()
elif key == Qt.Key_Period: # ignored in SpykeListViews
self.on_actionFindNextMostSimilar_triggered()
elif key == Qt.Key_F5: # ignored in SpykeListViews
self.on_actionReloadSpikes_triggered()
elif key == Qt.Key_E: # ignored in SpykeListViews
if ctrl:
self.actionToggleErrors.toggle()
else:
self.clear() # E is synonymous with ESC
elif key == Qt.Key_C: # toggle between PCA and ICA, ignored in SpykeListViews
c = str(spw.ui.componentAnalysisComboBox.currentText())
if c == 'PCA':
index = spw.ui.componentAnalysisComboBox.findText('ICA')
spw.ui.componentAnalysisComboBox.setCurrentIndex(index)
elif c == 'ICA':
index = spw.ui.componentAnalysisComboBox.findText('PCA')
spw.ui.componentAnalysisComboBox.setCurrentIndex(index)
spw.on_plotButton_clicked()
elif key == Qt.Key_T: # toggle plotting against time, ignored in SpykeListViews
z = str(spw.ui.zDimComboBox.currentText())
if z == 't':
spw.on_c0c1c2Button_clicked() # plot in pure component analysis space
else:
spw.on_c0c1tButton_clicked() # plot against time
elif key == Qt.Key_W: # toggle plotting against RMSError, ignored in SpykeListViews
z = str(spw.ui.zDimComboBox.currentText())
if z == 'RMSerror':
spw.on_c0c1c2Button_clicked() # plot in pure component analysis space
else:
spw.ui.zDimComboBox.setCurrentIndex(3)
spw.on_plotButton_clicked() # plot against RMSError
elif key in [Qt.Key_Enter, Qt.Key_Return]:
# this is handled at a lower level by on_actionItem_triggered
# in the various listview controls
pass
else:
SpykeToolWindow.keyPressEvent(self, event) # pass it on
def clear(self):
"""Clear selections in this order: unsorted spikes, sorted spikes,
cluster automatically selected for comparison, cluster 0, clusters"""
spw = self.spykewindow
clusters = spw.GetClusters()
if len(self.uslist.selectedIndexes()) > 0:
self.uslist.clearSelection()
elif self.nslist.nrowsSelected > 0:
self.nslist.clearSelection()
elif len(clusters) == 2 and self._source in clusters:
clusters.remove(self._source)
spw.SelectClusters(clusters, on=False)
elif 0 in spw.GetClusterIDs():
for cluster in spw.GetClusters():
if cluster.id == 0:
spw.SelectClusters([cluster], on=False)
break
else:
self.nlist.clearSelection()
# reset colours in cluster plot:
gw = spw.windows['Cluster'].glWidget
gw.colour()
gw.updateGL()
def on_actionDelete_triggered(self):
"""Delete explicity selected spikes, or clusters"""
selsids = self.spykewindow.GetSpikes() # IDs of explicitly selected spikes
nselsids = len(selsids)
if (QApplication.instance().keyboardModifiers() & Qt.ControlModifier
or nselsids > 0):
self.delete_spikes()
else:
self.delete_clusters()
def delete_clusters(self):
"""Del button press/click"""
spw = self.spykewindow
clusters = spw.GetClusters()
s = self.sort
spikes = s.spikes
sids = []
for cluster in clusters:
sids.append(cluster.neuron.sids)
sids = np.concatenate(sids)
# save some undo/redo stuff
message = 'delete clusters %r' % [ c.id for c in clusters ]
cc = ClusterChange(sids, spikes, message)
cc.save_old(clusters, s.norder, s.good)
# deselect and delete clusters
spw.DelClusters(clusters)
if len(s.clusters) > 0:
# select cluster that replaces the first of the deleted clusters in norder
selrows = [ cc.oldnorder.index(oldunid) for oldunid in cc.oldunids ]
if len(selrows) > 0:
selrow = selrows[0]
nlist = spw.windows['Sort'].nlist
nlist.selectRows(selrow) # TODO: this sets selection, but not focus
#else: # first of deleted clusters was last in norder, don't select anything
# save more undo/redo stuff
newclusters = []
cc.save_new(newclusters, s.norder, s.good)
spw.AddClusterChangeToStack(cc)
print(cc.message)
def delete_spikes(self):
"""CTRL+Del button press/click"""
self.spykewindow.SplitSpikes(delete=True)
def on_actionSplit_triggered(self):
"""+ button click. Split off selected clusters into their own cluster"""
self.spykewindow.SplitSpikes(delete=False)
def on_actionMergeClusters_triggered(self):
"""Merge button (M) click. Merge selected clusters. Easier to use than
running gac() on selected clusters using a really big sigma to force
them to all merge"""
spw = self.spykewindow
clusters = spw.GetClusters()
s = self.sort
spikes = s.spikes
sids = [] # spikes to merge
for cluster in clusters:
sids.append(cluster.neuron.sids)
# merge any selected usids as well
sids.append(spw.GetUnsortedSpikes())
sids = np.concatenate(sids)
if len(sids) == 0:
return
# save some undo/redo stuff
message = 'merge clusters %r' % [ c.id for c in clusters ]
cc = ClusterChange(sids, spikes, message)
cc.save_old(clusters, s.norder, s.good)
# decide on newnid and where to insert it into norder
newnid = None # merge by default into a new highest numbered nid
inserti = None # order new cluster by default to end of nlist
if len(clusters) == 1:
# keep same position of this one nid in norder, regardless of whether it's
# single-unit, multiunit, or junk
inserti = s.norder.index(clusters[0].id)
elif len(clusters) > 1:
oldunids = np.asarray(cc.oldunids)
suids = oldunids[oldunids > 0] # selected single unit nids
if len(suids) > 0: # merge into largest selected single unit nid:
spikecounts = np.asarray([ s.neurons[suid].nspikes for suid in suids ])
newnid = suids[spikecounts.argmax()]
inserti = s.norder.index(newnid)
# correct for shift due to deletion of oldunids that precede newnid in norder:
inserti -= sum([ s.norder.index(oldunid) < inserti for oldunid in oldunids])
# delete selected clusters and deselect selected usids
spw.DelClusters(clusters, update=False)
self.uslist.clearSelection()
# create new cluster
#t0 = time.time()
newcluster = spw.CreateCluster(update=False, id=newnid, inserti=inserti)
neuron = newcluster.neuron
self.MoveSpikes2Neuron(sids, neuron, update=False)
plotdims = spw.GetClusterPlotDims()
newcluster.update_pos()
# save more undo/redo stuff
cc.save_new([newcluster], s.norder, s.good)
spw.AddClusterChangeToStack(cc)
# now do some final updates
spw.UpdateClustersGUI()
spw.ColourPoints(newcluster)
#print('applying clusters to plot took %.3f sec' % (time.time()-t0))
# select newly created cluster
spw.SelectClusters(newcluster)
cc.message += ' into cluster %d' % newcluster.id
print(cc.message)
def on_actionToggleClustersGood_triggered(self):
"""'Good' button (G) click. Toggle 'good' flag of all selected clusters"""
spw = self.spykewindow
clusters = spw.GetClusters()
cids = []
for cluster in clusters:
cluster.neuron.good = not cluster.neuron.good
cids.append(cluster.id)
self.nlist.updateAll() # nlist item colouring will change as a result
print("Toggled 'good' flag of clusters %r" % cids)
def on_actionLabelMultiunit_triggered(self):
"""- button click. Label all selected clusters as multiunit by deleting them
and creating new ones with -ve IDs"""
spw = self.spykewindow
clusters = spw.GetClusters()
s = self.sort
spikes = s.spikes
# only relabel single unit clusters:
clusters = [ cluster for cluster in clusters if cluster.id > 0 ]
if len(clusters) == 0:
return
sids = []
for cluster in clusters:
sids.append(cluster.neuron.sids)
sids = np.concatenate(sids)
# save some undo/redo stuff
message = 'label as multiunit clusters %r' % [ c.id for c in clusters ]
cc = ClusterChange(sids, spikes, message)
cc.save_old(clusters, s.norder, s.good)
# delete old clusters
inserti = s.norder.index(clusters[0].id)
# collect cluster sids before cluster deletion
sidss = [ cluster.neuron.sids for cluster in clusters ]
spw.DelClusters(clusters, update=False)
# create new multiunit clusters
newclusters = []
for sids in sidss:
muid = s.get_nextmuid()
newcluster = spw.CreateCluster(update=False, id=muid, inserti=inserti)
neuron = newcluster.neuron
self.MoveSpikes2Neuron(sids, neuron, update=False)
newcluster.update_pos()
newclusters.append(newcluster)
inserti += 1
# select newly labelled multiunit clusters
spw.SelectClusters(newclusters)
# save more undo/redo stuff
cc.save_new(newclusters, s.norder, s.good)
spw.AddClusterChangeToStack(cc)
print(cc.message)
def on_actionChanSplitClusters_triggered(self):
"""Split by channels button (/) click"""
## TODO: make sure this works on .srf files! Why was chancombosplit being used?
self.spykewindow.maxchansplit()
#self.spykewindow.chancombosplit()
def on_actionDensitySplit_triggered(self):
"""Split cluster pair by density along line between their centers"""
self.spykewindow.densitysplit()
def on_actionRandomSplit_triggered(self):
"""Randomly split each selected cluster in half"""
self.spykewindow.randomsplit()
def on_actionRenumber_triggered(self):
if QApplication.instance().keyboardModifiers() & Qt.ControlModifier:
self.renumber_selected_cluster()
else:
self.renumber_all_clusters()
def renumber_selected_cluster(self):
"""Renumber a single selected cluster to whatever free ID the user wants, for
colouring purposes"""
spw = self.spykewindow
s = self.sort
spikes = s.spikes
cluster = spw.GetCluster() # exactly one selected cluster
oldid = cluster.id
newid = max(s.norder) + 1
newid, ok = QtGui.QInputDialog.getInt(self, "Renumber cluster",
"This will clear the undo/redo stack, and is not undoable.\n"
"Enter new ID:", value=newid)
if not ok:
return
if newid in s.norder:
print("Choose a non-existing nid to renumber to")
return
# deselect cluster
spw.SelectClusters(cluster, on=False)
# rename to newid
cluster.id = newid # this indirectly updates neuron.id
# update cluster and neuron dicts, and spikes array
s.clusters[newid] = cluster
s.neurons[newid] = cluster.neuron
sids = cluster.neuron.sids
spikes['nid'][sids] = newid
# remove duplicate oldid dict entries
del s.clusters[oldid]
del s.neurons[oldid]
# replace oldid with newid in norder
s.norder[s.norder.index(oldid)] = newid
# update colour of any relevant points in cluster plot
spw.ColourPoints(cluster)
# reselect cluster
spw.SelectClusters(cluster)
# some cluster changes in stack may no longer be applicable, reset cchanges
del spw.cchanges[:]
spw.cci = -1
print('Renumbered neuron %d to %d' % (oldid, newid))
def renumber_all_clusters(self):
"""Renumber single unit clusters consecutively from 1, ordered by y position. Do the
same for multiunit (-ve number) clusters, starting from -1. Sorting by y position
makes user inspection of clusters more orderly, makes the presence of duplicate
clusters more obvious, and allows for maximal spatial separation between clusters of
the same colour, reducing colour conflicts"""
val = QtGui.QMessageBox.question(self.panel, "Renumber all clusters",
"Are you sure? This will clear the undo/redo stack, and is not undoable.",
QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if val == QtGui.QMessageBox.No:
return
spw = self.spykewindow
s = self.sort
spikes = s.spikes
# get spatially and numerically ordered lists of new ids
oldids = np.asarray(s.norder)
oldsuids = oldids[oldids > 0]
oldmuids = oldids[oldids < 0]
# this is a bit confusing: find indices that would sort old ids by y pos, but then
# what you really want is to find the y pos *rank* of each old id, so you need to
# take argsort again:
newsuids = np.asarray([ s.clusters[cid].pos['y0']
for cid in oldsuids ]).argsort().argsort() + 1
newmuids = np.asarray([ s.clusters[cid].pos['y0']
for cid in oldmuids ]).argsort().argsort() + 1
newmuids = -newmuids
# multiunit, followed by single unit, no 0 junk cluster. Can't seem to do it the other
# way around as of Qt 4.7.2 - it seems QListViews don't like having a -ve value in
# the last entry. Doing so causes all 2 digit values in the list to become blank,
# suggests a spacing calculation bug. Reproduce by making last entry multiunit,
# undoing then redoing. Actually, maybe the bug is it doesn't like having a number
# in the last entry with fewer digits than the preceding entry. Only seems to be a
# problem when setting self.setUniformItemSizes(True).
newids = np.concatenate([newmuids, newsuids])
# test
if np.all(oldids == newids):
print('Nothing to renumber: cluster IDs already ordered in y0 and contiguous')
return
# update for replacing oldids with newids
oldids = np.concatenate([oldmuids, oldsuids])
# deselect current selections
selclusters = spw.GetClusters()
oldselids = [ cluster.id for cluster in selclusters ]
spw.SelectClusters(selclusters, on=False)
# delete junk cluster, if it exists
if 0 in s.clusters:
s.remove_neuron(0)
print('Deleted junk cluster 0')
if 0 in oldselids:
oldselids.remove(0)
# replace old ids with new ids
cw = spw.windows['Cluster']
oldclusters = s.clusters.copy() # no need to deepcopy, just copy refs, not clusters
dims = spw.GetClusterPlotDims()
for oldid, newid in zip(oldids, newids):
newid = int(newid) # keep as Python int, not numpy int
if oldid == newid:
continue # no need to waste time removing and recreating this cluster
# change all occurences of oldid to newid
cluster = oldclusters[oldid]
cluster.id = newid # this indirectly updates neuron.id
# update cluster and neuron dicts
s.clusters[newid] = cluster
s.neurons[newid] = cluster.neuron
sids = cluster.neuron.sids
spikes['nid'][sids] = newid
# remove any orphaned cluster ids
for oldid in oldids:
if oldid not in newids:
del s.clusters[oldid]
del s.neurons[oldid]
# reset norder
s.norder = []
s.norder.extend(sorted([ int(newid) for newid in newmuids ])[::-1])
s.norder.extend(sorted([ int(newid) for newid in newsuids ]))
# now do some final updates
spw.UpdateClustersGUI()
spw.ColourPoints(s.clusters.values())
# reselect the previously selected (but now renumbered) clusters,
# helps user keep track
oldiis = [ list(oldids).index(oldselid) for oldselid in oldselids ]
newselids = newids[oldiis]
spw.SelectClusters([s.clusters[cid] for cid in newselids])
# all cluster changes in stack are no longer applicable, reset cchanges
del spw.cchanges[:]
spw.cci = -1
print('Renumbering complete')
def on_actionFind_triggered(self):
"""Find current cluster or spike"""
ctrl = QApplication.instance().keyboardModifiers() & Qt.ControlModifier
if ctrl:
self.FindSpike()
else:
self.FindCluster()
def FindCluster(self):
"""Move focus to location of currently selected (single) cluster"""
spw = self.spykewindow
try:
cluster = spw.GetCluster()
except RuntimeError as err:
print(err)
return
gw = spw.windows['Cluster'].glWidget
dims = spw.GetClusterPlotDims()
gw.focus = np.float32([ cluster.normpos[dim] for dim in dims ])
gw.panTo() # pan to new focus
gw.updateGL()
def FindSpike(self):
"""Move focus to location of currently selected (single) spike"""
spw = self.spykewindow
try:
sid = spw.GetSpike()
except RuntimeError as err:
print(err)
return
gw = spw.windows['Cluster'].glWidget
pointis = gw.sids.searchsorted(sid)
gw.focus = gw.points[pointis]
gw.panTo() # pan to new focus
gw.updateGL()
def on_actionSelectRandomSpikes_triggered(self):
"""Select random sample of spikes in current cluster(s), or random sample
of unsorted spikes if no cluster(S) selected"""
nsamples = int(self.nsamplesComboBox.currentText())
if len(self.nslist.neurons) > 0:
slist = self.nslist
else:
slist = self.uslist
slist.clearSelection() # emits selectionChanged signal, .reset() doesn't
slist.selectRandom(nsamples)
def on_gainComboBox_triggered(self):
"""Set gain of panel based on gainComboBox selection"""
panel = self.panel
panel.gain = float(self.gainComboBox.currentText())
panel.do_layout() # resets axes lims and recalcs panel.pos
panel._update_scale()
panel.draw_refs()
panel.updateAllItems()
def on_actionAlignMin_triggered(self):
self.Align('min')
def on_actionAlignMax_triggered(self):
self.Align('max')
def on_actionAlignBest_triggered(self):
self.Align('best')
def on_actionShiftLeft_triggered(self):
if QApplication.instance().keyboardModifiers() & Qt.ControlModifier:
nt = -1
else:
nt = -2
self.Shift(nt)
def on_actionShiftRight_triggered(self):
if QApplication.instance().keyboardModifiers() & Qt.ControlModifier:
nt = 1
else:
nt = 2
self.Shift(nt)
def on_incltComboBox_triggered(self):
"""Change length of chan selection lines, optionally trigger cluster replot"""
self.panel.update_selvrefs()
self.panel.draw_refs()
#self.spykewindow.ui.plotButton.click()
def get_inclt(self):
"""Return inclt value in incltComboBox"""
return float(self.incltComboBox.currentText()) # us
inclt = property(get_inclt)
def get_tis(self):
"""Return tis (start and end timepoint indices) of duration inclt, asymmetric around
t=0 spike time. Note that any changes to the code here should also be made in the
timepoint selection display code in SortPanel.update_selvrefs()"""
s = self.sort
inclt = self.inclt # duration to include, asymmetric around t=0 spike time (us)
tw = self.panel.tw
dtw = tw[1] - tw[0] # spike time window width
left = intround(abs(tw[0]) / dtw * inclt) # left fraction wrt t=0 spike time
right = inclt - left # right fraction wrt t=0 spike time
tis = s.twts.searchsorted([-left, right])
return tis
tis = property(get_tis)
def on_nPCsPerChanSpinBox_valueChanged(self, val):
self.sort.npcsperchan = val
def on_actionReloadSpikes_triggered(self):
spw = self.spykewindow
sids = spw.GetAllSpikes()
sort = self.sort
if len(sids) == 0:
# if no spikes specified, reload all spikes
sids = sort.spikes['id']
usemeanchans = False
if QApplication.instance().keyboardModifiers() & Qt.ControlModifier:
usemeanchans = True
sort.reload_spikes_and_templates(sids, usemeanchans=usemeanchans)
# add sids to the set of dirtysids to be resaved to .wave file:
spw.update_dirtysids(sids)
# auto-refresh all plots:
self.panel.updateAllItems()
def on_actionFindPrevMostSimilar_triggered(self):
self.findMostSimilarCluster('previous')
def on_actionFindNextMostSimilar_triggered(self):
self.findMostSimilarCluster('next')
def on_actionToggleErrors_toggled(self, checked):
self.panel.showFills(checked)
def on_slider_valueChanged(self, slideri):
self.nslist.clearSelection() # emits selectionChanged signal, .reset() doesn't
if self.nslist.model().sliding == False:
self.nslist.model().sids.sort() # change from nid order to sid order
self.nslist.updateAll() # update to reflect new ordering
self.nslist.model().sliding = True
nsamples = int(self.nsamplesComboBox.currentText())
rows = np.arange(slideri, slideri+nsamples)
self.nslist.selectRows(rows)
def on_slider_sliderPressed(self):
"""Make slider click (without movement) highlight the first nsamples
or fewer spikes when slider is at 0 position"""
slideri = self.slider.value()
if slideri == 0:
nsamples = int(self.nsamplesComboBox.currentText())
nsamples = min(nsamples, self.nslist.model().nspikes)
rows = np.arange(nsamples)
self.nslist.selectRows(rows)
def update_slider(self):
"""Update slider limits and step sizes"""
nsamples = int(self.nsamplesComboBox.currentText())
nsids = len(self.nslist.sids)
ulim = max(nsids-nsamples, 1) # upper limit
self.slider.setRange(0, ulim)
self.slider.setSingleStep(1)
self.slider.setPageStep(nsamples)
def findMostSimilarCluster(self, which='next'):
"""If no chans selected, compare source to next or previous most similar cluster
based on chans the two have in common, while requiring the two have each others'
max chans in common. If chans have been selected, use them as a starting set of
chans to compare on. Also, use only the timepoint range selected in incltComboBox"""
try:
source = self.getClusterComparisonSource()
except RuntimeError as err:
print(err)
return
destinations = list(self.sort.clusters.values())
destinations.remove(source)
selchans = np.sort(self.panel.chans_selected)
if len(selchans) > 0:
srcchans = np.intersect1d(source.neuron.wave.chans, selchans)
if len(srcchans) == 0:
print("Source cluster doesn't overlap with selected chans")
return
else:
srcchans = source.neuron.wave.chans
if self.spykewindow.ui.normButton.isChecked():
print("NOTE: findMostSimilarCluster() doesn't currently take spike amplitude "
"normalization into account. To see the true amplitudes used to compare "
"neuron pairs, turn off normalization")
errors = []
dests = []
t0i, t1i = self.tis # timepoint range selected in incltComboBox
# try and compare source neuron waveform to all destination neuron waveforms
for dest in destinations:
if dest.neuron.wave.data is None: # hasn't been calculated yet
dest.neuron.update_wave()
dstchans = dest.neuron.wave.chans
if len(selchans) > 0:
if not set(selchans).issubset(dstchans):
continue
dstchans = selchans
cmpchans = np.intersect1d(srcchans, dstchans)
if len(cmpchans) == 0: # not comparable
continue
# ensure maxchan of both source and dest neuron are both in cmpchans
if source.neuron.chan not in cmpchans or dest.neuron.chan not in cmpchans:
continue
srcwavedata = source.neuron.wave[cmpchans].data[:, t0i:t1i]
dstwavedata = dest.neuron.wave[cmpchans].data[:, t0i:t1i]
error = core.rms(srcwavedata - dstwavedata)
errors.append(error)
dests.append(dest)
if len(errors) == 0:
print("No sufficiently overlapping clusters on selected chans to compare to")
return
errors = np.asarray(errors)
dests = np.asarray(dests)
desterrsortis = errors.argsort()
if which == 'next':
self._cmpid += 1
elif which == 'previous':
self._cmpid -= 1
else: raise ValueError('Unknown which: %r' % which)
self._cmpid = max(self._cmpid, 0)
self._cmpid = min(self._cmpid, len(dests)-1)
dest = dests[desterrsortis][self._cmpid]
self.spykewindow.SelectClusters(dest)
desterr = errors[desterrsortis][self._cmpid]
print('n%d to n%d rmserror: %.2f uV' %
(source.id, dest.id, self.sort.converter.AD2uV(desterr)))
def getClusterComparisonSource(self):
selclusters = self.spykewindow.GetClusters()
errmsg = 'unclear which cluster to use as source for comparison'
if len(selclusters) == 1:
source = selclusters[0]
self._source = source
self._cmpid = -1 # init/reset
elif len(selclusters) == 2:
source = self._source
if source not in selclusters:
raise RuntimeError(errmsg)
# deselect old destination cluster:
selclusters.remove(source)
self.spykewindow.SelectClusters(selclusters, on=False)
else:
self._source = None # reset for tidiness
raise RuntimeError(errmsg)
return source
def Shift(self, nt):
"""Shift selected sids by nt timepoints"""
s = self.sort
spikes = s.spikes
spw = self.spykewindow
sids = np.concatenate((spw.GetClusterSpikes(), spw.GetUnsortedSpikes()))
self.sort.shift(sids, nt)
print('Shifted %d spikes by %d timepoints' % (len(sids), nt))
unids = np.unique(spikes['nid'][sids])
neurons = [ s.neurons[nid] for nid in unids ]
for neuron in neurons:
neuron.update_wave() # update affected mean waveforms
# add dirtysids to the set to be resaved to .wave file:
spw.update_dirtysids(sids)
# auto-refresh all plots
self.panel.updateAllItems()
def Align(self, to):
"""Align all implicitly selected spikes to min or max, or best fit
on selected chans"""
s = self.sort
spikes = s.spikes
spw = self.spykewindow
sids = np.concatenate((spw.GetClusterSpikes(), spw.GetUnsortedSpikes()))
if to == 'best':
tis = self.tis
# find which chans are common to all sids:
commonchans = s.get_common_chans(sids)[0]
# check selected chans
selchans = spw.get_selchans(sids)
for selchan in selchans:
if selchan not in commonchans:
print("Chan %d not common to all spikes, pick from %r"
% (selchan, list(commonchans)))
return
print('Best fit aligning %d spikes between tis=%r on chans=%r' %
(len(sids), list(tis), selchans))
# numpy implementation:
#dirtysids = s.alignbest(sids, tis, selchans)
# cython implementation:
dirtysids = util.alignbest_cy(s, sids, tis, np.int64(selchans))
else: # to in ['min', 'max']
print('Aligning %d spikes to %s' % (len(sids), to))
dirtysids = s.alignminmax(sids, to)
paligned = len(dirtysids) / len(sids) * 100
print('Aligned %d/%d (%.1f%%) spikes' % (len(dirtysids), len(sids), paligned))
unids = np.unique(spikes['nid'][dirtysids])
neurons = [ s.neurons[nid] for nid in unids ]
for neuron in neurons:
neuron.update_wave() # update affected mean waveforms
# add dirtysids to the set to be resaved to .wave file:
spw.update_dirtysids(dirtysids)
# auto-refresh all plots:
self.panel.updateAllItems()
def RemoveNeuron(self, neuron, update=True):
"""Remove neuron and all its spikes from the GUI and the Sort"""
self.MoveSpikes2List(neuron, neuron.sids, update=update)
self.sort.remove_neuron(neuron.id)
if update:
self.nlist.updateAll()
def MoveSpikes2Neuron(self, sids, neuron=None, update=True):
"""Assign spikes from sort.spikes to a neuron, and trigger eventual update of
mean wave. If neuron is None, create a new one"""
sids = toiter(sids)
spikes = self.sort.spikes
if neuron == None:
neuron = self.sort.create_neuron()
neuron.sids = np.union1d(neuron.sids, sids) # update
spikes['nid'][sids] = neuron.id
if update:
self.sort.update_usids()
self.uslist.updateAll()
if neuron in self.nslist.neurons:
self.nslist.neurons = self.nslist.neurons # trigger nslist refresh
# TODO: selection doesn't seem to be working, always jumps to top of list
#self.uslist.Select(row) # automatically select the new item at that position
neuron.wave.data = None # trigger template mean update
return neuron
def MoveSpikes2List(self, neuron, sids, update=True):
"""Move spikes from a neuron back to the unsorted spike list control"""
sids = toiter(sids)
if len(sids) == 0:
return # nothing to do
spikes = self.sort.spikes
neuron.sids = np.setdiff1d(neuron.sids, sids) # return what's in 1st arr and not in 2nd
spikes['nid'][sids] = 0 # unbind neuron id of sids in spikes struct array
if update:
self.sort.update_usids()
self.uslist.updateAll()
# this only makes sense if the neuron is currently selected in the nlist:
if neuron in self.nslist.neurons:
self.nslist.neurons = self.nslist.neurons # this triggers a refresh
neuron.wave.data = None # triggers an update when it's actually needed
def PlotClusterHistogram(self, X, nids):
"""Plot histogram of given clusters along a single dimension. If two clusters are
given, project them onto axis connecting their centers, and calculate separation
indices between them. Otherwise, plot the distribution of all given clusters
(up to a limit) along the first dimension in X."""
spw = self.spykewindow
mplw = spw.OpenWindow('MPL')
unids = np.unique(nids) # each unid corresponds to a cluster, except possibly unid 0
nclusters = len(unids)
if nclusters == 0:
mplw.ax.clear()
mplw.figurecanvas.draw()
print("No spikes selected")
return
elif nclusters > 5: # to prevent slowdowns, don't plot too many
mplw.ax.clear()
mplw.figurecanvas.draw()
print("Too many clusters selected for cluster histogram")
return
elif nclusters == 2:
calc_measures = True
else:
calc_measures = False
projdimi = 0
ndims = X.shape[1]
points = [] # list of projection of each cluster's points onto dimi
for unid in unids:
sidis, = np.where(nids == unid)
# don't seem to need contig points for NDsepmetric, no need for copy:
points.append(X[sidis])
#points.append(np.ascontiguousarray(X[sidis]))
if calc_measures:
t0 = time.time()
NDsep = util.NDsepmetric(*points, Nmax=20000)
print('NDsep calc took %.3f sec' % (time.time()-t0))
# centers of both clusters, use median:
c0 = np.median(points[0], axis=0) # ndims vector
c1 = np.median(points[1], axis=0)
# line connecting the centers of the two clusters, wrt c0
line = c1-c0
line /= np.linalg.norm(line) # make it unit length
#print('c0=%r, c1=%r, line=%r' % (c0, c1, line))
else:
line = np.zeros(ndims)
line[projdimi] = 1.0 # pick out just the one component
c0 = 0.0 # set origin at 0
# calculate projection of each cluster's points onto line
projs = []
for cpoints in points:
projs.append(np.dot(cpoints-c0, line))
if calc_measures:
d = np.median(projs[1]) - np.median(projs[0])
# measure whether centers are at least 3 of the bigger stdevs away from
# each other:
maxstd = max(projs[0].std(), projs[1].std())
if maxstd == 0:
oneDsep = 0 # not sure if this is ideal
else:
oneDsep = d / (3 * maxstd)
#print('std0=%f, std1=%f, d=%f' % (projs[0].std(), projs[1].std(), d))
proj = np.concatenate(projs)
nbins = max(intround(np.sqrt(len(proj))), 2) # seems like a good heuristic
#print('nbins = %d' % nbins)
edges = np.histogram(proj, bins=nbins)[1]
hists = []
for i in range(nclusters):
hists.append(np.histogram(projs[i], bins=edges)[0])
hist = np.concatenate([hists]) # one cluster hist per row
masses = np.asarray([ h.sum() for h in hist ])
sortedmassis = masses.argsort()
# Take the fraction of area that the two distribs overlap.
# At each bin, take min value of the two distribs. Add up all those min values,
# and divide by the mass of the smaller distrib.
if calc_measures:
overlaparearatio = hist.min(axis=0).sum() / masses[sortedmassis[0]]
djs = core.DJS(hists[0], hists[1])
# plotting:
ledges = edges[:-1] # keep just the left edges, discard the last right edge
assert len(ledges) == nbins
binwidth = ledges[1] - ledges[0]
# plot:
a = mplw.ax
a.clear()
windowtitle = "clusters %r" % list(unids)
print(windowtitle)
mplw.setWindowTitle(windowtitle)
if calc_measures:
#title = ("sep index=%.3f, overlap area ratio=%.3f, DJS=%.3f, sqrt(DJS)=%.3f"
# % (oneDsep, overlaparearatio, djs, np.sqrt(djs)))
title = ("%dDsep=%.3f, 1Dsep=%.3f, OAR=%.3f, DJS=%.3f"
% (ndims, NDsep, oneDsep, overlaparearatio, djs))
print(title)
a.set_title(title)
cs = [ CLUSTERCOLOURDICT[unid] for unid in unids ]
for i, c in enumerate(cs):
# due to white background, replace white clusters with black:
if c == WHITE:
cs[i] = 'black'
# plot the smaller cluster last, to maximize visibility:
for i in sortedmassis[::-1]:
a.bar(ledges, hist[i], width=binwidth, color=cs[i], edgecolor=cs[i])
## TODO: tight_layout call needs updating for MPL 2.2:
#mplw.f.tight_layout(pad=0.3) # crop figure to contents
mplw.figurecanvas.draw()
| 47.437693 | 98 | 0.600495 |
from __future__ import division
from __future__ import print_function
__authors__ = ['Martin Spacek', 'Reza Lotun']
import os
import sys
import time
import datetime
from copy import copy
import operator
import random
import shutil
import hashlib
import multiprocessing as mp
from PyQt4 import QtCore, QtGui
from PyQt4.QtCore import Qt
from PyQt4.QtGui import QAction, QIcon, QApplication
import numpy as np
import scipy
import scipy.signal
import pylab as pl
import pyximport
pyximport.install(build_in_temp=False, inplace=True)
from . import util
from . import core
from .core import (WaveForm, Gaussian, MAXLONGLONG, R, toiter, intround, printflush, lstrip,
rstrip, lrstrip, pad, td2days, SpykeToolWindow, NList, NSList, dist,
USList, ClusterChange, SpikeSelectionSlider, lrrep2Darrstripis, rollwin2D)
from .detect import DEBUG
from .surf import EPOCH
from .plot import SpikeSortPanel, CLUSTERCOLOURDICT, WHITE
from .__version__ import __version__
LISTWIDTH = 70
PANELWIDTHPERCOLUMN = 120
PANELHEIGHTPERROW = 50
VSCROLLBARWIDTH = 14
SORTWINDOWHEIGHT = 1035
MINSORTWINDOWWIDTH = 566
MEANWAVEMAXSAMPLES = 2000
NPCSPERCHAN = 7
PCALIB = 'mdp'
ICALIB = 'sklearn'
DEFMINISI = 50
MAXGROUPISI = 100000
MAXGROUPDT = 100000000
class Sort(object):
def __init__(self, detector=None, stream=None, tw=None):
self.__version__ = __version__
self.fname = ''
self.user = ''
self.notes = ''
self.detector = detector
self.tw = tw # time window (us) relative to spike time
self.stream = stream
self.probe = stream.probe # only one probe design per sort allowed
self.converter = stream.converter
self.neurons = {}
self.clusters = {} # neurons with multidm params scaled for plotting
self.norder = [] # stores order of neuron ids display in nlist
self.npcsperchan = NPCSPERCHAN
def get_nextnid(self):
nids = list(self.neurons)
if len(nids) == 0:
return 1 # single unit nids start at 1
else:
return max(max(nids) + 1, 1) # at least 1
nextnid = property(get_nextnid)
def get_nextmuid(self):
nids = list(self.neurons)
if len(nids) == 0:
return -1 # multiunit ids start at -1
else:
return min(min(nids) - 1, -1) # at most -1
nextmuid = property(get_nextmuid)
def get_good(self):
good = []
for neuron in self.neurons.values():
try:
if neuron.good:
good.append(neuron.id)
except AttributeError: # neuron is from older sort, no .good attrib
neuron.good = False
return np.asarray(good)
def set_good(self, good):
nids = list(self.neurons)
assert np.all([ nid in nids for nid in good ]) # make sure all nids in good exist
notgood = np.setdiff1d(nids, good)
for nid in notgood:
neuron = self.neurons[nid]
neuron.good = False
for nid in good:
neuron = self.neurons[nid]
neuron.good = True
good = property(get_good, set_good)
def get_stream(self):
try:
return self._stream
except AttributeError:
# this is likely a brand new sort, has yet to be assigned a Stream
return None
def set_stream(self, stream=None):
oldstream = self.stream
if stream != None and oldstream != None:
# do stream types match?
if type(stream) != type(oldstream):
raise ValueError("Stream types don't match: %s, %s"
% (type(oldstream), type(stream)))
if type(stream.probe) != type(oldstream.probe):
raise ValueError("Stream probe types don't match: %s, %s"
% (type(oldstream.probe), type(stream.probe)))
# is one stream fname a superset of the other?
if (stream.fname not in oldstream.fname) and (oldstream.fname not in stream.fname):
raise ValueError("Stream file names are not supersets of each other: %s, %s"
% (oldstream.fname, stream.fname))
else:
print('Stream file names are similar enough to proceed: %s, %s'
% (stream.fname, oldstream.fname))
try:
stream.filtmeth = self.filtmeth
stream.car = self.car
stream.sampfreq = self.sampfreq
stream.shcorrect = self.shcorrect
except AttributeError:
pass # one of the above aren't bound
self._stream = stream
print('Bound stream %r to sort %r' % (stream.fname, self.fname))
self.calc_twts_twi()
stream = property(get_stream, set_stream)
def calc_twts_twi(self):
tres = self.tres
tw = self.tw
twts = np.arange(tw[0], tw[1], tres)
twts += twts[0] % tres
self.twts = twts
self.twi = intround(twts[0] / tres), intround(twts[-1] / tres)
def update_tw(self, tw):
oldtw = self.tw
self.tw = tw
self.calc_twts_twi()
dtw = np.asarray(tw) - np.asarray(oldtw)
self.spikes['t0'] += dtw[0]
self.spikes['t1'] += dtw[1]
self.spikes['tis'] = self.spikes['tis'] - intround(dtw[0] / self.tres)
for neuron in self.neurons.values():
if neuron.wave.data != None:
neuron.update_wave()
print('WARNING: all spike waveforms need to be reloaded!')
def get_tres(self):
return self.stream.tres
tres = property(get_tres)
def __getstate__(self):
d = self.__dict__.copy()
for attr in ['spikes', 'wavedata', 'usids', 'X', 'Xhash']:
try: del d[attr]
except KeyError: pass
return d
def get_nspikes(self):
try: return len(self.spikes)
except AttributeError: return 0
nspikes = property(get_nspikes)
def update_usids(self):
nids = self.spikes['nid']
self.usids, = np.where(nids == 0)
def get_spikes_sortedby(self, attr='id'):
vals = self.spikes[attr]
spikes = self.spikes[vals.argsort()]
return spikes
def get_wave(self, sid):
spikes = self.spikes
nchans = spikes['nchans'][sid]
chans = spikes['chans'][sid, :nchans]
t0 = spikes['t0'][sid]
t1 = spikes['t1'][sid]
wavedata = self.wavedata[sid, 0:nchans]
ts = np.arange(t0, t1, self.tres)
return WaveForm(data=wavedata, ts=ts, chans=chans, tres=self.tres)
def get_maxchan_wavedata(self, sid=None, nid=None):
if sid != None:
assert nid == None
chani = self.spikes['chani'][sid]
return self.wavedata[sid, chani]
elif nid != None:
assert sid == None
neuron = self.neurons[nid]
chani, = np.where(neuron.chans == neuron.chan)
assert len(chani) == 1
chani = chani[0]
return neuron.wave.data[chani]
def get_mean_wave(self, sids, nid=None):
spikes = self.spikes
nsids = len(sids)
if nsids > MEANWAVEMAXSAMPLES:
step = nsids // MEANWAVEMAXSAMPLES + 1
s = ("get_mean_wave() sampling every %d spikes instead of all %d"
% (step, nsids))
if nid != None:
s = "neuron %d: " % nid + s
print(s)
sids = sids[::step]
nsids = len(sids)
chanss = spikes['chans'][sids]
nchanss = spikes['nchans'][sids]
chanslist = [ chans[:nchans] for chans, nchans in zip(chanss, nchanss) ]
chanpopulation = np.concatenate(chanslist)
groupchans = np.unique(chanpopulation)
wavedata = self.wavedata[sids]
if wavedata.ndim == 2:
wavedata.shape = 1, wavedata.shape[0], wavedata.shape[1]
nt = wavedata.shape[-1]
maxnchans = len(groupchans)
data = np.zeros((maxnchans, nt))
nspikes = np.zeros((maxnchans, 1), dtype=int)
for chans, wd in zip(chanslist, wavedata):
chanis = groupchans.searchsorted(chans) # each spike's chans is a subset of groupchans
data[chanis] += wd[:len(chans)]
nspikes[chanis] += 1
#t0 = time.time()
data /= nspikes # normalize all data points appropriately, this is now the mean
var = np.zeros((maxnchans, nt))
for chans, wd in zip(chanslist, wavedata):
chanis = groupchans.searchsorted(chans) # each spike's chans is a subset of groupchans
var[chanis] += (wd[:len(chans)] - data[chanis]) ** 2
var /= nspikes
std = np.sqrt(var)
bins = list(groupchans) + [np.inf]
hist, bins = np.histogram(chanpopulation, bins=bins)
chans = groupchans[hist >= nsids/2]
chanis = groupchans.searchsorted(chans)
data = data[chanis]
std = std[chanis]
return WaveForm(data=data, std=std, chans=chans)
def check_ISIs(self, nids='good'):
print('Checking inter-spike intervals')
if nids == 'good':
nids = self.good
elif nids == 'all':
nids = sorted(self.neurons)
for nid in nids:
neuron = self.neurons[nid]
spikets = self.spikes['t'][neuron.sids]
assert spikets.flags['OWNDATA']
spikets.sort()
ndupl = (np.diff(spikets) < DEFMINISI).sum()
if ndupl > 0:
msg = ('n%d has %d duplicate spikes (given DEFMINISI=%d us).\n'
'Remove duplicate spikes with the ISI tool in the Verify tab'
% (nid, ndupl, DEFMINISI))
raise RuntimeError(msg)
def check_wavealign(self, nids='good', maxdti=1):
print('Checking neuron mean waveform alignment')
if nids == 'good':
nids = self.good
elif nids == 'all':
nids = sorted(self.neurons)
nt = self.twi[1] - self.twi[0] + 1 # expected number of points of each chan's wavedata
for nid in nids:
neuron = self.neurons[nid]
wd = self.get_maxchan_wavedata(nid=nid)
assert len(wd) == nt
ppeakis, _ = scipy.signal.find_peaks(wd)
npeakis, _ = scipy.signal.find_peaks(-wd)
pmaxi = ppeakis[wd[ppeakis].argmax()]
nmaxi = npeakis[wd[npeakis].argmin()]
if nmaxi < pmaxi:
peak1i = nmaxi
else:
pmax, nmax = wd[pmaxi], wd[nmaxi]
if pmax > abs(nmax):
peak1i = pmaxi
else:
peak1i = nmaxi
alignti = 0 - self.twi[0]
dti = peak1i - alignti
if abs(dti) > maxdti:
peak1uV = self.converter.AD2uV(wd[peak1i])
peak1us = intround(self.tres*(peak1i-alignti))
msg = ('Primary peak (%+d uV @ t=%d us) of n%d is %+d timepoints away from '
'the t=0 us alignment point. Shift it closer and try again'
% (peak1uV, peak1us, nid, dti))
raise RuntimeError(msg)
def check_wavepadding(self, nids='good', npad=2):
print('Checking spike waveform padding')
assert npad >= 2
if nids == 'good':
nids = self.good
elif nids == 'all':
nids = sorted(self.neurons)
for nid in nids:
neuron = self.neurons[nid]
for sid in neuron.sids:
wd = self.wavedata[sid]
l, r = wd[:, :npad], wd[:, -npad:]
leftpadded = (np.diff(l, axis=1) == 0).all()
rightpadded = (np.diff(r, axis=1) == 0).all()
if leftpadded:
if (wd[:, 0] == 0).all():
leftpadded = False
if rightpadded:
if (wd[:, -1] == 0).all():
rightpadded = False
if leftpadded or rightpadded:
msg = ('n%d has s%d that looks like it has been padded.\n'
'leftpadded, rightpadded = %r, %r\n'
'Reload s%d or n%d or all spikes and try again'
% (nid, sid, leftpadded, rightpadded, sid, nid))
raise RuntimeError(msg)
def check_contiguous_nids(self):
print('Checking that neuron IDs are contiguous')
nids = np.array(list(self.neurons))
nids = nids[nids > 0]
nids.sort()
if (np.diff(nids) != 1).any():
raise RuntimeError('Neuron IDs are not contiguous, renumber all and try again')
def exportptcsfiles(self, basepath, sortpath, user='', notes=''):
self.check_ISIs()
self.check_wavealign()
self.check_wavepadding()
self.check_contiguous_nids()
spikes = self.spikes
exportdt = str(datetime.datetime.now())
exportdt = exportdt.split('.')[0]
if self.stream.is_multi():
streams = self.stream.streams
else:
streams = [self.stream]
print('Exporting "good" clusters to:')
tranges = self.stream.tranges
t0 = tranges[0, 0]
for stream, trange in zip(streams, tranges):
abst0 = trange[0]
dt = abst0 - t0
dt = intround(dt)
self.exportptcsfile(stream, basepath, dt, exportdt, sortpath,
user=user, notes=notes)
def exportptcsfile(self, stream, basepath, dt, exportdt, sortpath, user='', notes=''):
nsamplebytes = 4
nrecs = []
nspikes = 0
for nid in sorted(self.good):
neuron = self.neurons[nid]
spikets = self.spikes['t'][neuron.sids]
assert spikets.flags['OWNDATA']
spikets.sort()
spikets -= dt # export spike times relative to t=0 of this recording
# only include spikes that occurred during this recording
lo, hi = spikets.searchsorted([stream.t0, stream.t1])
spikets = spikets[lo:hi]
if len(spikets) == 0:
continue # don't save empty neurons
nrec = PTCSNeuronRecord(neuron, spikets, nsamplebytes, descr='')
nrecs.append(nrec)
nspikes += len(spikets)
nneurons = len(nrecs)
path = os.path.join(basepath, stream.srcfnameroot)
try: os.mkdir(path)
except OSError: pass
fname = stream.srcfnameroot + '.ptcs'
fullfname = os.path.join(path, fname)
header = PTCSHeader(self, sortpath, stream, nneurons, nspikes, nsamplebytes,
fullfname, exportdt, user=user, notes=notes)
with open(fullfname, 'wb') as f:
header.write(f)
for nrec in nrecs:
nrec.write(f)
print(fullfname)
def exportcsv(self, fname):
sids = []
for nid in sorted(self.good):
neuron = self.neurons[nid]
sids.append(neuron.sids)
sids = np.hstack(sids)
spikes = self.spikes[sids]
tsecs = spikes['t'] / 1e6
nids = spikes['nid']
chans = spikes['chan']
data = np.column_stack([tsecs, nids, chans])
print('Exporting (tsec, nid, chan) of all spikes marked as "good" to %s' % fname)
np.savetxt(fname, data, fmt='%.6f, %d, %d')
def exporttschid(self, basepath):
raise NotImplementedError('Needs to be redone to work with multiple streams')
spikes = self.spikes[self.spikes['nid'] > 0]
dt = str(datetime.datetime.now()) # get an export timestamp
dt = dt.split('.')[0] # ditch the us
dt = dt.replace(' ', '_')
dt = dt.replace(':', '.')
srffnameroot = srffnameroot.replace(' ', '_')
tschidfname = dt + '_' + srffnameroot + '.tschid'
tschid = np.empty((len(spikes), 3), dtype=np.int64)
tschid[:, 0] = spikes['t']
tschid[:, 1] = spikes['chan']
tschid[:, 2] = spikes['nid']
tschid.tofile(os.path.join(path, tschidfname)) # save it
print(tschidfname)
def exportdin(self, basepath):
if self.stream.is_multi(): # self.stream is a MultiStream
streams = self.stream.streams
else: # self.stream is a single Stream
streams = [self.stream]
dinfiledtype=[('TimeStamp', '<i8'), ('SVal', '<i8')] # pairs of int64s
print('Exporting DIN(s) to:')
for stream in streams:
try: # neither of these attribs should exist for recordings with no stimuli:
svrecs = stream.srff.digitalsvalrecords
dsprecs = stream.srff.displayrecords
except AttributeError:
continue # no din to export for this stream
if len(svrecs) == 0 or stream.srff.ndigitalsvalrecords == 0:
raise ValueError("digitalsvalrecords are empty for stream %r. Attribute "
"shouldn't exist" % stream.fname)
path = os.path.join(basepath, stream.srcfnameroot)
try: os.mkdir(path)
except OSError: pass
svrecs = svrecs.astype(dinfiledtype)
# convert to normal n x 2 int64 array
svrecs = svrecs.view(np.int64).reshape(-1, 2)
# Some old recordings (<= ptc15) contain multiple experiments.
# To deal with this, iterate over stream.srff.displayrecords, export one .din
# per displayrecord. Append experiment ID to each .din filename, if necessary.
svrects = svrecs[:, 0]
dsprects = [ dsprec.TimeStamp for dsprec in dsprecs ]
svalrecis = svrects.searchsorted(dsprects)
assert svalrecis[0] == 0
svalrecis = svalrecis[1:] # exclude the trivial 0 index
# split sval records according to displayrecord timestamps:
dins = np.split(svrecs, svalrecis)
assert len(dins) == len(dsprecs)
for eid, din in enumerate(dins):
if eid == 0 and len(dins) == 1:
eidstr = ''
elif len(dins) < 10:
eidstr = '.%d' % eid
else: # include leading zero to maintain alphabetical fname order
eidstr = '.%02d' % eid
dinfname = stream.srcfnameroot + eidstr + '.din'
fullfname = os.path.join(path, dinfname)
din.tofile(fullfname) # save it
print(fullfname)
def exporttextheader(self, basepath):
if self.stream.is_multi(): # self.stream is a MultiStream
streams = self.stream.streams
else: # self.stream is a single Stream
streams = [self.stream]
print('Exporting text header(s) to:')
for stream in streams:
try:
dsprecs = stream.srff.displayrecords
except AttributeError: # no textheader to export for this stream
continue
if len(dsprecs) == 0:
raise ValueError("displayrecords are empty for stream %r. Attribute "
"shouldn't exist" % stream.fname)
path = os.path.join(basepath, stream.srcfnameroot)
try: os.mkdir(path)
except OSError: pass
for eid, dsprec in enumerate(dsprecs):
textheader = dsprec.Header.python_tbl
if eid == 0 and len(dsprecs) == 1:
eidstr = ''
elif len(dsprecs) < 10:
eidstr = '.%d' % eid
else:
eidstr = '.%02d' % eid
textheaderfname = stream.srcfnameroot + eidstr + '.textheader'
fullfname = os.path.join(path, textheaderfname)
with open(fullfname, 'w') as f:
f.write(textheader)
print(fullfname)
def exportall(self, basepath, sortpath):
self.exportptcsfiles(basepath, sortpath)
self.exportdin(basepath)
self.exporttextheader(basepath)
def exportspikewaves(self, sids, selchans, tis, fname, format):
nspikes = len(sids)
chans, chanslist = self.get_common_chans(sids, selchans)
nchans = len(chans)
ti0, ti1 = tis
nt = ti1 - ti0
dtype = self.wavedata.dtype
data = np.zeros((nspikes, nchans, nt), dtype=dtype)
for sii, sid in enumerate(sids):
spikechans = chanslist[sii]
spikechanis = spikechans.searchsorted(chans)
data[sii] = self.wavedata[sid][spikechanis, ti0:ti1]
if format == 'text':
data.shape = nspikes, nchans*nt
stream = self.stream
assert stream.kind == 'highpass'
if format == 'binary':
nids = self.spikes['nid'][sids]
spiketimes = self.spikes['t'][sids]
chanpos = stream.probe.siteloc_arr()
uVperAD = stream.converter.AD2uV(1)
with open(fname, 'wb') as f:
np.savez_compressed(f, data=data, sids=sids, nids=nids,
spiketimes=spiketimes, chans=chans, tis=tis,
chanpos=chanpos, uVperAD=uVperAD)
elif format == 'text':
np.savetxt(fname, data, fmt='%d', delimiter=',')
else:
raise ValueError('Unknown format: %r' % format)
print('Exported %d spikes on chans=%r and tis=%r to %s'
% (nspikes, list(chans), list(tis), fname))
def get_param_matrix(self, kind=None, sids=None, tis=None, selchans=None, norm=False,
dims=None, scale=True):
spikes = self.spikes
dtypefields = list(spikes.dtype.fields)
if sids is None:
sids = spikes['id']
comps = [ dim for dim in dims if dim.startswith('c') and dim[-1].isdigit() ]
rmserror = np.any([ dim == 'RMSerror' for dim in dims ])
ncomp = len(comps)
hascomps = ncomp > 0
if hascomps:
X = self.get_component_matrix(kind, sids, tis=tis, chans=selchans,
minncomp=ncomp, norm=norm)
if rmserror:
rms = self.get_rms_error(sids, tis=tis, chans=selchans)
data = []
for dim in dims:
if dim in dtypefields:
data.append( np.float32(spikes[dim][sids]) )
elif dim.startswith('c') and dim[-1].isdigit():
compid = int(lstrip(dim, 'c'))
data.append( np.float32(X[:, compid]) )
elif dim == 'RMSerror':
data.append( np.float32(rms) )
else:
raise RuntimeError('Unknown dim %r' % dim)
data = np.column_stack(data)
if scale:
for dim, d in zip(dims, data.T):
d -= d.mean()
if dim in ['x0', 'y0'] and self.probe.ncols > 1:
try: x0std
except NameError: x0std = spikes['x0'].std()
if x0std != 0.0:
d /= x0std
else:
dstd = d.std()
if dstd != 0.0:
d /= dstd
return data
def get_component_matrix(self, kind, sids, tis=None, chans=None, minncomp=None,
norm=False):
spikes = self.spikes
nt = self.wavedata.shape[2]
if tis is None:
tis = np.asarray([0, nt])
ti0, ti1 = tis
assert ti0 < ti1 <= nt
nt = ti1 - ti0
chans, chanslist = self.get_common_chans(sids, chans)
nchans = len(chans)
nspikes = len(sids)
if nspikes < 2:
raise RuntimeError("Need at least 2 spikes for %s" % kind)
if nchans == 0:
raise RuntimeError("Spikes have no common chans for %s" % kind)
Xhash = self.get_Xhash(kind, sids, tis, chans, self.npcsperchan, norm)
self.Xhash = Xhash
try: self.X
except AttributeError: self.X = {}
if Xhash in self.X:
print('Cache hit, using cached %ss from tis=%r, chans=%r of %d spikes' %
(kind[:-1], list(tis), list(chans), nspikes))
return self.X[Xhash]
print('Cache miss, (re)calculating %ss' % kind[:-1])
print('Doing %s on tis=%r, chans=%r of %d spikes' %
(kind, list(tis), list(chans), nspikes))
data = np.zeros((nspikes, nchans, nt), dtype=np.float64)
for sii, sid in enumerate(sids):
spikechans = chanslist[sii]
spikechanis = spikechans.searchsorted(chans)
spikedata = self.wavedata[sid][spikechanis, ti0:ti1]
if norm:
maxptp = spikedata.ptp(axis=1).max()
if maxptp != 0:
spikedata = spikedata / maxptp
data[sii] = spikedata
print('Input shape for %s: %r' % (kind, data.shape))
t0 = time.time()
data.shape = nspikes, nchans*nt
print('Reshaped input for %s: %r' % (kind, data.shape))
if kind == 'PCA':
if PCALIB == 'mdp':
import mdp
X = mdp.pca(data, output_dim=5, svd=False)
elif PCALIB == 'sklearn':
# doesn't tap into scipy.linalg.eig compiled code. RandomizedPCA is faster
from sklearn.decomposition import PCA
pca = PCA(n_components=5)
X = pca.fit_transform(data) # do both the fit and the transform
else:
raise ValueError('Invalid PCALIB %r' % PCALIB)
if X.shape[1] < minncomp:
raise RuntimeError("Can't satisfy minncomp=%d request" % minncomp)
elif kind == 'sPCA':
from sklearn.decomposition import SparsePCA
n_components = 5
alpha = 1
n_jobs = mp.cpu_count()
spca = SparsePCA(n_components=n_components, alpha=alpha, n_jobs=n_jobs)
X = spca.fit_transform(data)
elif kind == 'mbsPCA':
from sklearn.decomposition import MiniBatchSparsePCA
n_components = 5
alpha = 1
n_jobs = mp.cpu_count()
mbspca = MiniBatchSparsePCA(n_components=n_components, alpha=alpha, n_jobs=n_jobs)
X = mbspca.fit_transform(data)
elif kind == 'NMF':
from sklearn.decomposition import NMF
n_components = 5
init = None
nmf = NMF(n_components=n_components, init=init)
X = nmf.fit_transform(data)
elif kind == 'tSNE':
ncomp = min((self.npcsperchan*nchans, data.shape[1]))
print('ncomp: %d' % ncomp)
import mdp
data = mdp.pca(data, output_dim=ncomp)
from sklearn.manifold import TSNE
n_components = 3
tsne = TSNE(n_components=n_components)
X = tsne.fit_transform(data)
elif kind == 'ICA':
maxncomp = intround(np.sqrt(nspikes))
if maxncomp < minncomp:
raise RuntimeError("Can't satisfy minncomp=%d request" % minncomp)
if data.shape[0] <= data.shape[1]:
raise RuntimeError('Need more observations than dimensions for ICA')
# limit number of PCs to feed into ICA, keep up to npcsperchan components per
# chan on average:
ncomp = min((self.npcsperchan*nchans, maxncomp, data.shape[1]))
if ICALIB == 'mdp':
import mdp # delay as late as possible
# do PCA first, to reduce dimensionality and speed up ICA:
print('ncomp: %d' % ncomp)
data = mdp.pca(data, output_dim=ncomp)
# nonlinearity g='pow3', ie x**3. tanh seems to separate better,
# but is a bit slower. gaus seems to be slower still, and no better
# than tanh, but these are just vague impressions.
# defaults to whitened=False, ie assumes data isn't whitened
node = mdp.nodes.FastICANode(g='pow3')
X = node(data)
pm = node.get_projmatrix()
X = X[:, np.any(pm, axis=0)]
elif ICALIB == 'sklearn':
from sklearn.decomposition import FastICA
alg = 'parallel'
fun = 'logcosh'
maxiter = 100
tol = 0.5
fastica = FastICA(n_components=ncomp, algorithm=alg,
whiten=True, fun=fun, fun_args=None,
max_iter=maxiter, tol=tol, w_init=None,
random_state=None)
X = fastica.fit_transform(data)
print('fastica niters: %d' % (fastica.n_iter_))
else:
raise ValueError('Invalid ICALIB %r' % ICALIB)
if X.shape[1] < 3:
raise RuntimeError('Need at least 3 columns')
'''
# sort by abs(kurtosis) of each IC (column)
k = scipy.stats.kurtosis(X, axis=0)
ki = abs(k).argsort()[::-1] # decreasing order of abs(kurtosis)
print('Sort by abs(kurtosis):')
print(k[ki])
X = X[:, ki] # sort the ICs
'''
ne = core.negentropy(X, axis=0)
assert (ne > 0).all()
nei = ne.argsort()[::-1]
print('Sort by negentropy:')
print(ne[nei])
X = X[:, nei]
'''
import pylab as pl
pl.figure()
pl.imshow(pm)
pl.colorbar()
pl.title('original projmatrix')
pl.figure()
pl.imshow(pm[:, ki])
pl.colorbar()
pl.title('decreasing abs(kurtosis) projmatrix')
pl.figure()
pl.imshow(pm[:, nei])
pl.colorbar()
pl.title('decreasing negentropy projmatrix')
'''
else:
raise ValueError('Unknown kind %r' % kind)
print('Output shape for %s: %r' % (kind, X.shape))
self.X[Xhash] = X
print('%s took %.3f sec' % (kind, time.time()-t0))
unids = list(np.unique(spikes['nid'][sids]))
for nid in unids:
# common to all its spikes, and therefore can't have PCA/ICA done on it
if nid != 0:
self.clusters[nid].update_comppos(X, sids)
return X
def get_rms_error(self, sids, tis=None, chans=None):
spikes = self.spikes
nids = np.unique(spikes['nid'][sids])
nid = nids[0]
if len(nids) > 1 or nid == 0:
raise RuntimeError("Spikes must all belong to the same (non-junk) cluster for "
"RMS error calculation")
nt = self.wavedata.shape[2]
if tis is None:
tis = np.asarray([0, nt])
ti0, ti1 = tis
assert ti0 < ti1 <= nt
nt = ti1 - ti0
chans, chanslist = self.get_common_chans(sids, chans)
nchans = len(chans)
nspikes = len(sids)
if nchans == 0:
raise RuntimeError("Spikes have no common chans for RMS error")
print('Getting RMS error on tis=%r, chans=%r of %d spikes' %
(list(tis), list(chans), nspikes))
data = np.zeros((nspikes, nchans, nt), dtype=np.float64)
for sii, sid in enumerate(sids):
spikechans = chanslist[sii]
spikechanis = spikechans.searchsorted(chans)
data[sii] = self.wavedata[sid][spikechanis, ti0:ti1]
wave = self.neurons[nid].get_wave()
chanis = wave.chans.searchsorted(chans)
meandata = np.float64(wave.data[chanis, ti0:ti1])
se = (data - meandata) ** 2
mse = se.mean(axis=2).mean(axis=1)
return np.sqrt(mse)
def get_common_chans(self, sids, chans=None):
spikes = self.spikes
chanss = spikes['chans'][sids]
nchanss = spikes['nchans'][sids]
chanslist = [ cs[:ncs] for cs, ncs in zip(chanss, nchanss) ]
commonchans = util.intersect1d_uint8(chanslist)
if chans is not None and len(chans) > 0:
diffchans = np.setdiff1d(chans, commonchans)
commonchans = np.intersect1d(chans, commonchans)
if len(diffchans) > 0:
print('WARNING: ignored chans %r not common to all spikes' % list(diffchans))
return commonchans, chanslist
def get_Xhash(self, kind, sids, tis, chans, npcsperchan, norm):
h = hashlib.md5()
h.update(kind.encode())
h.update(sids)
h.update(tis)
h.update(chans)
if kind == 'ICA':
h.update(str(npcsperchan).encode())
h.update(str(norm).encode())
return h.hexdigest()
def create_neuron(self, id=None, inserti=None):
if id == None:
id = self.nextnid
if id in self.neurons:
raise RuntimeError('Neuron %d already exists' % id)
id = int(id)
neuron = Neuron(self, id)
self.neurons[neuron.id] = neuron
if inserti == None:
self.norder.append(neuron.id)
else:
self.norder.insert(inserti, neuron.id)
return neuron
def remove_neuron(self, id):
try:
del self.neurons[id]
del self.clusters[id]
self.norder.remove(id)
except (KeyError, ValueError):
pass
def shift(self, sids, nt):
spikes = self.spikes
wd = self.wavedata
for sid in sids:
core.shiftpad(wd[sid], nt) # modifies wd in-place
# update spike parameters:
dt = intround(nt * self.tres) # amount of time to shift by, signed, in us
# so we can later reload the wavedata accurately, shifting the waveform right and
# padding it on its left requires decrementing the associated timepoints
# (and vice versa)
spikes['t'][sids] -= dt
spikes['t0'][sids] -= dt
spikes['t1'][sids] -= dt
# might result in some out of bounds tis because the original peaks
# have shifted off the ends. Opposite sign wrt timepoints above, referencing within
# wavedata:
spikes['tis'][sids] = spikes['tis'][sids] + nt
# this in-place operation raises a TypeError in numpy 1.11.2, something related to
# subtracting an int from an unsigned int:
#spikes['tis'][sid] += nt
# caller should treat all sids as dirty
def alignminmax(self, sids, to):
if not self.stream.is_open():
raise RuntimeError("No open stream to reload spikes from")
spikes = self.spikes
V0s = spikes['V0'][sids]
V1s = spikes['V1'][sids]
Vss = np.column_stack((V0s, V1s))
alignis = spikes['aligni'][sids]
b = np.column_stack((alignis==0, alignis==1)) # 2D boolean array
if to == 'min':
i = Vss[b] > 0 # indices into sids of spikes aligned to the max peak
elif to == 'max':
i = Vss[b] < 0 # indices into sids of spikes aligned to the min peak
else:
raise ValueError('Unknown to %r' % to)
sids = sids[i] # sids that need realigning
nspikes = len(sids)
print("Realigning %d spikes" % nspikes)
if nspikes == 0: # nothing to do
return [] # no sids to mark as dirty
multichantis = spikes['tis'][sids] # nspikes x nchans x 2 arr
chanis = spikes['chani'][sids] # nspikes arr of max chanis
# peak tis on max chan of each spike, convert from uint8 to int32 for safe math
tis = np.int32(multichantis[np.arange(nspikes), chanis]) # nspikes x 2 arr
# NOTE: tis aren't always in temporal order!
dpeaktis = tis[:, 1] - tis[:, 0]
dpeaks = spikes['dt'][sids]
ordered = dpeaktis > 0
reversed = dpeaktis < 0
alignis = spikes['aligni'][sids]
alignis0 = alignis == 0
alignis1 = alignis == 1
dpeaki = np.zeros(nspikes, dtype=int)
dpeaki[ordered & alignis0 | reversed & alignis1] = 1
dpeaki[ordered & alignis1 | reversed & alignis0] = -1
dts = dpeaki * dpeaks
dtis = -dpeaki * abs(dpeaktis)
spikes['t'][sids] += dts
spikes['t0'][sids] += dts
spikes['t1'][sids] += dts
spikes['tis'][sids] = spikes['tis'][sids] + dtis[:, None, None]
spikes['aligni'][sids[alignis0]] = 1
spikes['aligni'][sids[alignis1]] = 0
self.reload_spikes(sids)
return sids
def choose_new_meanchans(self, sids):
print('Choosing new channel set for all selected spikes')
det = self.detector
meanwave = self.get_mean_wave(sids)
maxchan = meanwave.chans[meanwave.data.ptp(axis=1).argmax()]
maxchani = det.chans.searchsorted(maxchan)
distances = det.dm.data[maxchani]
chanis = distances.argsort()[:det.maxnchansperspike]
meanchans = det.chans[chanis]
meanchans.sort()
print('meanchans: %r' % list(meanchans))
furthestchan = det.chans[chanis[-1]]
print('furthestchan: %d' % furthestchan)
furthestchani = meanchans.searchsorted(furthestchan)
assert len(meanchans) == det.maxnchansperspike
assert maxchan in meanchans
return meanchans, furthestchan, furthestchani
def reload_spikes(self, sids, usemeanchans=False):
' % nsids)
stream = self.stream
if not stream.is_open():
raise RuntimeError("No open stream to reload spikes from")
spikes = self.spikes
det = self.detector
ver_lte_03 = float(self.__version__) <= 0.3
if ver_lte_03:
print('Fixing potentially incorrect time values during spike reloading')
nfixed = 0
treload = time.time()
if usemeanchans:
if ver_lte_03:
raise RuntimeError("Best not to choose new chans from mean until after "
"converting to .sort >= 0.4")
meanchans, furthestchan, furthestchani = self.choose_new_meanchans(sids)
nmeanchans = len(meanchans)
ts = spikes[sids]['t']
if not (np.diff(ts) >= 0).all():
print("Selected sids aren't in temporal order, sorting by time...")
tsis = ts.argsort()
sids = sids[tsis]
print("Done sorting sids by time")
splitis = np.where(np.diff(ts) >= MAXGROUPISI)[0] + 1
groups = np.split(sids, splitis)
groupi = 0
while groupi < len(groups):
group = groups[groupi]
splitis = np.where(np.diff(relts // MAXGROUPDT) > 0)[0] + 1
nsubgroups = len(splitis) + 1
if nsubgroups > 1:
del groups[groupi]
subgroups = np.split(group, splitis)
groups[groupi:groupi] = subgroups
groupi += len(subgroups)
else:
groupi += 1
print('ngroups: %d' % len(groups))
sidi = 0
for groupi, group in enumerate(groups):
printflush('<%d>' % groupi, end='')
assert len(group) > 0
t0 = spikes[group[0]]['t0']
t1 = spikes[group[-1]]['t1']
if ver_lte_03:
t0 -= 5000
t1 += 5000
unionchans = np.unique(spikes['chans'][group])
if usemeanchans:
spikes['nchans'][group] = nmeanchans
# we're using the max num chans, so assign the full array:
spikes['chans'][group] = meanchans
unionchans = np.unique(np.hstack((unionchans, meanchans)))
if 0 not in stream.chans:
unionchans = unionchans[unionchans != 0]
tempwave = stream(t0, t1, unionchans)
# slice out each spike's reloaded data from tempwave:
for sid in group:
if sidi % 10000 == 0:
printflush(sidi, end='')
elif sidi % 1000 == 0:
printflush('.', end='')
if usemeanchans:
# check that each spike's maxchan is in meanchans:
chan = spikes[sid]['chan']
if chan not in meanchans:
print("spike %d: replacing furthestchan %d with spike's maxchan %d"
% (sid, furthestchan, chan))
nchans = spikes[sid]['nchans']
chans = spikes[sid]['chans'][:nchans]
chans[furthestchani] = chan
chans.sort()
#spikes['chans'][sid][:nchans] = chans
spike = spikes[sid]
nchans = spike['nchans']
chans = spike['chans'][:nchans]
rd = tempwave[spike['t0']:spike['t1']][chans].data # reloaded data
if ver_lte_03: # fix potentially incorrect spike tis
result = self.reload_spike_ver_lte_03(sid, nchans, tempwave, rd)
if result == None:
sidi += 1 # inc status counter
continue # rollwin2D won't work, skip to next sid
else:
rd, fixed = result
if fixed:
nfixed += 1
nt = rd.shape[1]
self.wavedata[sid, :nchans, :nt] = rd
sidi += 1
print()
if ver_lte_03:
print('Fixed time values of %d spikes' % nfixed)
print('(Re)loaded %d spikes, took %.3f sec' % (len(sids), time.time()-treload))
def reload_spike_ver_lte_03(self, sid, nchans, tempwave, rd):
od = self.wavedata[sid, :nchans]
lefti, righti = lrrep2Darrstripis(od)
od = od[:, lefti:righti]
width = od.shape[1]
if not width <= rd.shape[1]:
print('')
print("WARNING: od.shape[1]=%d > rd.shape[1]=%d for sid %d" %
(od.shape[1], rd.shape[1], sid))
return
odinndis = np.where((rollwin2D(rd, width) == od).all(axis=1).all(axis=1))[0]
if len(odinndis) == 0:
dnt = 0
elif len(odinndis) == 1:
odinndi = odinndis[0]
dnt = odinndi - lefti
else:
raise RuntimeError("Multiple hits of old data in new, don't know "
"how to reload spike %d" % sid)
newrd, fixed = rd, False
if dnt != 0:
dt = intround(dnt * self.tres) # time to correct by, signed, in us
spikes['t'][sid] += dt # should remain halfway between t0 and t1
spikes['t0'][sid] += dt
spikes['t1'][sid] += dt
# might result in some out of bounds tis because the original peaks
# have shifted off the ends. Use opposite sign because we're
spikes['phasetis'][sid] = spikes['phasetis'][sid] - dnt
spike = spikes[sid]
newrd = tempwave[spike['t0']:spike['t1']][chans].data
fixed = True
return newrd, fixed
def reload_spikes_and_templates(self, sids, usemeanchans=False):
self.reload_spikes(sids, usemeanchans=usemeanchans)
unids = np.unique(self.spikes['nid'][sids])
unids = unids[unids != 0]
neurons = [ self.neurons[nid] for nid in unids ]
for neuron in neurons:
neuron.update_wave() # update affected mean waveforms
def init_spike_alignment(self):
print('Setting initial spike alignment points')
ntis, nalignis = {}, {} # tis and aligni derived from each neuron's mean waveform
for neuron in self.neurons.values():
nwave = neuron.get_wave()
mintis = nwave.data.argmin(axis=1)
maxtis = nwave.data.argmax(axis=1)
ntis[neuron.id] = np.column_stack([mintis, maxtis])
nalignis[neuron.id] = np.argmin([mintis.std(), maxtis.std()])
AD2uV = self.converter.AD2uV
for s, wd in zip(self.spikes, self.wavedata):
sid = s['id']
if sid % 100000 == 0:
printflush(sid, end='')
elif sid % 10000 == 0:
printflush('.', end='')
nid = s['nid']
nchans = s['nchans']
chans = s['chans'][:nchans]
neuronchans = self.neurons[nid].wave.chans
assert (chans == neuronchans).all()
s['tis'][:nchans] = ntis[nid]
s['aligni'] = nalignis[nid]
maxchani = s['chani']
t0i, t1i = int(s['tis'][maxchani, 0]), int(s['tis'][maxchani, 1])
s['dt'] = abs(t1i - t0i) / self.sampfreq * 1e6
s['V0'], s['V1'] = AD2uV(wd[maxchani, t0i]), wd[maxchani, t1i]
s['Vpp'] = abs(s['V1'] - s['V0'])
print()
def spatially_localize_spikes(self, sortwin, method='fit'):
det = self.detector
weights2f = self.extractor.weights2spatial
weights2spatialmean = self.extractor.weights2spatialmean
f = self.extractor.f
nreject = 0
print('Running spatial localization on all %d spikes' % self.nspikes)
tstart = time.clock()
wavedata):
# see core.rowtake() or util.rowtake_cy() for indexing explanation:
sid = s['id']
# print out progress on a regular basis:
if sid % 10000 == 0:
printflush(sid, end='')
elif sid % 1000 == 0:
printflush('.', end='')
chan = s['chan']
nchans = s['nchans']
chans = s['chans'][:nchans]
maxchani = s['chani']
chanis = det.chans.searchsorted(chans)
w = np.float32(wd[np.arange(s['nchans'])[:, None], s['tis'][:nchans]]) # nchans x 2
w = abs(w).sum(axis=1) # Vpp for each chan, measured at t0i and t1i
x = det.siteloc[chanis, 0] # 1D array (row)
y = det.siteloc[chanis, 1]
if method == 'fit':
# localize by fitting extractor.f function to wavedata
params = weights2f(f, w, x, y, maxchani)
elif method == 'mean':
# set localization to Vpp-weighted spatial mean and 0 sigma:
x0, y0 = weights2spatialmean(w, x, y)
# a very ad-hoc guess for spatial sigma:
sx = 2 * dist((x0, y0), self.probe.SiteLoc[chan])
params = x0, y0, sx, sx
else:
print('Unknown method %r' % method)
if params == None: # presumably a non-localizable many-channel noise event
#printflush('X', end='') # to indicate a rejected spike
if DEBUG:
spiket = intround(s['t']) # nearest us
det.log("Reject spike %d at t=%d based on fit params" % (sid, spiket))
neuron = self.neurons[s['nid']]
# remove from its neuron, add to unsorted list of spikes:
sortwin.MoveSpikes2List(neuron, [sid], update=False)
# manually set localization params to Vpp-weighted spatial mean and 0 sigma:
x0, y0 = weights2spatialmean(w, x, y)
# set sigma to 0 um, and then later round lockr up to 1 um so that only one
# raster tick shows up for each rejected spike, reducing clutter
params = x0, y0, 0, 0
nreject += 1
# Save spatial fit params, and "lockout" only the channels within lockrx*sx
# of the fit spatial location of the spike, up to a max of inclr. "Lockout"
# in this case only refers to which channels are highlighted with a raster tick
# for each spike:
s['x0'], s['y0'], s['sx'], s['sy'] = params
x0, y0 = s['x0'], s['y0']
# lockout radius for this spike:
lockr = min(det.lockrx*s['sx'], det.inclr) # in um
lockr = max(lockr, 1) # at least 1 um, so at least the maxchan gets a tick
# test y coords of chans in y array, ylockchaniis can be used to index
# into x, y and chans:
ylockchaniis, = np.where(np.abs(y - y0) <= lockr) # convert bool arr to int
# test Euclid distance from x0, y0 for each ylockchani:
lockchaniis = ylockchaniis.copy()
for ylockchanii in ylockchaniis:
if dist((x[ylockchanii], y[ylockchanii]), (x0, y0)) > lockr:
# Euclidean distance is too great, remove ylockchanii from lockchaniis:
lockchaniis = lockchaniis[lockchaniis != ylockchanii]
lockchans = chans[lockchaniis]
nlockchans = len(lockchans)
s['lockchans'][:nlockchans], s['nlockchans'] = lockchans, nlockchans
print('Spatial localization of spikes took %.3f s' % (time.clock() - tstart))
return nreject
class Neuron(object):
def __init__(self, sort, id=None):
self.sort = sort
self.id = id # neuron id
self.wave = WaveForm() # init to empty waveform
self.sids = np.array([], dtype=int) # indices of spikes that make up this neuron
# relative reference timestamp, here for symmetry with fellow spike rec
# (obj.t comes up sometimes):
self.t = 0
self.plt = None # Plot currently holding self
self.cluster = None
self.good = False # user can mark this neuron as "good" if so desired
#self.fname # not here, let's allow neurons to have spikes from different files?
def get_chans(self):
if self.wave.data is None:
self.update_wave()
return self.wave.chans
chans = property(get_chans)
def get_chan(self):
if self.wave.data is None:
self.update_wave()
return self.wave.chans[self.wave.data.ptp(axis=1).argmax()]
chan = property(get_chan)
def get_nspikes(self):
return len(self.sids)
nspikes = property(get_nspikes)
def __getstate__(self):
d = self.__dict__.copy()
#d.pop('X', None)
#d.pop('Xhash', None)
# don't save plot self is assigned to, since that'll change anyway on unpickle
d['plt'] = None
return d
def get_wave(self):
# many neuron waveforms saved in old .sort files won't have a wave.std field:
try:
self.wave.std
except AttributeError:
return self.update_wave()
if self.wave == None or self.wave.data is None or self.wave.std is None:
return self.update_wave()
else:
return self.wave
def update_wave(self):
sort = self.sort
spikes = sort.spikes
if len(self.sids) == 0:
raise RuntimeError("n%d has no spikes and its waveform can't be updated" % self.id)
meanwave = sort.get_mean_wave(self.sids, nid=self.id)
# update self's Waveform object
self.wave.data = meanwave.data
self.wave.std = meanwave.std
self.wave.ts = sort.twts.copy()
self.wave.chans = meanwave.chans
self.wave.tres = sort.tres
return self.wave
def __sub__(self, other):
selfwavedata, otherwavedata = self.getCommonWaveData(other.chan, other.chans,
other.wave.data)
return selfwavedata - otherwavedata
def getCommonWaveData(self, otherchan, otherchans, otherwavedata):
chans = np.intersect1d(self.chans, otherchans, assume_unique=True)
if len(chans) == 0:
raise ValueError('No common chans')
if self.chan not in chans or otherchan not in chans:
raise ValueError("maxchans aren't part of common chans")
selfchanis = self.chans.searchsorted(chans)
otherchanis = otherchans.searchsorted(chans)
return self.wave.data[selfchanis], otherwavedata[otherchanis]
class PTCSHeader(object):
FORMATVERSION = 3 # overall .ptcs file format version, not header format version
def __init__(self, sort, sortpath, stream, nneurons, nspikes, nsamplebytes,
fullfname, exportdt, user='', notes=''):
self.sort = sort
self.stream = stream
self.nneurons = nneurons
self.nspikes = nspikes
self.nsamplebytes = nsamplebytes
homelessfullfname = lstrip(fullfname, os.path.expanduser('~'))
sortfname = sort.fname
sortfullfname = os.path.join(sortpath, sortfname)
sortfmoddt = str(datetime.datetime.fromtimestamp(os.path.getmtime(sortfullfname)))
sortfmoddt = sortfmoddt.split('.')[0] # ditch the us
sortfsize = os.path.getsize(sortfullfname) # in bytes
d = {'file_type': '.ptcs (polytrode clustered spikes) file',
'original_fname': homelessfullfname, 'export_time': exportdt,
'sort': {'fname': sortfname, 'path': sortpath,
'fmtime': sortfmoddt, 'fsize': sortfsize},
'user': user, 'notes': notes}
descr = str(d)
self.descr = pad(descr, align=8)
self.srcfname = pad(lstrip(stream.fname, '../'), align=8)
self.pttype = pad(stream.probe.name, align=8)
self.dt = stream.datetime
self.dtstr = pad(self.dt.isoformat(), align=8)
def write(self, f):
s = self.sort
np.int64(self.FORMATVERSION).tofile(f) # formatversion
np.uint64(len(self.descr)).tofile(f) # ndescrbytes
f.write(self.descr) # descr
np.uint64(self.nneurons).tofile(f) # nneurons
np.uint64(self.nspikes).tofile(f) # nspikes
np.uint64(self.nsamplebytes).tofile(f) # nsamplebytes
np.uint64(s.sampfreq).tofile(f) # samplerate
np.uint64(len(self.pttype)).tofile(f) # npttypebytes
f.write(self.pttype) # pttype
np.uint64(s.stream.probe.nchans).tofile(f) # nptchans
np.float64(s.stream.probe.siteloc_arr()).tofile(f) # chanpos
np.uint64(len(self.srcfname)).tofile(f) # nsrcfnamebytes
f.write(self.srcfname) # srcfname
np.float64(td2days(self.dt - EPOCH)).tofile(f) # datetime (in days)
np.uint64(len(self.dtstr)).tofile(f) # ndatetimestrbytes
f.write(self.dtstr)
class PTCSNeuronRecord(object):
def __init__(self, neuron, spikets=None, nsamplebytes=None, descr=''):
n = neuron
AD2uV = n.sort.converter.AD2uV
self.neuron = neuron
self.spikets = spikets # constrained to stream range, may be < neuron.sids
self.wavedtype = {2: np.float16, 4: np.float32, 8: np.float64}[nsamplebytes]
if n.wave.data is None or n.wave.std is None: # some may have never been displayed
n.update_wave()
# wavedata and wavestd are nchans * nt * nsamplebytes long:
self.wavedata = pad(self.wavedtype(AD2uV(n.wave.data)), align=8)
self.wavestd = pad(self.wavedtype(AD2uV(n.wave.std)), align=8)
self.descr = pad(descr, align=8)
def write(self, f):
n = self.neuron
np.int64(n.id).tofile(f) # nid
np.uint64(len(self.descr)).tofile(f) # ndescrbytes
f.write(self.descr) # descr, bytes
np.float64(np.nan).tofile(f) # clusterscore
np.float64(n.cluster.pos['x0']).tofile(f) # xpos (um)
np.float64(n.cluster.pos['y0']).tofile(f) # ypos (um)
np.float64(n.cluster.pos['sx']).tofile(f) # sigma (um)
np.uint64(len(n.wave.chans)).tofile(f) # nchans
np.uint64(n.wave.chans).tofile(f) # chanids
np.uint64(n.chan).tofile(f) # maxchanid
np.uint64(len(n.wave.ts)).tofile(f) # nt
np.uint64(self.wavedata.nbytes).tofile(f) # nwavedatabytes
self.wavedata.tofile(f) # wavedata
np.uint64(self.wavestd.nbytes).tofile(f) # nwavestdbytes
self.wavestd.tofile(f) # wavestd
np.uint64(len(self.spikets)).tofile(f) # nspikes
np.uint64(self.spikets).tofile(f) # spike timestamps (us)
class PanelScrollArea(QtGui.QScrollArea):
def keyPressEvent(self, event):
key = event.key()
# seems the ENTER key needs be handled to directly call plot, unlike in sortwin
# where the event is passed on to be handled by the list widgets
if key in [Qt.Key_Enter, Qt.Key_Return]:
sortwin = self.topLevelWidget()
sortwin.parent().ui.plotButton.click()
else:
QtGui.QScrollArea.keyPressEvent(self, event) # pass it on
class SortWindow(SpykeToolWindow):
def __init__(self, parent, pos=None):
SpykeToolWindow.__init__(self, parent, flags=QtCore.Qt.Tool)
self.spykewindow = parent
ncols = self.sort.probe.ncols
nrows = self.sort.probe.nrows
# try and allow the same amount of horizontal space per column for 2 and 3 col probes:
if ncols <= 2:
self.MAINSPLITTERPOS = 300
else:
self.MAINSPLITTERPOS = 265 # move it more to the left
# make horizontal sort slider use as little vertical space as possible
self.VSPLITTERPOS = 1
panelwidth = PANELWIDTHPERCOLUMN * ncols
panelheight = PANELHEIGHTPERROW * nrows
width = max(self.MAINSPLITTERPOS + panelwidth + VSCROLLBARWIDTH, MINSORTWINDOWWIDTH)
size = (width, SORTWINDOWHEIGHT)
self.setWindowTitle('Sort Window')
self.move(*pos)
self.resize(*size)
self._source = None # source cluster for comparison
self.slider = SpikeSelectionSlider(Qt.Horizontal, self)
self.slider.setInvertedControls(True)
self.slider.setToolTip('Position of sliding spike selection time window')
self.connect(self.slider, QtCore.SIGNAL('valueChanged(int)'),
self.on_slider_valueChanged)
self.connect(self.slider, QtCore.SIGNAL('sliderPressed()'),
self.on_slider_sliderPressed)
self.nlist = NList(self)
self.nlist.setToolTip('Neuron list')
self.nslist = NSList(self)
self.nslist.setToolTip('Sorted spike list')
self.uslist = USList(self) # should really be multicolumn tableview
self.uslist.setToolTip('Unsorted spike list')
tw = self.spykewindow.sort.tw
self.panel = SpikeSortPanel(self, tw=tw)
self.panel.setMinimumSize(QtCore.QSize(panelwidth, panelheight))
self.panelscrollarea = PanelScrollArea(self)
self.panelscrollarea.setWidget(self.panel)
self.panelscrollarea.setMinimumWidth(panelwidth + VSCROLLBARWIDTH)
self.panelscrollarea.setWidgetResizable(True) # allows panel to size bigger than min
self.vsplitter = QtGui.QSplitter(Qt.Vertical)
self.vsplitter.addWidget(self.slider)
self.vsplitter.addWidget(self.nlist)
self.vsplitter.addWidget(self.nslist)
self.vsplitter.addWidget(self.uslist)
self.mainsplitter = QtGui.QSplitter(Qt.Horizontal)
self.mainsplitter.addWidget(self.vsplitter)
self.mainsplitter.addWidget(self.panelscrollarea)
self.layout = QtGui.QVBoxLayout()
self.layout.setContentsMargins(0, 0, 0, 0)
self.layout.addWidget(self.mainsplitter)
mainwidget = QtGui.QWidget(self)
mainwidget.setLayout(self.layout)
self.setCentralWidget(mainwidget)
self.toolbar = self.setupToolbar()
self.addToolBar(self.toolbar)
def setupToolbar(self):
toolbar = QtGui.QToolBar(self)
toolbar.setObjectName('toolbar')
toolbar.setFloatable(True)
toolbar.setIconSize(QtCore.QSize(16, 16)) # like in main spyke window
actionDelete = QAction(QIcon('res/edit-delete.svg'), 'Del', self)
tt = ('<nobr><b>Del</b> Delete selected spikes or clusters</nobr>\n'
'<nobr><b>CTRL+Del</b> Delete selected spikes</nobr>')
actionDelete.setToolTip(tt)
self.connect(actionDelete, QtCore.SIGNAL('triggered()'),
self.on_actionDelete_triggered)
toolbar.addAction(actionDelete)
actionMergeClusters = QAction('M', self)
tt = '<nobr><b>M</b> Merge clusters</nobr>'
actionMergeClusters.setToolTip(tt)
self.connect(actionMergeClusters, QtCore.SIGNAL('triggered()'),
self.on_actionMergeClusters_triggered)
toolbar.addAction(actionMergeClusters)
#actionToggleClustersGood = QAction(QIcon('res/dialog-apply.svg'), 'G', self)
actionToggleClustersGood = QAction('G', self)
tt = '<nobr><b>G</b> Toggle clusters as "good"</nobr>'
actionToggleClustersGood.setToolTip(tt)
self.connect(actionToggleClustersGood, QtCore.SIGNAL('triggered()'),
self.on_actionToggleClustersGood_triggered)
toolbar.addAction(actionToggleClustersGood)
actionSplit = QAction('+', self)
tt = '<nobr><b>+</b> Split off selected spikes</nobr>'
actionSplit.setToolTip(tt)
self.connect(actionSplit, QtCore.SIGNAL('triggered()'),
self.on_actionSplit_triggered)
toolbar.addAction(actionSplit)
actionLabelMultiunit = QAction('-', self)
tt = '<nobr><b>-</b> Label clusters as multiunit</nobr>'
actionLabelMultiunit.setToolTip(tt)
self.connect(actionLabelMultiunit, QtCore.SIGNAL('triggered()'),
self.on_actionLabelMultiunit_triggered)
toolbar.addAction(actionLabelMultiunit)
actionChanSplitClusters = QAction('/', self)
tt = '<nobr><b>/</b> Split clusters by channels</nobr>'
actionChanSplitClusters.setToolTip(tt)
self.connect(actionChanSplitClusters, QtCore.SIGNAL('triggered()'),
self.on_actionChanSplitClusters_triggered)
toolbar.addAction(actionChanSplitClusters)
actionDensitySplit = QAction('P', self)
tt = ('<nobr><b>P</b> Split cluster pair by density along line between '
'their centers</nobr>')
actionDensitySplit.setToolTip(tt)
self.connect(actionDensitySplit, QtCore.SIGNAL('triggered()'),
self.on_actionDensitySplit_triggered)
toolbar.addAction(actionDensitySplit)
actionRandomSplit = QAction('\\', self)
tt = ('<nobr><b>\\</b> Randomly split each selected cluster in half</nobr>')
actionRandomSplit.setToolTip(tt)
self.connect(actionRandomSplit, QtCore.SIGNAL('triggered()'),
self.on_actionRandomSplit_triggered)
toolbar.addAction(actionRandomSplit)
#actionRenumber = QAction(QIcon('res/gtk-edit.svg'), '
actionRenumber = QAction('
tt = ('<nobr><b>
'<nobr><b>CTRL+
actionRenumber.setToolTip(tt)
self.connect(actionRenumber, QtCore.SIGNAL('triggered()'),
self.on_actionRenumber_triggered)
toolbar.addAction(actionRenumber)
actionFind = QAction(QIcon('res/edit-find.svg'), 'Find', self)
tt = ('<nobr><b>CTRL+F</b> Find spike in cluster plot</nobr>')
actionFind.setToolTip(tt)
self.connect(actionFind, QtCore.SIGNAL('triggered()'),
self.on_actionFind_triggered)
toolbar.addAction(actionFind)
actionSelectRandomSpikes = QAction('R', self)
tt = '<nobr><b>R</b> Select random sample of spikes of current clusters</nobr>'
actionSelectRandomSpikes.setToolTip(tt)
self.connect(actionSelectRandomSpikes, QtCore.SIGNAL('triggered()'),
self.on_actionSelectRandomSpikes_triggered)
toolbar.addAction(actionSelectRandomSpikes)
actionToggleErrors = QAction('E', self)
actionToggleErrors.setCheckable(True)
actionToggleErrors.setChecked(self.panel.enable_fills)
tt = '<nobr><b>CTRL+E</b> Toggle visibility of template error limits</nobr>'
actionToggleErrors.setToolTip(tt)
self.connect(actionToggleErrors, QtCore.SIGNAL('toggled(bool)'),
self.on_actionToggleErrors_toggled)
toolbar.addAction(actionToggleErrors)
self.actionToggleErrors = actionToggleErrors
nsamplesComboBox = QtGui.QComboBox(self)
nsamplesComboBox.setToolTip('Number of spikes per cluster to randomly select')
nsamplesComboBox.setFocusPolicy(Qt.NoFocus)
nsamplesComboBox.addItems(['100', '50', '20', '10', '5', '1'])
nsamplesComboBox.setCurrentIndex(2)
toolbar.addWidget(nsamplesComboBox)
self.connect(nsamplesComboBox, QtCore.SIGNAL('activated(int)'),
self.on_actionSelectRandomSpikes_triggered)
self.nsamplesComboBox = nsamplesComboBox
gainComboBox = QtGui.QComboBox(self)
gainComboBox.setToolTip('Waveform gain (default: 1.5)')
gainComboBox.setFocusPolicy(Qt.NoFocus)
gainComboBox.addItems(['4', '3.75', '3.5', '3.25', '3', '2.75', '2.5', '2.25', '2',
'1.75', '1.5', '1.25', '1', '0.75', '0.5', '0.25'])
gainComboBox.setCurrentIndex(3)
toolbar.addWidget(gainComboBox)
self.connect(gainComboBox, QtCore.SIGNAL('activated(int)'),
self.on_gainComboBox_triggered)
self.gainComboBox = gainComboBox
#actionAlignMin = QAction(QIcon('res/go-bottom.svg'), 'Min', self)
actionAlignMin = QAction('Min', self)
actionAlignMin.setToolTip('Align selected spikes to min')
self.connect(actionAlignMin, QtCore.SIGNAL('triggered()'),
self.on_actionAlignMin_triggered)
toolbar.addAction(actionAlignMin)
#actionAlignMax = QAction(QIcon('res/go-top.svg'), 'Max', self)
actionAlignMax = QAction('Max', self)
actionAlignMax.setToolTip('Align selected spikes to max')
self.connect(actionAlignMax, QtCore.SIGNAL('triggered()'),
self.on_actionAlignMax_triggered)
toolbar.addAction(actionAlignMax)
#actionAlignBest = QAction(QIcon('res/emblem-OK.png'), 'Best', self)
actionAlignBest = QAction('B', self)
tt = '<nobr><b>B</b> Align selected spikes by best fit</nobr>'
actionAlignBest.setToolTip(tt)
self.connect(actionAlignBest, QtCore.SIGNAL('triggered()'),
self.on_actionAlignBest_triggered)
toolbar.addAction(actionAlignBest)
actionShiftLeft = QAction('[', self)
tt = ('<nobr><b>[</b> Shift selected spikes 2 points left</nobr>\n'
'<nobr><b>CTRL+[</b> Shift selected spikes 1 point left</nobr>')
actionShiftLeft.setToolTip(tt)
self.connect(actionShiftLeft, QtCore.SIGNAL('triggered()'),
self.on_actionShiftLeft_triggered)
toolbar.addAction(actionShiftLeft)
actionShiftRight = QAction(']', self)
tt = ('<nobr><b>]</b> Shift selected spikes 2 points right</nobr>\n'
'<nobr><b>CTRL+]</b> Shift selected spikes 1 point right</nobr>')
actionShiftRight.setToolTip(tt)
self.connect(actionShiftRight, QtCore.SIGNAL('triggered()'),
self.on_actionShiftRight_triggered)
toolbar.addAction(actionShiftRight)
incltComboBox = QtGui.QComboBox(self)
incltComboBox.setToolTip("Waveform duration (us) to include for component "
"analysis,\nasymmetric around spike time")
incltComboBox.setFocusPolicy(Qt.NoFocus)
dtw = self.sort.tw[1] - self.sort.tw[0] # spike time window width
incltstep = intround(dtw / 10) # evenly spaced inclt values
incltvals = np.arange(dtw, 0, -incltstep)
incltComboBox.addItems([ str(incltval) for incltval in incltvals ])
incltComboBox.setCurrentIndex(0)
toolbar.addWidget(incltComboBox)
self.connect(incltComboBox, QtCore.SIGNAL('activated(int)'),
self.on_incltComboBox_triggered)
self.incltComboBox = incltComboBox
#incltunitsLabel = QtGui.QLabel('us', self)
#toolbar.addWidget(incltunitsLabel)
nPCsPerChanSpinBox = QtGui.QSpinBox(self)
nPCsPerChanSpinBox.setToolTip("Number of PCs to use per channel to feed into ICA")
nPCsPerChanSpinBox.setFocusPolicy(Qt.NoFocus)
toolbar.addWidget(nPCsPerChanSpinBox)
nPCsPerChanSpinBox.setMinimum(1)
self.connect(nPCsPerChanSpinBox, QtCore.SIGNAL('valueChanged(int)'),
self.on_nPCsPerChanSpinBox_valueChanged)
nPCsPerChanSpinBox.setValue(self.sort.npcsperchan)
self.nPCsPerChanSpinBox = nPCsPerChanSpinBox
#actionFindPrevMostSimilar = QAction(QIcon('res/go-previous.svg'), '<', self)
actionFindPrevMostSimilar = QAction('<', self)
tt = '<nobr><b><</b> Find previous most similar cluster</nobr>'
actionFindPrevMostSimilar.setToolTip(tt)
self.connect(actionFindPrevMostSimilar, QtCore.SIGNAL('triggered()'),
self.on_actionFindPrevMostSimilar_triggered)
toolbar.addAction(actionFindPrevMostSimilar)
#actionFindNextMostSimilar = QAction(QIcon('res/go-next.svg'), '>', self)
actionFindNextMostSimilar = QAction('>', self)
tt = '<nobr><b>></b> Find next most similar cluster</nobr>'
actionFindNextMostSimilar.setToolTip(tt)
self.connect(actionFindNextMostSimilar, QtCore.SIGNAL('triggered()'),
self.on_actionFindNextMostSimilar_triggered)
toolbar.addAction(actionFindNextMostSimilar)
actionReloadSpikes = QAction(QIcon('res/view-refresh.svg'), 'Reload', self)
tt = ('<nobr><b>F5</b> Reload waveforms of selected spikes. '
'If none selected, reload all</nobr>\n'
'<nobr><b>CTRL+F5</b> Use mean waveform to choose chans to reload</nobr>')
actionReloadSpikes.setToolTip(tt)
self.connect(actionReloadSpikes, QtCore.SIGNAL('triggered()'),
self.on_actionReloadSpikes_triggered)
toolbar.addAction(actionReloadSpikes)
actionSave = QAction(QIcon('res/document-save.svg'), '&Save', self)
actionSave.setToolTip('Save sort panel to file')
self.connect(actionSave, QtCore.SIGNAL('triggered()'),
self.on_actionSave_triggered)
toolbar.addAction(actionSave)
return toolbar
def get_sort(self):
return self.spykewindow.sort
sort = property(get_sort) # make this a property for proper behaviour after unpickling
def closeEvent(self, event):
self.spykewindow.HideWindow('Sort')
def mousePressEvent(self, event):
buttons = event.buttons()
if buttons == QtCore.Qt.MiddleButton:
#self.on_actionSelectRandomSpikes_triggered()
self.spykewindow.ui.plotButton.click() # same as hitting ENTER in nslist
elif buttons == QtCore.Qt.RightButton:
self.clear()
def keyPressEvent(self, event):
key = event.key()
modifiers = event.modifiers()
ctrl = modifiers & Qt.ControlModifier # ctrl is down
spw = self.spykewindow
if key == Qt.Key_A: # ignored in SpykeListViews
spw.ui.plotButton.click() # same as hitting ENTER in nslist
elif key == Qt.Key_X: # ignored in SpykeListViews
spw.ui.plotXcorrsButton.click()
elif key == Qt.Key_N: # ignored in SpykeListViews
spw.ui.normButton.click()
elif key == Qt.Key_Escape: # deselect all spikes and all clusters
self.clear()
elif key == Qt.Key_Delete:
self.on_actionDelete_triggered()
elif key == Qt.Key_M: # ignored in SpykeListViews
self.on_actionMergeClusters_triggered()
elif key == Qt.Key_G: # ignored in SpykeListViews
self.on_actionToggleClustersGood_triggered()
elif key == Qt.Key_Equal: # ignored in SpykeListViews
self.on_actionSplit_triggered()
elif key == Qt.Key_Minus: # ignored in SpykeListViews
self.on_actionLabelMultiunit_triggered()
elif key == Qt.Key_Slash: # ignored in SpykeListViews
self.on_actionChanSplitClusters_triggered()
elif key == Qt.Key_P: # ignored in SpykeListViews
self.on_actionDensitySplit_triggered()
elif key == Qt.Key_Backslash: # ignored in SpykeListViews
self.on_actionRandomSplit_triggered()
elif key == Qt.Key_NumberSign: # ignored in SpykeListViews
self.on_actionRenumber_triggered()
elif key == Qt.Key_F: # ignored in SpykeListViews
if ctrl:
self.FindSpike()
else:
self.FindCluster()
elif key == Qt.Key_R: # ignored in SpykeListViews
self.on_actionSelectRandomSpikes_triggered()
elif key == Qt.Key_Space: # ignored in SpykeListViews
if ctrl:
SpykeToolWindow.keyPressEvent(self, event) # pass it on
else:
spw.on_clusterButton_clicked()
elif key == Qt.Key_B: # ignored in SpykeListViews
self.on_actionAlignBest_triggered()
elif key == Qt.Key_BracketLeft: # ignored in SpykeListViews
self.on_actionShiftLeft_triggered()
elif key == Qt.Key_BracketRight: # ignored in SpykeListViews
self.on_actionShiftRight_triggered()
elif key == Qt.Key_Comma: # ignored in SpykeListViews
self.on_actionFindPrevMostSimilar_triggered()
elif key == Qt.Key_Period: # ignored in SpykeListViews
self.on_actionFindNextMostSimilar_triggered()
elif key == Qt.Key_F5: # ignored in SpykeListViews
self.on_actionReloadSpikes_triggered()
elif key == Qt.Key_E: # ignored in SpykeListViews
if ctrl:
self.actionToggleErrors.toggle()
else:
self.clear() # E is synonymous with ESC
elif key == Qt.Key_C: # toggle between PCA and ICA, ignored in SpykeListViews
c = str(spw.ui.componentAnalysisComboBox.currentText())
if c == 'PCA':
index = spw.ui.componentAnalysisComboBox.findText('ICA')
spw.ui.componentAnalysisComboBox.setCurrentIndex(index)
elif c == 'ICA':
index = spw.ui.componentAnalysisComboBox.findText('PCA')
spw.ui.componentAnalysisComboBox.setCurrentIndex(index)
spw.on_plotButton_clicked()
elif key == Qt.Key_T: # toggle plotting against time, ignored in SpykeListViews
z = str(spw.ui.zDimComboBox.currentText())
if z == 't':
spw.on_c0c1c2Button_clicked() # plot in pure component analysis space
else:
spw.on_c0c1tButton_clicked() # plot against time
elif key == Qt.Key_W: # toggle plotting against RMSError, ignored in SpykeListViews
z = str(spw.ui.zDimComboBox.currentText())
if z == 'RMSerror':
spw.on_c0c1c2Button_clicked() # plot in pure component analysis space
else:
spw.ui.zDimComboBox.setCurrentIndex(3)
spw.on_plotButton_clicked() # plot against RMSError
elif key in [Qt.Key_Enter, Qt.Key_Return]:
# this is handled at a lower level by on_actionItem_triggered
# in the various listview controls
pass
else:
SpykeToolWindow.keyPressEvent(self, event) # pass it on
def clear(self):
spw = self.spykewindow
clusters = spw.GetClusters()
if len(self.uslist.selectedIndexes()) > 0:
self.uslist.clearSelection()
elif self.nslist.nrowsSelected > 0:
self.nslist.clearSelection()
elif len(clusters) == 2 and self._source in clusters:
clusters.remove(self._source)
spw.SelectClusters(clusters, on=False)
elif 0 in spw.GetClusterIDs():
for cluster in spw.GetClusters():
if cluster.id == 0:
spw.SelectClusters([cluster], on=False)
break
else:
self.nlist.clearSelection()
# reset colours in cluster plot:
gw = spw.windows['Cluster'].glWidget
gw.colour()
gw.updateGL()
def on_actionDelete_triggered(self):
selsids = self.spykewindow.GetSpikes() # IDs of explicitly selected spikes
nselsids = len(selsids)
if (QApplication.instance().keyboardModifiers() & Qt.ControlModifier
or nselsids > 0):
self.delete_spikes()
else:
self.delete_clusters()
def delete_clusters(self):
spw = self.spykewindow
clusters = spw.GetClusters()
s = self.sort
spikes = s.spikes
sids = []
for cluster in clusters:
sids.append(cluster.neuron.sids)
sids = np.concatenate(sids)
# save some undo/redo stuff
message = 'delete clusters %r' % [ c.id for c in clusters ]
cc = ClusterChange(sids, spikes, message)
cc.save_old(clusters, s.norder, s.good)
# deselect and delete clusters
spw.DelClusters(clusters)
if len(s.clusters) > 0:
# select cluster that replaces the first of the deleted clusters in norder
selrows = [ cc.oldnorder.index(oldunid) for oldunid in cc.oldunids ]
if len(selrows) > 0:
selrow = selrows[0]
nlist = spw.windows['Sort'].nlist
nlist.selectRows(selrow) # TODO: this sets selection, but not focus
#else: # first of deleted clusters was last in norder, don't select anything
newclusters = []
cc.save_new(newclusters, s.norder, s.good)
spw.AddClusterChangeToStack(cc)
print(cc.message)
def delete_spikes(self):
self.spykewindow.SplitSpikes(delete=True)
def on_actionSplit_triggered(self):
self.spykewindow.SplitSpikes(delete=False)
def on_actionMergeClusters_triggered(self):
spw = self.spykewindow
clusters = spw.GetClusters()
s = self.sort
spikes = s.spikes
sids = []
for cluster in clusters:
sids.append(cluster.neuron.sids)
sids.append(spw.GetUnsortedSpikes())
sids = np.concatenate(sids)
if len(sids) == 0:
return
message = 'merge clusters %r' % [ c.id for c in clusters ]
cc = ClusterChange(sids, spikes, message)
cc.save_old(clusters, s.norder, s.good)
newnid = None
inserti = None
if len(clusters) == 1:
# single-unit, multiunit, or junk
inserti = s.norder.index(clusters[0].id)
elif len(clusters) > 1:
oldunids = np.asarray(cc.oldunids)
suids = oldunids[oldunids > 0] # selected single unit nids
if len(suids) > 0: # merge into largest selected single unit nid:
spikecounts = np.asarray([ s.neurons[suid].nspikes for suid in suids ])
newnid = suids[spikecounts.argmax()]
inserti = s.norder.index(newnid)
# correct for shift due to deletion of oldunids that precede newnid in norder:
inserti -= sum([ s.norder.index(oldunid) < inserti for oldunid in oldunids])
# delete selected clusters and deselect selected usids
spw.DelClusters(clusters, update=False)
self.uslist.clearSelection()
# create new cluster
#t0 = time.time()
newcluster = spw.CreateCluster(update=False, id=newnid, inserti=inserti)
neuron = newcluster.neuron
self.MoveSpikes2Neuron(sids, neuron, update=False)
plotdims = spw.GetClusterPlotDims()
newcluster.update_pos()
# save more undo/redo stuff
cc.save_new([newcluster], s.norder, s.good)
spw.AddClusterChangeToStack(cc)
# now do some final updates
spw.UpdateClustersGUI()
spw.ColourPoints(newcluster)
#print('applying clusters to plot took %.3f sec' % (time.time()-t0))
# select newly created cluster
spw.SelectClusters(newcluster)
cc.message += ' into cluster %d' % newcluster.id
print(cc.message)
def on_actionToggleClustersGood_triggered(self):
spw = self.spykewindow
clusters = spw.GetClusters()
cids = []
for cluster in clusters:
cluster.neuron.good = not cluster.neuron.good
cids.append(cluster.id)
self.nlist.updateAll() # nlist item colouring will change as a result
print("Toggled 'good' flag of clusters %r" % cids)
def on_actionLabelMultiunit_triggered(self):
spw = self.spykewindow
clusters = spw.GetClusters()
s = self.sort
spikes = s.spikes
# only relabel single unit clusters:
clusters = [ cluster for cluster in clusters if cluster.id > 0 ]
if len(clusters) == 0:
return
sids = []
for cluster in clusters:
sids.append(cluster.neuron.sids)
sids = np.concatenate(sids)
# save some undo/redo stuff
message = 'label as multiunit clusters %r' % [ c.id for c in clusters ]
cc = ClusterChange(sids, spikes, message)
cc.save_old(clusters, s.norder, s.good)
# delete old clusters
inserti = s.norder.index(clusters[0].id)
# collect cluster sids before cluster deletion
sidss = [ cluster.neuron.sids for cluster in clusters ]
spw.DelClusters(clusters, update=False)
# create new multiunit clusters
newclusters = []
for sids in sidss:
muid = s.get_nextmuid()
newcluster = spw.CreateCluster(update=False, id=muid, inserti=inserti)
neuron = newcluster.neuron
self.MoveSpikes2Neuron(sids, neuron, update=False)
newcluster.update_pos()
newclusters.append(newcluster)
inserti += 1
# select newly labelled multiunit clusters
spw.SelectClusters(newclusters)
# save more undo/redo stuff
cc.save_new(newclusters, s.norder, s.good)
spw.AddClusterChangeToStack(cc)
print(cc.message)
def on_actionChanSplitClusters_triggered(self):
## TODO: make sure this works on .srf files! Why was chancombosplit being used?
self.spykewindow.maxchansplit()
#self.spykewindow.chancombosplit()
def on_actionDensitySplit_triggered(self):
self.spykewindow.densitysplit()
def on_actionRandomSplit_triggered(self):
self.spykewindow.randomsplit()
def on_actionRenumber_triggered(self):
if QApplication.instance().keyboardModifiers() & Qt.ControlModifier:
self.renumber_selected_cluster()
else:
self.renumber_all_clusters()
def renumber_selected_cluster(self):
spw = self.spykewindow
s = self.sort
spikes = s.spikes
cluster = spw.GetCluster() # exactly one selected cluster
oldid = cluster.id
newid = max(s.norder) + 1
newid, ok = QtGui.QInputDialog.getInt(self, "Renumber cluster",
"This will clear the undo/redo stack, and is not undoable.\n"
"Enter new ID:", value=newid)
if not ok:
return
if newid in s.norder:
print("Choose a non-existing nid to renumber to")
return
# deselect cluster
spw.SelectClusters(cluster, on=False)
# rename to newid
cluster.id = newid # this indirectly updates neuron.id
# update cluster and neuron dicts, and spikes array
s.clusters[newid] = cluster
s.neurons[newid] = cluster.neuron
sids = cluster.neuron.sids
spikes['nid'][sids] = newid
# remove duplicate oldid dict entries
del s.clusters[oldid]
del s.neurons[oldid]
# replace oldid with newid in norder
s.norder[s.norder.index(oldid)] = newid
# update colour of any relevant points in cluster plot
spw.ColourPoints(cluster)
# reselect cluster
spw.SelectClusters(cluster)
# some cluster changes in stack may no longer be applicable, reset cchanges
del spw.cchanges[:]
spw.cci = -1
print('Renumbered neuron %d to %d' % (oldid, newid))
def renumber_all_clusters(self):
val = QtGui.QMessageBox.question(self.panel, "Renumber all clusters",
"Are you sure? This will clear the undo/redo stack, and is not undoable.",
QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if val == QtGui.QMessageBox.No:
return
spw = self.spykewindow
s = self.sort
spikes = s.spikes
# get spatially and numerically ordered lists of new ids
oldids = np.asarray(s.norder)
oldsuids = oldids[oldids > 0]
oldmuids = oldids[oldids < 0]
# this is a bit confusing: find indices that would sort old ids by y pos, but then
# what you really want is to find the y pos *rank* of each old id, so you need to
# take argsort again:
newsuids = np.asarray([ s.clusters[cid].pos['y0']
for cid in oldsuids ]).argsort().argsort() + 1
newmuids = np.asarray([ s.clusters[cid].pos['y0']
for cid in oldmuids ]).argsort().argsort() + 1
newmuids = -newmuids
# multiunit, followed by single unit, no 0 junk cluster. Can't seem to do it the other
# the last entry. Doing so causes all 2 digit values in the list to become blank,
# suggests a spacing calculation bug. Reproduce by making last entry multiunit,
# undoing then redoing. Actually, maybe the bug is it doesn't like having a number
newids = np.concatenate([newmuids, newsuids])
if np.all(oldids == newids):
print('Nothing to renumber: cluster IDs already ordered in y0 and contiguous')
return
oldids = np.concatenate([oldmuids, oldsuids])
selclusters = spw.GetClusters()
oldselids = [ cluster.id for cluster in selclusters ]
spw.SelectClusters(selclusters, on=False)
if 0 in s.clusters:
s.remove_neuron(0)
print('Deleted junk cluster 0')
if 0 in oldselids:
oldselids.remove(0)
cw = spw.windows['Cluster']
oldclusters = s.clusters.copy()
dims = spw.GetClusterPlotDims()
for oldid, newid in zip(oldids, newids):
newid = int(newid)
if oldid == newid:
continue
cluster = oldclusters[oldid]
cluster.id = newid
s.clusters[newid] = cluster
s.neurons[newid] = cluster.neuron
sids = cluster.neuron.sids
spikes['nid'][sids] = newid
for oldid in oldids:
if oldid not in newids:
del s.clusters[oldid]
del s.neurons[oldid]
s.norder = []
s.norder.extend(sorted([ int(newid) for newid in newmuids ])[::-1])
s.norder.extend(sorted([ int(newid) for newid in newsuids ]))
spw.UpdateClustersGUI()
spw.ColourPoints(s.clusters.values())
oldiis = [ list(oldids).index(oldselid) for oldselid in oldselids ]
newselids = newids[oldiis]
spw.SelectClusters([s.clusters[cid] for cid in newselids])
del spw.cchanges[:]
spw.cci = -1
print('Renumbering complete')
def on_actionFind_triggered(self):
ctrl = QApplication.instance().keyboardModifiers() & Qt.ControlModifier
if ctrl:
self.FindSpike()
else:
self.FindCluster()
def FindCluster(self):
spw = self.spykewindow
try:
cluster = spw.GetCluster()
except RuntimeError as err:
print(err)
return
gw = spw.windows['Cluster'].glWidget
dims = spw.GetClusterPlotDims()
gw.focus = np.float32([ cluster.normpos[dim] for dim in dims ])
gw.panTo()
gw.updateGL()
def FindSpike(self):
spw = self.spykewindow
try:
sid = spw.GetSpike()
except RuntimeError as err:
print(err)
return
gw = spw.windows['Cluster'].glWidget
pointis = gw.sids.searchsorted(sid)
gw.focus = gw.points[pointis]
gw.panTo()
gw.updateGL()
def on_actionSelectRandomSpikes_triggered(self):
nsamples = int(self.nsamplesComboBox.currentText())
if len(self.nslist.neurons) > 0:
slist = self.nslist
else:
slist = self.uslist
slist.clearSelection()
slist.selectRandom(nsamples)
def on_gainComboBox_triggered(self):
panel = self.panel
panel.gain = float(self.gainComboBox.currentText())
panel.do_layout() # resets axes lims and recalcs panel.pos
panel._update_scale()
panel.draw_refs()
panel.updateAllItems()
def on_actionAlignMin_triggered(self):
self.Align('min')
def on_actionAlignMax_triggered(self):
self.Align('max')
def on_actionAlignBest_triggered(self):
self.Align('best')
def on_actionShiftLeft_triggered(self):
if QApplication.instance().keyboardModifiers() & Qt.ControlModifier:
nt = -1
else:
nt = -2
self.Shift(nt)
def on_actionShiftRight_triggered(self):
if QApplication.instance().keyboardModifiers() & Qt.ControlModifier:
nt = 1
else:
nt = 2
self.Shift(nt)
def on_incltComboBox_triggered(self):
self.panel.update_selvrefs()
self.panel.draw_refs()
#self.spykewindow.ui.plotButton.click()
def get_inclt(self):
return float(self.incltComboBox.currentText()) # us
inclt = property(get_inclt)
def get_tis(self):
s = self.sort
inclt = self.inclt # duration to include, asymmetric around t=0 spike time (us)
tw = self.panel.tw
dtw = tw[1] - tw[0] # spike time window width
left = intround(abs(tw[0]) / dtw * inclt) # left fraction wrt t=0 spike time
right = inclt - left # right fraction wrt t=0 spike time
tis = s.twts.searchsorted([-left, right])
return tis
tis = property(get_tis)
def on_nPCsPerChanSpinBox_valueChanged(self, val):
self.sort.npcsperchan = val
def on_actionReloadSpikes_triggered(self):
spw = self.spykewindow
sids = spw.GetAllSpikes()
sort = self.sort
if len(sids) == 0:
# if no spikes specified, reload all spikes
sids = sort.spikes['id']
usemeanchans = False
if QApplication.instance().keyboardModifiers() & Qt.ControlModifier:
usemeanchans = True
sort.reload_spikes_and_templates(sids, usemeanchans=usemeanchans)
# add sids to the set of dirtysids to be resaved to .wave file:
spw.update_dirtysids(sids)
# auto-refresh all plots:
self.panel.updateAllItems()
def on_actionFindPrevMostSimilar_triggered(self):
self.findMostSimilarCluster('previous')
def on_actionFindNextMostSimilar_triggered(self):
self.findMostSimilarCluster('next')
def on_actionToggleErrors_toggled(self, checked):
self.panel.showFills(checked)
def on_slider_valueChanged(self, slideri):
self.nslist.clearSelection() # emits selectionChanged signal, .reset() doesn't
if self.nslist.model().sliding == False:
self.nslist.model().sids.sort()
self.nslist.updateAll()
self.nslist.model().sliding = True
nsamples = int(self.nsamplesComboBox.currentText())
rows = np.arange(slideri, slideri+nsamples)
self.nslist.selectRows(rows)
def on_slider_sliderPressed(self):
slideri = self.slider.value()
if slideri == 0:
nsamples = int(self.nsamplesComboBox.currentText())
nsamples = min(nsamples, self.nslist.model().nspikes)
rows = np.arange(nsamples)
self.nslist.selectRows(rows)
def update_slider(self):
nsamples = int(self.nsamplesComboBox.currentText())
nsids = len(self.nslist.sids)
ulim = max(nsids-nsamples, 1)
self.slider.setRange(0, ulim)
self.slider.setSingleStep(1)
self.slider.setPageStep(nsamples)
def findMostSimilarCluster(self, which='next'):
try:
source = self.getClusterComparisonSource()
except RuntimeError as err:
print(err)
return
destinations = list(self.sort.clusters.values())
destinations.remove(source)
selchans = np.sort(self.panel.chans_selected)
if len(selchans) > 0:
srcchans = np.intersect1d(source.neuron.wave.chans, selchans)
if len(srcchans) == 0:
print("Source cluster doesn't overlap with selected chans")
return
else:
srcchans = source.neuron.wave.chans
if self.spykewindow.ui.normButton.isChecked():
print("NOTE: findMostSimilarCluster() doesn't currently take spike amplitude "
"normalization into account. To see the true amplitudes used to compare "
"neuron pairs, turn off normalization")
errors = []
dests = []
t0i, t1i = self.tis
for dest in destinations:
if dest.neuron.wave.data is None:
dest.neuron.update_wave()
dstchans = dest.neuron.wave.chans
if len(selchans) > 0:
if not set(selchans).issubset(dstchans):
continue
dstchans = selchans
cmpchans = np.intersect1d(srcchans, dstchans)
if len(cmpchans) == 0: # not comparable
continue
# ensure maxchan of both source and dest neuron are both in cmpchans
if source.neuron.chan not in cmpchans or dest.neuron.chan not in cmpchans:
continue
srcwavedata = source.neuron.wave[cmpchans].data[:, t0i:t1i]
dstwavedata = dest.neuron.wave[cmpchans].data[:, t0i:t1i]
error = core.rms(srcwavedata - dstwavedata)
errors.append(error)
dests.append(dest)
if len(errors) == 0:
print("No sufficiently overlapping clusters on selected chans to compare to")
return
errors = np.asarray(errors)
dests = np.asarray(dests)
desterrsortis = errors.argsort()
if which == 'next':
self._cmpid += 1
elif which == 'previous':
self._cmpid -= 1
else: raise ValueError('Unknown which: %r' % which)
self._cmpid = max(self._cmpid, 0)
self._cmpid = min(self._cmpid, len(dests)-1)
dest = dests[desterrsortis][self._cmpid]
self.spykewindow.SelectClusters(dest)
desterr = errors[desterrsortis][self._cmpid]
print('n%d to n%d rmserror: %.2f uV' %
(source.id, dest.id, self.sort.converter.AD2uV(desterr)))
def getClusterComparisonSource(self):
selclusters = self.spykewindow.GetClusters()
errmsg = 'unclear which cluster to use as source for comparison'
if len(selclusters) == 1:
source = selclusters[0]
self._source = source
self._cmpid = -1 # init/reset
elif len(selclusters) == 2:
source = self._source
if source not in selclusters:
raise RuntimeError(errmsg)
# deselect old destination cluster:
selclusters.remove(source)
self.spykewindow.SelectClusters(selclusters, on=False)
else:
self._source = None # reset for tidiness
raise RuntimeError(errmsg)
return source
def Shift(self, nt):
s = self.sort
spikes = s.spikes
spw = self.spykewindow
sids = np.concatenate((spw.GetClusterSpikes(), spw.GetUnsortedSpikes()))
self.sort.shift(sids, nt)
print('Shifted %d spikes by %d timepoints' % (len(sids), nt))
unids = np.unique(spikes['nid'][sids])
neurons = [ s.neurons[nid] for nid in unids ]
for neuron in neurons:
neuron.update_wave() # update affected mean waveforms
# add dirtysids to the set to be resaved to .wave file:
spw.update_dirtysids(sids)
# auto-refresh all plots
self.panel.updateAllItems()
def Align(self, to):
s = self.sort
spikes = s.spikes
spw = self.spykewindow
sids = np.concatenate((spw.GetClusterSpikes(), spw.GetUnsortedSpikes()))
if to == 'best':
tis = self.tis
# find which chans are common to all sids:
commonchans = s.get_common_chans(sids)[0]
# check selected chans
selchans = spw.get_selchans(sids)
for selchan in selchans:
if selchan not in commonchans:
print("Chan %d not common to all spikes, pick from %r"
% (selchan, list(commonchans)))
return
print('Best fit aligning %d spikes between tis=%r on chans=%r' %
(len(sids), list(tis), selchans))
# numpy implementation:
#dirtysids = s.alignbest(sids, tis, selchans)
# cython implementation:
dirtysids = util.alignbest_cy(s, sids, tis, np.int64(selchans))
else: # to in ['min', 'max']
print('Aligning %d spikes to %s' % (len(sids), to))
dirtysids = s.alignminmax(sids, to)
paligned = len(dirtysids) / len(sids) * 100
print('Aligned %d/%d (%.1f%%) spikes' % (len(dirtysids), len(sids), paligned))
unids = np.unique(spikes['nid'][dirtysids])
neurons = [ s.neurons[nid] for nid in unids ]
for neuron in neurons:
neuron.update_wave() # update affected mean waveforms
# add dirtysids to the set to be resaved to .wave file:
spw.update_dirtysids(dirtysids)
# auto-refresh all plots:
self.panel.updateAllItems()
def RemoveNeuron(self, neuron, update=True):
self.MoveSpikes2List(neuron, neuron.sids, update=update)
self.sort.remove_neuron(neuron.id)
if update:
self.nlist.updateAll()
def MoveSpikes2Neuron(self, sids, neuron=None, update=True):
sids = toiter(sids)
spikes = self.sort.spikes
if neuron == None:
neuron = self.sort.create_neuron()
neuron.sids = np.union1d(neuron.sids, sids) # update
spikes['nid'][sids] = neuron.id
if update:
self.sort.update_usids()
self.uslist.updateAll()
if neuron in self.nslist.neurons:
self.nslist.neurons = self.nslist.neurons # trigger nslist refresh
# TODO: selection doesn't seem to be working, always jumps to top of list
ron
def MoveSpikes2List(self, neuron, sids, update=True):
sids = toiter(sids)
if len(sids) == 0:
return
spikes = self.sort.spikes
neuron.sids = np.setdiff1d(neuron.sids, sids)
spikes['nid'][sids] = 0 # unbind neuron id of sids in spikes struct array
if update:
self.sort.update_usids()
self.uslist.updateAll()
# this only makes sense if the neuron is currently selected in the nlist:
if neuron in self.nslist.neurons:
self.nslist.neurons = self.nslist.neurons # this triggers a refresh
neuron.wave.data = None # triggers an update when it's actually needed
def PlotClusterHistogram(self, X, nids):
spw = self.spykewindow
mplw = spw.OpenWindow('MPL')
unids = np.unique(nids)
nclusters = len(unids)
if nclusters == 0:
mplw.ax.clear()
mplw.figurecanvas.draw()
print("No spikes selected")
return
elif nclusters > 5:
mplw.ax.clear()
mplw.figurecanvas.draw()
print("Too many clusters selected for cluster histogram")
return
elif nclusters == 2:
calc_measures = True
else:
calc_measures = False
projdimi = 0
ndims = X.shape[1]
points = [] # list of projection of each cluster's points onto dimi
for unid in unids:
sidis, = np.where(nids == unid)
points.append(X[sidis])
#points.append(np.ascontiguousarray(X[sidis]))
if calc_measures:
t0 = time.time()
NDsep = util.NDsepmetric(*points, Nmax=20000)
print('NDsep calc took %.3f sec' % (time.time()-t0))
# centers of both clusters, use median:
c0 = np.median(points[0], axis=0) # ndims vector
c1 = np.median(points[1], axis=0)
# line connecting the centers of the two clusters, wrt c0
line = c1-c0
line /= np.linalg.norm(line) # make it unit length
#print('c0=%r, c1=%r, line=%r' % (c0, c1, line))
else:
line = np.zeros(ndims)
line[projdimi] = 1.0 # pick out just the one component
c0 = 0.0 # set origin at 0
# calculate projection of each cluster's points onto line
projs = []
for cpoints in points:
projs.append(np.dot(cpoints-c0, line))
if calc_measures:
d = np.median(projs[1]) - np.median(projs[0])
maxstd = max(projs[0].std(), projs[1].std())
if maxstd == 0:
oneDsep = 0
else:
oneDsep = d / (3 * maxstd)
proj = np.concatenate(projs)
nbins = max(intround(np.sqrt(len(proj))), 2)
edges = np.histogram(proj, bins=nbins)[1]
hists = []
for i in range(nclusters):
hists.append(np.histogram(projs[i], bins=edges)[0])
hist = np.concatenate([hists])
masses = np.asarray([ h.sum() for h in hist ])
sortedmassis = masses.argsort()
if calc_measures:
overlaparearatio = hist.min(axis=0).sum() / masses[sortedmassis[0]]
djs = core.DJS(hists[0], hists[1])
ledges = edges[:-1]
assert len(ledges) == nbins
binwidth = ledges[1] - ledges[0]
a = mplw.ax
a.clear()
windowtitle = "clusters %r" % list(unids)
print(windowtitle)
mplw.setWindowTitle(windowtitle)
if calc_measures:
title = ("%dDsep=%.3f, 1Dsep=%.3f, OAR=%.3f, DJS=%.3f"
% (ndims, NDsep, oneDsep, overlaparearatio, djs))
print(title)
a.set_title(title)
cs = [ CLUSTERCOLOURDICT[unid] for unid in unids ]
for i, c in enumerate(cs):
if c == WHITE:
cs[i] = 'black'
for i in sortedmassis[::-1]:
a.bar(ledges, hist[i], width=binwidth, color=cs[i], edgecolor=cs[i])
| true | true |
f71a7085403e8ce0a19e0672e598aeec15a4a023 | 899 | py | Python | examples/show_debug.py | Matuiss2/python-sc2 | dd93215d8b09b7ddacfd5c3cc4e9f43641d3f953 | [
"MIT"
] | 2 | 2019-01-23T19:11:53.000Z | 2019-04-05T17:45:49.000Z | examples/show_debug.py | Matuiss2/python-sc2 | dd93215d8b09b7ddacfd5c3cc4e9f43641d3f953 | [
"MIT"
] | null | null | null | examples/show_debug.py | Matuiss2/python-sc2 | dd93215d8b09b7ddacfd5c3cc4e9f43641d3f953 | [
"MIT"
] | 1 | 2019-04-24T13:31:20.000Z | 2019-04-24T13:31:20.000Z | import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
class MyBot(sc2.BotAI):
async def on_step(self, iteration):
for structure in self.structures:
self._client.debug_text_world(
"\n".join([
f"{structure.type_id.name}:{structure.type_id.value}",
f"({structure.position.x:.2f},{structure.position.y:.2f})",
f"{structure.build_progress:.2f}",
] + [repr(x) for x in structure.orders]),
structure.position3d,
color=(0, 255, 0),
size=12,
)
await self._client.send_debug()
def main():
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, MyBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=True)
if __name__ == '__main__':
main()
| 31 | 79 | 0.558398 | import sc2
from sc2 import run_game, maps, Race, Difficulty
from sc2.player import Bot, Computer
class MyBot(sc2.BotAI):
async def on_step(self, iteration):
for structure in self.structures:
self._client.debug_text_world(
"\n".join([
f"{structure.type_id.name}:{structure.type_id.value}",
f"({structure.position.x:.2f},{structure.position.y:.2f})",
f"{structure.build_progress:.2f}",
] + [repr(x) for x in structure.orders]),
structure.position3d,
color=(0, 255, 0),
size=12,
)
await self._client.send_debug()
def main():
run_game(maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, MyBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=True)
if __name__ == '__main__':
main()
| true | true |
f71a7119f0a598c0a33db2eb55c1805b7e234b08 | 21,798 | py | Python | archive/reuUpdated.py | emmettmeinzer/hmwgen | cd47733b5a34a6a3a9b56026eb5e73069e398033 | [
"MIT"
] | null | null | null | archive/reuUpdated.py | emmettmeinzer/hmwgen | cd47733b5a34a6a3a9b56026eb5e73069e398033 | [
"MIT"
] | null | null | null | archive/reuUpdated.py | emmettmeinzer/hmwgen | cd47733b5a34a6a3a9b56026eb5e73069e398033 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Mon Nov 11 13:41:14 2019
@author: Emmett & Binyang
"""
from pprint import pprint
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from nltk.tokenize.punkt import PunktSentenceTokenizer, PunktTrainer
##Let’s first build a corpus to train our tokenizer on. We’ll use stuff available in NLTK:
from nltk.corpus import gutenberg
# print (dir(gutenberg))
# print (gutenberg.fileids())
text = ""
for file_id in gutenberg.fileids():
text += gutenberg.raw(file_id)
print (len(text))
##a funtion that converts a list to a string
def listToString(s):
# initialize an empty string
str1 = ""
# traverse in the string
for ele in s:
str1 += ele
# return string
return str1
##extract sentences from samples for following sentiment analysis
sampNum = 1
sent_df = pd.DataFrame()
i = 0
while (sampNum < 186):
fileOpen = open("sample"+str(sampNum)+".txt","r")
temp = fileOpen.readlines()
temp = listToString(temp)
trainer = PunktTrainer()
trainer.INCLUDE_ALL_COLLOCS = True
trainer.train(text)
tokenizer = PunktSentenceTokenizer(trainer.get_params())
##Adding more abbreviations
tokenizer._params.abbrev_types.add('dr')
sent = tokenizer.tokenize(temp)
for sent in sent:
sent_df.loc[i, 'sent'] = sent
sent_df.loc[i, 'sample'] = sampNum
i += 1
sampNum += 1
##NLTK’s built-in Vader Sentiment Analyzer will simply rank a piece of text as positive, negative or neutral
##using a lexicon of positive and negative words.
##We can utilize this tool by first creating a Sentiment Intensity Analyzer (SIA) to categorize our headlines,
##then we'll use the polarity_scores method to get the sentiment.
##We'll append each sentiment dictionary to a results list, which we'll transform into a dataframe:
from nltk.sentiment.vader import SentimentIntensityAnalyzer as SIA
sia = SIA()
results = []
for idx, row in sent_df.iterrows():
line = row['sent']
score = sia.polarity_scores(line)
sent_df.loc[idx, 'neg'] = score.get('neg')
sent_df.loc[idx, 'neu'] = score.get('neu')
sent_df.loc[idx, 'pos'] = score.get('pos')
sent_df.loc[idx, 'compound'] = score.get('compound')
# pprint(results[:10], width=100)
##We will consider posts with a compound value greater than 0.2 as positive and less than -0.2 as negative.
##There's some testing and experimentation that goes with choosing these ranges, and there is a trade-off to be
##made here. If you choose a higher value, you might get more compact results (less false positives and false
##negatives), but the size of the results will decrease significantly.
sent_df['label'] = 0
sent_df.loc[sent_df['compound'] > 0.3, 'label'] = 1
sent_df.loc[sent_df['compound'] < -0.3, 'label'] = -1
# sent_df.head()
##We have all the data we need to save, so let's do that:
sent_df.to_csv('sentiment analysis.csv', mode='a', encoding='utf-8', index=False)
##We can now keep appending to this csv, but just make sure that if you reassign the headlines set, you could get
##duplicates. Maybe add a more advanced saving function that reads and removes duplicates before saving.
#Let's first take a peak at a few positive and negative headlines:
print("Positive headlines:\n")
pprint(list(sent_df[sent_df['label'] == 1].sent)[:5], width=200)
print("\nNegative headlines:\n")
pprint(list(sent_df[sent_df['label'] == -1].sent)[:5], width=200)
##Now let's check how many total positives and negatives we have in this dataset:
print(sent_df.label.value_counts())
print(sent_df.label.value_counts(normalize=True) * 100)
##The first line gives us raw value counts of the labels, whereas the second line provides percentages
##with the normalize keyword.
##For fun, let's plot a bar chart:
"""
fig, ax = plt.subplots(figsize=(8, 8))
counts = sent_df.label.value_counts(normalize=True) * 100
sns.barplot(x=counts.index, y=counts, ax=ax)
ax.set_xticklabels(['Negative', 'Neutral', 'Positive'])
ax.set_ylabel("Percentage")
plt.show()
"""
##filter the sentences by number of words in it
for idx, row in sent_df.iterrows():
sentence = row['sent']
sent_df.loc[idx, 'len_sent'] = len(sentence.split())
##split positive and other sentences
pos = sent_df[sent_df['label'] == 1]
neg = sent_df[sent_df['label'] != 1]
import gensim
from gensim.parsing.preprocessing import strip_non_alphanum
from gensim.parsing.preprocessing import strip_punctuation
from gensim.parsing.preprocessing import strip_multiple_whitespaces
from gensim.parsing.preprocessing import stem_text
corpus_full = []
for idx, row in sent_df.iterrows():
temp = row['sent']
temp1 = strip_non_alphanum(str(temp))
temp2 = strip_punctuation(temp1)
temp3 = strip_multiple_whitespaces(temp2)
final = stem_text(temp3)
corpus_full.append(final)
corpus_pos = []
for idx, row in pos.iterrows():
temp = row['sent']
temp1 = strip_non_alphanum(str(temp))
temp2 = strip_punctuation(temp1)
temp3 = strip_multiple_whitespaces(temp2)
final = stem_text(temp3)
corpus_pos.append(final)
corpus_neg = []
for idx, row in neg.iterrows():
temp = row['sent']
temp1 = strip_non_alphanum(str(temp))
temp2 = strip_punctuation(temp1)
temp3 = strip_multiple_whitespaces(temp2)
final = stem_text(temp3)
corpus_neg.append(final)
from nltk.corpus import stopwords
stop_words = stopwords.words('english')
stoplist = set('a about above after again against all am an and any are arent\
as also at be because been before being below between both but\
by cant cannot could couldnt did didnt do does doesnt doing dont\
down during each els few for from further had hadnt has have havent\
having he hed hes her here heres hers herself him himself his\
how hows i id ill im ive if in into is isnt it its itself lets\
me more most mustnt my myself no nor not of off on once only or\
other ought our ours ourselves out over own same shant she shes\
should shouldnt so some such than that thats the their theirs\
them themselves then there theres these they theyd theyll theyre\
theyve this those through to too under until up very was wasnt\
we wed were weve were werent what whats when whens which while\
who whos whom why whys with wont would wouldnt you youd youll\
youre youve your yours yourself yourselves ll ve s ar mayb ha re\
us thi isn a b c d e f g h i j k l m n o p q r s t u v w x y z\
hi will can get back go don wa let atc ok ani mi thei whenev make\
just take aw know sai good baltimor jetblu lol thank thanks like\
vari might less highest billion nice probabl lot fuck shit sure\
feel dure befor realli work veri chanc see awai onc onli dy aren\
100 someth thing even happen becaus wai everi much help want think\
fear flight plane fly mai time dai\
1 2 3 4 5 6 7 8 9 10'.split())
print (len(stoplist))
stoplist.update(stop_words)
print(len(stop_words))
print(len(stoplist))
#standardize text -- makes all characters lowercase and removes common stop words
text_full = [[word for word in document.lower().split() if word not in stoplist]
for document in corpus_full]
print(text_full)
text_pos = [[word for word in document.lower().split() if word not in stoplist]
for document in corpus_pos]
text_neg = [[word for word in document.lower().split() if word not in stoplist]
for document in corpus_neg]
#count number of times that word appears in corpus
#pair frequency with respective word in new array
from collections import defaultdict
frequency = defaultdict(int)
for text in text_full:
for token in text:
frequency[token] += 1
corpus_removeOne_full = [[token for token in text if frequency[token]>1] for text in text_full]
frequency = defaultdict(int)
for text in text_pos:
for token in text:
frequency[token] += 1
corpus_removeOne_pos = [[token for token in text if frequency[token]>1] for text in text_pos]
frequency = defaultdict(int)
for text in text_neg:
for token in text:
frequency[token] += 1
corpus_removeOne_neg = [[token for token in text if frequency[token]>1] for text in text_neg]
from gensim import corpora
#add corpora to dictionary
dictionary_full = corpora.Dictionary(corpus_removeOne_full)
dictionary_pos = corpora.Dictionary(corpus_removeOne_pos)
dictionary_neg = corpora.Dictionary(corpus_removeOne_neg)
#save dictionary for future reference
dictionary_full.save('redditTest_full.dict')
dictionary_pos.save('redditTest_pos.dict') #location of document in computer
dictionary_neg.save('redditTest_neg.dict')
#dict = gensim.corpora.Dictionary.load('redditTest.dict')
#assign numeric id to each token in dictionary
dictID_full = dictionary_full.token2id
dictID_pos = dictionary_pos.token2id
dictID_neg = dictionary_neg.token2id
#remove empty sentences
for text in corpus_removeOne_full:
if len(text) == 0:
corpus_removeOne_full.remove(text)
for text in corpus_removeOne_pos:
if len(text) == 0:
corpus_removeOne_pos.remove(text)
for text in corpus_removeOne_neg:
if len(text) == 0:
corpus_removeOne_neg.remove(text)
#converts each word into vector following same process as example
#Bag of Word Corpus of Full Sentiment
bow_corpus_full = [dictionary_full.doc2bow(text) for text in corpus_removeOne_full]
corpora.MmCorpus.serialize('redditTest_full.mm', bow_corpus_full)
corp_full = gensim.corpora.MmCorpus('redditTest_full.mm')
from gensim import models
tfidf_pos = models.TfidfModel(bow_corpus_full)
corpus_tfidf_full = tfidf_pos[bow_corpus_full]
#Bag of Word Corpus of Positive Sentiment
bow_corpus_pos = [dictionary_pos.doc2bow(text) for text in corpus_removeOne_pos]
corpora.MmCorpus.serialize('redditTest_pos.mm', bow_corpus_pos)
corp_pos = gensim.corpora.MmCorpus('redditTest_pos.mm')
from gensim import models
tfidf_pos = models.TfidfModel(bow_corpus_pos)
corpus_tfidf_pos = tfidf_pos[bow_corpus_pos]
#Bag of Word Corpus of Negative Sentiment
bow_corpus_neg = [dictionary_neg.doc2bow(text) for text in corpus_removeOne_neg]
corpora.MmCorpus.serialize('redditTest_neg.mm', bow_corpus_neg)
corp_neg = gensim.corpora.MmCorpus('redditTest_neg.mm')
from gensim import models
tfidf_neg = models.TfidfModel(bow_corpus_neg)
corpus_tfidf_neg = tfidf_neg[bow_corpus_neg]
#LDA Mallet for full corpus
mallet_path = '/Users/emmet/.spyder-py3-dev/REU_Project/mallet-2.0.8/bin/mallet'
lda_full = gensim.models.wrappers.LdaMallet(mallet_path, corpus=bow_corpus_full, num_topics=9, id2word=dictionary_full, workers=1, alpha=110, random_seed=109, iterations=50)
corpus_LDA_full = lda_full[bow_corpus_full]
lda_full.print_topics(9)
#LDA Mallet for positive corpus
mallet_path = '/Users/emmet/.spyder-py3-dev/REU_Project/mallet-2.0.8/bin/mallet'
lda_pos = gensim.models.wrappers.LdaMallet(mallet_path, corpus=bow_corpus_pos, num_topics=9, id2word=dictionary_pos, workers=1, alpha=110, random_seed=109, iterations=50)
corpus_LDA_pos = lda_pos[bow_corpus_pos]
lda_pos.print_topics(9)
#LDA Mallet for negative corpus
mallet_path = '/Users/emmet/.spyder-py3-dev/REU_Project/mallet-2.0.8/bin/mallet'
lda_neg = gensim.models.wrappers.LdaMallet(mallet_path, corpus=bow_corpus_neg, num_topics=9, id2word=dictionary_neg, workers=1, alpha=110, random_seed=109, iterations=50)
corpus_LDA_neg = lda_neg[bow_corpus_neg]
lda_neg.print_topics(9)
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
from sklearn.manifold import TSNE
colors = np.array([color for name, color in mcolors.TABLEAU_COLORS.items()])
#t-SNE plot for full corpus
n_topics = 9
topic_weights_full = []
for row_list in lda_full[bow_corpus_full]:
tmp = np.zeros(n_topics)
for i, w in row_list:
tmp[i] = w
topic_weights_full.append(tmp)
arr_full = pd.DataFrame(topic_weights_full).fillna(9).values
topic_num_full = np.argmax(arr_full, axis=1)
tsne_model_full = TSNE(n_components=3, random_state=None, method='barnes_hut',
angle=0.5, init='pca')
tsne_lda_full = tsne_model_full.fit_transform(arr_full)
sub = str.maketrans("0123456789", "₀₁₂₃₄₅₆₇₈₉")
plt.xlabel('t-SNE1'.translate(sub))
plt.ylabel('t-SNE2'.translate(sub))
plt.title('t-SNE Plot of Topics within Positive Sentiment Corpus')
tsne_full = plt.scatter(x=tsne_lda_full[:,0], y=tsne_lda_full[:,1])
plt.show(tsne_full)
"""
#t-SNE plot for positive corpus
n_topics = 9
topic_weights_pos = []
for row_list in lda_pos[bow_corpus_pos]:
tmp = np.zeros(n_topics)
for i, w in row_list:
tmp[i] = w
topic_weights_pos.append(tmp)
arr_pos = pd.DataFrame(topic_weights_pos).fillna(0).values
topic_num_pos = np.argmax(arr_pos, axis=1)
tsne_model_pos = TSNE(n_components=3, random_state=None, method='barnes_hut',
angle=0.5, init='pca')
tsne_lda_pos = tsne_model_pos.fit_transform(arr_pos)
sub = str.maketrans("0123456789", "₀₁₂₃₄₅₆₇₈₉")
plt.xlabel('t-SNE1'.translate(sub))
plt.ylabel('t-SNE2'.translate(sub))
plt.title('t-SNE Plot of Topics within Positive Sentiment Corpus')
tsne_pos = plt.scatter(x=tsne_lda_pos[:,0], y=tsne_lda_pos[:,1])
#plt.show(tsne_pos)
#t-SNE plot for negative corpus
n_topics = 9
topic_weights_neg = []
for row_list in lda_neg[bow_corpus_neg]:
tmp = np.zeros(n_topics)
for i, w in row_list:
tmp[i] = w
topic_weights_neg.append(tmp)
arr_neg = pd.DataFrame(topic_weights_neg).fillna(0).values
topic_num_neg = np.argmax(arr_neg, axis=1)
tsne_model_neg = TSNE(n_components=3, random_state=None, method='barnes_hut',
angle=0.5, init='pca')
tsne_lda_neg = tsne_model_neg.fit_transform(arr_neg)
sub = str.maketrans("0123456789", "₀₁₂₃₄₅₆₇₈₉")
plt.xlabel('t-SNE1'.translate(sub))
plt.ylabel('t-SNE2'.translate(sub))
plt.title('t-SNE Plot of Topics within Negative Sentiment Corpus')
tsne_neg = plt.scatter(tsne_lda_neg[:,0], tsne_lda_neg[:,1])
#plt.show(tsne_neg)
"""
from collections import Counter
#Word Count & Keyword for Full Corpus
topics_full = lda_full.show_topics(formatted=False)
flatten_full = [w for w_list in bow_corpus_full for w in w_list]
counter_full = Counter(flatten_full)
topic_weight_full = []
for i, topic in topics_full:
for word, weight in topic:
topic_weight_full.append([word, i , weight, counter_full[word]])
data_frame_full = pd.DataFrame(topic_weight_full, columns=['word', 'topic_id', 'importance', 'word_count'])
fig, axes = plt.subplots(3, 3, figsize=(10,6), sharey=True, dpi=160)
for i, ax in enumerate(axes.flatten()):
ax.bar(x='word', height="word_count", data=data_frame_full.loc[data_frame_full.topic_id==i, :], color=colors[i], width=0.5, alpha=0.3, label='Word Count')
ax_twin = ax.twinx()
ax_twin.bar(x='word', height="importance", data=data_frame_full.loc[data_frame_full.topic_id==i, :], color=colors[i], width=0.2, label='Weights')
ax.set_ylabel('Word Count', color=colors[i])
ax_twin.set_ylim(0, 0.5); ax.set_ylim(0, 100)
ax.set_title('Topic: ' + str(i+1), color=colors[i], fontsize=8)
ax.tick_params(axis='y', left=False)
ax.set_xticklabels(data_frame_full.loc[data_frame_full.topic_id==i, 'word'], rotation=90, horizontalalignment= 'center')
ax.legend(loc='upper left'); ax_twin.legend(loc='upper right')
fig.tight_layout(w_pad=2)
plt.show()
"""
#Word Count & Keyword for Positive Corpus
topics_pos = lda_pos.show_topics(formatted=False)
flatten_pos = [w for w_list in bow_corpus_pos for w in w_list]
counter_pos = Counter(flatten_pos)
topic_weight_pos = []
for i, topic in topics_pos:
for word, weight in topic:
topic_weight_pos.append([word, i , weight, counter_pos[word]])
data_frame_pos = pd.DataFrame(topic_weight_pos, columns=['word', 'topic_id', 'importance', 'word_count'])
fig, axes = plt.subplots(3, 3, figsize=(10,6), sharey=True, dpi=160)
for i, ax in enumerate(axes.flatten()):
ax.bar(x='word', height="word_count", data=data_frame_pos.loc[data_frame_pos.topic_id==i, :], color=colors[i], width=0.5, alpha=0.3, label='Word Count')
ax_twin = ax.twinx()
ax_twin.bar(x='word', height="importance", data=data_frame_pos.loc[data_frame_pos.topic_id==i, :], color=colors[i], width=0.2, label='Weights')
ax.set_ylabel('Word Count', color=colors[i])
ax_twin.set_ylim(0, 0.5); ax.set_ylim(0, 100)
ax.set_title('Topic: ' + str(i+1), color=colors[i], fontsize=8)
ax.tick_params(axis='y', left=False)
ax.set_xticklabels(data_frame_pos.loc[data_frame_pos.topic_id==i, 'word'], rotation=90, horizontalalignment= 'center')
ax.legend(loc='upper left'); ax_twin.legend(loc='upper right')
fig.tight_layout(w_pad=2)
plt.show()
#Word Count & Keyword for Negative Corpus
topics_neg = lda_neg.show_topics(formatted=False)
flatten_neg = [w for w_list in bow_corpus_neg for w in w_list]
counter_neg = Counter(flatten_neg)
topic_weight_neg = []
for i, topic in topics_neg:
for word, weight in topic:
topic_weight_neg.append([word, i , weight, counter_neg[word]])
data_frame_neg = pd.DataFrame(topic_weight_neg, columns=['word', 'topic_id', 'importance', 'word_count'])
fig, axes = plt.subplots(3, 3, figsize=(10,6), sharey=True, dpi=160)
for i, ax in enumerate(axes.flatten()):
ax.bar(x='word', height="word_count", data=data_frame_neg.loc[data_frame_neg.topic_id==i, :], color=colors[i], width=0.5, alpha=0.3, label='Word Count')
ax_twin = ax.twinx()
ax_twin.bar(x='word', height="importance", data=data_frame_neg.loc[data_frame_neg.topic_id==i, :], color=colors[i], width=0.2, label='Weights')
ax.set_ylabel('Word Count', color=colors[i])
ax_twin.set_ylim(0, 0.5); ax.set_ylim(0, 100)
ax.set_title('Topic: ' + str(i+1), color=colors[i], fontsize=8)
ax.tick_params(axis='y', left=False)
ax.set_xticklabels(data_frame_neg.loc[data_frame_neg.topic_id==i, 'word'], rotation=90, horizontalalignment= 'center')
ax.legend(loc='upper left'); ax_twin.legend(loc='upper right')
fig.tight_layout(w_pad=2)
plt.show()
"""
from wordcloud import WordCloud
import matplotlib.colors as mcolors
#Word Cloud Display for Full Corpus
cloud = WordCloud(stopwords=stoplist, background_color='white', width=2500, height=1800, max_words=7, colormap='tab10', color_func=lambda *args, **kwargs: colors[i], prefer_horizontal=1.0)
topics_full = lda_full.show_topics(formatted=False)
fig, axes = plt.subplots(3, 3, figsize=(10, 6))
for i, ax in enumerate(axes.flatten()):
fig.add_subplot(ax)
topic_words_full = dict(topics_full[i][1])
cloud.generate_from_frequencies(topic_words_full, max_font_size=300)
plt.gca().imshow(cloud)
plt.gca().set_title('Topic ' + str(i+1), fontdict=dict(size=10))
plt.gca().axis('off')
plt.axis('off')
plt.tight_layout()
plt.show()
"""
#Word Cloud Display for Positive Corpus
cloud = WordCloud(stopwords=stoplist, background_color='white', width=2500, height=1800, max_words=7, colormap='tab10', color_func=lambda *args, **kwargs: colors[i], prefer_horizontal=1.0)
topics_pos = lda_pos.show_topics(formatted=False)
fig, axes = plt.subplots(3, 3, figsize=(10, 6))
for i, ax in enumerate(axes.flatten()):
fig.add_subplot(ax)
topic_words_pos = dict(topics_pos[i][1])
cloud.generate_from_frequencies(topic_words_pos, max_font_size=300)
plt.gca().imshow(cloud)
plt.gca().set_title('Topic ' + str(i+1), fontdict=dict(size=10))
plt.gca().axis('off')
plt.axis('off')
plt.tight_layout()
plt.show()
#Word Cloud Display for Negative Corpus
cloud = WordCloud(stopwords=stoplist, background_color='white', width=2500, height=1800, max_words=7, colormap='tab10', color_func=lambda *args, **kwargs: colors[i], prefer_horizontal=1.0)
topics_neg = lda_neg.show_topics(formatted=False)
fig, axes = plt.subplots(3, 3, figsize=(10, 6))
for i, ax in enumerate(axes.flatten()):
fig.add_subplot(ax)
topic_words_neg = dict(topics_neg[i][1])
cloud.generate_from_frequencies(topic_words_neg, max_font_size=300)
plt.gca().imshow(cloud)
plt.gca().set_title('Topic ' + str(i+1), fontdict=dict(size=10))
plt.gca().axis('off')
plt.axis('off')
plt.tight_layout()
plt.show()
"""
import pyLDAvis.gensim
import pyLDAvis
import gensim
#LDA Mallet pyLDAvis for Full Corpus
mallet2lda_full = gensim.models.wrappers.ldamallet.malletmodel2ldamodel(lda_full)
visualizeLDA_full = pyLDAvis.gensim.prepare(mallet2lda_full, bow_corpus_full, dictionary_full)
pyLDAvis.show()
"""
#LDA Mallet pyLDAvis for Postiive Corpus
mallet2lda_pos = gensim.models.wrappers.ldamallet.malletmodel2ldamodel(lda_pos)
visualizeLDA_pos = pyLDAvis.gensim.prepare(mallet2lda_pos, bow_corpus_pos, dictionary_pos)
pyLDAvis.show(visualizeLDA_pos)
#LDA Mallet pyLDAvis for Negative Corpus
mallet2lda_neg = gensim.models.wrappers.ldamallet.malletmodel2ldamodel(lda_neg)
visualizeLDA_neg = pyLDAvis.gensim.prepare(mallet2lda_neg, bow_corpus_neg, dictionary_neg)
pyLDAvis.show(visualizeLDA_neg)
""" | 38.376761 | 189 | 0.708551 |
from pprint import pprint
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from nltk.tokenize.punkt import PunktSentenceTokenizer, PunktTrainer
text += gutenberg.raw(file_id)
print (len(text))
= ""
for ele in s:
str1 += ele
return str1
186):
fileOpen = open("sample"+str(sampNum)+".txt","r")
temp = fileOpen.readlines()
temp = listToString(temp)
trainer = PunktTrainer()
trainer.INCLUDE_ALL_COLLOCS = True
trainer.train(text)
tokenizer = PunktSentenceTokenizer(trainer.get_params())
ev_types.add('dr')
sent = tokenizer.tokenize(temp)
for sent in sent:
sent_df.loc[i, 'sent'] = sent
sent_df.loc[i, 'sample'] = sampNum
i += 1
sampNum += 1
g'] = score.get('neg')
sent_df.loc[idx, 'neu'] = score.get('neu')
sent_df.loc[idx, 'pos'] = score.get('pos')
sent_df.loc[idx, 'compound'] = score.get('compound')
# pprint(results[:10], width=100)
##We will consider posts with a compound value greater than 0.2 as positive and less than -0.2 as negative.
##There's some testing and experimentation that goes with choosing these ranges, and there is a trade-off to be
keep appending to this csv, but just make sure that if you reassign the headlines set, you could get
##duplicates. Maybe add a more advanced saving function that reads and removes duplicates before saving.
#Let's first take a peak at a few positive and negative headlines:
print("Positive headlines:\n")
pprint(list(sent_df[sent_df['label'] == 1].sent)[:5], width=200)
print("\nNegative headlines:\n")
pprint(list(sent_df[sent_df['label'] == -1].sent)[:5], width=200)
e=True) * 100)
##The first line gives us raw value counts of the labels, whereas the second line provides percentages
##with the normalize keyword.
##For fun, let's plot a bar chart:
nce = row['sent']
sent_df.loc[idx, 'len_sent'] = len(sentence.split())
]
neg = sent_df[sent_df['label'] != 1]
import gensim
from gensim.parsing.preprocessing import strip_non_alphanum
from gensim.parsing.preprocessing import strip_punctuation
from gensim.parsing.preprocessing import strip_multiple_whitespaces
from gensim.parsing.preprocessing import stem_text
corpus_full = []
for idx, row in sent_df.iterrows():
temp = row['sent']
temp1 = strip_non_alphanum(str(temp))
temp2 = strip_punctuation(temp1)
temp3 = strip_multiple_whitespaces(temp2)
final = stem_text(temp3)
corpus_full.append(final)
corpus_pos = []
for idx, row in pos.iterrows():
temp = row['sent']
temp1 = strip_non_alphanum(str(temp))
temp2 = strip_punctuation(temp1)
temp3 = strip_multiple_whitespaces(temp2)
final = stem_text(temp3)
corpus_pos.append(final)
corpus_neg = []
for idx, row in neg.iterrows():
temp = row['sent']
temp1 = strip_non_alphanum(str(temp))
temp2 = strip_punctuation(temp1)
temp3 = strip_multiple_whitespaces(temp2)
final = stem_text(temp3)
corpus_neg.append(final)
from nltk.corpus import stopwords
stop_words = stopwords.words('english')
stoplist = set('a about above after again against all am an and any are arent\
as also at be because been before being below between both but\
by cant cannot could couldnt did didnt do does doesnt doing dont\
down during each els few for from further had hadnt has have havent\
having he hed hes her here heres hers herself him himself his\
how hows i id ill im ive if in into is isnt it its itself lets\
me more most mustnt my myself no nor not of off on once only or\
other ought our ours ourselves out over own same shant she shes\
should shouldnt so some such than that thats the their theirs\
them themselves then there theres these they theyd theyll theyre\
theyve this those through to too under until up very was wasnt\
we wed were weve were werent what whats when whens which while\
who whos whom why whys with wont would wouldnt you youd youll\
youre youve your yours yourself yourselves ll ve s ar mayb ha re\
us thi isn a b c d e f g h i j k l m n o p q r s t u v w x y z\
hi will can get back go don wa let atc ok ani mi thei whenev make\
just take aw know sai good baltimor jetblu lol thank thanks like\
vari might less highest billion nice probabl lot fuck shit sure\
feel dure befor realli work veri chanc see awai onc onli dy aren\
100 someth thing even happen becaus wai everi much help want think\
fear flight plane fly mai time dai\
1 2 3 4 5 6 7 8 9 10'.split())
print (len(stoplist))
stoplist.update(stop_words)
print(len(stop_words))
print(len(stoplist))
text_full = [[word for word in document.lower().split() if word not in stoplist]
for document in corpus_full]
print(text_full)
text_pos = [[word for word in document.lower().split() if word not in stoplist]
for document in corpus_pos]
text_neg = [[word for word in document.lower().split() if word not in stoplist]
for document in corpus_neg]
from collections import defaultdict
frequency = defaultdict(int)
for text in text_full:
for token in text:
frequency[token] += 1
corpus_removeOne_full = [[token for token in text if frequency[token]>1] for text in text_full]
frequency = defaultdict(int)
for text in text_pos:
for token in text:
frequency[token] += 1
corpus_removeOne_pos = [[token for token in text if frequency[token]>1] for text in text_pos]
frequency = defaultdict(int)
for text in text_neg:
for token in text:
frequency[token] += 1
corpus_removeOne_neg = [[token for token in text if frequency[token]>1] for text in text_neg]
from gensim import corpora
dictionary_full = corpora.Dictionary(corpus_removeOne_full)
dictionary_pos = corpora.Dictionary(corpus_removeOne_pos)
dictionary_neg = corpora.Dictionary(corpus_removeOne_neg)
dictionary_full.save('redditTest_full.dict')
dictionary_pos.save('redditTest_pos.dict')
dictionary_neg.save('redditTest_neg.dict')
dictID_full = dictionary_full.token2id
dictID_pos = dictionary_pos.token2id
dictID_neg = dictionary_neg.token2id
for text in corpus_removeOne_full:
if len(text) == 0:
corpus_removeOne_full.remove(text)
for text in corpus_removeOne_pos:
if len(text) == 0:
corpus_removeOne_pos.remove(text)
for text in corpus_removeOne_neg:
if len(text) == 0:
corpus_removeOne_neg.remove(text)
bow_corpus_full = [dictionary_full.doc2bow(text) for text in corpus_removeOne_full]
corpora.MmCorpus.serialize('redditTest_full.mm', bow_corpus_full)
corp_full = gensim.corpora.MmCorpus('redditTest_full.mm')
from gensim import models
tfidf_pos = models.TfidfModel(bow_corpus_full)
corpus_tfidf_full = tfidf_pos[bow_corpus_full]
bow_corpus_pos = [dictionary_pos.doc2bow(text) for text in corpus_removeOne_pos]
corpora.MmCorpus.serialize('redditTest_pos.mm', bow_corpus_pos)
corp_pos = gensim.corpora.MmCorpus('redditTest_pos.mm')
from gensim import models
tfidf_pos = models.TfidfModel(bow_corpus_pos)
corpus_tfidf_pos = tfidf_pos[bow_corpus_pos]
bow_corpus_neg = [dictionary_neg.doc2bow(text) for text in corpus_removeOne_neg]
corpora.MmCorpus.serialize('redditTest_neg.mm', bow_corpus_neg)
corp_neg = gensim.corpora.MmCorpus('redditTest_neg.mm')
from gensim import models
tfidf_neg = models.TfidfModel(bow_corpus_neg)
corpus_tfidf_neg = tfidf_neg[bow_corpus_neg]
mallet_path = '/Users/emmet/.spyder-py3-dev/REU_Project/mallet-2.0.8/bin/mallet'
lda_full = gensim.models.wrappers.LdaMallet(mallet_path, corpus=bow_corpus_full, num_topics=9, id2word=dictionary_full, workers=1, alpha=110, random_seed=109, iterations=50)
corpus_LDA_full = lda_full[bow_corpus_full]
lda_full.print_topics(9)
mallet_path = '/Users/emmet/.spyder-py3-dev/REU_Project/mallet-2.0.8/bin/mallet'
lda_pos = gensim.models.wrappers.LdaMallet(mallet_path, corpus=bow_corpus_pos, num_topics=9, id2word=dictionary_pos, workers=1, alpha=110, random_seed=109, iterations=50)
corpus_LDA_pos = lda_pos[bow_corpus_pos]
lda_pos.print_topics(9)
mallet_path = '/Users/emmet/.spyder-py3-dev/REU_Project/mallet-2.0.8/bin/mallet'
lda_neg = gensim.models.wrappers.LdaMallet(mallet_path, corpus=bow_corpus_neg, num_topics=9, id2word=dictionary_neg, workers=1, alpha=110, random_seed=109, iterations=50)
corpus_LDA_neg = lda_neg[bow_corpus_neg]
lda_neg.print_topics(9)
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
from sklearn.manifold import TSNE
colors = np.array([color for name, color in mcolors.TABLEAU_COLORS.items()])
n_topics = 9
topic_weights_full = []
for row_list in lda_full[bow_corpus_full]:
tmp = np.zeros(n_topics)
for i, w in row_list:
tmp[i] = w
topic_weights_full.append(tmp)
arr_full = pd.DataFrame(topic_weights_full).fillna(9).values
topic_num_full = np.argmax(arr_full, axis=1)
tsne_model_full = TSNE(n_components=3, random_state=None, method='barnes_hut',
angle=0.5, init='pca')
tsne_lda_full = tsne_model_full.fit_transform(arr_full)
sub = str.maketrans("0123456789", "₀₁₂₃₄₅₆₇₈₉")
plt.xlabel('t-SNE1'.translate(sub))
plt.ylabel('t-SNE2'.translate(sub))
plt.title('t-SNE Plot of Topics within Positive Sentiment Corpus')
tsne_full = plt.scatter(x=tsne_lda_full[:,0], y=tsne_lda_full[:,1])
plt.show(tsne_full)
from collections import Counter
topics_full = lda_full.show_topics(formatted=False)
flatten_full = [w for w_list in bow_corpus_full for w in w_list]
counter_full = Counter(flatten_full)
topic_weight_full = []
for i, topic in topics_full:
for word, weight in topic:
topic_weight_full.append([word, i , weight, counter_full[word]])
data_frame_full = pd.DataFrame(topic_weight_full, columns=['word', 'topic_id', 'importance', 'word_count'])
fig, axes = plt.subplots(3, 3, figsize=(10,6), sharey=True, dpi=160)
for i, ax in enumerate(axes.flatten()):
ax.bar(x='word', height="word_count", data=data_frame_full.loc[data_frame_full.topic_id==i, :], color=colors[i], width=0.5, alpha=0.3, label='Word Count')
ax_twin = ax.twinx()
ax_twin.bar(x='word', height="importance", data=data_frame_full.loc[data_frame_full.topic_id==i, :], color=colors[i], width=0.2, label='Weights')
ax.set_ylabel('Word Count', color=colors[i])
ax_twin.set_ylim(0, 0.5); ax.set_ylim(0, 100)
ax.set_title('Topic: ' + str(i+1), color=colors[i], fontsize=8)
ax.tick_params(axis='y', left=False)
ax.set_xticklabels(data_frame_full.loc[data_frame_full.topic_id==i, 'word'], rotation=90, horizontalalignment= 'center')
ax.legend(loc='upper left'); ax_twin.legend(loc='upper right')
fig.tight_layout(w_pad=2)
plt.show()
from wordcloud import WordCloud
import matplotlib.colors as mcolors
cloud = WordCloud(stopwords=stoplist, background_color='white', width=2500, height=1800, max_words=7, colormap='tab10', color_func=lambda *args, **kwargs: colors[i], prefer_horizontal=1.0)
topics_full = lda_full.show_topics(formatted=False)
fig, axes = plt.subplots(3, 3, figsize=(10, 6))
for i, ax in enumerate(axes.flatten()):
fig.add_subplot(ax)
topic_words_full = dict(topics_full[i][1])
cloud.generate_from_frequencies(topic_words_full, max_font_size=300)
plt.gca().imshow(cloud)
plt.gca().set_title('Topic ' + str(i+1), fontdict=dict(size=10))
plt.gca().axis('off')
plt.axis('off')
plt.tight_layout()
plt.show()
import pyLDAvis.gensim
import pyLDAvis
import gensim
mallet2lda_full = gensim.models.wrappers.ldamallet.malletmodel2ldamodel(lda_full)
visualizeLDA_full = pyLDAvis.gensim.prepare(mallet2lda_full, bow_corpus_full, dictionary_full)
pyLDAvis.show()
| true | true |
f71a71c02c39541a49fbe5ad95d204ca99999495 | 1,129 | py | Python | migrations/versions/0076_add_intl_flag_to_provider.py | cds-snc/notifier-api | 90b385ec49efbaee7e607516fc7d9f08991af813 | [
"MIT"
] | 41 | 2019-11-28T16:58:41.000Z | 2022-01-28T21:11:16.000Z | migrations/versions/0076_add_intl_flag_to_provider.py | cds-snc/notification-api | b1c1064f291eb860b494c3fa65ac256ad70bf47c | [
"MIT"
] | 1,083 | 2019-07-08T12:57:24.000Z | 2022-03-08T18:53:40.000Z | migrations/versions/0076_add_intl_flag_to_provider.py | cds-snc/notifier-api | 90b385ec49efbaee7e607516fc7d9f08991af813 | [
"MIT"
] | 9 | 2020-01-24T19:56:43.000Z | 2022-01-27T21:36:53.000Z | """empty message
Revision ID: 0076_add_intl_flag_to_provider
Revises: 0075_create_rates_table
Create Date: 2017-04-25 09:44:13.194164
"""
# revision identifiers, used by Alembic.
revision = "0076_add_intl_flag_to_provider"
down_revision = "0075_create_rates_table"
import sqlalchemy as sa
from alembic import op
def upgrade():
op.add_column(
"provider_details",
sa.Column(
"supports_international",
sa.Boolean(),
nullable=False,
server_default=sa.false(),
),
)
op.add_column(
"provider_details_history",
sa.Column(
"supports_international",
sa.Boolean(),
nullable=False,
server_default=sa.false(),
),
)
op.execute("UPDATE provider_details SET supports_international=True WHERE identifier='mmg'")
op.execute("UPDATE provider_details_history SET supports_international=True WHERE identifier='mmg'")
def downgrade():
op.drop_column("provider_details_history", "supports_international")
op.drop_column("provider_details", "supports_international")
| 25.659091 | 104 | 0.675819 |
revision = "0076_add_intl_flag_to_provider"
down_revision = "0075_create_rates_table"
import sqlalchemy as sa
from alembic import op
def upgrade():
op.add_column(
"provider_details",
sa.Column(
"supports_international",
sa.Boolean(),
nullable=False,
server_default=sa.false(),
),
)
op.add_column(
"provider_details_history",
sa.Column(
"supports_international",
sa.Boolean(),
nullable=False,
server_default=sa.false(),
),
)
op.execute("UPDATE provider_details SET supports_international=True WHERE identifier='mmg'")
op.execute("UPDATE provider_details_history SET supports_international=True WHERE identifier='mmg'")
def downgrade():
op.drop_column("provider_details_history", "supports_international")
op.drop_column("provider_details", "supports_international")
| true | true |
f71a721c1a9432964c02aa7cb35a51f05080d90d | 1,983 | py | Python | openbb_terminal/cryptocurrency/onchain/whale_alert_view.py | joshuabuildsthings/GamestonkTerminal | 385d12803ae1725a22b0a440c3b88bffa974edcd | [
"MIT"
] | 255 | 2022-03-29T16:43:51.000Z | 2022-03-31T23:57:08.000Z | openbb_terminal/cryptocurrency/onchain/whale_alert_view.py | joshuabuildsthings/GamestonkTerminal | 385d12803ae1725a22b0a440c3b88bffa974edcd | [
"MIT"
] | 14 | 2022-03-29T14:20:33.000Z | 2022-03-31T23:39:20.000Z | openbb_terminal/cryptocurrency/onchain/whale_alert_view.py | joshuabuildsthings/GamestonkTerminal | 385d12803ae1725a22b0a440c3b88bffa974edcd | [
"MIT"
] | 24 | 2022-03-29T15:28:56.000Z | 2022-03-31T23:54:15.000Z | """Whale Alert view"""
__docformat__ = "numpy"
import logging
import os
from openbb_terminal.cryptocurrency.onchain import whale_alert_model
from openbb_terminal.decorators import check_api_key
from openbb_terminal.decorators import log_start_end
from openbb_terminal.helper_funcs import (
export_data,
lambda_long_number_format,
print_rich_table,
)
from openbb_terminal.rich_config import console
logger = logging.getLogger(__name__)
@log_start_end(log=logger)
@check_api_key(["API_WHALE_ALERT_KEY"])
def display_whales_transactions(
min_value: int = 800000,
top: int = 100,
sortby: str = "date",
descend: bool = False,
show_address: bool = False,
export: str = "",
) -> None:
"""Display huge value transactions from major blockchains. [Source: https://docs.whale-alert.io/]
Parameters
----------
min_value: int
Minimum value of trade to track.
top: int
Limit of transactions. Maximum 100
sortby: str
Key to sort by.
descend: str
Sort in descending order.
show_address: bool
Flag to show addresses of transactions.
export : str
Export dataframe data to csv,json,xlsx file
"""
df = whale_alert_model.get_whales_transactions(min_value)
if df.empty:
console.print("Failed to retrieve data.")
return
df_data = df.copy()
df = df.sort_values(by=sortby, ascending=descend)
if not show_address:
df = df.drop(["from_address", "to_address"], axis=1)
else:
df = df.drop(["from", "to", "blockchain"], axis=1)
for col in ["amount_usd", "amount"]:
df[col] = df[col].apply(lambda x: lambda_long_number_format(x))
print_rich_table(
df.head(top),
headers=list(df.columns),
show_index=False,
title="Large Value Transactions",
)
export_data(
export,
os.path.dirname(os.path.abspath(__file__)),
"whales",
df_data,
)
| 25.101266 | 101 | 0.660111 | __docformat__ = "numpy"
import logging
import os
from openbb_terminal.cryptocurrency.onchain import whale_alert_model
from openbb_terminal.decorators import check_api_key
from openbb_terminal.decorators import log_start_end
from openbb_terminal.helper_funcs import (
export_data,
lambda_long_number_format,
print_rich_table,
)
from openbb_terminal.rich_config import console
logger = logging.getLogger(__name__)
@log_start_end(log=logger)
@check_api_key(["API_WHALE_ALERT_KEY"])
def display_whales_transactions(
min_value: int = 800000,
top: int = 100,
sortby: str = "date",
descend: bool = False,
show_address: bool = False,
export: str = "",
) -> None:
df = whale_alert_model.get_whales_transactions(min_value)
if df.empty:
console.print("Failed to retrieve data.")
return
df_data = df.copy()
df = df.sort_values(by=sortby, ascending=descend)
if not show_address:
df = df.drop(["from_address", "to_address"], axis=1)
else:
df = df.drop(["from", "to", "blockchain"], axis=1)
for col in ["amount_usd", "amount"]:
df[col] = df[col].apply(lambda x: lambda_long_number_format(x))
print_rich_table(
df.head(top),
headers=list(df.columns),
show_index=False,
title="Large Value Transactions",
)
export_data(
export,
os.path.dirname(os.path.abspath(__file__)),
"whales",
df_data,
)
| true | true |
f71a7324585ada53dbc92d0b00bc1d9b2653e2ad | 78,121 | py | Python | deepspeed/runtime/engine.py | LatencyTDH/DeepSpeed | eecef309cb12528cfa78d932a6f073afb43847e5 | [
"MIT"
] | 1 | 2021-04-21T01:14:32.000Z | 2021-04-21T01:14:32.000Z | deepspeed/runtime/engine.py | LatencyTDH/DeepSpeed | eecef309cb12528cfa78d932a6f073afb43847e5 | [
"MIT"
] | null | null | null | deepspeed/runtime/engine.py | LatencyTDH/DeepSpeed | eecef309cb12528cfa78d932a6f073afb43847e5 | [
"MIT"
] | null | null | null | '''
Copyright 2019 The Microsoft DeepSpeed Team
'''
import os
import stat
import torch
import warnings
import hashlib
import torch.distributed as dist
from collections import OrderedDict
from shutil import copyfile
from torch.nn.modules import Module
from torch.distributed.distributed_c10d import _get_global_rank
from tensorboardX import SummaryWriter
from deepspeed.runtime.utils import see_memory_usage
from deepspeed.runtime.zero.stage2 import FP16_DeepSpeedZeroOptimizer
from deepspeed.runtime.zero.stage1 import FP16_DeepSpeedZeroOptimizer_Stage1
from deepspeed.runtime.zero.partition_parameters import ZeroParamStatus
from deepspeed.runtime.zero.utils import is_zero_supported_optimizer
from deepspeed.runtime.activation_checkpointing import checkpointing as activation_checkpointing
from deepspeed.runtime.fp16.fused_optimizer import FP16_Optimizer
from deepspeed.runtime.fp16.unfused_optimizer import FP16_UnfusedOptimizer
from deepspeed.runtime.config import DeepSpeedConfig, DEEPSPEED_OPTIMIZERS, \
ADAM_OPTIMIZER, ADAMW_OPTIMIZER, LAMB_OPTIMIZER, ONEBIT_ADAM_OPTIMIZER, \
TORCH_ADAM_PARAM, ADAM_W_MODE, ADAM_W_MODE_DEFAULT
from deepspeed.runtime.dataloader import DeepSpeedDataLoader
from deepspeed.runtime.constants import \
ROUTE_TRAIN, ROUTE_PREDICT, ROUTE_EVAL, \
PLD_THETA, PLD_GAMMA
from deepspeed.runtime.zero.constants import \
ZERO_OPTIMIZATION_OPTIMIZER_STATES, ZERO_OPTIMIZATION_GRADIENTS, ZERO_OPTIMIZATION_WEIGHTS
from deepspeed.runtime.csr_tensor import CSRTensor
import deepspeed.runtime.lr_schedules as lr_schedules
from deepspeed.utils import logger, log_dist, init_distributed
from deepspeed.utils.timer import ThroughputTimer, SynchronizedWallClockTimer
from deepspeed.runtime.progressive_layer_drop import ProgressiveLayerDrop
from .pipe.module import PipelineModule
from .utils import ensure_directory_exists
from ..ops.op_builder import UtilsBuilder
from ..ops.adam import DeepSpeedCPUAdam
from ..ops.adam import FusedAdam
from deepspeed.profiling.flops_profiler.profiler import FlopsProfiler
MEMORY_OPT_ALLREDUCE_SIZE = 500000000
try:
from apex import amp
except ImportError:
# Fail silently so we don't spam logs unnecessarily if user isn't using amp
pass
def split_half_float_double_csr(tensors):
dtypes = [
"torch.cuda.HalfTensor",
"torch.cuda.FloatTensor",
"torch.cuda.DoubleTensor",
CSRTensor.type()
]
buckets = []
for i, dtype in enumerate(dtypes):
bucket = [t for t in tensors if t.type() == dtype]
if bucket:
buckets.append((dtype, bucket))
return buckets
def _initialize_parameter_parallel_groups(parameter_parallel_size=None):
data_parallel_size = int(dist.get_world_size())
if parameter_parallel_size is None:
parameter_parallel_size = int(data_parallel_size)
logger.info("data_parallel_size: %s, parameter_parallel_size: %s",
data_parallel_size,
parameter_parallel_size)
assert data_parallel_size % parameter_parallel_size == 0, \
'world size should be divisible by parameter parallel size'
rank = dist.get_rank()
my_group = None
for i in range(dist.get_world_size() // parameter_parallel_size):
ranks = range(i * parameter_parallel_size, (i + 1) * parameter_parallel_size)
group = torch.distributed.new_group(ranks)
if rank in ranks:
my_group = group
return my_group
def print_configuration(args, name):
logger.info('{}:'.format(name))
for arg in sorted(vars(args)):
dots = '.' * (29 - len(arg))
logger.info(' {} {} {}'.format(arg, dots, getattr(args, arg)))
class DeepSpeedEngine(Module):
r"""DeepSpeed engine for training.
"""
def __init__(self,
args,
model,
optimizer=None,
model_parameters=None,
training_data=None,
lr_scheduler=None,
mpu=None,
dist_init_required=None,
collate_fn=None,
config_params=None,
dont_change_device=False):
super(DeepSpeedEngine, self).__init__()
self.dont_change_device = dont_change_device
self.client_optimizer = optimizer
self.client_model_parameters = model_parameters
self.client_lr_scheduler = lr_scheduler
self.training_data = training_data
self.collate_fn = collate_fn
self.mpu = mpu
self.data_parallel_group = None
self.global_steps = 0
self.global_samples = 0
self.micro_steps = 0
self.skipped_steps = 0
self.gradient_average = True
self.warn_unscaled_loss = True
self.config_params = config_params
self.loaded_checkpoint_mp_world_size = None
self.loaded_checkpoint_dp_world_size = None
self.enable_backward_allreduce = True
self.progressive_layer_drop = None
self.dist_backend = "nccl"
if dist_init_required is None:
dist_init_required = not dist.is_initialized()
if dist_init_required is False:
assert dist.is_initialized() is True, "Torch distributed not initialized. Please set dist_init_required to True or initialize before calling deepspeed.initialize()"
else:
# Initialize torch distributed if needed
init_distributed(dist_backend=self.dist_backend)
see_memory_usage(f"DeepSpeed Engine: Before args sanity test")
self._do_args_sanity_check(args)
self._configure_with_arguments(args, mpu)
self._do_sanity_check()
if mpu is not None:
assert not self.elasticity_enabled(), "Elasticity is not currently supported" \
" with model parallelism."
self._set_distributed_vars()
if self.tensorboard_enabled() and self.global_rank == 0:
self.summary_writer = self.get_summary_writer()
see_memory_usage(f"DeepSpeed Engine: Before configure distributed model")
# Configure distributed model
self._configure_distributed_model(model)
see_memory_usage(f"DeepSpeed Engine: After configure distributed model")
# Configure wall clock timer
self.timers = SynchronizedWallClockTimer()
# Throughput timer
self.tput_timer = ThroughputTimer(
batch_size=self.train_micro_batch_size_per_gpu(),
num_workers=self.dp_world_size,
steps_per_output=self.steps_per_print(),
monitor_memory=False)
if training_data:
self.training_dataloader = self.deepspeed_io(training_data)
else:
self.training_dataloader = None
# Configure optimizer and scheduler
self.optimizer = None
self.lr_scheduler = None
if model_parameters or optimizer:
self._configure_optimizer(optimizer, model_parameters)
self._configure_lr_scheduler(lr_scheduler)
self._report_progress(0)
# Bookkeeping for csr support
self.csr_tensor_module_names = set()
if self.sparse_gradients_enabled():
for name, module in self.module.named_modules():
if isinstance(module, torch.nn.Embedding):
self.csr_tensor_module_names.add(name + ".weight")
logger.info("Will convert {} to sparse (csr) "
"tensor during training".format(name))
self.save_non_zero_checkpoint = False
self.save_zero_checkpoint = False
self._configure_checkpointing(dist_init_required)
if self.pld_enabled():
self.progressive_layer_drop = self._configure_progressive_layer_drop()
if self.global_rank == 0:
self._config.print('DeepSpeedEngine configuration')
if self.dump_state():
print_configuration(self, 'DeepSpeedEngine')
# Load pre-installed or JIT compile (un)flatten ops
util_ops = UtilsBuilder().load()
self.flatten = util_ops.flatten
self.unflatten = util_ops.unflatten
def get_batch_info(self):
""" Get all training batch related settings.
Returns:
train_batch_size (int): The effective training batch size. This is the amount of data
samples that leads to one step of model update.
train_micro_batch_size_per_gpu (int): Batch size to be processed by one GPU in one
step (without gradient accumulation).
gradient_accumulation_steps (int): Number of training steps to accumulate gradients
before averaging and applying them.
"""
return self.train_batch_size, self.train_micro_batch_size_per_gpu, self.gradient_accumulation_steps
def checkpoint_tag_validation_enabled(self):
return self._config.checkpoint_tag_validation_enabled
def checkpoint_tag_validation_fail(self):
return self._config.checkpoint_tag_validation_fail
def elasticity_enabled(self):
return self._config.elasticity_enabled
def pld_enabled(self):
return self._config.pld_enabled
def pld_params(self):
return self._config.pld_params
def pld_theta(self):
return self.pld_params()[PLD_THETA]
def pld_gamma(self):
return self.pld_params()[PLD_GAMMA]
def tensorboard_enabled(self):
return self._config.tensorboard_enabled
def tensorboard_output_path(self):
return self._config.tensorboard_output_path
def tensorboard_job_name(self):
return self._config.tensorboard_job_name
def get_summary_writer(self,
name="DeepSpeedJobName",
base=os.path.join(os.path.expanduser("~"),
"tensorboard")):
if self.tensorboard_output_path():
base_dir = self.tensorboard_output_path()
job_name = self.tensorboard_job_name()
log_dir = os.path.join(base_dir, job_name)
else:
if self.tensorboard_job_name():
name = self.tensorboard_job_name()
# Infrastructure-specific job-id
if 'DLWS_JOB_ID' in os.environ:
infra_job_id = os.environ['DLWS_JOB_ID']
elif 'DLTS_JOB_ID' in os.environ:
infra_job_id = os.environ['DLTS_JOB_ID']
else:
infra_job_id = 'unknown-job-id'
summary_writer_dir_name = os.path.join(infra_job_id, "logs")
log_dir = os.path.join(base, summary_writer_dir_name, name)
os.makedirs(log_dir, exist_ok=True)
return SummaryWriter(log_dir=log_dir)
def wall_clock_breakdown(self):
return self._config.wall_clock_breakdown
def flops_profiler_enabled(self):
return self._config.flops_profiler_config.enabled
def flops_profiler_profile_step(self):
return self._config.flops_profiler_config.profile_step
def flops_profiler_module_depth(self):
return self._config.flops_profiler_config.module_depth
def flops_profiler_top_modules(self):
return self._config.flops_profiler_config.top_modules
def flops_profiler_detailed(self):
return self._config.flops_profiler_config.detailed
def memory_breakdown(self):
return self._config.memory_breakdown
def sparse_gradients_enabled(self):
return self._config.sparse_gradients_enabled
def train_batch_size(self):
return self._config.train_batch_size
def train_micro_batch_size_per_gpu(self):
return self._config.train_micro_batch_size_per_gpu
def optimizer_name(self):
return self.client_optimizer.__class__.__name__ if self.client_optimizer else self._config.optimizer_name
def optimizer_params(self):
return self._config.optimizer_params
def optimizer_legacy_fusion(self):
return self._config.optimizer_legacy_fusion
def scheduler_name(self):
return self._config.scheduler_name
def scheduler_params(self):
return self._config.scheduler_params
def zero_optimization(self):
return self._config.zero_enabled
def zero_allow_untested_optimizer(self):
return self._config.zero_allow_untested_optimizer
def zero_reduce_scatter(self):
return self._config.zero_config.reduce_scatter
def zero_overlap_comm(self):
return self._config.zero_config.overlap_comm
def zero_offload_optimizer(self):
return self._config.zero_config.offload_optimizer
def zero_offload_param(self):
return self._config.zero_config.offload_param
def zero_cpu_offload(self):
return self._config.zero_config.offload_optimizer is not None
def zero_sub_group_size(self):
return self._config.zero_config.sub_group_size
def zero_optimization_stage(self):
return self._config.zero_optimization_stage
def zero_reduce_bucket_size(self):
return self._config.zero_config.reduce_bucket_size
def zero_allgather_bucket_size(self):
return self._config.zero_config.allgather_bucket_size
def zero_optimization_partition_gradients(self):
return self.zero_optimization_stage() >= ZERO_OPTIMIZATION_GRADIENTS
def zero_optimization_partition_weights(self):
return self.zero_optimization_stage() >= ZERO_OPTIMIZATION_WEIGHTS
def zero_contiguous_gradients(self):
return self._config.zero_config.contiguous_gradients
def zero_load_from_fp32_weights(self):
return self._config.zero_config.load_from_fp32_weights
def zero_elastic_checkpoint(self):
return self._config.zero_config.elastic_checkpoint
def zero_max_live_parameters(self):
return self._config.zero_config.max_live_parameters
def zero_max_reuse_distance(self):
return self._config.zero_config.max_reuse_distance
def zero_prefetch_bucket_size(self):
return self._config.zero_config.prefetch_bucket_size
def zero_param_persistence_threshold(self):
return self._config.zero_config.param_persistence_threshold
def zero_gather_fp16_weights_on_model_save(self):
return self._config.zero_config.gather_fp16_weights_on_model_save
def fp16_enabled(self):
return self._config.fp16_enabled
def amp_enabled(self):
return self._config.amp_enabled
def amp_params(self):
return self._config.amp_params
def loss_scale(self):
return self._config.loss_scale
def gradient_accumulation_steps(self):
return self._config.gradient_accumulation_steps
def allreduce_always_fp32(self):
return self._config.allreduce_always_fp32
def postscale_gradients(self):
return not self._config.prescale_gradients
def gradient_predivide_factor(self):
return self._config.gradient_predivide_factor
def steps_per_print(self):
return self._config.steps_per_print
def zero_allgather_partitions(self):
return self._config.zero_config.allgather_partitions
def dump_state(self):
return self._config.dump_state
def gradient_clipping(self):
return self._config.gradient_clipping
def dynamic_loss_scale(self):
return self._config.loss_scale == 0
def initial_dynamic_scale(self):
return self._config.initial_dynamic_scale
def dynamic_loss_scale_args(self):
return self._config.dynamic_loss_scale_args
def swap_tensor_config(self):
return self._config.swap_tensor_config
def aio_config(self):
return self._config.aio_config
def _configure_lr_scheduler(self, client_lr_scheduler):
# First check for scheduler in json configuration
lr_scheduler = self._scheduler_from_config(self.optimizer)
if lr_scheduler:
if self.global_rank == 0:
logger.info(
f'DeepSpeed using configured LR scheduler = {self.scheduler_name()}')
self.lr_scheduler = lr_scheduler
else:
if self.global_rank == 0:
logger.info('DeepSpeed using client LR scheduler')
self.lr_scheduler = client_lr_scheduler
log_dist(f'DeepSpeed LR Scheduler = {self.lr_scheduler}', ranks=[0])
def _configure_checkpointing(self, dist_init_required):
dp_rank = self.global_rank
if self.mpu:
dp_rank = self.mpu.get_data_parallel_rank()
# only the first data parallel process needs to store the model checkpoint
self.save_non_zero_checkpoint = (
dp_rank == 0) or self.zero_optimization_partition_weights()
if self.zero_optimization():
param_rank = torch.distributed.get_rank(
group=self.optimizer.dp_process_group)
# Only the first parameter parallel process needs to store the
# optimizer state checkpoints for zero
self.save_zero_checkpoint = (param_rank == dp_rank)
def _scheduler_from_config(self, optimizer):
scheduler_name = self.scheduler_name()
if scheduler_name is not None:
if hasattr(lr_schedules, scheduler_name):
scheduler = getattr(lr_schedules, scheduler_name)
else:
assert hasattr(torch.optim.lr_scheduler, scheduler_name), \
f"DeepSpeed does not recognize LR scheduler {scheduler_name}"
scheduler = getattr(torch.optim.lr_scheduler, scheduler_name)
scheduler_params = self.scheduler_params()
instantiated_scheduler = scheduler(optimizer, **scheduler_params)
return instantiated_scheduler
else:
return None
def _set_distributed_vars(self):
if self.local_rank >= 0:
torch.cuda.set_device(self.local_rank)
self.device = torch.device("cuda", self.local_rank)
self.world_size = dist.get_world_size()
self.global_rank = dist.get_rank()
else:
self.world_size = 1
self.global_rank = 0
self.device = torch.device("cuda")
# Configure based on command line arguments
def _configure_with_arguments(self, args, mpu):
# After the distributed backend is initialized we are guaranteed the LOCAL_RANK
# environment variable is set. We must align args.local_rank to this value for
# backwards compatability with scripts relying on [args|self].local_rank containing
# the correct local rank info. _do_args_sanity_check will ensure this is the case.
self.local_rank = int(os.environ['LOCAL_RANK'])
if hasattr(args, 'local_rank'):
args.local_rank = self.local_rank
config_file = args.deepspeed_config if hasattr(args,
'deepspeed_config') else None
self._config = DeepSpeedConfig(config_file, mpu, param_dict=self.config_params)
# Validate command line arguments
def _do_args_sanity_check(self, args):
if hasattr(args, 'deepscale_config') and args.deepscale_config is not None:
logger.warning(
"************ --deepscale_config is deprecated, please use --deepspeed_config ************"
)
if hasattr(args, 'deepspeed_config'):
assert args.deepspeed_config is None, "Not sure how to proceed, we were given both a deepscale_config and deepspeed_config"
args.deepspeed_config = args.deepscale_config
assert "LOCAL_RANK" in os.environ, "DeepSpeed requires the LOCAL_RANK environment variable, it is set by the deepspeed launcher, " \
"deepspeed.init_distributed, or the torch.distributed launcher. If using a different launcher please ensure LOCAL_RANK is set prior to initializing deepspeed."
if hasattr(args, 'local_rank') and args.local_rank != None:
assert isinstance(args.local_rank, int), f"args.local_rank of {args.local_rank} is an unknown type {type(args.local_rank)}"
if args.local_rank >= 0:
env_local_rank = int(os.environ.get("LOCAL_RANK"))
assert env_local_rank == args.local_rank, \
f"Mismatch in local rank setting, args.local_rank={args.local_rank} but env['LOCAL_RANK']={env_local_rank}."
if self.config_params is None:
assert hasattr(args, 'deepspeed_config') and args.deepspeed_config is not None, \
'DeepSpeed requires --deepspeed_config to specify configuration file'
assert os.path.isfile(args.deepspeed_config), \
'DeepSpeed configuration file: {} is not an existing file'.format(args.deepspeed_config)
def _is_supported_optimizer(self, optimizer_name):
return optimizer_name in DEEPSPEED_OPTIMIZERS or \
getattr(torch.optim, optimizer_name, None) is not None
# Validate configuration based on command line arguments
def _do_sanity_check(self):
if not self.client_optimizer:
if self.optimizer_name() is not None:
assert self._is_supported_optimizer(self.optimizer_name()), \
'{} is not a supported DeepSpeed Optimizer'.format(self.optimizer_name())
if self.optimizer_name() == LAMB_OPTIMIZER:
assert self.dynamic_loss_scale(), \
'DeepSpeed {} optimizer requires dynamic loss scaling'.format(self.optimizer_name())
def _broadcast_model(self):
def is_replicated(p):
if hasattr(p, 'ds_status') and p.ds_status is not ZeroParamStatus.AVAILABLE:
return False
return True
for p in self.module.parameters():
if torch.is_tensor(p) and is_replicated(p):
dist.broadcast(p,
self.broadcast_src_rank,
group=self.data_parallel_group)
def _configure_distributed_model(self, model):
self.module = model
if self.fp16_enabled():
self.module.half()
if not self.dont_change_device:
self.module.to(self.device)
if self.mpu is None:
self.data_parallel_group = _initialize_parameter_parallel_groups()
self.dp_world_size = dist.get_world_size()
self.mp_world_size = 1
self.broadcast_src_rank = 0
else:
self.data_parallel_group = self.mpu.get_data_parallel_group()
self.dp_world_size = self.mpu.get_data_parallel_world_size()
self.mp_world_size = self.mpu.get_model_parallel_world_size()
self.broadcast_src_rank = _get_global_rank(
self.mpu.get_data_parallel_group(),
0)
if not self.amp_enabled():
self._broadcast_model()
# Configure optimizer
def _configure_optimizer(self, client_optimizer, model_parameters):
if client_optimizer is not None:
client_optimizer.param_groups[:] = [
pg for pg in client_optimizer.param_groups if len(pg["params"]) != 0
]
if self.global_rank == 0:
logger.info(
"Removing param_group that has no 'params' in the client Optimizer")
basic_optimizer = client_optimizer
if self.global_rank == 0:
logger.info('Using client Optimizer as basic optimizer')
else:
basic_optimizer = self._configure_basic_optimizer(model_parameters)
if self.global_rank == 0:
logger.info(
'Using DeepSpeed Optimizer param name {} as basic optimizer'.format(
self.optimizer_name()))
if self.global_rank == 0:
logger.info('DeepSpeed Basic Optimizer = {}'.format(
basic_optimizer.__class__.__name__))
if self.zero_optimization():
assert not self.amp_enabled(), "Amp and ZeRO are not currently compatible, please use (legacy) fp16 mode which performs similar to amp opt_mode=O2"
if not is_zero_supported_optimizer(basic_optimizer):
assert self.zero_allow_untested_optimizer(), \
'You are using an untested ZeRO Optimizer. Please add <"zero_allow_untested_optimizer": true> in the configuration file to use it.'
if self.global_rank == 0:
logger.warning(
"**** You are using ZeRO with an untested optimizer, proceed with caution *****"
)
self.optimizer = self._configure_zero_optimizer(basic_optimizer)
elif self.amp_enabled():
assert not self.fp16_enabled(), "Cannot enable both amp with (legacy) fp16 mode"
amp_params = self.amp_params()
if self.global_rank == 0:
logger.info(f"Initializing AMP with these params: {amp_params}")
try:
logger.info("Initializing Apex amp from: {}".format(amp.__path__))
except NameError:
# If apex/amp is available it will be imported above
raise RuntimeError(
"Unable to import apex/amp, please make sure it is installed")
self.module, self.optimizer = amp.initialize(self.module, basic_optimizer, **amp_params)
self._broadcast_model()
elif self.fp16_enabled():
self.optimizer = self._configure_fp16_optimizer(basic_optimizer)
else:
self.optimizer = basic_optimizer
log_dist('DeepSpeed Final Optimizer = {}'.format(self.optimizer_name()),
ranks=[0])
def _configure_basic_optimizer(self, model_parameters):
optimizer_parameters = self.optimizer_params()
# print(optimizer_parameters.keys())
if 'max_grad_norm' in optimizer_parameters.keys():
raise ValueError(
"'max_grad_norm' is not supported as an optimizer parameter, please switch to using the deepspeed parameter 'gradient_clipping' see: https://www.deepspeed.ai/docs/config-json/#gradient-clipping for more details"
)
if self.optimizer_name() in [ADAM_OPTIMIZER, ADAMW_OPTIMIZER]:
torch_adam = optimizer_parameters.pop(TORCH_ADAM_PARAM, False)
adam_w_mode = optimizer_parameters.pop(ADAM_W_MODE, ADAM_W_MODE_DEFAULT)
# Optimizer name of Adam forces AdamW logic unless adam_w_mode is explictly set
effective_adam_w_mode = self.optimizer_name(
) == ADAMW_OPTIMIZER or adam_w_mode
if torch_adam:
if not effective_adam_w_mode:
optimizer = torch.optim.Adam(model_parameters,
**optimizer_parameters)
else:
optimizer = torch.optim.AdamW(model_parameters,
**optimizer_parameters)
else:
if self.zero_cpu_offload():
from deepspeed.ops.adam import DeepSpeedCPUAdam
optimizer = DeepSpeedCPUAdam(model_parameters,
**optimizer_parameters,
adamw_mode=effective_adam_w_mode)
else:
from deepspeed.ops.adam import FusedAdam
optimizer = FusedAdam(model_parameters,
**optimizer_parameters,
adam_w_mode=effective_adam_w_mode)
elif self.optimizer_name() == LAMB_OPTIMIZER:
from deepspeed.ops.lamb import FusedLamb
optimizer = FusedLamb(model_parameters, **optimizer_parameters)
elif self.optimizer_name() == ONEBIT_ADAM_OPTIMIZER:
from deepspeed.runtime.fp16.onebit.adam import OnebitAdam
optimizer = OnebitAdam(model_parameters, self, **optimizer_parameters)
if not self.fp16_enabled():
logger.warning(
f'Currently the convergence of 1-bit Adam is only verified under FP16'
)
else:
torch_optimizer = getattr(torch.optim, self.optimizer_name())
optimizer = torch_optimizer(model_parameters, **optimizer_parameters)
return optimizer
def _configure_fp16_optimizer(self, optimizer):
initial_dynamic_scale = self.initial_dynamic_scale()
dynamic_loss_args = self.dynamic_loss_scale_args()
clip_grad = self.gradient_clipping()
if isinstance(optimizer,
FusedAdam) or self.optimizer_name() == ONEBIT_ADAM_OPTIMIZER:
if self.dynamic_loss_scale():
log_dist('Creating fp16 optimizer with dynamic loss scale', ranks=[0])
timers = self.timers if self.wall_clock_breakdown() else None
optimizer = FP16_Optimizer(
optimizer,
dynamic_loss_scale=True,
initial_dynamic_scale=initial_dynamic_scale,
dynamic_loss_args=dynamic_loss_args,
mpu=self.mpu,
clip_grad=clip_grad,
fused_adam_legacy=self.optimizer_legacy_fusion(),
timers=timers)
else:
log_dist('Creating fp16 optimizer with static loss scale: {}'.format(
self.loss_scale()),
ranks=[0])
optimizer = FP16_Optimizer(
optimizer,
static_loss_scale=self.loss_scale(),
mpu=self.mpu,
clip_grad=clip_grad,
fused_adam_legacy=self.optimizer_legacy_fusion())
else:
log_dist('Creating fp16 unfused optimizer with dynamic loss scale',
ranks=[0])
optimizer = FP16_UnfusedOptimizer(
optimizer,
static_loss_scale=self.loss_scale(),
dynamic_loss_scale=self.dynamic_loss_scale(),
dynamic_loss_args=dynamic_loss_args,
mpu=self.mpu,
clip_grad=clip_grad,
fused_lamb_legacy=self.optimizer_name() == LAMB_OPTIMIZER)
return optimizer
def _configure_zero_optimizer(self, optimizer):
zero_stage = self.zero_optimization_stage()
log_dist('Creating fp16 ZeRO stage {} optimizer'.format(zero_stage), ranks=[0])
assert not self.allreduce_always_fp32(), "ZeRO does not support 'fp32_allreduce': true"
timers = self.timers if self.wall_clock_breakdown() else None
if zero_stage == ZERO_OPTIMIZATION_OPTIMIZER_STATES:
assert self.zero_reduce_scatter(), 'Stage 1 only supports reduce scatter mode'
optimizer = FP16_DeepSpeedZeroOptimizer_Stage1(
optimizer,
static_loss_scale=self.loss_scale(),
dynamic_loss_scale=self.dynamic_loss_scale(),
dynamic_loss_args=self.dynamic_loss_scale_args(),
clip_grad=self.gradient_clipping(),
all_gather_partitions=self.zero_allgather_partitions(),
allgather_size=self.zero_allgather_bucket_size(),
max_elements_per_comm=self.zero_reduce_bucket_size(),
dp_process_group=self.data_parallel_group,
elastic_checkpoint=self.zero_elastic_checkpoint(),
mpu=self.mpu)
elif zero_stage == ZERO_OPTIMIZATION_GRADIENTS:
optimizer = FP16_DeepSpeedZeroOptimizer(
optimizer,
timers=timers,
static_loss_scale=self.loss_scale(),
dynamic_loss_scale=self.dynamic_loss_scale(),
dynamic_loss_args=self.dynamic_loss_scale_args(),
clip_grad=self.gradient_clipping(),
contiguous_gradients=self.zero_contiguous_gradients(),
reduce_bucket_size=self.zero_reduce_bucket_size(),
allgather_bucket_size=self.zero_allgather_bucket_size(),
dp_process_group=self.data_parallel_group,
reduce_scatter=self.zero_reduce_scatter(),
overlap_comm=self.zero_overlap_comm(),
cpu_offload=self.zero_cpu_offload(),
mpu=self.mpu,
postscale_gradients=self.postscale_gradients(),
gradient_predivide_factor=self.gradient_predivide_factor(),
gradient_accumulation_steps=self.gradient_accumulation_steps())
elif zero_stage == ZERO_OPTIMIZATION_WEIGHTS:
print("Initializing ZeRO Stage 3") if dist.get_rank() == 0 else None
from deepspeed.runtime.zero.stage3 import FP16_DeepSpeedZeroOptimizer_Stage3
optimizer = FP16_DeepSpeedZeroOptimizer_Stage3(
self.module,
optimizer,
timers=timers,
static_loss_scale=self.loss_scale(),
dynamic_loss_scale=self.dynamic_loss_scale(),
dynamic_loss_args=self.dynamic_loss_scale_args(),
clip_grad=self.gradient_clipping(),
contiguous_gradients=self.zero_contiguous_gradients(),
reduce_bucket_size=self.zero_reduce_bucket_size(),
prefetch_bucket_size=self.zero_prefetch_bucket_size(),
max_reuse_distance=self.zero_max_reuse_distance(),
max_live_parameters=self.zero_max_live_parameters(),
param_persistence_threshold=self.zero_param_persistence_threshold(),
dp_process_group=self.data_parallel_group,
reduce_scatter=self.zero_reduce_scatter(),
overlap_comm=self.zero_overlap_comm(),
offload_optimizer_config=self.zero_offload_optimizer(),
offload_param_config=self.zero_offload_param(),
sub_group_size=self.zero_sub_group_size(),
mpu=self.mpu,
postscale_gradients=self.postscale_gradients(),
gradient_predivide_factor=self.gradient_predivide_factor(),
gradient_accumulation_steps=self.gradient_accumulation_steps(),
aio_config=self.aio_config())
else:
raise NotImplementedError("ZeRO stage {} not implemented".format(zero_stage))
return optimizer
def _configure_progressive_layer_drop(self):
pld = ProgressiveLayerDrop(theta=self.pld_theta(), gamma=self.pld_gamma())
return pld
def deepspeed_io(self,
dataset,
batch_size=None,
route=ROUTE_TRAIN,
pin_memory=True,
data_sampler=None,
collate_fn=None,
num_local_io_workers=None):
if not isinstance(dataset, torch.utils.data.Dataset):
raise ValueError("Training data must be a torch Dataset")
if data_sampler is None and (route == ROUTE_PREDICT or route == ROUTE_EVAL):
data_sampler = torch.utils.data.SequentialSampler(dataset)
if batch_size is None:
batch_size = self.train_micro_batch_size_per_gpu()
if collate_fn is None:
collate_fn = self.collate_fn
# Currently we only use timer in train route
deepspeed_io_timer = None
if route == ROUTE_TRAIN:
deepspeed_io_timer = self.tput_timer
# If mpu is provied, forward world size and parallel rank to sampler.
data_parallel_world_size = None
data_parallel_rank = None
if self.mpu is not None:
data_parallel_world_size = self.mpu.get_data_parallel_world_size()
data_parallel_rank = self.mpu.get_data_parallel_rank()
return DeepSpeedDataLoader(dataset=dataset,
batch_size=batch_size,
pin_memory=pin_memory,
collate_fn=collate_fn,
local_rank=self.local_rank,
tput_timer=deepspeed_io_timer,
num_local_io_workers=num_local_io_workers,
data_sampler=data_sampler,
data_parallel_world_size=data_parallel_world_size,
data_parallel_rank=data_parallel_rank)
def train(self, mode=True):
r"""
"""
self.warn_unscaled_loss = True
self.module.train(mode)
def eval(self):
r"""
"""
self.warn_unscaled_loss = True
self.module.train(False)
def _scale_loss(self, prescaled_loss):
if isinstance(prescaled_loss, torch.Tensor):
scaled_loss = prescaled_loss / self.gradient_accumulation_steps()
elif isinstance(prescaled_loss, tuple) or isinstance(prescaled_loss, list):
scaled_loss = []
for l in prescaled_loss:
if isinstance(l, torch.Tensor):
scaled_loss.append(l / self.gradient_accumulation_steps())
else:
scaled_loss.append(l)
else:
scaled_loss = prescaled_loss
if self.warn_unscaled_loss:
logger.warning(
f'DeepSpeed unable to scale loss because of type: {type(prescaled_loss)}'
)
self.warn_unscaled_loss = False
return scaled_loss
def forward(self, *inputs, **kwargs):
r"""Execute forward propagation
Arguments:
*inputs: Variable length input list
**kwargs: variable length keyword arguments
"""
if self.flops_profiler_enabled(
) and self.global_steps == self.flops_profiler_profile_step(
) and self.global_rank == 0:
self.flops_profiler = FlopsProfiler(self.module)
self.flops_profiler.start_profile(ignore_list=None)
if self.module.training and self.progressive_layer_drop:
kwargs.update(self.progressive_layer_drop.get_state())
if self.zero_optimization_partition_weights():
# Enable automated discovery of external parameters by indicating that
# we are in a forward pass.
for module in self.module.modules():
module._parameters._in_forward = True
pass
if self.wall_clock_breakdown():
self.timers('forward_microstep').start()
self.timers('forward').start()
if self.training_dataloader is None:
self.tput_timer.start()
loss = self.module(*inputs, **kwargs)
if self.zero_optimization_partition_weights():
# Reset the ZeRO-3 state if we are only doing forward-passes (ie evaluation).
if not torch._C.is_grad_enabled():
self.optimizer.param_coordinator.reset_step()
# Disable automated discovery of external parameters
for module in self.module.modules():
module._parameters._in_forward = False
if self.wall_clock_breakdown():
self.timers('forward').stop()
self.timers('forward_microstep').stop()
if self.flops_profiler_enabled(
) and self.global_steps == self.flops_profiler_profile_step(
) and self.global_rank == 0:
self.flops_profiler.print_model_profile(
profile_step=self.global_steps,
module_depth=self.flops_profiler_module_depth(),
top_modules=self.flops_profiler_top_modules(),
detailed=self.flops_profiler_detailed())
self.flops_profiler.end_profile()
return loss
def allreduce_gradients(self, bucket_size=MEMORY_OPT_ALLREDUCE_SIZE):
#Zero stage 2 communicates during non gradient accumulation boundaries as well
if self.zero_optimization_partition_gradients():
self.optimizer.overlapping_partition_gradients_reduce_epilogue()
#Communicate only at gradient accumulation boundaries
elif self.is_gradient_accumulation_boundary():
if self.zero_optimization_stage() == ZERO_OPTIMIZATION_OPTIMIZER_STATES:
assert self.zero_reduce_scatter()
self.optimizer.reduce_scatter_gradients(
postscale_gradients=self.postscale_gradients(),
gradient_predivide_factor=self.gradient_predivide_factor(),
gradient_average=self.gradient_average)
else:
self.buffered_allreduce_fallback(elements_per_buffer=bucket_size)
def backward(self, loss, allreduce_gradients=True, release_loss=False):
r"""Execute backward pass on the loss
Arguments:
loss: Torch tensor on which to execute backward propagation
allreduce_gradients: is deprecated, ignored, and will soon be removed'
"""
if not allreduce_gradients:
logger.warning(
f'Argument `allreduce_gradients` is deprecated, ignored, and will soon be removed'
)
# scale loss w.r.t. gradient accumulation if needed
if self.gradient_accumulation_steps() > 1:
loss = self._scale_loss(loss.float())
# Log training Loss
if self.tensorboard_enabled():
if self.is_gradient_accumulation_boundary():
if self.global_rank == 0:
self.summary_events = [
(f'Train/Samples/train_loss',
loss.mean().item() * self.gradient_accumulation_steps(),
self.global_samples)
]
for event in self.summary_events: # write_summary_events
self.summary_writer.add_scalar(event[0], event[1], event[2])
self.summary_writer.flush()
if self.wall_clock_breakdown():
self.timers('backward_microstep').start()
self.timers('backward').start()
assert self.optimizer is not None, "must provide optimizer during " \
"init in order to use backward"
if self.wall_clock_breakdown():
self.timers('backward_inner_microstep').start()
self.timers('backward_inner').start()
if self.zero_optimization():
self.optimizer.is_gradient_accumulation_boundary = self.is_gradient_accumulation_boundary(
)
self.optimizer.backward(loss)
elif self.amp_enabled():
# AMP requires delaying unscale when inside gradient accumulation boundaries
# https://nvidia.github.io/apex/advanced.html#gradient-accumulation-across-iterations
delay_unscale = not self.is_gradient_accumulation_boundary()
with amp.scale_loss(loss,
self.optimizer,
delay_unscale=delay_unscale) as scaled_loss:
scaled_loss.backward()
elif self.fp16_enabled():
self.optimizer.backward(loss)
else:
loss.backward()
if self.wall_clock_breakdown():
self.timers('backward_inner').stop()
self.timers('backward_inner_microstep').stop()
if self.wall_clock_breakdown():
self.timers('backward_allreduce_microstep').start()
self.timers('backward_allreduce').start()
if self.enable_backward_allreduce:
self.allreduce_gradients()
if self.wall_clock_breakdown():
self.timers('backward_allreduce').stop()
self.timers('backward_allreduce_microstep').stop()
self.timers('backward').stop()
self.timers('backward_microstep').stop()
if release_loss:
# loss.data = None
pass
return loss
def is_gradient_accumulation_boundary(self):
"""Query whether the current micro-batch is at the boundary of
gradient accumulation, and thus will trigger gradient reductions and
an optimizer step.
Returns:
bool: if the current step is a gradient accumulation boundary.
"""
return (self.micro_steps + 1) % \
self.gradient_accumulation_steps() == 0
def zero_grad(self):
"""
Zero parameter grads.
"""
for param_name, param in self.module.named_parameters():
param.grad = None
def clip_fp32_gradients(self):
torch.nn.utils.clip_grad_norm_(parameters=self.module.parameters(),
max_norm=self.gradient_clipping())
def _take_model_step(self, lr_kwargs):
if self.gradient_clipping() > 0.0:
if not self.fp16_enabled() and not self.amp_enabled():
self.clip_fp32_gradients()
elif self.amp_enabled():
# AMP's recommended way of doing clipping
# https://nvidia.github.io/apex/advanced.html#gradient-clipping
master_params = amp.master_params(self.optimizer)
torch.nn.utils.clip_grad_norm_(parameters=master_params,
max_norm=self.gradient_clipping())
self.optimizer.step()
#zero grad in basic optimizer could be unreliable and may not exhibit
#the behaviour that we want
if not self.zero_optimization() and not self.fp16_enabled(
) and not self.amp_enabled():
self.zero_grad()
else:
self.optimizer.zero_grad()
report_progress = self.global_rank == 0 if self.global_rank else True
# Check overlow here since in DS fp16 optimizer, the overflow is updated in above step() function.
overflow = False
if hasattr(self.optimizer, 'overflow'):
overflow = self.optimizer.overflow
if overflow:
self.skipped_steps += 1
else:
if self.lr_scheduler is not None:
self.lr_scheduler.step(**(lr_kwargs or {}))
if report_progress and (self.global_steps + 1) % self.steps_per_print() == 0:
self._report_progress(self.global_steps + 1)
self.global_steps += 1
self.global_samples += self.train_batch_size()
def step(self, lr_kwargs=None):
r"""Execute the weight update step after forward and backward propagation
on effective_train_batch.
"""
if self.wall_clock_breakdown():
self.timers('step_microstep').start()
self.timers('step').start()
assert self.optimizer is not None, "must provide optimizer during " \
"init in order to use step"
report_progress = self.global_rank == 0 if self.global_rank else True
# Update the model when we reach gradient accumulation boundaries
if self.is_gradient_accumulation_boundary():
if self.progressive_layer_drop:
self.progressive_layer_drop.update_state(self.global_steps)
self._take_model_step(lr_kwargs)
self.tput_timer.stop(report_progress)
# Log learning rate
if self.tensorboard_enabled():
if self.is_gradient_accumulation_boundary():
if self.global_rank == 0:
self.summary_events = [(f'Train/Samples/lr',
self.get_lr()[0],
self.global_samples)]
for event in self.summary_events: # write_summary_events
self.summary_writer.add_scalar(event[0], event[1], event[2])
if self.fp16_enabled() and hasattr(self.optimizer, 'cur_scale'):
self.summary_events.append((f'Train/Samples/loss_scale',
self.optimizer.cur_scale,
self.global_samples))
for event in self.summary_events: # write_summary_events
self.summary_writer.add_scalar(event[0], event[1], event[2])
self.summary_writer.flush()
if self.wall_clock_breakdown():
self.timers('step').stop()
self.timers('step_microstep').stop()
timer_names = [
'forward_microstep',
'backward_microstep',
'backward_inner_microstep',
'backward_allreduce_microstep',
'step_microstep'
]
self.timers.log(names=timer_names, memory_breakdown=self.memory_breakdown())
# Log timing
if self.is_gradient_accumulation_boundary():
if self.tensorboard_enabled():
if self.global_rank == 0:
self.summary_events = [
(f'Train/Samples/elapsed_time_ms_forward',
self.timers('forward').elapsed(reset=False) * 1000.0,
self.global_samples),
(f'Train/Samples/elapsed_time_ms_backward',
self.timers('backward').elapsed(reset=False) * 1000.0,
self.global_samples),
(f'Train/Samples/elapsed_time_ms_backward_inner',
self.timers('backward_inner').elapsed(reset=False) * 1000.0,
self.global_samples),
(f'Train/Samples/elapsed_time_ms_backward_allreduce',
self.timers('backward_allreduce').elapsed(reset=False) *
1000.0,
self.global_samples),
(f'Train/Samples/elapsed_time_ms_step',
self.timers('step').elapsed(reset=False) * 1000.0,
self.global_samples)
]
for event in self.summary_events: # write_summary_events
self.summary_writer.add_scalar(event[0], event[1], event[2])
self.summary_writer.flush()
if self.wall_clock_breakdown():
self.timers.log([
'forward',
'backward',
'backward_inner',
'backward_allreduce',
'step'
])
self.micro_steps += 1
def _get_optimizer_param(self, param_name):
result = []
if not self.optimizer:
return result
for group in self.optimizer.param_groups:
if param_name in group:
result.append(group[param_name])
else:
result.append(0.0)
return result
def get_lr(self):
return self._get_optimizer_param('lr')
def get_type(self):
return self._get_optimizer_param('type')
def get_mom(self):
if self.optimizer_name() in ['SGD', 'RMSprop']:
return self._get_optimizer_param('momentum')
else:
return self._get_optimizer_param('betas')
def get_pld_theta(self):
if self.progressive_layer_drop:
return self.progressive_layer_drop.get_theta()
else:
return None
def _report_progress(self, step):
lr = self.get_lr()
mom = self.get_mom()
log_dist(f'step={step}, skipped={self.skipped_steps}, lr={lr}, mom={mom}',
ranks=[0])
def allreduce_bucket(self, bucket):
tensor = self.flatten(bucket)
tensor_to_allreduce = tensor
if self.allreduce_always_fp32():
tensor_to_allreduce = tensor.float()
if self.postscale_gradients():
if self.gradient_predivide_factor() != 1.0:
tensor_to_allreduce.mul_(1. / self.gradient_predivide_factor())
dist.all_reduce(tensor_to_allreduce, group=self.data_parallel_group)
if self.gradient_average:
if self.gradient_predivide_factor() != self.dp_world_size:
tensor_to_allreduce.mul_(self.gradient_predivide_factor() /
self.dp_world_size)
else:
tensor_to_allreduce.div_(self.dp_world_size)
dist.all_reduce(tensor_to_allreduce, group=self.data_parallel_group)
if self.allreduce_always_fp32() and tensor is not tensor_to_allreduce:
tensor.copy_(tensor_to_allreduce)
return tensor
def allreduce_and_copy(self, small_bucket):
allreduced = self.allreduce_bucket(small_bucket)
for buf, synced in zip(small_bucket, self.unflatten(allreduced, small_bucket)):
buf.copy_(synced)
def allreduce_no_retain(self, bucket, numel_per_bucket=500000000):
small_bucket = []
numel = 0
for tensor in bucket:
small_bucket.append(tensor)
numel = numel + tensor.numel()
if numel > numel_per_bucket:
self.allreduce_and_copy(small_bucket)
small_bucket = []
numel = 0
if len(small_bucket) > 0:
self.allreduce_and_copy(small_bucket)
def buffered_allreduce_fallback(self, grads=None, elements_per_buffer=500000000):
grads = []
for param_name, param in self.module.named_parameters():
if param.grad is None:
# In cases where there is an imbalance of empty grads across
# ranks we must create empty grads, this will ensure that every
# rank is reducing the same size. In some cases it may make
# sense in the future to support the ability to average not
# w.r.t. world size but with a different value.
param.grad = torch.zeros(param.size(),
dtype=param.dtype,
device=param.device)
grads.append(param.grad.data)
else:
grad_data = param.grad.data
if self.sparse_gradients_enabled(
) and param_name in self.csr_tensor_module_names:
grads.append(CSRTensor(grad_data))
else:
grads.append(grad_data)
split_buckets = split_half_float_double_csr(grads)
for i, bucket_tuple in enumerate(split_buckets):
bucket_type, bucket = bucket_tuple
if bucket_type == CSRTensor.type():
self.csr_allreduce_no_retain(bucket)
else:
self.allreduce_no_retain(bucket, numel_per_bucket=elements_per_buffer)
def csr_allreduce_no_retain(self, bucket):
allreduced_csrs = self.csr_allreduce_bucket(bucket)
# Densify csr tensor and copy back to original location
for csr in allreduced_csrs:
dense_tensor = csr.to_dense()
csr.orig_dense_tensor.copy_(dense_tensor)
def csr_allreduce_bucket(self, bucket):
csr_list = []
for csr in bucket:
csr_list.append(self.csr_allreduce(csr))
return csr_list
def csr_allreduce(self, csr):
# Pre-divide for fp16 stability
csr.values.div_(self.dp_world_size)
indices_device_list = self.csr_all_gather(csr.indices)
values_device_list = self.csr_all_gather(csr.values)
csr.indices = torch.cat(indices_device_list)
csr.values = torch.cat(values_device_list)
return csr
def csr_all_gather(self, value):
my_size = torch.LongTensor([value.size()[0]]).to(self.device)
all_sizes = self.all_gather_scalar(my_size)
max_size = torch.cat(all_sizes).max()
fill_size = (max_size - my_size)
assert value.dim() in [1, 2]
if value.dim() == 1:
if fill_size > 0:
value = torch.cat([value, value.new_zeros(fill_size)])
tensor_list = [value.new_zeros(max_size) for _ in range(self.dp_world_size)]
else:
if fill_size > 0:
value = torch.cat([value, value.new_zeros(fill_size, value.size()[1])])
tensor_list = [
value.new_zeros(max_size,
value.size()[1]) for _ in range(self.dp_world_size)
]
dist.all_gather(tensor_list, value, group=self.data_parallel_group)
tensors = []
for dev_idx, t in enumerate(tensor_list):
size = all_sizes[dev_idx][0]
tensors.append(
t.index_select(0,
torch.LongTensor(range(size)).to(self.device)))
return tensors
def all_gather_scalar(self, value):
tensor_list = [value.new_zeros(value.size()) for _ in range(self.dp_world_size)]
dist.all_gather(tensor_list, value, group=self.data_parallel_group)
return tensor_list
def module_state_dict(self, destination=None, prefix='', keep_vars=False):
sd = self.module.state_dict(destination, prefix, keep_vars)
return sd
def load_module_state_dict(self, state_dict, strict=True):
self.module.load_state_dict(state_dict, strict=strict)
def _get_rank_zero_ckpt_name(self, checkpoints_path, tag, mp_rank, dp_rank):
filename = 'zero_pp_rank_{}'.format(dp_rank)
zero_ckpt_name = os.path.join(
checkpoints_path,
str(tag),
filename + '_mp_rank_{:02d}'.format(mp_rank) + '_optim_states.pt')
return zero_ckpt_name
def _get_zero_ckpt_name(self, checkpoints_path, tag):
mp_rank = 0 if self.mpu is None else self.mpu.get_model_parallel_rank()
pp_rank = torch.distributed.get_rank(group=self.optimizer.dp_process_group)
return self._get_rank_zero_ckpt_name(checkpoints_path, tag, mp_rank, pp_rank)
def _get_ckpt_name(self, checkpoints_path, tag):
mp_rank = 0 if self.mpu is None else self.mpu.get_model_parallel_rank()
if self.zero_optimization_partition_weights():
filename = 'zero_pp_rank_{}'.format(
torch.distributed.get_rank(group=self.optimizer.dp_process_group))
ckpt_name = os.path.join(
checkpoints_path,
str(tag),
filename + '_mp_rank_{:02d}'.format(mp_rank) + '_model_states.pt')
else:
ckpt_name = os.path.join(
checkpoints_path,
str(tag),
'mp_rank_{:02d}'.format(mp_rank) + '_model_states.pt')
return ckpt_name
def load_checkpoint(self,
load_dir,
tag=None,
load_module_strict=True,
load_optimizer_states=True,
load_lr_scheduler_states=True):
"""Load training checkpoint
Arguments:
load_dir: Required. Directory to load the checkpoint from
tag: Checkpoint tag used as a unique identifier for checkpoint, if not provided will attempt to load tag in 'latest' file
load_module_strict: Optional. Boolean to strictly enforce that the keys in state_dict of module and checkpoint match.
load_optimizer_states: Optional. Boolean to load the training optimizer states from Checkpoint. Ex. ADAM's momentum and variance
load_lr_scheduler_states: Optional. Boolean to add the learning rate scheduler states from Checkpoint.
Returns:
A tuple of ``load_path`` and ``client_state``.
*``load_path``: Path of the loaded checkpoint. ``None`` if loading the checkpoint failed.
*``client_state``: State dictionary used for loading required training states in the client code.
"""
if tag is None:
latest_path = os.path.join(load_dir, 'latest')
if os.path.isfile(latest_path):
with open(latest_path, 'r') as fd:
tag = fd.read().strip()
else:
logger.warning(f"Unable to find latest file at {latest_path}, if trying to load latest " \
"checkpoint please ensure this file exists or pass an explicit checkpoint tag when loading a checkpoint.")
return None, None
load_path, client_states = self._load_checkpoint(load_dir,
tag,
load_module_strict=load_module_strict,
load_optimizer_states=load_optimizer_states,
load_lr_scheduler_states=load_lr_scheduler_states)
if self.zero_optimization() and load_path is not None:
self._load_zero_checkpoint(load_dir,
tag,
load_optimizer_states=load_optimizer_states)
return load_path, client_states
def _load_checkpoint(self,
load_dir,
tag,
load_module_strict=True,
load_optimizer_states=True,
load_lr_scheduler_states=True):
load_path = self._get_ckpt_name(load_dir, tag)
if not os.path.exists(load_path):
logger.warn(
'Client provided checkpoint load path: {} does not exist ... skip checkpoint load'
.format(load_path))
return None, None
logger.info(f'rank: {self.global_rank} loading checkpoint: {load_path}')
checkpoint = torch.load(load_path, map_location=lambda storage, loc: storage)
if isinstance(self.module, PipelineModule):
# Pipeline parallelism uses this to load its own checkpoint files.
self._curr_ckpt_path = os.path.join(load_dir, tag)
self.load_module_state_dict(state_dict=checkpoint['module'],
strict=load_module_strict)
if self.optimizer is not None and not self.zero_optimization():
if self.fp16_enabled():
self.optimizer.load_state_dict(
checkpoint['optimizer'],
load_optimizer_states=load_optimizer_states)
elif load_optimizer_states:
self.optimizer.load_state_dict(checkpoint['optimizer'])
if load_lr_scheduler_states and self.lr_scheduler is not None:
self.lr_scheduler.load_state_dict(checkpoint['lr_scheduler'])
self.csr_tensor_module_names = checkpoint['csr_tensor_module_names']
self.global_steps = checkpoint['global_steps']
self.global_samples = checkpoint.get('global_samples',
self.global_steps * self.train_batch_size())
self.skipped_steps = checkpoint['skipped_steps']
self.loaded_checkpoint_mp_world_size = checkpoint['mp_world_size']
self.loaded_checkpoint_dp_world_size = checkpoint['dp_world_size']
deepspeed_states = [
'module',
'optimizer',
'lr_scheduler',
'csr_tensor_module_names',
'skipped_steps',
'global_steps',
'dp_world_size',
'mp_world_size'
]
client_state = {
key: value
for key,
value in checkpoint.items() if not key in deepspeed_states
}
return load_path, client_state
def _load_zero_checkpoint(self, load_dir, tag, load_optimizer_states=True):
zero_sd_list = self._get_all_zero_checkpoints(load_dir, tag)
if zero_sd_list is None:
return
self.optimizer.load_state_dict(
state_dict_list=zero_sd_list,
load_optimizer_states=load_optimizer_states,
load_from_fp32_weights=self.zero_load_from_fp32_weights())
print(
f'loading {len(zero_sd_list)} zero partition checkpoints for rank {self.global_rank}'
)
def _get_mp_rank_zero_checkpoint_names(self, load_dir, tag, mp_rank, dp_world_size):
zero_ckpt_names = []
for dp_rank in range(dp_world_size):
ckpt_name = self._get_rank_zero_ckpt_name(checkpoints_path=load_dir,
tag=tag,
mp_rank=mp_rank,
dp_rank=dp_rank)
zero_ckpt_names.append(ckpt_name)
return zero_ckpt_names
def _get_all_zero_checkpoint_names(self,
load_dir,
tag,
mp_world_size,
dp_world_size):
zero_ckpt_names = []
for mp_rank in range(mp_world_size):
mp_rank_ckpt_names = self._get_mp_rank_zero_checkpoint_names(
load_dir=load_dir,
tag=tag,
mp_rank=mp_rank,
dp_world_size=dp_world_size)
zero_ckpt_names += mp_rank_ckpt_names
return zero_ckpt_names
def _get_all_zero_checkpoints(self, load_dir, tag):
mp_rank = 0 if self.mpu is None else self.mpu.get_model_parallel_rank()
zero_ckpt_names = self._get_mp_rank_zero_checkpoint_names(
load_dir=load_dir,
tag=tag,
mp_rank=mp_rank,
dp_world_size=self.loaded_checkpoint_dp_world_size)
invalid_zero_ckpt_paths = []
for i, ckpt_name in enumerate(zero_ckpt_names):
if not os.path.exists(ckpt_name):
# transparently handle the old file pattern for optim_states
if 'optim_states.pt' in ckpt_name:
ckpt_name_try = ckpt_name.replace("_optim_states.pt",
"optim_states.pt")
if os.path.exists(ckpt_name_try):
zero_ckpt_names[i] = ckpt_name_try
continue
invalid_zero_ckpt_paths.append(ckpt_name)
if len(invalid_zero_ckpt_paths) > 0:
logger.warn(
f"The following zero checkpoints paths are missing: {invalid_zero_ckpt_paths}"
)
return None
zero_sd_list = []
for ckpt_name in zero_ckpt_names:
zero_sd_list.append(torch.load(ckpt_name, map_location='cpu'))
zero_optimizer_sd = [sd['optimizer_state_dict'] for sd in zero_sd_list]
print(
f"successfully loaded {len(zero_optimizer_sd)} ZeRO state_dicts for rank {self.global_rank}"
)
return zero_optimizer_sd
def _checkpoint_tag_validation(self, tag):
if self.checkpoint_tag_validation_enabled():
s_hash = hashlib.sha1(tag.encode())
bhash = torch.ByteTensor([s_hash.digest()]).flatten().to(self.device)
max_bhash = bhash.clone()
min_bhash = bhash.clone()
dist.all_reduce(max_bhash, op=torch.distributed.ReduceOp.MAX)
dist.all_reduce(min_bhash, op=torch.distributed.ReduceOp.MIN)
valid = all(min_bhash == bhash) and all(max_bhash == bhash)
msg = f"[rank={dist.get_rank()}] The checkpoint tag name '{tag}' is not consistent across " \
"all ranks. Including rank unique information in checkpoint tag could cause issues when " \
"restoring with different world sizes."
if self.checkpoint_tag_validation_fail():
assert valid, msg
elif not valid:
logger.warning(msg)
def save_checkpoint(self, save_dir, tag=None, client_state={}, save_latest=True):
r"""Save training checkpoint
Arguments:
save_dir: Required. Directory for saving the checkpoint
tag: Optional. Checkpoint tag used as a unique identifier for the checkpoint, global step is
used if not provided. Tag name must be the same across all ranks.
client_state: Optional. State dictionary used for saving required training states in the client code.
save_latest: Optional. Save a file 'latest' pointing to the latest saved checkpoint.
Important: all processes must call this method and not just the process with rank 0. It is
because each process needs to save its master weights and scheduler+optimizer states. This
method will hang waiting to synchronize with other processes if it's called just for the
process with rank 0.
"""
if self.zero_optimization_partition_weights():
# Prepare for state_dict() by ensuring all parameters are partitioned
self.optimizer.save_checkpoint_prologue()
# This is to make sure the checkpoint names are created without collision
# There seems to be issue creating them in parallel
# Ensure save_dir directory exists
os.makedirs(save_dir, exist_ok=True)
if tag is None:
tag = f"global_step{self.global_steps}"
# Ensure tag is a string
tag = str(tag)
# Ensure checkpoint tag is consistent across ranks
self._checkpoint_tag_validation(tag)
if self.save_non_zero_checkpoint:
self._create_checkpoint_file(save_dir, tag, False)
self._save_checkpoint(save_dir, tag, client_state=client_state)
if self.save_zero_checkpoint:
self._create_zero_checkpoint_files(save_dir, tag)
self._save_zero_checkpoint(save_dir, tag)
# Save latest checkpoint tag
if save_latest:
with open(os.path.join(save_dir, 'latest'), 'w') as fd:
fd.write(tag)
if self.zero_optimization_partition_weights():
self.optimizer.save_checkpoint_epilogue()
return True
def _create_checkpoint_file(self, save_dir, tag, zero_checkpoint):
name_function = self._get_zero_ckpt_name if zero_checkpoint else self._get_ckpt_name
try:
checkpoint_name = name_function(save_dir, tag)
ensure_directory_exists(checkpoint_name)
except:
logger.error(f'Failed saving model checkpoint to {save_dir} with tag {tag}')
return False
return True
def _create_zero_checkpoint_files(self, save_dir, tag):
success = True
# zero checkpoint files are created sequentially
for rank in range(self.world_size):
if rank == self.global_rank:
success = self._create_checkpoint_file(save_dir, tag, True)
dist.barrier()
return success
def _save_checkpoint(self, save_dir, tag, client_state={}):
save_path = self._get_ckpt_name(save_dir, tag)
# A hack to save the checkpointing directory. Pipeline parallelism overrides
# module_state_dict() and uses this path to save the model. module_state_dict()
# then instead just returns None.
self._curr_ckpt_path = os.path.join(save_dir, tag)
state = dict(
module=self.module_state_dict(),
optimizer=self.optimizer.state_dict()
if self.optimizer and not self.zero_optimization() else None,
lr_scheduler=self.lr_scheduler.state_dict()
if self.lr_scheduler is not None else None,
csr_tensor_module_names=self.csr_tensor_module_names,
skipped_steps=self.skipped_steps,
global_steps=self.global_steps,
global_samples=self.global_samples,
dp_world_size=self.dp_world_size,
mp_world_size=self.mp_world_size,
)
state.update(client_state)
log_dist(message=f'Saving model checkpoint: {save_path}', ranks=[0])
#logger.info('Saving model checkpoint: {}'.format(save_path))
torch.save(state, save_path)
self._curr_save_path = None
def _get_param_shapes(self):
param_shapes = OrderedDict()
for name, param in self.module.named_parameters():
param_shapes[name] = param.ds_shape if hasattr(param,
"ds_shape") else param.shape
# print(f"saving param {name} {param_shapes[name]}")
return param_shapes
def _copy_recovery_script(self, save_path):
base_dir = os.path.dirname(os.path.dirname(__file__))
script = "zero_to_fp32.py"
src = os.path.join(base_dir, "utils", script)
dst = os.path.join(save_path, script)
logger.info(f"creating recovery script {dst}")
copyfile(src, dst)
# make executable
os.chmod(dst, os.stat(dst).st_mode | stat.S_IEXEC)
def _save_zero_checkpoint(self, save_path, tag):
zero_checkpoint_name = self._get_zero_ckpt_name(save_path, tag)
zero_sd = dict(
optimizer_state_dict=self.optimizer.state_dict(),
param_shapes=self._get_param_shapes(),
)
torch.save(zero_sd, zero_checkpoint_name)
self._copy_recovery_script(save_path)
logger.info('zero checkpoint saved {}'.format(zero_checkpoint_name))
def _zero3_consolidated_fp16_state_dict(self):
"""
Get a full non-partitioned state_dict with fp16 weights on cpu.
Important: this function must be called on all ranks and not just rank 0.
This is similar to nn.Module.state_dict (modelled after _save_to_state_dict), but:
1. consolidates the weights from different partitions on gpu0
2. works on one layer at a time to require as little gpu0 memory as possible, by
moving the already consolidated weights to cpu
3. takes care to keep the shared params shared when gradually copying the params to cpu
Returns:
a consolidated fp16 ``state_dict`` on cpu on rank 0, ``None`` on other ranks
"""
import deepspeed
if not self.zero_optimization_partition_weights():
raise ValueError("this function requires ZeRO-3 mode")
state_dict = OrderedDict() if torch.distributed.get_rank() == 0 else None
shared_weights = {}
def get_layer_state_dict(module, prefix=""):
# gather one layer at a time to be memory-efficient
with deepspeed.zero.GatheredParameters(list(
module.parameters(recurse=False))):
if torch.distributed.get_rank() == 0:
for name, param in module.named_parameters(recurse=False):
if param is None:
continue
key = prefix + name
# for shared weights we want to make sure not to unshare them when copying to cpu
data_ptr_id = param.storage().data_ptr()
if data_ptr_id in shared_weights:
# shared weights
# print(f"`{key}` is shared with `{shared_weights[data_ptr_id]}`")
state_dict[key] = state_dict[shared_weights[data_ptr_id]]
else:
state_dict[key] = param.detach().cpu()
shared_weights[data_ptr_id] = key
#print(f"param {name} {param.shape}")
#print(f"param {key} {param.shape} {state_dict[key].storage().data_ptr()}")
# now buffers - not sure if need to take care of potentially shared weights here
for name, buf in module.named_buffers(recurse=False):
if buf is not None and name not in module._non_persistent_buffers_set:
state_dict[prefix + name] = buf.detach().cpu()
for name, child in module.named_children():
if child is not None:
get_layer_state_dict(child, prefix + name + ".")
see_memory_usage("before get_layer_state_dict", force=False)
get_layer_state_dict(self.module, prefix="")
see_memory_usage("after get_layer_state_dict", force=False)
return state_dict
def save_fp16_model(self, save_dir, save_filename="pytorch_model.bin"):
r"""Save fp16 model weights
This method saves the fp16 model weights at the desired destination.
Arguments:
save_dir: Required. Directory for saving the model
save_filename: Optional. Filename to save to. Defaults to ``pytorch_model.bin``
Important: all processes must call this method and not just the process with rank 0. It is
because the processes need to work in sync to gather the weights. This method will hang
waiting to synchronize with other processes if it's called just for the process with rank 0.
"""
path = os.path.join(save_dir, save_filename)
if self.zero_optimization_partition_weights():
if self.zero_gather_fp16_weights_on_model_save():
# consolidation is expensive in time and memory and therefore isn't a default
state_dict = self._zero3_consolidated_fp16_state_dict()
else:
# the model will be bogus if not consolidated so don't confuse the user by saving it
logger.info(
f"Did not save the model {path} because `stage3_gather_fp16_weights_on_model_save` is False"
)
return
else:
state_dict = self.module.state_dict()
if torch.distributed.get_rank() == 0:
os.makedirs(save_dir, exist_ok=True)
logger.info(f"Saving model weights to {path}")
torch.save(state_dict, path)
| 42.735777 | 227 | 0.624493 |
import os
import stat
import torch
import warnings
import hashlib
import torch.distributed as dist
from collections import OrderedDict
from shutil import copyfile
from torch.nn.modules import Module
from torch.distributed.distributed_c10d import _get_global_rank
from tensorboardX import SummaryWriter
from deepspeed.runtime.utils import see_memory_usage
from deepspeed.runtime.zero.stage2 import FP16_DeepSpeedZeroOptimizer
from deepspeed.runtime.zero.stage1 import FP16_DeepSpeedZeroOptimizer_Stage1
from deepspeed.runtime.zero.partition_parameters import ZeroParamStatus
from deepspeed.runtime.zero.utils import is_zero_supported_optimizer
from deepspeed.runtime.activation_checkpointing import checkpointing as activation_checkpointing
from deepspeed.runtime.fp16.fused_optimizer import FP16_Optimizer
from deepspeed.runtime.fp16.unfused_optimizer import FP16_UnfusedOptimizer
from deepspeed.runtime.config import DeepSpeedConfig, DEEPSPEED_OPTIMIZERS, \
ADAM_OPTIMIZER, ADAMW_OPTIMIZER, LAMB_OPTIMIZER, ONEBIT_ADAM_OPTIMIZER, \
TORCH_ADAM_PARAM, ADAM_W_MODE, ADAM_W_MODE_DEFAULT
from deepspeed.runtime.dataloader import DeepSpeedDataLoader
from deepspeed.runtime.constants import \
ROUTE_TRAIN, ROUTE_PREDICT, ROUTE_EVAL, \
PLD_THETA, PLD_GAMMA
from deepspeed.runtime.zero.constants import \
ZERO_OPTIMIZATION_OPTIMIZER_STATES, ZERO_OPTIMIZATION_GRADIENTS, ZERO_OPTIMIZATION_WEIGHTS
from deepspeed.runtime.csr_tensor import CSRTensor
import deepspeed.runtime.lr_schedules as lr_schedules
from deepspeed.utils import logger, log_dist, init_distributed
from deepspeed.utils.timer import ThroughputTimer, SynchronizedWallClockTimer
from deepspeed.runtime.progressive_layer_drop import ProgressiveLayerDrop
from .pipe.module import PipelineModule
from .utils import ensure_directory_exists
from ..ops.op_builder import UtilsBuilder
from ..ops.adam import DeepSpeedCPUAdam
from ..ops.adam import FusedAdam
from deepspeed.profiling.flops_profiler.profiler import FlopsProfiler
MEMORY_OPT_ALLREDUCE_SIZE = 500000000
try:
from apex import amp
except ImportError:
pass
def split_half_float_double_csr(tensors):
dtypes = [
"torch.cuda.HalfTensor",
"torch.cuda.FloatTensor",
"torch.cuda.DoubleTensor",
CSRTensor.type()
]
buckets = []
for i, dtype in enumerate(dtypes):
bucket = [t for t in tensors if t.type() == dtype]
if bucket:
buckets.append((dtype, bucket))
return buckets
def _initialize_parameter_parallel_groups(parameter_parallel_size=None):
data_parallel_size = int(dist.get_world_size())
if parameter_parallel_size is None:
parameter_parallel_size = int(data_parallel_size)
logger.info("data_parallel_size: %s, parameter_parallel_size: %s",
data_parallel_size,
parameter_parallel_size)
assert data_parallel_size % parameter_parallel_size == 0, \
'world size should be divisible by parameter parallel size'
rank = dist.get_rank()
my_group = None
for i in range(dist.get_world_size() // parameter_parallel_size):
ranks = range(i * parameter_parallel_size, (i + 1) * parameter_parallel_size)
group = torch.distributed.new_group(ranks)
if rank in ranks:
my_group = group
return my_group
def print_configuration(args, name):
logger.info('{}:'.format(name))
for arg in sorted(vars(args)):
dots = '.' * (29 - len(arg))
logger.info(' {} {} {}'.format(arg, dots, getattr(args, arg)))
class DeepSpeedEngine(Module):
def __init__(self,
args,
model,
optimizer=None,
model_parameters=None,
training_data=None,
lr_scheduler=None,
mpu=None,
dist_init_required=None,
collate_fn=None,
config_params=None,
dont_change_device=False):
super(DeepSpeedEngine, self).__init__()
self.dont_change_device = dont_change_device
self.client_optimizer = optimizer
self.client_model_parameters = model_parameters
self.client_lr_scheduler = lr_scheduler
self.training_data = training_data
self.collate_fn = collate_fn
self.mpu = mpu
self.data_parallel_group = None
self.global_steps = 0
self.global_samples = 0
self.micro_steps = 0
self.skipped_steps = 0
self.gradient_average = True
self.warn_unscaled_loss = True
self.config_params = config_params
self.loaded_checkpoint_mp_world_size = None
self.loaded_checkpoint_dp_world_size = None
self.enable_backward_allreduce = True
self.progressive_layer_drop = None
self.dist_backend = "nccl"
if dist_init_required is None:
dist_init_required = not dist.is_initialized()
if dist_init_required is False:
assert dist.is_initialized() is True, "Torch distributed not initialized. Please set dist_init_required to True or initialize before calling deepspeed.initialize()"
else:
init_distributed(dist_backend=self.dist_backend)
see_memory_usage(f"DeepSpeed Engine: Before args sanity test")
self._do_args_sanity_check(args)
self._configure_with_arguments(args, mpu)
self._do_sanity_check()
if mpu is not None:
assert not self.elasticity_enabled(), "Elasticity is not currently supported" \
" with model parallelism."
self._set_distributed_vars()
if self.tensorboard_enabled() and self.global_rank == 0:
self.summary_writer = self.get_summary_writer()
see_memory_usage(f"DeepSpeed Engine: Before configure distributed model")
self._configure_distributed_model(model)
see_memory_usage(f"DeepSpeed Engine: After configure distributed model")
self.timers = SynchronizedWallClockTimer()
self.tput_timer = ThroughputTimer(
batch_size=self.train_micro_batch_size_per_gpu(),
num_workers=self.dp_world_size,
steps_per_output=self.steps_per_print(),
monitor_memory=False)
if training_data:
self.training_dataloader = self.deepspeed_io(training_data)
else:
self.training_dataloader = None
self.optimizer = None
self.lr_scheduler = None
if model_parameters or optimizer:
self._configure_optimizer(optimizer, model_parameters)
self._configure_lr_scheduler(lr_scheduler)
self._report_progress(0)
self.csr_tensor_module_names = set()
if self.sparse_gradients_enabled():
for name, module in self.module.named_modules():
if isinstance(module, torch.nn.Embedding):
self.csr_tensor_module_names.add(name + ".weight")
logger.info("Will convert {} to sparse (csr) "
"tensor during training".format(name))
self.save_non_zero_checkpoint = False
self.save_zero_checkpoint = False
self._configure_checkpointing(dist_init_required)
if self.pld_enabled():
self.progressive_layer_drop = self._configure_progressive_layer_drop()
if self.global_rank == 0:
self._config.print('DeepSpeedEngine configuration')
if self.dump_state():
print_configuration(self, 'DeepSpeedEngine')
util_ops = UtilsBuilder().load()
self.flatten = util_ops.flatten
self.unflatten = util_ops.unflatten
def get_batch_info(self):
return self.train_batch_size, self.train_micro_batch_size_per_gpu, self.gradient_accumulation_steps
def checkpoint_tag_validation_enabled(self):
return self._config.checkpoint_tag_validation_enabled
def checkpoint_tag_validation_fail(self):
return self._config.checkpoint_tag_validation_fail
def elasticity_enabled(self):
return self._config.elasticity_enabled
def pld_enabled(self):
return self._config.pld_enabled
def pld_params(self):
return self._config.pld_params
def pld_theta(self):
return self.pld_params()[PLD_THETA]
def pld_gamma(self):
return self.pld_params()[PLD_GAMMA]
def tensorboard_enabled(self):
return self._config.tensorboard_enabled
def tensorboard_output_path(self):
return self._config.tensorboard_output_path
def tensorboard_job_name(self):
return self._config.tensorboard_job_name
def get_summary_writer(self,
name="DeepSpeedJobName",
base=os.path.join(os.path.expanduser("~"),
"tensorboard")):
if self.tensorboard_output_path():
base_dir = self.tensorboard_output_path()
job_name = self.tensorboard_job_name()
log_dir = os.path.join(base_dir, job_name)
else:
if self.tensorboard_job_name():
name = self.tensorboard_job_name()
if 'DLWS_JOB_ID' in os.environ:
infra_job_id = os.environ['DLWS_JOB_ID']
elif 'DLTS_JOB_ID' in os.environ:
infra_job_id = os.environ['DLTS_JOB_ID']
else:
infra_job_id = 'unknown-job-id'
summary_writer_dir_name = os.path.join(infra_job_id, "logs")
log_dir = os.path.join(base, summary_writer_dir_name, name)
os.makedirs(log_dir, exist_ok=True)
return SummaryWriter(log_dir=log_dir)
def wall_clock_breakdown(self):
return self._config.wall_clock_breakdown
def flops_profiler_enabled(self):
return self._config.flops_profiler_config.enabled
def flops_profiler_profile_step(self):
return self._config.flops_profiler_config.profile_step
def flops_profiler_module_depth(self):
return self._config.flops_profiler_config.module_depth
def flops_profiler_top_modules(self):
return self._config.flops_profiler_config.top_modules
def flops_profiler_detailed(self):
return self._config.flops_profiler_config.detailed
def memory_breakdown(self):
return self._config.memory_breakdown
def sparse_gradients_enabled(self):
return self._config.sparse_gradients_enabled
def train_batch_size(self):
return self._config.train_batch_size
def train_micro_batch_size_per_gpu(self):
return self._config.train_micro_batch_size_per_gpu
def optimizer_name(self):
return self.client_optimizer.__class__.__name__ if self.client_optimizer else self._config.optimizer_name
def optimizer_params(self):
return self._config.optimizer_params
def optimizer_legacy_fusion(self):
return self._config.optimizer_legacy_fusion
def scheduler_name(self):
return self._config.scheduler_name
def scheduler_params(self):
return self._config.scheduler_params
def zero_optimization(self):
return self._config.zero_enabled
def zero_allow_untested_optimizer(self):
return self._config.zero_allow_untested_optimizer
def zero_reduce_scatter(self):
return self._config.zero_config.reduce_scatter
def zero_overlap_comm(self):
return self._config.zero_config.overlap_comm
def zero_offload_optimizer(self):
return self._config.zero_config.offload_optimizer
def zero_offload_param(self):
return self._config.zero_config.offload_param
def zero_cpu_offload(self):
return self._config.zero_config.offload_optimizer is not None
def zero_sub_group_size(self):
return self._config.zero_config.sub_group_size
def zero_optimization_stage(self):
return self._config.zero_optimization_stage
def zero_reduce_bucket_size(self):
return self._config.zero_config.reduce_bucket_size
def zero_allgather_bucket_size(self):
return self._config.zero_config.allgather_bucket_size
def zero_optimization_partition_gradients(self):
return self.zero_optimization_stage() >= ZERO_OPTIMIZATION_GRADIENTS
def zero_optimization_partition_weights(self):
return self.zero_optimization_stage() >= ZERO_OPTIMIZATION_WEIGHTS
def zero_contiguous_gradients(self):
return self._config.zero_config.contiguous_gradients
def zero_load_from_fp32_weights(self):
return self._config.zero_config.load_from_fp32_weights
def zero_elastic_checkpoint(self):
return self._config.zero_config.elastic_checkpoint
def zero_max_live_parameters(self):
return self._config.zero_config.max_live_parameters
def zero_max_reuse_distance(self):
return self._config.zero_config.max_reuse_distance
def zero_prefetch_bucket_size(self):
return self._config.zero_config.prefetch_bucket_size
def zero_param_persistence_threshold(self):
return self._config.zero_config.param_persistence_threshold
def zero_gather_fp16_weights_on_model_save(self):
return self._config.zero_config.gather_fp16_weights_on_model_save
def fp16_enabled(self):
return self._config.fp16_enabled
def amp_enabled(self):
return self._config.amp_enabled
def amp_params(self):
return self._config.amp_params
def loss_scale(self):
return self._config.loss_scale
def gradient_accumulation_steps(self):
return self._config.gradient_accumulation_steps
def allreduce_always_fp32(self):
return self._config.allreduce_always_fp32
def postscale_gradients(self):
return not self._config.prescale_gradients
def gradient_predivide_factor(self):
return self._config.gradient_predivide_factor
def steps_per_print(self):
return self._config.steps_per_print
def zero_allgather_partitions(self):
return self._config.zero_config.allgather_partitions
def dump_state(self):
return self._config.dump_state
def gradient_clipping(self):
return self._config.gradient_clipping
def dynamic_loss_scale(self):
return self._config.loss_scale == 0
def initial_dynamic_scale(self):
return self._config.initial_dynamic_scale
def dynamic_loss_scale_args(self):
return self._config.dynamic_loss_scale_args
def swap_tensor_config(self):
return self._config.swap_tensor_config
def aio_config(self):
return self._config.aio_config
def _configure_lr_scheduler(self, client_lr_scheduler):
lr_scheduler = self._scheduler_from_config(self.optimizer)
if lr_scheduler:
if self.global_rank == 0:
logger.info(
f'DeepSpeed using configured LR scheduler = {self.scheduler_name()}')
self.lr_scheduler = lr_scheduler
else:
if self.global_rank == 0:
logger.info('DeepSpeed using client LR scheduler')
self.lr_scheduler = client_lr_scheduler
log_dist(f'DeepSpeed LR Scheduler = {self.lr_scheduler}', ranks=[0])
def _configure_checkpointing(self, dist_init_required):
dp_rank = self.global_rank
if self.mpu:
dp_rank = self.mpu.get_data_parallel_rank()
self.save_non_zero_checkpoint = (
dp_rank == 0) or self.zero_optimization_partition_weights()
if self.zero_optimization():
param_rank = torch.distributed.get_rank(
group=self.optimizer.dp_process_group)
self.save_zero_checkpoint = (param_rank == dp_rank)
def _scheduler_from_config(self, optimizer):
scheduler_name = self.scheduler_name()
if scheduler_name is not None:
if hasattr(lr_schedules, scheduler_name):
scheduler = getattr(lr_schedules, scheduler_name)
else:
assert hasattr(torch.optim.lr_scheduler, scheduler_name), \
f"DeepSpeed does not recognize LR scheduler {scheduler_name}"
scheduler = getattr(torch.optim.lr_scheduler, scheduler_name)
scheduler_params = self.scheduler_params()
instantiated_scheduler = scheduler(optimizer, **scheduler_params)
return instantiated_scheduler
else:
return None
def _set_distributed_vars(self):
if self.local_rank >= 0:
torch.cuda.set_device(self.local_rank)
self.device = torch.device("cuda", self.local_rank)
self.world_size = dist.get_world_size()
self.global_rank = dist.get_rank()
else:
self.world_size = 1
self.global_rank = 0
self.device = torch.device("cuda")
def _configure_with_arguments(self, args, mpu):
self.local_rank = int(os.environ['LOCAL_RANK'])
if hasattr(args, 'local_rank'):
args.local_rank = self.local_rank
config_file = args.deepspeed_config if hasattr(args,
'deepspeed_config') else None
self._config = DeepSpeedConfig(config_file, mpu, param_dict=self.config_params)
def _do_args_sanity_check(self, args):
if hasattr(args, 'deepscale_config') and args.deepscale_config is not None:
logger.warning(
"************ --deepscale_config is deprecated, please use --deepspeed_config ************"
)
if hasattr(args, 'deepspeed_config'):
assert args.deepspeed_config is None, "Not sure how to proceed, we were given both a deepscale_config and deepspeed_config"
args.deepspeed_config = args.deepscale_config
assert "LOCAL_RANK" in os.environ, "DeepSpeed requires the LOCAL_RANK environment variable, it is set by the deepspeed launcher, " \
"deepspeed.init_distributed, or the torch.distributed launcher. If using a different launcher please ensure LOCAL_RANK is set prior to initializing deepspeed."
if hasattr(args, 'local_rank') and args.local_rank != None:
assert isinstance(args.local_rank, int), f"args.local_rank of {args.local_rank} is an unknown type {type(args.local_rank)}"
if args.local_rank >= 0:
env_local_rank = int(os.environ.get("LOCAL_RANK"))
assert env_local_rank == args.local_rank, \
f"Mismatch in local rank setting, args.local_rank={args.local_rank} but env['LOCAL_RANK']={env_local_rank}."
if self.config_params is None:
assert hasattr(args, 'deepspeed_config') and args.deepspeed_config is not None, \
'DeepSpeed requires --deepspeed_config to specify configuration file'
assert os.path.isfile(args.deepspeed_config), \
'DeepSpeed configuration file: {} is not an existing file'.format(args.deepspeed_config)
def _is_supported_optimizer(self, optimizer_name):
return optimizer_name in DEEPSPEED_OPTIMIZERS or \
getattr(torch.optim, optimizer_name, None) is not None
def _do_sanity_check(self):
if not self.client_optimizer:
if self.optimizer_name() is not None:
assert self._is_supported_optimizer(self.optimizer_name()), \
'{} is not a supported DeepSpeed Optimizer'.format(self.optimizer_name())
if self.optimizer_name() == LAMB_OPTIMIZER:
assert self.dynamic_loss_scale(), \
'DeepSpeed {} optimizer requires dynamic loss scaling'.format(self.optimizer_name())
def _broadcast_model(self):
def is_replicated(p):
if hasattr(p, 'ds_status') and p.ds_status is not ZeroParamStatus.AVAILABLE:
return False
return True
for p in self.module.parameters():
if torch.is_tensor(p) and is_replicated(p):
dist.broadcast(p,
self.broadcast_src_rank,
group=self.data_parallel_group)
def _configure_distributed_model(self, model):
self.module = model
if self.fp16_enabled():
self.module.half()
if not self.dont_change_device:
self.module.to(self.device)
if self.mpu is None:
self.data_parallel_group = _initialize_parameter_parallel_groups()
self.dp_world_size = dist.get_world_size()
self.mp_world_size = 1
self.broadcast_src_rank = 0
else:
self.data_parallel_group = self.mpu.get_data_parallel_group()
self.dp_world_size = self.mpu.get_data_parallel_world_size()
self.mp_world_size = self.mpu.get_model_parallel_world_size()
self.broadcast_src_rank = _get_global_rank(
self.mpu.get_data_parallel_group(),
0)
if not self.amp_enabled():
self._broadcast_model()
def _configure_optimizer(self, client_optimizer, model_parameters):
if client_optimizer is not None:
client_optimizer.param_groups[:] = [
pg for pg in client_optimizer.param_groups if len(pg["params"]) != 0
]
if self.global_rank == 0:
logger.info(
"Removing param_group that has no 'params' in the client Optimizer")
basic_optimizer = client_optimizer
if self.global_rank == 0:
logger.info('Using client Optimizer as basic optimizer')
else:
basic_optimizer = self._configure_basic_optimizer(model_parameters)
if self.global_rank == 0:
logger.info(
'Using DeepSpeed Optimizer param name {} as basic optimizer'.format(
self.optimizer_name()))
if self.global_rank == 0:
logger.info('DeepSpeed Basic Optimizer = {}'.format(
basic_optimizer.__class__.__name__))
if self.zero_optimization():
assert not self.amp_enabled(), "Amp and ZeRO are not currently compatible, please use (legacy) fp16 mode which performs similar to amp opt_mode=O2"
if not is_zero_supported_optimizer(basic_optimizer):
assert self.zero_allow_untested_optimizer(), \
'You are using an untested ZeRO Optimizer. Please add <"zero_allow_untested_optimizer": true> in the configuration file to use it.'
if self.global_rank == 0:
logger.warning(
"**** You are using ZeRO with an untested optimizer, proceed with caution *****"
)
self.optimizer = self._configure_zero_optimizer(basic_optimizer)
elif self.amp_enabled():
assert not self.fp16_enabled(), "Cannot enable both amp with (legacy) fp16 mode"
amp_params = self.amp_params()
if self.global_rank == 0:
logger.info(f"Initializing AMP with these params: {amp_params}")
try:
logger.info("Initializing Apex amp from: {}".format(amp.__path__))
except NameError:
raise RuntimeError(
"Unable to import apex/amp, please make sure it is installed")
self.module, self.optimizer = amp.initialize(self.module, basic_optimizer, **amp_params)
self._broadcast_model()
elif self.fp16_enabled():
self.optimizer = self._configure_fp16_optimizer(basic_optimizer)
else:
self.optimizer = basic_optimizer
log_dist('DeepSpeed Final Optimizer = {}'.format(self.optimizer_name()),
ranks=[0])
def _configure_basic_optimizer(self, model_parameters):
optimizer_parameters = self.optimizer_params()
if 'max_grad_norm' in optimizer_parameters.keys():
raise ValueError(
"'max_grad_norm' is not supported as an optimizer parameter, please switch to using the deepspeed parameter 'gradient_clipping' see: https://www.deepspeed.ai/docs/config-json/#gradient-clipping for more details"
)
if self.optimizer_name() in [ADAM_OPTIMIZER, ADAMW_OPTIMIZER]:
torch_adam = optimizer_parameters.pop(TORCH_ADAM_PARAM, False)
adam_w_mode = optimizer_parameters.pop(ADAM_W_MODE, ADAM_W_MODE_DEFAULT)
effective_adam_w_mode = self.optimizer_name(
) == ADAMW_OPTIMIZER or adam_w_mode
if torch_adam:
if not effective_adam_w_mode:
optimizer = torch.optim.Adam(model_parameters,
**optimizer_parameters)
else:
optimizer = torch.optim.AdamW(model_parameters,
**optimizer_parameters)
else:
if self.zero_cpu_offload():
from deepspeed.ops.adam import DeepSpeedCPUAdam
optimizer = DeepSpeedCPUAdam(model_parameters,
**optimizer_parameters,
adamw_mode=effective_adam_w_mode)
else:
from deepspeed.ops.adam import FusedAdam
optimizer = FusedAdam(model_parameters,
**optimizer_parameters,
adam_w_mode=effective_adam_w_mode)
elif self.optimizer_name() == LAMB_OPTIMIZER:
from deepspeed.ops.lamb import FusedLamb
optimizer = FusedLamb(model_parameters, **optimizer_parameters)
elif self.optimizer_name() == ONEBIT_ADAM_OPTIMIZER:
from deepspeed.runtime.fp16.onebit.adam import OnebitAdam
optimizer = OnebitAdam(model_parameters, self, **optimizer_parameters)
if not self.fp16_enabled():
logger.warning(
f'Currently the convergence of 1-bit Adam is only verified under FP16'
)
else:
torch_optimizer = getattr(torch.optim, self.optimizer_name())
optimizer = torch_optimizer(model_parameters, **optimizer_parameters)
return optimizer
def _configure_fp16_optimizer(self, optimizer):
initial_dynamic_scale = self.initial_dynamic_scale()
dynamic_loss_args = self.dynamic_loss_scale_args()
clip_grad = self.gradient_clipping()
if isinstance(optimizer,
FusedAdam) or self.optimizer_name() == ONEBIT_ADAM_OPTIMIZER:
if self.dynamic_loss_scale():
log_dist('Creating fp16 optimizer with dynamic loss scale', ranks=[0])
timers = self.timers if self.wall_clock_breakdown() else None
optimizer = FP16_Optimizer(
optimizer,
dynamic_loss_scale=True,
initial_dynamic_scale=initial_dynamic_scale,
dynamic_loss_args=dynamic_loss_args,
mpu=self.mpu,
clip_grad=clip_grad,
fused_adam_legacy=self.optimizer_legacy_fusion(),
timers=timers)
else:
log_dist('Creating fp16 optimizer with static loss scale: {}'.format(
self.loss_scale()),
ranks=[0])
optimizer = FP16_Optimizer(
optimizer,
static_loss_scale=self.loss_scale(),
mpu=self.mpu,
clip_grad=clip_grad,
fused_adam_legacy=self.optimizer_legacy_fusion())
else:
log_dist('Creating fp16 unfused optimizer with dynamic loss scale',
ranks=[0])
optimizer = FP16_UnfusedOptimizer(
optimizer,
static_loss_scale=self.loss_scale(),
dynamic_loss_scale=self.dynamic_loss_scale(),
dynamic_loss_args=dynamic_loss_args,
mpu=self.mpu,
clip_grad=clip_grad,
fused_lamb_legacy=self.optimizer_name() == LAMB_OPTIMIZER)
return optimizer
def _configure_zero_optimizer(self, optimizer):
zero_stage = self.zero_optimization_stage()
log_dist('Creating fp16 ZeRO stage {} optimizer'.format(zero_stage), ranks=[0])
assert not self.allreduce_always_fp32(), "ZeRO does not support 'fp32_allreduce': true"
timers = self.timers if self.wall_clock_breakdown() else None
if zero_stage == ZERO_OPTIMIZATION_OPTIMIZER_STATES:
assert self.zero_reduce_scatter(), 'Stage 1 only supports reduce scatter mode'
optimizer = FP16_DeepSpeedZeroOptimizer_Stage1(
optimizer,
static_loss_scale=self.loss_scale(),
dynamic_loss_scale=self.dynamic_loss_scale(),
dynamic_loss_args=self.dynamic_loss_scale_args(),
clip_grad=self.gradient_clipping(),
all_gather_partitions=self.zero_allgather_partitions(),
allgather_size=self.zero_allgather_bucket_size(),
max_elements_per_comm=self.zero_reduce_bucket_size(),
dp_process_group=self.data_parallel_group,
elastic_checkpoint=self.zero_elastic_checkpoint(),
mpu=self.mpu)
elif zero_stage == ZERO_OPTIMIZATION_GRADIENTS:
optimizer = FP16_DeepSpeedZeroOptimizer(
optimizer,
timers=timers,
static_loss_scale=self.loss_scale(),
dynamic_loss_scale=self.dynamic_loss_scale(),
dynamic_loss_args=self.dynamic_loss_scale_args(),
clip_grad=self.gradient_clipping(),
contiguous_gradients=self.zero_contiguous_gradients(),
reduce_bucket_size=self.zero_reduce_bucket_size(),
allgather_bucket_size=self.zero_allgather_bucket_size(),
dp_process_group=self.data_parallel_group,
reduce_scatter=self.zero_reduce_scatter(),
overlap_comm=self.zero_overlap_comm(),
cpu_offload=self.zero_cpu_offload(),
mpu=self.mpu,
postscale_gradients=self.postscale_gradients(),
gradient_predivide_factor=self.gradient_predivide_factor(),
gradient_accumulation_steps=self.gradient_accumulation_steps())
elif zero_stage == ZERO_OPTIMIZATION_WEIGHTS:
print("Initializing ZeRO Stage 3") if dist.get_rank() == 0 else None
from deepspeed.runtime.zero.stage3 import FP16_DeepSpeedZeroOptimizer_Stage3
optimizer = FP16_DeepSpeedZeroOptimizer_Stage3(
self.module,
optimizer,
timers=timers,
static_loss_scale=self.loss_scale(),
dynamic_loss_scale=self.dynamic_loss_scale(),
dynamic_loss_args=self.dynamic_loss_scale_args(),
clip_grad=self.gradient_clipping(),
contiguous_gradients=self.zero_contiguous_gradients(),
reduce_bucket_size=self.zero_reduce_bucket_size(),
prefetch_bucket_size=self.zero_prefetch_bucket_size(),
max_reuse_distance=self.zero_max_reuse_distance(),
max_live_parameters=self.zero_max_live_parameters(),
param_persistence_threshold=self.zero_param_persistence_threshold(),
dp_process_group=self.data_parallel_group,
reduce_scatter=self.zero_reduce_scatter(),
overlap_comm=self.zero_overlap_comm(),
offload_optimizer_config=self.zero_offload_optimizer(),
offload_param_config=self.zero_offload_param(),
sub_group_size=self.zero_sub_group_size(),
mpu=self.mpu,
postscale_gradients=self.postscale_gradients(),
gradient_predivide_factor=self.gradient_predivide_factor(),
gradient_accumulation_steps=self.gradient_accumulation_steps(),
aio_config=self.aio_config())
else:
raise NotImplementedError("ZeRO stage {} not implemented".format(zero_stage))
return optimizer
def _configure_progressive_layer_drop(self):
pld = ProgressiveLayerDrop(theta=self.pld_theta(), gamma=self.pld_gamma())
return pld
def deepspeed_io(self,
dataset,
batch_size=None,
route=ROUTE_TRAIN,
pin_memory=True,
data_sampler=None,
collate_fn=None,
num_local_io_workers=None):
if not isinstance(dataset, torch.utils.data.Dataset):
raise ValueError("Training data must be a torch Dataset")
if data_sampler is None and (route == ROUTE_PREDICT or route == ROUTE_EVAL):
data_sampler = torch.utils.data.SequentialSampler(dataset)
if batch_size is None:
batch_size = self.train_micro_batch_size_per_gpu()
if collate_fn is None:
collate_fn = self.collate_fn
deepspeed_io_timer = None
if route == ROUTE_TRAIN:
deepspeed_io_timer = self.tput_timer
data_parallel_world_size = None
data_parallel_rank = None
if self.mpu is not None:
data_parallel_world_size = self.mpu.get_data_parallel_world_size()
data_parallel_rank = self.mpu.get_data_parallel_rank()
return DeepSpeedDataLoader(dataset=dataset,
batch_size=batch_size,
pin_memory=pin_memory,
collate_fn=collate_fn,
local_rank=self.local_rank,
tput_timer=deepspeed_io_timer,
num_local_io_workers=num_local_io_workers,
data_sampler=data_sampler,
data_parallel_world_size=data_parallel_world_size,
data_parallel_rank=data_parallel_rank)
def train(self, mode=True):
self.warn_unscaled_loss = True
self.module.train(mode)
def eval(self):
self.warn_unscaled_loss = True
self.module.train(False)
def _scale_loss(self, prescaled_loss):
if isinstance(prescaled_loss, torch.Tensor):
scaled_loss = prescaled_loss / self.gradient_accumulation_steps()
elif isinstance(prescaled_loss, tuple) or isinstance(prescaled_loss, list):
scaled_loss = []
for l in prescaled_loss:
if isinstance(l, torch.Tensor):
scaled_loss.append(l / self.gradient_accumulation_steps())
else:
scaled_loss.append(l)
else:
scaled_loss = prescaled_loss
if self.warn_unscaled_loss:
logger.warning(
f'DeepSpeed unable to scale loss because of type: {type(prescaled_loss)}'
)
self.warn_unscaled_loss = False
return scaled_loss
def forward(self, *inputs, **kwargs):
if self.flops_profiler_enabled(
) and self.global_steps == self.flops_profiler_profile_step(
) and self.global_rank == 0:
self.flops_profiler = FlopsProfiler(self.module)
self.flops_profiler.start_profile(ignore_list=None)
if self.module.training and self.progressive_layer_drop:
kwargs.update(self.progressive_layer_drop.get_state())
if self.zero_optimization_partition_weights():
for module in self.module.modules():
module._parameters._in_forward = True
pass
if self.wall_clock_breakdown():
self.timers('forward_microstep').start()
self.timers('forward').start()
if self.training_dataloader is None:
self.tput_timer.start()
loss = self.module(*inputs, **kwargs)
if self.zero_optimization_partition_weights():
if not torch._C.is_grad_enabled():
self.optimizer.param_coordinator.reset_step()
for module in self.module.modules():
module._parameters._in_forward = False
if self.wall_clock_breakdown():
self.timers('forward').stop()
self.timers('forward_microstep').stop()
if self.flops_profiler_enabled(
) and self.global_steps == self.flops_profiler_profile_step(
) and self.global_rank == 0:
self.flops_profiler.print_model_profile(
profile_step=self.global_steps,
module_depth=self.flops_profiler_module_depth(),
top_modules=self.flops_profiler_top_modules(),
detailed=self.flops_profiler_detailed())
self.flops_profiler.end_profile()
return loss
def allreduce_gradients(self, bucket_size=MEMORY_OPT_ALLREDUCE_SIZE):
if self.zero_optimization_partition_gradients():
self.optimizer.overlapping_partition_gradients_reduce_epilogue()
elif self.is_gradient_accumulation_boundary():
if self.zero_optimization_stage() == ZERO_OPTIMIZATION_OPTIMIZER_STATES:
assert self.zero_reduce_scatter()
self.optimizer.reduce_scatter_gradients(
postscale_gradients=self.postscale_gradients(),
gradient_predivide_factor=self.gradient_predivide_factor(),
gradient_average=self.gradient_average)
else:
self.buffered_allreduce_fallback(elements_per_buffer=bucket_size)
def backward(self, loss, allreduce_gradients=True, release_loss=False):
if not allreduce_gradients:
logger.warning(
f'Argument `allreduce_gradients` is deprecated, ignored, and will soon be removed'
)
if self.gradient_accumulation_steps() > 1:
loss = self._scale_loss(loss.float())
if self.tensorboard_enabled():
if self.is_gradient_accumulation_boundary():
if self.global_rank == 0:
self.summary_events = [
(f'Train/Samples/train_loss',
loss.mean().item() * self.gradient_accumulation_steps(),
self.global_samples)
]
for event in self.summary_events:
self.summary_writer.add_scalar(event[0], event[1], event[2])
self.summary_writer.flush()
if self.wall_clock_breakdown():
self.timers('backward_microstep').start()
self.timers('backward').start()
assert self.optimizer is not None, "must provide optimizer during " \
"init in order to use backward"
if self.wall_clock_breakdown():
self.timers('backward_inner_microstep').start()
self.timers('backward_inner').start()
if self.zero_optimization():
self.optimizer.is_gradient_accumulation_boundary = self.is_gradient_accumulation_boundary(
)
self.optimizer.backward(loss)
elif self.amp_enabled():
_gradient_accumulation_boundary()
with amp.scale_loss(loss,
self.optimizer,
delay_unscale=delay_unscale) as scaled_loss:
scaled_loss.backward()
elif self.fp16_enabled():
self.optimizer.backward(loss)
else:
loss.backward()
if self.wall_clock_breakdown():
self.timers('backward_inner').stop()
self.timers('backward_inner_microstep').stop()
if self.wall_clock_breakdown():
self.timers('backward_allreduce_microstep').start()
self.timers('backward_allreduce').start()
if self.enable_backward_allreduce:
self.allreduce_gradients()
if self.wall_clock_breakdown():
self.timers('backward_allreduce').stop()
self.timers('backward_allreduce_microstep').stop()
self.timers('backward').stop()
self.timers('backward_microstep').stop()
if release_loss:
pass
return loss
def is_gradient_accumulation_boundary(self):
return (self.micro_steps + 1) % \
self.gradient_accumulation_steps() == 0
def zero_grad(self):
for param_name, param in self.module.named_parameters():
param.grad = None
def clip_fp32_gradients(self):
torch.nn.utils.clip_grad_norm_(parameters=self.module.parameters(),
max_norm=self.gradient_clipping())
def _take_model_step(self, lr_kwargs):
if self.gradient_clipping() > 0.0:
if not self.fp16_enabled() and not self.amp_enabled():
self.clip_fp32_gradients()
elif self.amp_enabled():
# https://nvidia.github.io/apex/advanced.html#gradient-clipping
master_params = amp.master_params(self.optimizer)
torch.nn.utils.clip_grad_norm_(parameters=master_params,
max_norm=self.gradient_clipping())
self.optimizer.step()
#zero grad in basic optimizer could be unreliable and may not exhibit
#the behaviour that we want
if not self.zero_optimization() and not self.fp16_enabled(
) and not self.amp_enabled():
self.zero_grad()
else:
self.optimizer.zero_grad()
report_progress = self.global_rank == 0 if self.global_rank else True
# Check overlow here since in DS fp16 optimizer, the overflow is updated in above step() function.
overflow = False
if hasattr(self.optimizer, 'overflow'):
overflow = self.optimizer.overflow
if overflow:
self.skipped_steps += 1
else:
if self.lr_scheduler is not None:
self.lr_scheduler.step(**(lr_kwargs or {}))
if report_progress and (self.global_steps + 1) % self.steps_per_print() == 0:
self._report_progress(self.global_steps + 1)
self.global_steps += 1
self.global_samples += self.train_batch_size()
def step(self, lr_kwargs=None):
if self.wall_clock_breakdown():
self.timers('step_microstep').start()
self.timers('step').start()
assert self.optimizer is not None, "must provide optimizer during " \
"init in order to use step"
report_progress = self.global_rank == 0 if self.global_rank else True
# Update the model when we reach gradient accumulation boundaries
if self.is_gradient_accumulation_boundary():
if self.progressive_layer_drop:
self.progressive_layer_drop.update_state(self.global_steps)
self._take_model_step(lr_kwargs)
self.tput_timer.stop(report_progress)
# Log learning rate
if self.tensorboard_enabled():
if self.is_gradient_accumulation_boundary():
if self.global_rank == 0:
self.summary_events = [(f'Train/Samples/lr',
self.get_lr()[0],
self.global_samples)]
for event in self.summary_events: # write_summary_events
self.summary_writer.add_scalar(event[0], event[1], event[2])
if self.fp16_enabled() and hasattr(self.optimizer, 'cur_scale'):
self.summary_events.append((f'Train/Samples/loss_scale',
self.optimizer.cur_scale,
self.global_samples))
for event in self.summary_events: # write_summary_events
self.summary_writer.add_scalar(event[0], event[1], event[2])
self.summary_writer.flush()
if self.wall_clock_breakdown():
self.timers('step').stop()
self.timers('step_microstep').stop()
timer_names = [
'forward_microstep',
'backward_microstep',
'backward_inner_microstep',
'backward_allreduce_microstep',
'step_microstep'
]
self.timers.log(names=timer_names, memory_breakdown=self.memory_breakdown())
# Log timing
if self.is_gradient_accumulation_boundary():
if self.tensorboard_enabled():
if self.global_rank == 0:
self.summary_events = [
(f'Train/Samples/elapsed_time_ms_forward',
self.timers('forward').elapsed(reset=False) * 1000.0,
self.global_samples),
(f'Train/Samples/elapsed_time_ms_backward',
self.timers('backward').elapsed(reset=False) * 1000.0,
self.global_samples),
(f'Train/Samples/elapsed_time_ms_backward_inner',
self.timers('backward_inner').elapsed(reset=False) * 1000.0,
self.global_samples),
(f'Train/Samples/elapsed_time_ms_backward_allreduce',
self.timers('backward_allreduce').elapsed(reset=False) *
1000.0,
self.global_samples),
(f'Train/Samples/elapsed_time_ms_step',
self.timers('step').elapsed(reset=False) * 1000.0,
self.global_samples)
]
for event in self.summary_events: # write_summary_events
self.summary_writer.add_scalar(event[0], event[1], event[2])
self.summary_writer.flush()
if self.wall_clock_breakdown():
self.timers.log([
'forward',
'backward',
'backward_inner',
'backward_allreduce',
'step'
])
self.micro_steps += 1
def _get_optimizer_param(self, param_name):
result = []
if not self.optimizer:
return result
for group in self.optimizer.param_groups:
if param_name in group:
result.append(group[param_name])
else:
result.append(0.0)
return result
def get_lr(self):
return self._get_optimizer_param('lr')
def get_type(self):
return self._get_optimizer_param('type')
def get_mom(self):
if self.optimizer_name() in ['SGD', 'RMSprop']:
return self._get_optimizer_param('momentum')
else:
return self._get_optimizer_param('betas')
def get_pld_theta(self):
if self.progressive_layer_drop:
return self.progressive_layer_drop.get_theta()
else:
return None
def _report_progress(self, step):
lr = self.get_lr()
mom = self.get_mom()
log_dist(f'step={step}, skipped={self.skipped_steps}, lr={lr}, mom={mom}',
ranks=[0])
def allreduce_bucket(self, bucket):
tensor = self.flatten(bucket)
tensor_to_allreduce = tensor
if self.allreduce_always_fp32():
tensor_to_allreduce = tensor.float()
if self.postscale_gradients():
if self.gradient_predivide_factor() != 1.0:
tensor_to_allreduce.mul_(1. / self.gradient_predivide_factor())
dist.all_reduce(tensor_to_allreduce, group=self.data_parallel_group)
if self.gradient_average:
if self.gradient_predivide_factor() != self.dp_world_size:
tensor_to_allreduce.mul_(self.gradient_predivide_factor() /
self.dp_world_size)
else:
tensor_to_allreduce.div_(self.dp_world_size)
dist.all_reduce(tensor_to_allreduce, group=self.data_parallel_group)
if self.allreduce_always_fp32() and tensor is not tensor_to_allreduce:
tensor.copy_(tensor_to_allreduce)
return tensor
def allreduce_and_copy(self, small_bucket):
allreduced = self.allreduce_bucket(small_bucket)
for buf, synced in zip(small_bucket, self.unflatten(allreduced, small_bucket)):
buf.copy_(synced)
def allreduce_no_retain(self, bucket, numel_per_bucket=500000000):
small_bucket = []
numel = 0
for tensor in bucket:
small_bucket.append(tensor)
numel = numel + tensor.numel()
if numel > numel_per_bucket:
self.allreduce_and_copy(small_bucket)
small_bucket = []
numel = 0
if len(small_bucket) > 0:
self.allreduce_and_copy(small_bucket)
def buffered_allreduce_fallback(self, grads=None, elements_per_buffer=500000000):
grads = []
for param_name, param in self.module.named_parameters():
if param.grad is None:
# In cases where there is an imbalance of empty grads across
# ranks we must create empty grads, this will ensure that every
# rank is reducing the same size. In some cases it may make
# sense in the future to support the ability to average not
# w.r.t. world size but with a different value.
param.grad = torch.zeros(param.size(),
dtype=param.dtype,
device=param.device)
grads.append(param.grad.data)
else:
grad_data = param.grad.data
if self.sparse_gradients_enabled(
) and param_name in self.csr_tensor_module_names:
grads.append(CSRTensor(grad_data))
else:
grads.append(grad_data)
split_buckets = split_half_float_double_csr(grads)
for i, bucket_tuple in enumerate(split_buckets):
bucket_type, bucket = bucket_tuple
if bucket_type == CSRTensor.type():
self.csr_allreduce_no_retain(bucket)
else:
self.allreduce_no_retain(bucket, numel_per_bucket=elements_per_buffer)
def csr_allreduce_no_retain(self, bucket):
allreduced_csrs = self.csr_allreduce_bucket(bucket)
# Densify csr tensor and copy back to original location
for csr in allreduced_csrs:
dense_tensor = csr.to_dense()
csr.orig_dense_tensor.copy_(dense_tensor)
def csr_allreduce_bucket(self, bucket):
csr_list = []
for csr in bucket:
csr_list.append(self.csr_allreduce(csr))
return csr_list
def csr_allreduce(self, csr):
# Pre-divide for fp16 stability
csr.values.div_(self.dp_world_size)
indices_device_list = self.csr_all_gather(csr.indices)
values_device_list = self.csr_all_gather(csr.values)
csr.indices = torch.cat(indices_device_list)
csr.values = torch.cat(values_device_list)
return csr
def csr_all_gather(self, value):
my_size = torch.LongTensor([value.size()[0]]).to(self.device)
all_sizes = self.all_gather_scalar(my_size)
max_size = torch.cat(all_sizes).max()
fill_size = (max_size - my_size)
assert value.dim() in [1, 2]
if value.dim() == 1:
if fill_size > 0:
value = torch.cat([value, value.new_zeros(fill_size)])
tensor_list = [value.new_zeros(max_size) for _ in range(self.dp_world_size)]
else:
if fill_size > 0:
value = torch.cat([value, value.new_zeros(fill_size, value.size()[1])])
tensor_list = [
value.new_zeros(max_size,
value.size()[1]) for _ in range(self.dp_world_size)
]
dist.all_gather(tensor_list, value, group=self.data_parallel_group)
tensors = []
for dev_idx, t in enumerate(tensor_list):
size = all_sizes[dev_idx][0]
tensors.append(
t.index_select(0,
torch.LongTensor(range(size)).to(self.device)))
return tensors
def all_gather_scalar(self, value):
tensor_list = [value.new_zeros(value.size()) for _ in range(self.dp_world_size)]
dist.all_gather(tensor_list, value, group=self.data_parallel_group)
return tensor_list
def module_state_dict(self, destination=None, prefix='', keep_vars=False):
sd = self.module.state_dict(destination, prefix, keep_vars)
return sd
def load_module_state_dict(self, state_dict, strict=True):
self.module.load_state_dict(state_dict, strict=strict)
def _get_rank_zero_ckpt_name(self, checkpoints_path, tag, mp_rank, dp_rank):
filename = 'zero_pp_rank_{}'.format(dp_rank)
zero_ckpt_name = os.path.join(
checkpoints_path,
str(tag),
filename + '_mp_rank_{:02d}'.format(mp_rank) + '_optim_states.pt')
return zero_ckpt_name
def _get_zero_ckpt_name(self, checkpoints_path, tag):
mp_rank = 0 if self.mpu is None else self.mpu.get_model_parallel_rank()
pp_rank = torch.distributed.get_rank(group=self.optimizer.dp_process_group)
return self._get_rank_zero_ckpt_name(checkpoints_path, tag, mp_rank, pp_rank)
def _get_ckpt_name(self, checkpoints_path, tag):
mp_rank = 0 if self.mpu is None else self.mpu.get_model_parallel_rank()
if self.zero_optimization_partition_weights():
filename = 'zero_pp_rank_{}'.format(
torch.distributed.get_rank(group=self.optimizer.dp_process_group))
ckpt_name = os.path.join(
checkpoints_path,
str(tag),
filename + '_mp_rank_{:02d}'.format(mp_rank) + '_model_states.pt')
else:
ckpt_name = os.path.join(
checkpoints_path,
str(tag),
'mp_rank_{:02d}'.format(mp_rank) + '_model_states.pt')
return ckpt_name
def load_checkpoint(self,
load_dir,
tag=None,
load_module_strict=True,
load_optimizer_states=True,
load_lr_scheduler_states=True):
if tag is None:
latest_path = os.path.join(load_dir, 'latest')
if os.path.isfile(latest_path):
with open(latest_path, 'r') as fd:
tag = fd.read().strip()
else:
logger.warning(f"Unable to find latest file at {latest_path}, if trying to load latest " \
"checkpoint please ensure this file exists or pass an explicit checkpoint tag when loading a checkpoint.")
return None, None
load_path, client_states = self._load_checkpoint(load_dir,
tag,
load_module_strict=load_module_strict,
load_optimizer_states=load_optimizer_states,
load_lr_scheduler_states=load_lr_scheduler_states)
if self.zero_optimization() and load_path is not None:
self._load_zero_checkpoint(load_dir,
tag,
load_optimizer_states=load_optimizer_states)
return load_path, client_states
def _load_checkpoint(self,
load_dir,
tag,
load_module_strict=True,
load_optimizer_states=True,
load_lr_scheduler_states=True):
load_path = self._get_ckpt_name(load_dir, tag)
if not os.path.exists(load_path):
logger.warn(
'Client provided checkpoint load path: {} does not exist ... skip checkpoint load'
.format(load_path))
return None, None
logger.info(f'rank: {self.global_rank} loading checkpoint: {load_path}')
checkpoint = torch.load(load_path, map_location=lambda storage, loc: storage)
if isinstance(self.module, PipelineModule):
# Pipeline parallelism uses this to load its own checkpoint files.
self._curr_ckpt_path = os.path.join(load_dir, tag)
self.load_module_state_dict(state_dict=checkpoint['module'],
strict=load_module_strict)
if self.optimizer is not None and not self.zero_optimization():
if self.fp16_enabled():
self.optimizer.load_state_dict(
checkpoint['optimizer'],
load_optimizer_states=load_optimizer_states)
elif load_optimizer_states:
self.optimizer.load_state_dict(checkpoint['optimizer'])
if load_lr_scheduler_states and self.lr_scheduler is not None:
self.lr_scheduler.load_state_dict(checkpoint['lr_scheduler'])
self.csr_tensor_module_names = checkpoint['csr_tensor_module_names']
self.global_steps = checkpoint['global_steps']
self.global_samples = checkpoint.get('global_samples',
self.global_steps * self.train_batch_size())
self.skipped_steps = checkpoint['skipped_steps']
self.loaded_checkpoint_mp_world_size = checkpoint['mp_world_size']
self.loaded_checkpoint_dp_world_size = checkpoint['dp_world_size']
deepspeed_states = [
'module',
'optimizer',
'lr_scheduler',
'csr_tensor_module_names',
'skipped_steps',
'global_steps',
'dp_world_size',
'mp_world_size'
]
client_state = {
key: value
for key,
value in checkpoint.items() if not key in deepspeed_states
}
return load_path, client_state
def _load_zero_checkpoint(self, load_dir, tag, load_optimizer_states=True):
zero_sd_list = self._get_all_zero_checkpoints(load_dir, tag)
if zero_sd_list is None:
return
self.optimizer.load_state_dict(
state_dict_list=zero_sd_list,
load_optimizer_states=load_optimizer_states,
load_from_fp32_weights=self.zero_load_from_fp32_weights())
print(
f'loading {len(zero_sd_list)} zero partition checkpoints for rank {self.global_rank}'
)
def _get_mp_rank_zero_checkpoint_names(self, load_dir, tag, mp_rank, dp_world_size):
zero_ckpt_names = []
for dp_rank in range(dp_world_size):
ckpt_name = self._get_rank_zero_ckpt_name(checkpoints_path=load_dir,
tag=tag,
mp_rank=mp_rank,
dp_rank=dp_rank)
zero_ckpt_names.append(ckpt_name)
return zero_ckpt_names
def _get_all_zero_checkpoint_names(self,
load_dir,
tag,
mp_world_size,
dp_world_size):
zero_ckpt_names = []
for mp_rank in range(mp_world_size):
mp_rank_ckpt_names = self._get_mp_rank_zero_checkpoint_names(
load_dir=load_dir,
tag=tag,
mp_rank=mp_rank,
dp_world_size=dp_world_size)
zero_ckpt_names += mp_rank_ckpt_names
return zero_ckpt_names
def _get_all_zero_checkpoints(self, load_dir, tag):
mp_rank = 0 if self.mpu is None else self.mpu.get_model_parallel_rank()
zero_ckpt_names = self._get_mp_rank_zero_checkpoint_names(
load_dir=load_dir,
tag=tag,
mp_rank=mp_rank,
dp_world_size=self.loaded_checkpoint_dp_world_size)
invalid_zero_ckpt_paths = []
for i, ckpt_name in enumerate(zero_ckpt_names):
if not os.path.exists(ckpt_name):
# transparently handle the old file pattern for optim_states
if 'optim_states.pt' in ckpt_name:
ckpt_name_try = ckpt_name.replace("_optim_states.pt",
"optim_states.pt")
if os.path.exists(ckpt_name_try):
zero_ckpt_names[i] = ckpt_name_try
continue
invalid_zero_ckpt_paths.append(ckpt_name)
if len(invalid_zero_ckpt_paths) > 0:
logger.warn(
f"The following zero checkpoints paths are missing: {invalid_zero_ckpt_paths}"
)
return None
zero_sd_list = []
for ckpt_name in zero_ckpt_names:
zero_sd_list.append(torch.load(ckpt_name, map_location='cpu'))
zero_optimizer_sd = [sd['optimizer_state_dict'] for sd in zero_sd_list]
print(
f"successfully loaded {len(zero_optimizer_sd)} ZeRO state_dicts for rank {self.global_rank}"
)
return zero_optimizer_sd
def _checkpoint_tag_validation(self, tag):
if self.checkpoint_tag_validation_enabled():
s_hash = hashlib.sha1(tag.encode())
bhash = torch.ByteTensor([s_hash.digest()]).flatten().to(self.device)
max_bhash = bhash.clone()
min_bhash = bhash.clone()
dist.all_reduce(max_bhash, op=torch.distributed.ReduceOp.MAX)
dist.all_reduce(min_bhash, op=torch.distributed.ReduceOp.MIN)
valid = all(min_bhash == bhash) and all(max_bhash == bhash)
msg = f"[rank={dist.get_rank()}] The checkpoint tag name '{tag}' is not consistent across " \
"all ranks. Including rank unique information in checkpoint tag could cause issues when " \
"restoring with different world sizes."
if self.checkpoint_tag_validation_fail():
assert valid, msg
elif not valid:
logger.warning(msg)
def save_checkpoint(self, save_dir, tag=None, client_state={}, save_latest=True):
if self.zero_optimization_partition_weights():
# Prepare for state_dict() by ensuring all parameters are partitioned
self.optimizer.save_checkpoint_prologue()
# This is to make sure the checkpoint names are created without collision
# There seems to be issue creating them in parallel
# Ensure save_dir directory exists
os.makedirs(save_dir, exist_ok=True)
if tag is None:
tag = f"global_step{self.global_steps}"
# Ensure tag is a string
tag = str(tag)
# Ensure checkpoint tag is consistent across ranks
self._checkpoint_tag_validation(tag)
if self.save_non_zero_checkpoint:
self._create_checkpoint_file(save_dir, tag, False)
self._save_checkpoint(save_dir, tag, client_state=client_state)
if self.save_zero_checkpoint:
self._create_zero_checkpoint_files(save_dir, tag)
self._save_zero_checkpoint(save_dir, tag)
# Save latest checkpoint tag
if save_latest:
with open(os.path.join(save_dir, 'latest'), 'w') as fd:
fd.write(tag)
if self.zero_optimization_partition_weights():
self.optimizer.save_checkpoint_epilogue()
return True
def _create_checkpoint_file(self, save_dir, tag, zero_checkpoint):
name_function = self._get_zero_ckpt_name if zero_checkpoint else self._get_ckpt_name
try:
checkpoint_name = name_function(save_dir, tag)
ensure_directory_exists(checkpoint_name)
except:
logger.error(f'Failed saving model checkpoint to {save_dir} with tag {tag}')
return False
return True
def _create_zero_checkpoint_files(self, save_dir, tag):
success = True
# zero checkpoint files are created sequentially
for rank in range(self.world_size):
if rank == self.global_rank:
success = self._create_checkpoint_file(save_dir, tag, True)
dist.barrier()
return success
def _save_checkpoint(self, save_dir, tag, client_state={}):
save_path = self._get_ckpt_name(save_dir, tag)
# A hack to save the checkpointing directory. Pipeline parallelism overrides
# module_state_dict() and uses this path to save the model. module_state_dict()
# then instead just returns None.
self._curr_ckpt_path = os.path.join(save_dir, tag)
state = dict(
module=self.module_state_dict(),
optimizer=self.optimizer.state_dict()
if self.optimizer and not self.zero_optimization() else None,
lr_scheduler=self.lr_scheduler.state_dict()
if self.lr_scheduler is not None else None,
csr_tensor_module_names=self.csr_tensor_module_names,
skipped_steps=self.skipped_steps,
global_steps=self.global_steps,
global_samples=self.global_samples,
dp_world_size=self.dp_world_size,
mp_world_size=self.mp_world_size,
)
state.update(client_state)
log_dist(message=f'Saving model checkpoint: {save_path}', ranks=[0])
#logger.info('Saving model checkpoint: {}'.format(save_path))
torch.save(state, save_path)
self._curr_save_path = None
def _get_param_shapes(self):
param_shapes = OrderedDict()
for name, param in self.module.named_parameters():
param_shapes[name] = param.ds_shape if hasattr(param,
"ds_shape") else param.shape
# print(f"saving param {name} {param_shapes[name]}")
return param_shapes
def _copy_recovery_script(self, save_path):
base_dir = os.path.dirname(os.path.dirname(__file__))
script = "zero_to_fp32.py"
src = os.path.join(base_dir, "utils", script)
dst = os.path.join(save_path, script)
logger.info(f"creating recovery script {dst}")
copyfile(src, dst)
# make executable
os.chmod(dst, os.stat(dst).st_mode | stat.S_IEXEC)
def _save_zero_checkpoint(self, save_path, tag):
zero_checkpoint_name = self._get_zero_ckpt_name(save_path, tag)
zero_sd = dict(
optimizer_state_dict=self.optimizer.state_dict(),
param_shapes=self._get_param_shapes(),
)
torch.save(zero_sd, zero_checkpoint_name)
self._copy_recovery_script(save_path)
logger.info('zero checkpoint saved {}'.format(zero_checkpoint_name))
def _zero3_consolidated_fp16_state_dict(self):
import deepspeed
if not self.zero_optimization_partition_weights():
raise ValueError("this function requires ZeRO-3 mode")
state_dict = OrderedDict() if torch.distributed.get_rank() == 0 else None
shared_weights = {}
def get_layer_state_dict(module, prefix=""):
# gather one layer at a time to be memory-efficient
with deepspeed.zero.GatheredParameters(list(
module.parameters(recurse=False))):
if torch.distributed.get_rank() == 0:
for name, param in module.named_parameters(recurse=False):
if param is None:
continue
key = prefix + name
# for shared weights we want to make sure not to unshare them when copying to cpu
data_ptr_id = param.storage().data_ptr()
if data_ptr_id in shared_weights:
# shared weights
# print(f"`{key}` is shared with `{shared_weights[data_ptr_id]}`")
state_dict[key] = state_dict[shared_weights[data_ptr_id]]
else:
state_dict[key] = param.detach().cpu()
shared_weights[data_ptr_id] = key
#print(f"param {name} {param.shape}")
#print(f"param {key} {param.shape} {state_dict[key].storage().data_ptr()}")
# now buffers - not sure if need to take care of potentially shared weights here
for name, buf in module.named_buffers(recurse=False):
if buf is not None and name not in module._non_persistent_buffers_set:
state_dict[prefix + name] = buf.detach().cpu()
for name, child in module.named_children():
if child is not None:
get_layer_state_dict(child, prefix + name + ".")
see_memory_usage("before get_layer_state_dict", force=False)
get_layer_state_dict(self.module, prefix="")
see_memory_usage("after get_layer_state_dict", force=False)
return state_dict
def save_fp16_model(self, save_dir, save_filename="pytorch_model.bin"):
path = os.path.join(save_dir, save_filename)
if self.zero_optimization_partition_weights():
if self.zero_gather_fp16_weights_on_model_save():
# consolidation is expensive in time and memory and therefore isn't a default
state_dict = self._zero3_consolidated_fp16_state_dict()
else:
logger.info(
f"Did not save the model {path} because `stage3_gather_fp16_weights_on_model_save` is False"
)
return
else:
state_dict = self.module.state_dict()
if torch.distributed.get_rank() == 0:
os.makedirs(save_dir, exist_ok=True)
logger.info(f"Saving model weights to {path}")
torch.save(state_dict, path)
| true | true |
f71a744b58bcf58f5653e87192017fca4a93e074 | 580 | py | Python | code/py/test_statsrecorder.py | notmatthancock/notmatthancock.github.io | abcd91cc7c2653c5243fe96ba2fd681ec03930bb | [
"MIT"
] | null | null | null | code/py/test_statsrecorder.py | notmatthancock/notmatthancock.github.io | abcd91cc7c2653c5243fe96ba2fd681ec03930bb | [
"MIT"
] | null | null | null | code/py/test_statsrecorder.py | notmatthancock/notmatthancock.github.io | abcd91cc7c2653c5243fe96ba2fd681ec03930bb | [
"MIT"
] | null | null | null | import numpy as np
import statsrecorder as sr
rs = np.random.RandomState(323)
mystats = sr.StatsRecorder()
# Hold all observations in "data" to check for correctness.
ndims = 42
data = np.empty((0, ndims))
for i in range(1000):
nobserv = rs.randint(10,101)
newdata = rs.randn(nobserv, ndims)
data = np.vstack((data, newdata))
# Update stats recorder object
mystats.update(newdata)
# Check stats recorder object is doing its business right.
assert np.allclose(mystats.mean, data.mean(axis=0))
assert np.allclose(mystats.std, data.std(axis=0))
| 25.217391 | 62 | 0.705172 | import numpy as np
import statsrecorder as sr
rs = np.random.RandomState(323)
mystats = sr.StatsRecorder()
ndims = 42
data = np.empty((0, ndims))
for i in range(1000):
nobserv = rs.randint(10,101)
newdata = rs.randn(nobserv, ndims)
data = np.vstack((data, newdata))
mystats.update(newdata)
assert np.allclose(mystats.mean, data.mean(axis=0))
assert np.allclose(mystats.std, data.std(axis=0))
| true | true |
f71a746bad402ab1d91d173ac40a919ce1f67c52 | 40,695 | py | Python | sscanss/ui/dialogs/insert.py | samtygier-stfc/SScanSS-2 | 0df2160c32fdc533f7d391735bd55d524e253f4d | [
"BSD-3-Clause"
] | null | null | null | sscanss/ui/dialogs/insert.py | samtygier-stfc/SScanSS-2 | 0df2160c32fdc533f7d391735bd55d524e253f4d | [
"BSD-3-Clause"
] | null | null | null | sscanss/ui/dialogs/insert.py | samtygier-stfc/SScanSS-2 | 0df2160c32fdc533f7d391735bd55d524e253f4d | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
from PyQt5 import QtCore, QtGui, QtWidgets
from sscanss.config import path_for, settings
from sscanss.core.math import Plane, Matrix33, Vector3, clamp, map_range, trunc, VECTOR_EPS
from sscanss.core.geometry import mesh_plane_intersection
from sscanss.core.util import Primitives, DockFlag, StrainComponents, PointType, PlaneOptions, Attributes
from sscanss.ui.widgets import (FormGroup, FormControl, GraphicsView, GraphicsScene, create_tool_button, FormTitle,
create_scroll_area, CompareValidator, GraphicsPointItem, Grid, create_icon)
from .managers import PointManager
class InsertPrimitiveDialog(QtWidgets.QWidget):
"""Provides UI for typing in measurement/fiducial points
:param primitive: primitive type
:type primitive: Primitives
:param parent: Main window
:type parent: MainWindow
"""
dock_flag = DockFlag.Upper
def __init__(self, primitive, parent):
super().__init__(parent)
self.parent = parent
self.parent_model = self.parent.presenter.model
self.parent.scenes.switchToSampleScene()
self.primitive = primitive
self.main_layout = QtWidgets.QVBoxLayout()
self.textboxes = {}
name = self.parent_model.uniqueKey(self.primitive.value)
self.mesh_args = {'name': name}
if self.primitive == Primitives.Tube:
self.mesh_args.update({'outer_radius': 100.000, 'inner_radius': 50.000, 'height': 200.000})
elif self.primitive == Primitives.Sphere:
self.mesh_args.update({'radius': 100.000})
elif self.primitive == Primitives.Cylinder:
self.mesh_args.update({'radius': 100.000, 'height': 200.000})
else:
self.mesh_args.update({'width': 50.000, 'height': 100.000, 'depth': 200.000})
self.createPrimitiveSwitcher()
self.createFormInputs()
button_layout = QtWidgets.QHBoxLayout()
self.create_primitive_button = QtWidgets.QPushButton('Create')
self.create_primitive_button.clicked.connect(self.createPrimiviteButtonClicked)
button_layout.addWidget(self.create_primitive_button)
button_layout.addStretch(1)
self.main_layout.addLayout(button_layout)
self.main_layout.addStretch(1)
self.setLayout(self.main_layout)
self.title = 'Insert {}'.format(self.primitive.value)
self.setMinimumWidth(450)
self.textboxes['name'].setFocus()
def createPrimitiveSwitcher(self):
switcher_layout = QtWidgets.QHBoxLayout()
switcher = create_tool_button(style_name='MenuButton', status_tip='Open dialog for a different primitive')
switcher.setArrowType(QtCore.Qt.DownArrow)
switcher.setPopupMode(QtWidgets.QToolButton.InstantPopup)
switcher.setMenu(self.parent.primitives_menu)
switcher_layout.addStretch(1)
switcher_layout.addWidget(switcher)
self.main_layout.addLayout(switcher_layout)
def createFormInputs(self):
self.form_group = FormGroup()
for key, value in self.mesh_args.items():
pretty_label = key.replace('_', ' ').title()
if key == 'name':
control = FormControl(pretty_label, value, required=True)
control.form_lineedit.textChanged.connect(self.nameCheck)
else:
control = FormControl(pretty_label, value, desc='mm', required=True, number=True)
control.range(0, None, min_exclusive=True)
self.textboxes[key] = control
self.form_group.addControl(control)
if self.primitive == Primitives.Tube:
outer_radius = self.textboxes['outer_radius']
inner_radius = self.textboxes['inner_radius']
outer_radius.compareWith(inner_radius, CompareValidator.Operator.Greater)
inner_radius.compareWith(outer_radius, CompareValidator.Operator.Less)
self.main_layout.addWidget(self.form_group)
self.form_group.groupValidation.connect(self.formValidation)
def nameCheck(self, value):
if self.parent_model.all_sample_key == value:
self.textboxes['name'].isInvalid(f'"{self.parent_model.all_sample_key}" is a reserved name')
def formValidation(self, is_valid):
if is_valid:
self.create_primitive_button.setEnabled(True)
else:
self.create_primitive_button.setDisabled(True)
def createPrimiviteButtonClicked(self):
for key, textbox in self.textboxes.items():
value = textbox.value
self.mesh_args[key] = value
self.parent.presenter.addPrimitive(self.primitive, self.mesh_args)
new_name = self.parent_model.uniqueKey(self.primitive.value)
self.textboxes['name'].value = new_name
class InsertPointDialog(QtWidgets.QWidget):
"""Provides UI for typing in measurement/fiducial points
:param point_type: point type
:type point_type: PointType
:param parent: Main window
:type parent: MainWindow
"""
dock_flag = DockFlag.Upper
def __init__(self, point_type, parent):
super().__init__(parent)
self.parent = parent
self.parent_model = parent.presenter.model
self.parent.scenes.switchToSampleScene()
self.point_type = point_type
self.title = 'Add {} Point'.format(point_type.value)
self.main_layout = QtWidgets.QVBoxLayout()
unit = 'mm'
self.form_group = FormGroup()
self.x_axis = FormControl('X', 0.0, required=True, desc=unit, number=True)
self.y_axis = FormControl('Y', 0.0, required=True, desc=unit, number=True)
self.z_axis = FormControl('Z', 0.0, required=True, desc=unit, number=True)
self.form_group.addControl(self.x_axis)
self.form_group.addControl(self.y_axis)
self.form_group.addControl(self.z_axis)
self.form_group.groupValidation.connect(self.formValidation)
button_layout = QtWidgets.QHBoxLayout()
self.execute_button = QtWidgets.QPushButton(self.title)
self.execute_button.clicked.connect(self.executeButtonClicked)
button_layout.addWidget(self.execute_button)
button_layout.addStretch(1)
self.main_layout.addWidget(self.form_group)
self.main_layout.addLayout(button_layout)
self.main_layout.addStretch(1)
self.setLayout(self.main_layout)
self.setMinimumWidth(450)
def formValidation(self, is_valid):
if is_valid:
self.execute_button.setEnabled(True)
else:
self.execute_button.setDisabled(True)
def executeButtonClicked(self):
point = [self.x_axis.value, self.y_axis.value, self.z_axis.value]
self.parent.presenter.addPoints([(point, True)], self.point_type)
class InsertVectorDialog(QtWidgets.QWidget):
"""Provides UI for adding measurement vectors using a variety of methods
:param parent: Main window
:type parent: MainWindow
"""
dock_flag = DockFlag.Upper
def __init__(self, parent):
super().__init__(parent)
self.parent = parent
self.parent_model = parent.presenter.model
self.parent.scenes.switchToSampleScene()
self.title = 'Add Measurement Vectors'
self.main_layout = QtWidgets.QVBoxLayout()
spacing = 10
self.main_layout.addSpacing(spacing)
self.main_layout.addWidget(QtWidgets.QLabel('Measurement Point:'))
self.points_combobox = QtWidgets.QComboBox()
self.points_combobox.setView(QtWidgets.QListView())
self.main_layout.addWidget(self.points_combobox)
self.updatePointList()
self.main_layout.addSpacing(spacing)
layout = QtWidgets.QHBoxLayout()
alignment_layout = QtWidgets.QVBoxLayout()
alignment_layout.addWidget(QtWidgets.QLabel('Alignment:'))
self.alignment_combobox = QtWidgets.QComboBox()
self.alignment_combobox.setView(QtWidgets.QListView())
self.alignment_combobox.setInsertPolicy(QtWidgets.QComboBox.InsertAtCurrent)
self.updateAlignment()
self.alignment_combobox.activated.connect(self.addNewAlignment)
self.alignment_combobox.currentIndexChanged.connect(self.changeRenderedAlignment)
alignment_layout.addWidget(self.alignment_combobox)
alignment_layout.addSpacing(spacing)
layout.addLayout(alignment_layout)
self.detector_combobox = QtWidgets.QComboBox()
self.detector_combobox.setView(QtWidgets.QListView())
self.detector_combobox.addItems(list(self.parent_model.instrument.detectors.keys()))
if len(self.parent_model.instrument.detectors) > 1:
detector_layout = QtWidgets.QVBoxLayout()
detector_layout.addWidget(QtWidgets.QLabel('Detector:'))
detector_layout.addWidget(self.detector_combobox)
size = self.detector_combobox.iconSize()
self.detector_combobox.setItemIcon(0, create_icon(settings.value(settings.Key.Vector_1_Colour), size))
self.detector_combobox.setItemIcon(1, create_icon(settings.value(settings.Key.Vector_2_Colour), size))
detector_layout.addSpacing(spacing)
layout.addSpacing(spacing)
layout.addLayout(detector_layout)
self.main_layout.addLayout(layout)
self.main_layout.addWidget(QtWidgets.QLabel('Strain Component:'))
self.component_combobox = QtWidgets.QComboBox()
self.component_combobox.setView(QtWidgets.QListView())
strain_components = [s.value for s in StrainComponents]
self.component_combobox.addItems(strain_components)
self.component_combobox.currentTextChanged.connect(self.toggleKeyInBox)
self.main_layout.addWidget(self.component_combobox)
self.main_layout.addSpacing(spacing)
button_layout = QtWidgets.QHBoxLayout()
self.execute_button = QtWidgets.QPushButton(self.title)
self.execute_button.clicked.connect(self.executeButtonClicked)
button_layout.addWidget(self.execute_button)
button_layout.addStretch(1)
self.createKeyInBox()
self.reverse_checkbox = QtWidgets.QCheckBox('Reverse Direction of Vector')
self.main_layout.addWidget(self.reverse_checkbox)
self.main_layout.addSpacing(spacing)
self.main_layout.addLayout(button_layout)
self.main_layout.addStretch(1)
self.setLayout(self.main_layout)
self.parent_model.measurement_points_changed.connect(self.updatePointList)
self.parent_model.measurement_vectors_changed.connect(self.updateAlignment)
self.parent.scenes.rendered_alignment_changed.connect(self.alignment_combobox.setCurrentIndex)
self.setMinimumWidth(450)
def updatePointList(self):
self.points_combobox.clear()
point_list = ['All Points']
point_list.extend(['{}'.format(i+1) for i in range(self.parent_model.measurement_points.size)])
self.points_combobox.addItems(point_list)
def updateAlignment(self):
align_count = self.parent_model.measurement_vectors.shape[2]
if align_count != self.alignment_combobox.count() - 1:
self.alignment_combobox.clear()
alignment_list = ['{}'.format(i + 1) for i in range(align_count)]
alignment_list.append('Add New...')
self.alignment_combobox.addItems(alignment_list)
self.alignment_combobox.setCurrentIndex(self.parent.scenes.rendered_alignment)
def addNewAlignment(self, index):
if index == self.alignment_combobox.count() - 1:
self.alignment_combobox.insertItem(index, '{}'.format(index + 1))
self.alignment_combobox.setCurrentIndex(index)
def changeRenderedAlignment(self, index):
align_count = self.parent_model.measurement_vectors.shape[2]
if 0 <= index < align_count:
self.parent.scenes.changeRenderedAlignment(index)
elif index >= align_count:
self.parent.scenes.changeVisibility(Attributes.Vectors, False)
def toggleKeyInBox(self, selected_text):
strain_component = StrainComponents(selected_text)
if strain_component == StrainComponents.custom:
self.key_in_box.setVisible(True)
self.form_group.validateGroup()
else:
self.key_in_box.setVisible(False)
self.execute_button.setEnabled(True)
def createKeyInBox(self):
self.key_in_box = QtWidgets.QWidget(self)
layout = QtWidgets.QVBoxLayout()
self.form_group = FormGroup(FormGroup.Layout.Horizontal)
self.x_axis = FormControl('X', 1.0, required=True, number=True, decimals=7)
self.x_axis.range(-1.0, 1.0)
self.y_axis = FormControl('Y', 0.0, required=True, number=True, decimals=7)
self.y_axis.range(-1.0, 1.0)
self.z_axis = FormControl('Z', 0.0, required=True, number=True, decimals=7)
self.z_axis.range(-1.0, 1.0)
self.form_group.addControl(self.x_axis)
self.form_group.addControl(self.y_axis)
self.form_group.addControl(self.z_axis)
self.form_group.groupValidation.connect(self.formValidation)
layout.addWidget(self.form_group)
self.key_in_box.setLayout(layout)
self.main_layout.addWidget(self.key_in_box)
self.toggleKeyInBox(self.component_combobox.currentText())
def formValidation(self, is_valid):
self.execute_button.setDisabled(True)
if is_valid:
if np.linalg.norm([self.x_axis.value, self.y_axis.value, self.z_axis.value]) > VECTOR_EPS:
self.x_axis.validation_label.setText('')
self.execute_button.setEnabled(True)
else:
self.x_axis.validation_label.setText('Bad Normal')
def executeButtonClicked(self):
points = self.points_combobox.currentIndex() - 1
selected_text = self.component_combobox.currentText()
strain_component = StrainComponents(selected_text)
alignment = self.alignment_combobox.currentIndex()
detector = self.detector_combobox.currentIndex()
check_state = self.reverse_checkbox.checkState()
reverse = True if check_state == QtCore.Qt.Checked else False
if strain_component == StrainComponents.custom:
vector = [self.x_axis.value, self.y_axis.value, self.z_axis.value]
else:
vector = None
self.parent.presenter.addVectors(points, strain_component, alignment, detector,
key_in=vector, reverse=reverse)
# New vectors are drawn by the scene manager after function ends
self.parent.scenes._rendered_alignment = alignment
def closeEvent(self, event):
self.parent.scenes.changeRenderedAlignment(0)
event.accept()
class PickPointDialog(QtWidgets.QWidget):
"""Provides UI for selecting measurement points on a cross section of the sample
:param parent: Main window
:type parent: MainWindow
"""
dock_flag = DockFlag.Full
def __init__(self, parent):
super().__init__(parent)
self.parent = parent
self.parent_model = parent.presenter.model
self.parent.scenes.switchToSampleScene()
self.title = 'Add Measurement Points Graphically'
self.setMinimumWidth(500)
self.plane_offset_range = (-1., 1.)
self.slider_range = (-10000000, 10000000)
self.sample_scale = 20
self.path_pen = QtGui.QPen(QtGui.QColor(255, 0, 0), 0)
self.point_pen = QtGui.QPen(QtGui.QColor(200, 0, 0), 0)
self.main_layout = QtWidgets.QVBoxLayout()
self.setLayout(self.main_layout)
button_layout = QtWidgets.QHBoxLayout()
self.help_button = create_tool_button(tooltip='Help', style_name='ToolButton',
status_tip='Display shortcuts for the cross-section view',
icon_path=path_for('question.png'))
self.help_button.clicked.connect(self.showHelp)
self.reset_button = create_tool_button(tooltip='Reset View', style_name='ToolButton',
status_tip='Reset camera transformation of the cross-section view',
icon_path=path_for('refresh.png'))
self.execute_button = QtWidgets.QPushButton('Add Points')
self.execute_button.clicked.connect(self.addPoints)
button_layout.addWidget(self.help_button)
button_layout.addWidget(self.reset_button)
button_layout.addStretch(1)
button_layout.addWidget(self.execute_button)
self.main_layout.addLayout(button_layout)
self.splitter = QtWidgets.QSplitter(QtCore.Qt.Vertical)
self.splitter.setChildrenCollapsible(False)
self.main_layout.addWidget(self.splitter)
self.createGraphicsView()
self.reset_button.clicked.connect(self.view.reset)
self.createControlPanel()
self.prepareMesh()
self.parent_model.sample_changed.connect(self.prepareMesh)
self.parent_model.measurement_points_changed.connect(self.updateCrossSection)
self.initializing = True
def showEvent(self, event):
if self.initializing:
self.view.fitInView(self.view.anchor, QtCore.Qt.KeepAspectRatio)
self.initializing = False
super().showEvent(event)
def closeEvent(self, event):
self.parent.scenes.removePlane()
event.accept()
def prepareMesh(self):
self.mesh = None
samples = self.parent_model.sample
for _, sample in samples.items():
if self.mesh is None:
self.mesh = sample.copy()
else:
self.mesh.append(sample)
self.scene.clear()
self.tabs.setEnabled(self.mesh is not None)
if self.mesh is not None:
self.setPlane(self.plane_combobox.currentText())
else:
self.parent.scenes.removePlane()
self.view.reset()
def updateStatusBar(self, point):
if self.view.rect().contains(point):
transform = self.view.scene_transform.inverted()[0]
scene_pt = transform.map(self.view.mapToScene(point)) / self.sample_scale
world_pt = [scene_pt.x(), scene_pt.y(), -self.old_distance] @ self.matrix.transpose()
cursor_text = f'X: {world_pt[0]:.3f} Y: {world_pt[1]:.3f} Z: {world_pt[2]:.3f}'
self.parent.cursor_label.setText(cursor_text)
else:
self.parent.cursor_label.clear()
def createGraphicsView(self):
self.scene = GraphicsScene(self.sample_scale, self)
self.view = GraphicsView(self.scene)
self.view.mouse_moved.connect(self.updateStatusBar)
self.view.setMinimumHeight(350)
self.splitter.addWidget(self.view)
def createControlPanel(self):
self.tabs = QtWidgets.QTabWidget()
self.tabs.setMinimumHeight(250)
self.tabs.setTabPosition(QtWidgets.QTabWidget.South)
self.splitter.addWidget(self.tabs)
self.createPlaneTab()
self.createSelectionToolsTab()
self.createGridOptionsTab()
point_manager = PointManager(PointType.Measurement, self.parent)
self.tabs.addTab(create_scroll_area(point_manager), 'Point Manager')
def createPlaneTab(self):
layout = QtWidgets.QVBoxLayout()
layout.addWidget(QtWidgets.QLabel('Specify Plane:'))
self.plane_combobox = QtWidgets.QComboBox()
self.plane_combobox.setView(QtWidgets.QListView())
self.plane_combobox.addItems([p.value for p in PlaneOptions])
self.plane_combobox.currentTextChanged.connect(self.setPlane)
self.createCustomPlaneBox()
layout.addWidget(self.plane_combobox)
layout.addWidget(self.custom_plane_widget)
layout.addSpacing(20)
slider_layout = QtWidgets.QHBoxLayout()
slider_layout.addWidget(QtWidgets.QLabel('Plane Distance from Origin (mm):'))
self.plane_lineedit = QtWidgets.QLineEdit()
validator = QtGui.QDoubleValidator(self.plane_lineedit)
validator.setNotation(QtGui.QDoubleValidator.StandardNotation)
validator.setDecimals(3)
self.plane_lineedit.setValidator(validator)
self.plane_lineedit.textEdited.connect(self.updateSlider)
self.plane_lineedit.editingFinished.connect(self.movePlane)
slider_layout.addStretch(1)
slider_layout.addWidget(self.plane_lineedit)
layout.addLayout(slider_layout)
self.plane_slider = QtWidgets.QSlider(QtCore.Qt.Horizontal)
self.plane_slider.setMinimum(self.slider_range[0])
self.plane_slider.setMaximum(self.slider_range[1])
self.plane_slider.setFocusPolicy(QtCore.Qt.StrongFocus)
self.plane_slider.setSingleStep(1)
self.plane_slider.sliderMoved.connect(self.updateLineEdit)
self.plane_slider.sliderReleased.connect(self.movePlane)
layout.addWidget(self.plane_slider)
layout.addStretch(1)
plane_tab = QtWidgets.QWidget()
plane_tab.setLayout(layout)
self.tabs.addTab(create_scroll_area(plane_tab), 'Define Plane')
def createSelectionToolsTab(self):
layout = QtWidgets.QVBoxLayout()
selector_layout = QtWidgets.QHBoxLayout()
selector_layout.addWidget(QtWidgets.QLabel('Select Geometry of Points: '))
self.button_group = QtWidgets.QButtonGroup()
self.button_group.buttonClicked[int].connect(self.changeSceneMode)
self.object_selector = create_tool_button(checkable=True, checked=True, tooltip='Select Points',
status_tip='Select movable points from the cross-section view',
style_name='MidToolButton', icon_path=path_for('select.png'))
self.point_selector = create_tool_button(checkable=True, tooltip='Draw a Point',
status_tip='Draw a single point at the selected position',
style_name='MidToolButton', icon_path=path_for('point.png'))
self.line_selector = create_tool_button(checkable=True, tooltip='Draw Points on Line',
status_tip='Draw equally spaced points on the selected line',
style_name='MidToolButton', icon_path=path_for('line_tool.png'))
self.area_selector = create_tool_button(checkable=True, tooltip='Draw Points on Area',
status_tip='Draw a grid of points on the selected area',
style_name='MidToolButton', icon_path=path_for('area_tool.png'))
self.button_group.addButton(self.object_selector, GraphicsScene.Mode.Select.value)
self.button_group.addButton(self.point_selector, GraphicsScene.Mode.Draw_point.value)
self.button_group.addButton(self.line_selector, GraphicsScene.Mode.Draw_line.value)
self.button_group.addButton(self.area_selector, GraphicsScene.Mode.Draw_area.value)
selector_layout.addWidget(self.object_selector)
selector_layout.addWidget(self.point_selector)
selector_layout.addWidget(self.line_selector)
selector_layout.addWidget(self.area_selector)
selector_layout.addStretch(1)
self.createLineToolWidget()
self.createAreaToolWidget()
layout.addLayout(selector_layout)
layout.addWidget(self.line_tool_widget)
layout.addWidget(self.area_tool_widget)
layout.addStretch(1)
select_tab = QtWidgets.QWidget()
select_tab.setLayout(layout)
self.tabs.addTab(create_scroll_area(select_tab), 'Selection Tools')
def createGridOptionsTab(self):
layout = QtWidgets.QVBoxLayout()
self.show_grid_checkbox = QtWidgets.QCheckBox('Show Grid')
self.show_grid_checkbox.stateChanged.connect(self.showGrid)
self.snap_to_grid_checkbox = QtWidgets.QCheckBox('Snap Selection to Grid')
self.snap_to_grid_checkbox.stateChanged.connect(self.snapToGrid)
self.snap_to_grid_checkbox.setEnabled(self.view.show_grid)
layout.addWidget(self.show_grid_checkbox)
layout.addWidget(self.snap_to_grid_checkbox)
self.createGridWidget()
layout.addWidget(self.grid_widget)
layout.addStretch(1)
grid_tab = QtWidgets.QWidget()
grid_tab.setLayout(layout)
self.tabs.addTab(create_scroll_area(grid_tab), 'Grid Options')
def createCustomPlaneBox(self):
self.custom_plane_widget = QtWidgets.QWidget(self)
layout = QtWidgets.QVBoxLayout()
self.form_group = FormGroup(FormGroup.Layout.Horizontal)
self.x_axis = FormControl('X', 1.0, required=True, number=True)
self.x_axis.range(-1.0, 1.0)
self.y_axis = FormControl('Y', 0.0, required=True, number=True)
self.y_axis.range(-1.0, 1.0)
self.z_axis = FormControl('Z', 0.0, required=True, number=True)
self.z_axis.range(-1.0, 1.0)
self.form_group.addControl(self.x_axis)
self.form_group.addControl(self.y_axis)
self.form_group.addControl(self.z_axis)
self.form_group.groupValidation.connect(self.setCustomPlane)
layout.addWidget(self.form_group)
self.custom_plane_widget.setLayout(layout)
def createLineToolWidget(self):
self.line_tool_widget = QtWidgets.QWidget(self)
layout = QtWidgets.QHBoxLayout()
layout.setContentsMargins(0, 20, 0, 0)
layout.addWidget(QtWidgets.QLabel('Number of Points: '))
self.line_point_count_spinbox = QtWidgets.QSpinBox()
self.line_point_count_spinbox.setValue(self.scene.line_tool_size)
self.line_point_count_spinbox.setRange(2, 100)
self.line_point_count_spinbox.valueChanged.connect(self.scene.setLineToolSize)
layout.addWidget(self.line_point_count_spinbox)
self.line_tool_widget.setVisible(False)
self.line_tool_widget.setLayout(layout)
def createAreaToolWidget(self):
self.area_tool_widget = QtWidgets.QWidget(self)
layout = QtWidgets.QHBoxLayout()
layout.setContentsMargins(0, 20, 0, 0)
layout.addWidget(QtWidgets.QLabel('Number of Points: '))
self.area_x_spinbox = QtWidgets.QSpinBox()
self.area_x_spinbox.setValue(self.scene.area_tool_size[0])
self.area_x_spinbox.setRange(2, 100)
self.area_y_spinbox = QtWidgets.QSpinBox()
self.area_y_spinbox.setValue(self.scene.area_tool_size[1])
self.area_y_spinbox.setRange(2, 100)
stretch_factor = 3
layout.addStretch(1)
layout.addWidget(QtWidgets.QLabel('X: '))
self.area_x_spinbox.valueChanged.connect(lambda: self.scene.setAreaToolSize(self.area_x_spinbox.value(),
self.area_y_spinbox.value()))
layout.addWidget(self.area_x_spinbox, stretch_factor)
layout.addStretch(1)
layout.addWidget(QtWidgets.QLabel('Y: '))
self.area_y_spinbox.valueChanged.connect(lambda: self.scene.setAreaToolSize(self.area_x_spinbox.value(),
self.area_y_spinbox.value()))
layout.addWidget(self.area_y_spinbox, stretch_factor)
self.area_tool_widget.setVisible(False)
self.area_tool_widget.setLayout(layout)
def createGridWidget(self):
self.grid_widget = QtWidgets.QWidget(self)
main_layout = QtWidgets.QVBoxLayout()
main_layout.setContentsMargins(0, 20, 0, 0)
layout = QtWidgets.QHBoxLayout()
layout.addWidget(QtWidgets.QLabel('Grid Type: '))
grid_combobox = QtWidgets.QComboBox()
grid_combobox.setView(QtWidgets.QListView())
grid_combobox.addItems([g.value for g in Grid.Type])
grid_combobox.currentTextChanged.connect(lambda value: self.setGridType(Grid.Type(value)))
layout.addWidget(grid_combobox)
main_layout.addLayout(layout)
main_layout.addSpacing(20)
layout = QtWidgets.QHBoxLayout()
layout.addWidget(QtWidgets.QLabel('Grid Size: '))
self.grid_x_label = QtWidgets.QLabel('')
self.grid_x_spinbox = QtWidgets.QDoubleSpinBox()
self.grid_x_spinbox.setDecimals(1)
self.grid_x_spinbox.setSingleStep(0.1)
self.grid_x_spinbox.valueChanged.connect(self.changeGridSize)
self.grid_y_label = QtWidgets.QLabel('')
self.grid_y_spinbox = QtWidgets.QDoubleSpinBox()
self.grid_y_spinbox.setDecimals(1)
self.grid_y_spinbox.setSingleStep(0.1)
self.grid_y_spinbox.valueChanged.connect(self.changeGridSize)
stretch_factor = 3
layout.addStretch(1)
layout.addWidget(self.grid_x_label)
layout.addWidget(self.grid_x_spinbox, stretch_factor)
layout.addStretch(1)
layout.addWidget(self.grid_y_label)
layout.addWidget(self.grid_y_spinbox, stretch_factor)
main_layout.addLayout(layout)
self.setGridType(self.view.grid.type)
self.grid_widget.setVisible(False)
self.grid_widget.setLayout(main_layout)
def changeGridSize(self):
if self.view.grid.type == Grid.Type.Box:
grid_x = int(self.grid_x_spinbox.value() * self.sample_scale)
grid_y = int(self.grid_y_spinbox.value() * self.sample_scale)
else:
grid_x = int(self.grid_x_spinbox.value() * self.sample_scale)
grid_y = self.grid_y_spinbox.value()
self.view.setGridSize((grid_x, grid_y))
def setGridType(self, grid_type):
self.view.setGridType(grid_type)
size = self.view.grid.size
if grid_type == Grid.Type.Box:
self.grid_x_label.setText('X (mm): ')
self.grid_y_label.setText('Y (mm): ')
self.grid_x_spinbox.setValue(size[0])
self.grid_y_spinbox.setValue(size[1])
self.grid_x_spinbox.setRange(0.1, 1000)
self.grid_y_spinbox.setRange(0.1, 1000)
else:
self.grid_x_label.setText('Radius (mm): ')
self.grid_y_label.setText('Angle (degree): ')
self.grid_x_spinbox.setValue(size[0])
self.grid_y_spinbox.setValue(size[1])
self.grid_x_spinbox.setRange(0.1, 1000)
self.grid_y_spinbox.setRange(0.1, 360)
def changeSceneMode(self, button_id):
self.scene.mode = GraphicsScene.Mode(button_id)
self.line_tool_widget.setVisible(self.scene.mode == GraphicsScene.Mode.Draw_line)
self.area_tool_widget.setVisible(self.scene.mode == GraphicsScene.Mode.Draw_area)
def showHelp(self):
self.view.show_help = False if self.view.has_foreground else True
self.scene.update()
def showGrid(self, state):
self.view.show_grid = True if state == QtCore.Qt.Checked else False
self.snap_to_grid_checkbox.setEnabled(self.view.show_grid)
self.grid_widget.setVisible(self.view.show_grid)
self.scene.update()
def snapToGrid(self, state):
self.view.snap_to_grid = True if state == QtCore.Qt.Checked else False
def updateSlider(self, value):
if not self.plane_lineedit.hasAcceptableInput():
return
new_distance = clamp(float(value), *self.plane_offset_range)
slider_value = int(map_range(*self.plane_offset_range, *self.slider_range, new_distance))
self.plane_slider.setValue(slider_value)
offset = new_distance - self.old_distance
self.parent.scenes.movePlane(offset * self.plane.normal)
self.old_distance = new_distance
def updateLineEdit(self, value):
new_distance = trunc(map_range(*self.slider_range, *self.plane_offset_range, value), 3)
self.plane_lineedit.setText('{:.3f}'.format(new_distance))
offset = new_distance - self.old_distance
self.parent.scenes.movePlane(offset * self.plane.normal)
self.old_distance = new_distance
def movePlane(self):
distance = clamp(float(self.plane_lineedit.text()), *self.plane_offset_range)
self.plane_lineedit.setText('{:.3f}'.format(distance))
point = distance * self.plane.normal
self.plane = Plane(self.plane.normal, point)
self.updateCrossSection()
def setCustomPlane(self, is_valid):
if is_valid:
normal = np.array([self.x_axis.value, self.y_axis.value, self.z_axis.value])
try:
self.initializePlane(normal, self.mesh.bounding_box.center)
except ValueError:
self.x_axis.validation_label.setText('Bad Normal')
def setPlane(self, selected_text):
if selected_text == PlaneOptions.Custom.value:
self.custom_plane_widget.setVisible(True)
self.form_group.validateGroup()
return
else:
self.custom_plane_widget.setVisible(False)
if selected_text == PlaneOptions.XY.value:
plane_normal = np.array([0., 0., 1.])
elif selected_text == PlaneOptions.XZ.value:
plane_normal = np.array([0., 1., 0.])
else:
plane_normal = np.array([1., 0., 0.])
self.initializePlane(plane_normal, self.mesh.bounding_box.center)
def initializePlane(self, plane_normal, plane_point):
self.plane = Plane(plane_normal, plane_point)
plane_size = self.mesh.bounding_box.radius
self.parent.scenes.drawPlane(self.plane, 2 * plane_size, 2 * plane_size)
distance = self.plane.distanceFromOrigin()
self.plane_offset_range = (distance - plane_size, distance + plane_size)
slider_value = int(map_range(*self.plane_offset_range, *self.slider_range, distance))
self.plane_slider.setValue(slider_value)
self.plane_lineedit.setText('{:.3f}'.format(distance))
self.old_distance = distance
# inverted the normal so that the y-axis is flipped
self.matrix = self.__lookAt(-Vector3(self.plane.normal))
self.view.resetTransform()
self.updateCrossSection()
def updateCrossSection(self):
self.scene.clear()
segments = mesh_plane_intersection(self.mesh, self.plane)
if len(segments) == 0:
return
segments = np.array(segments)
item = QtWidgets.QGraphicsPathItem()
cross_section_path = QtGui.QPainterPath()
rotated_segments = self.sample_scale * (segments @ self.matrix)
for i in range(0, rotated_segments.shape[0], 2):
start = rotated_segments[i, :]
cross_section_path.moveTo(start[0], start[1])
end = rotated_segments[i + 1, :]
cross_section_path.lineTo(end[0], end[1])
item.setPath(cross_section_path)
item.setPen(self.path_pen)
item.setTransform(self.view.scene_transform)
self.scene.addItem(item)
rect = item.boundingRect()
anchor = rect.center()
ab = self.plane.point - self.parent_model.measurement_points.points
d = np.einsum('ij,ij->i', np.expand_dims(self.plane.normal, axis=0), ab)
index = np.where(np.abs(d) < VECTOR_EPS)[0]
rotated_points = self.parent_model.measurement_points.points[index, :]
rotated_points = rotated_points @ self.matrix
for i, p in zip(index, rotated_points):
point = QtCore.QPointF(p[0], p[1]) * self.sample_scale
point = self.view.scene_transform.map(point)
item = GraphicsPointItem(point, size=self.scene.point_size)
item.setToolTip(f'Point {i + 1}')
item.fixed = True
item.makeControllable(self.scene.mode == GraphicsScene.Mode.Select)
item.setPen(self.point_pen)
self.scene.addItem(item)
rect = rect.united(item.boundingRect().translated(point))
# calculate new rectangle that encloses original rect with a different anchor
rect.united(rect.translated(anchor - rect.center()))
self.view.setSceneRect(rect)
self.view.fitInView(rect, QtCore.Qt.KeepAspectRatio)
self.view.anchor = rect
@staticmethod
def __lookAt(forward):
rot_matrix = Matrix33.identity()
up = Vector3([0., -1., 0.]) if -VECTOR_EPS < forward[1] < VECTOR_EPS else Vector3([0., 0., 1.])
left = up ^ forward
left.normalize()
up = forward ^ left
rot_matrix.c1[:3] = left
rot_matrix.c2[:3] = up
rot_matrix.c3[:3] = forward
return rot_matrix
def addPoints(self):
if len(self.scene.items()) < 2:
return
points_2d = []
transform = self.view.scene_transform.inverted()[0]
for item in self.scene.items():
if isinstance(item, GraphicsPointItem) and not item.fixed:
pos = transform.map(item.pos()) / self.sample_scale
# negate distance due to inverted normal when creating matrix
points_2d.append([pos.x(), pos.y(), -self.old_distance])
self.scene.removeItem(item)
if not points_2d:
return
points = points_2d[::-1] @ self.matrix.transpose()
enabled = [True] * points.shape[0]
self.parent.presenter.addPoints(list(zip(points, enabled)), PointType.Measurement, False)
class AlignSample(QtWidgets.QWidget):
"""Provides UI for aligning sample on instrument with 6D pose
:param parent: Main window
:type parent: MainWindow
"""
dock_flag = DockFlag.Upper
def __init__(self, parent):
super().__init__(parent)
self.parent = parent
self.parent.scenes.switchToInstrumentScene()
self.title = 'Align Sample with 6D pose'
self.setMinimumWidth(450)
self.main_layout = QtWidgets.QVBoxLayout()
self.setLayout(self.main_layout)
self.main_layout.addSpacing(20)
self.main_layout.addWidget(FormTitle('Create Transformation for Alignment'))
self.main_layout.addSpacing(10)
self.main_layout.addWidget(QtWidgets.QLabel('Translation along the X, Y, and Z axis (mm):'))
self.position_form_group = FormGroup(FormGroup.Layout.Horizontal)
self.x_position = FormControl('X', 0.0, required=True, number=True)
self.y_position = FormControl('Y', 0.0, required=True, number=True)
self.z_position = FormControl('Z', 0.0, required=True, number=True)
self.position_form_group.addControl(self.x_position)
self.position_form_group.addControl(self.y_position)
self.position_form_group.addControl(self.z_position)
self.position_form_group.groupValidation.connect(self.formValidation)
self.main_layout.addWidget(self.position_form_group)
self.main_layout.addWidget(QtWidgets.QLabel('Rotation around the X, Y, and Z axis (degrees):'))
self.orientation_form_group = FormGroup(FormGroup.Layout.Horizontal)
self.x_rotation = FormControl('X', 0.0, required=True, number=True)
self.x_rotation.range(-360.0, 360.0)
self.y_rotation = FormControl('Y', 0.0, required=True, number=True)
self.y_rotation.range(-360.0, 360.0)
self.z_rotation = FormControl('Z', 0.0, required=True, number=True)
self.z_rotation.range(-360.0, 360.0)
self.orientation_form_group.addControl(self.x_rotation)
self.orientation_form_group.addControl(self.y_rotation)
self.orientation_form_group.addControl(self.z_rotation)
self.orientation_form_group.groupValidation.connect(self.formValidation)
self.main_layout.addWidget(self.orientation_form_group)
button_layout = QtWidgets.QHBoxLayout()
self.execute_button = QtWidgets.QPushButton('Align Sample')
self.execute_button.clicked.connect(self.executeButtonClicked)
button_layout.addWidget(self.execute_button)
button_layout.addStretch(1)
self.main_layout.addLayout(button_layout)
self.main_layout.addStretch(1)
def formValidation(self):
if self.position_form_group.valid and self.orientation_form_group.valid:
self.execute_button.setEnabled(True)
else:
self.execute_button.setDisabled(True)
def executeButtonClicked(self):
pose = [self.x_position.value, self.y_position.value, self.z_position.value,
self.z_rotation.value, self.y_rotation.value, self.x_rotation.value]
self.parent.presenter.alignSampleWithPose(pose)
| 44.966851 | 115 | 0.672589 | import numpy as np
from PyQt5 import QtCore, QtGui, QtWidgets
from sscanss.config import path_for, settings
from sscanss.core.math import Plane, Matrix33, Vector3, clamp, map_range, trunc, VECTOR_EPS
from sscanss.core.geometry import mesh_plane_intersection
from sscanss.core.util import Primitives, DockFlag, StrainComponents, PointType, PlaneOptions, Attributes
from sscanss.ui.widgets import (FormGroup, FormControl, GraphicsView, GraphicsScene, create_tool_button, FormTitle,
create_scroll_area, CompareValidator, GraphicsPointItem, Grid, create_icon)
from .managers import PointManager
class InsertPrimitiveDialog(QtWidgets.QWidget):
dock_flag = DockFlag.Upper
def __init__(self, primitive, parent):
super().__init__(parent)
self.parent = parent
self.parent_model = self.parent.presenter.model
self.parent.scenes.switchToSampleScene()
self.primitive = primitive
self.main_layout = QtWidgets.QVBoxLayout()
self.textboxes = {}
name = self.parent_model.uniqueKey(self.primitive.value)
self.mesh_args = {'name': name}
if self.primitive == Primitives.Tube:
self.mesh_args.update({'outer_radius': 100.000, 'inner_radius': 50.000, 'height': 200.000})
elif self.primitive == Primitives.Sphere:
self.mesh_args.update({'radius': 100.000})
elif self.primitive == Primitives.Cylinder:
self.mesh_args.update({'radius': 100.000, 'height': 200.000})
else:
self.mesh_args.update({'width': 50.000, 'height': 100.000, 'depth': 200.000})
self.createPrimitiveSwitcher()
self.createFormInputs()
button_layout = QtWidgets.QHBoxLayout()
self.create_primitive_button = QtWidgets.QPushButton('Create')
self.create_primitive_button.clicked.connect(self.createPrimiviteButtonClicked)
button_layout.addWidget(self.create_primitive_button)
button_layout.addStretch(1)
self.main_layout.addLayout(button_layout)
self.main_layout.addStretch(1)
self.setLayout(self.main_layout)
self.title = 'Insert {}'.format(self.primitive.value)
self.setMinimumWidth(450)
self.textboxes['name'].setFocus()
def createPrimitiveSwitcher(self):
switcher_layout = QtWidgets.QHBoxLayout()
switcher = create_tool_button(style_name='MenuButton', status_tip='Open dialog for a different primitive')
switcher.setArrowType(QtCore.Qt.DownArrow)
switcher.setPopupMode(QtWidgets.QToolButton.InstantPopup)
switcher.setMenu(self.parent.primitives_menu)
switcher_layout.addStretch(1)
switcher_layout.addWidget(switcher)
self.main_layout.addLayout(switcher_layout)
def createFormInputs(self):
self.form_group = FormGroup()
for key, value in self.mesh_args.items():
pretty_label = key.replace('_', ' ').title()
if key == 'name':
control = FormControl(pretty_label, value, required=True)
control.form_lineedit.textChanged.connect(self.nameCheck)
else:
control = FormControl(pretty_label, value, desc='mm', required=True, number=True)
control.range(0, None, min_exclusive=True)
self.textboxes[key] = control
self.form_group.addControl(control)
if self.primitive == Primitives.Tube:
outer_radius = self.textboxes['outer_radius']
inner_radius = self.textboxes['inner_radius']
outer_radius.compareWith(inner_radius, CompareValidator.Operator.Greater)
inner_radius.compareWith(outer_radius, CompareValidator.Operator.Less)
self.main_layout.addWidget(self.form_group)
self.form_group.groupValidation.connect(self.formValidation)
def nameCheck(self, value):
if self.parent_model.all_sample_key == value:
self.textboxes['name'].isInvalid(f'"{self.parent_model.all_sample_key}" is a reserved name')
def formValidation(self, is_valid):
if is_valid:
self.create_primitive_button.setEnabled(True)
else:
self.create_primitive_button.setDisabled(True)
def createPrimiviteButtonClicked(self):
for key, textbox in self.textboxes.items():
value = textbox.value
self.mesh_args[key] = value
self.parent.presenter.addPrimitive(self.primitive, self.mesh_args)
new_name = self.parent_model.uniqueKey(self.primitive.value)
self.textboxes['name'].value = new_name
class InsertPointDialog(QtWidgets.QWidget):
dock_flag = DockFlag.Upper
def __init__(self, point_type, parent):
super().__init__(parent)
self.parent = parent
self.parent_model = parent.presenter.model
self.parent.scenes.switchToSampleScene()
self.point_type = point_type
self.title = 'Add {} Point'.format(point_type.value)
self.main_layout = QtWidgets.QVBoxLayout()
unit = 'mm'
self.form_group = FormGroup()
self.x_axis = FormControl('X', 0.0, required=True, desc=unit, number=True)
self.y_axis = FormControl('Y', 0.0, required=True, desc=unit, number=True)
self.z_axis = FormControl('Z', 0.0, required=True, desc=unit, number=True)
self.form_group.addControl(self.x_axis)
self.form_group.addControl(self.y_axis)
self.form_group.addControl(self.z_axis)
self.form_group.groupValidation.connect(self.formValidation)
button_layout = QtWidgets.QHBoxLayout()
self.execute_button = QtWidgets.QPushButton(self.title)
self.execute_button.clicked.connect(self.executeButtonClicked)
button_layout.addWidget(self.execute_button)
button_layout.addStretch(1)
self.main_layout.addWidget(self.form_group)
self.main_layout.addLayout(button_layout)
self.main_layout.addStretch(1)
self.setLayout(self.main_layout)
self.setMinimumWidth(450)
def formValidation(self, is_valid):
if is_valid:
self.execute_button.setEnabled(True)
else:
self.execute_button.setDisabled(True)
def executeButtonClicked(self):
point = [self.x_axis.value, self.y_axis.value, self.z_axis.value]
self.parent.presenter.addPoints([(point, True)], self.point_type)
class InsertVectorDialog(QtWidgets.QWidget):
dock_flag = DockFlag.Upper
def __init__(self, parent):
super().__init__(parent)
self.parent = parent
self.parent_model = parent.presenter.model
self.parent.scenes.switchToSampleScene()
self.title = 'Add Measurement Vectors'
self.main_layout = QtWidgets.QVBoxLayout()
spacing = 10
self.main_layout.addSpacing(spacing)
self.main_layout.addWidget(QtWidgets.QLabel('Measurement Point:'))
self.points_combobox = QtWidgets.QComboBox()
self.points_combobox.setView(QtWidgets.QListView())
self.main_layout.addWidget(self.points_combobox)
self.updatePointList()
self.main_layout.addSpacing(spacing)
layout = QtWidgets.QHBoxLayout()
alignment_layout = QtWidgets.QVBoxLayout()
alignment_layout.addWidget(QtWidgets.QLabel('Alignment:'))
self.alignment_combobox = QtWidgets.QComboBox()
self.alignment_combobox.setView(QtWidgets.QListView())
self.alignment_combobox.setInsertPolicy(QtWidgets.QComboBox.InsertAtCurrent)
self.updateAlignment()
self.alignment_combobox.activated.connect(self.addNewAlignment)
self.alignment_combobox.currentIndexChanged.connect(self.changeRenderedAlignment)
alignment_layout.addWidget(self.alignment_combobox)
alignment_layout.addSpacing(spacing)
layout.addLayout(alignment_layout)
self.detector_combobox = QtWidgets.QComboBox()
self.detector_combobox.setView(QtWidgets.QListView())
self.detector_combobox.addItems(list(self.parent_model.instrument.detectors.keys()))
if len(self.parent_model.instrument.detectors) > 1:
detector_layout = QtWidgets.QVBoxLayout()
detector_layout.addWidget(QtWidgets.QLabel('Detector:'))
detector_layout.addWidget(self.detector_combobox)
size = self.detector_combobox.iconSize()
self.detector_combobox.setItemIcon(0, create_icon(settings.value(settings.Key.Vector_1_Colour), size))
self.detector_combobox.setItemIcon(1, create_icon(settings.value(settings.Key.Vector_2_Colour), size))
detector_layout.addSpacing(spacing)
layout.addSpacing(spacing)
layout.addLayout(detector_layout)
self.main_layout.addLayout(layout)
self.main_layout.addWidget(QtWidgets.QLabel('Strain Component:'))
self.component_combobox = QtWidgets.QComboBox()
self.component_combobox.setView(QtWidgets.QListView())
strain_components = [s.value for s in StrainComponents]
self.component_combobox.addItems(strain_components)
self.component_combobox.currentTextChanged.connect(self.toggleKeyInBox)
self.main_layout.addWidget(self.component_combobox)
self.main_layout.addSpacing(spacing)
button_layout = QtWidgets.QHBoxLayout()
self.execute_button = QtWidgets.QPushButton(self.title)
self.execute_button.clicked.connect(self.executeButtonClicked)
button_layout.addWidget(self.execute_button)
button_layout.addStretch(1)
self.createKeyInBox()
self.reverse_checkbox = QtWidgets.QCheckBox('Reverse Direction of Vector')
self.main_layout.addWidget(self.reverse_checkbox)
self.main_layout.addSpacing(spacing)
self.main_layout.addLayout(button_layout)
self.main_layout.addStretch(1)
self.setLayout(self.main_layout)
self.parent_model.measurement_points_changed.connect(self.updatePointList)
self.parent_model.measurement_vectors_changed.connect(self.updateAlignment)
self.parent.scenes.rendered_alignment_changed.connect(self.alignment_combobox.setCurrentIndex)
self.setMinimumWidth(450)
def updatePointList(self):
self.points_combobox.clear()
point_list = ['All Points']
point_list.extend(['{}'.format(i+1) for i in range(self.parent_model.measurement_points.size)])
self.points_combobox.addItems(point_list)
def updateAlignment(self):
align_count = self.parent_model.measurement_vectors.shape[2]
if align_count != self.alignment_combobox.count() - 1:
self.alignment_combobox.clear()
alignment_list = ['{}'.format(i + 1) for i in range(align_count)]
alignment_list.append('Add New...')
self.alignment_combobox.addItems(alignment_list)
self.alignment_combobox.setCurrentIndex(self.parent.scenes.rendered_alignment)
def addNewAlignment(self, index):
if index == self.alignment_combobox.count() - 1:
self.alignment_combobox.insertItem(index, '{}'.format(index + 1))
self.alignment_combobox.setCurrentIndex(index)
def changeRenderedAlignment(self, index):
align_count = self.parent_model.measurement_vectors.shape[2]
if 0 <= index < align_count:
self.parent.scenes.changeRenderedAlignment(index)
elif index >= align_count:
self.parent.scenes.changeVisibility(Attributes.Vectors, False)
def toggleKeyInBox(self, selected_text):
strain_component = StrainComponents(selected_text)
if strain_component == StrainComponents.custom:
self.key_in_box.setVisible(True)
self.form_group.validateGroup()
else:
self.key_in_box.setVisible(False)
self.execute_button.setEnabled(True)
def createKeyInBox(self):
self.key_in_box = QtWidgets.QWidget(self)
layout = QtWidgets.QVBoxLayout()
self.form_group = FormGroup(FormGroup.Layout.Horizontal)
self.x_axis = FormControl('X', 1.0, required=True, number=True, decimals=7)
self.x_axis.range(-1.0, 1.0)
self.y_axis = FormControl('Y', 0.0, required=True, number=True, decimals=7)
self.y_axis.range(-1.0, 1.0)
self.z_axis = FormControl('Z', 0.0, required=True, number=True, decimals=7)
self.z_axis.range(-1.0, 1.0)
self.form_group.addControl(self.x_axis)
self.form_group.addControl(self.y_axis)
self.form_group.addControl(self.z_axis)
self.form_group.groupValidation.connect(self.formValidation)
layout.addWidget(self.form_group)
self.key_in_box.setLayout(layout)
self.main_layout.addWidget(self.key_in_box)
self.toggleKeyInBox(self.component_combobox.currentText())
def formValidation(self, is_valid):
self.execute_button.setDisabled(True)
if is_valid:
if np.linalg.norm([self.x_axis.value, self.y_axis.value, self.z_axis.value]) > VECTOR_EPS:
self.x_axis.validation_label.setText('')
self.execute_button.setEnabled(True)
else:
self.x_axis.validation_label.setText('Bad Normal')
def executeButtonClicked(self):
points = self.points_combobox.currentIndex() - 1
selected_text = self.component_combobox.currentText()
strain_component = StrainComponents(selected_text)
alignment = self.alignment_combobox.currentIndex()
detector = self.detector_combobox.currentIndex()
check_state = self.reverse_checkbox.checkState()
reverse = True if check_state == QtCore.Qt.Checked else False
if strain_component == StrainComponents.custom:
vector = [self.x_axis.value, self.y_axis.value, self.z_axis.value]
else:
vector = None
self.parent.presenter.addVectors(points, strain_component, alignment, detector,
key_in=vector, reverse=reverse)
self.parent.scenes._rendered_alignment = alignment
def closeEvent(self, event):
self.parent.scenes.changeRenderedAlignment(0)
event.accept()
class PickPointDialog(QtWidgets.QWidget):
dock_flag = DockFlag.Full
def __init__(self, parent):
super().__init__(parent)
self.parent = parent
self.parent_model = parent.presenter.model
self.parent.scenes.switchToSampleScene()
self.title = 'Add Measurement Points Graphically'
self.setMinimumWidth(500)
self.plane_offset_range = (-1., 1.)
self.slider_range = (-10000000, 10000000)
self.sample_scale = 20
self.path_pen = QtGui.QPen(QtGui.QColor(255, 0, 0), 0)
self.point_pen = QtGui.QPen(QtGui.QColor(200, 0, 0), 0)
self.main_layout = QtWidgets.QVBoxLayout()
self.setLayout(self.main_layout)
button_layout = QtWidgets.QHBoxLayout()
self.help_button = create_tool_button(tooltip='Help', style_name='ToolButton',
status_tip='Display shortcuts for the cross-section view',
icon_path=path_for('question.png'))
self.help_button.clicked.connect(self.showHelp)
self.reset_button = create_tool_button(tooltip='Reset View', style_name='ToolButton',
status_tip='Reset camera transformation of the cross-section view',
icon_path=path_for('refresh.png'))
self.execute_button = QtWidgets.QPushButton('Add Points')
self.execute_button.clicked.connect(self.addPoints)
button_layout.addWidget(self.help_button)
button_layout.addWidget(self.reset_button)
button_layout.addStretch(1)
button_layout.addWidget(self.execute_button)
self.main_layout.addLayout(button_layout)
self.splitter = QtWidgets.QSplitter(QtCore.Qt.Vertical)
self.splitter.setChildrenCollapsible(False)
self.main_layout.addWidget(self.splitter)
self.createGraphicsView()
self.reset_button.clicked.connect(self.view.reset)
self.createControlPanel()
self.prepareMesh()
self.parent_model.sample_changed.connect(self.prepareMesh)
self.parent_model.measurement_points_changed.connect(self.updateCrossSection)
self.initializing = True
def showEvent(self, event):
if self.initializing:
self.view.fitInView(self.view.anchor, QtCore.Qt.KeepAspectRatio)
self.initializing = False
super().showEvent(event)
def closeEvent(self, event):
self.parent.scenes.removePlane()
event.accept()
def prepareMesh(self):
self.mesh = None
samples = self.parent_model.sample
for _, sample in samples.items():
if self.mesh is None:
self.mesh = sample.copy()
else:
self.mesh.append(sample)
self.scene.clear()
self.tabs.setEnabled(self.mesh is not None)
if self.mesh is not None:
self.setPlane(self.plane_combobox.currentText())
else:
self.parent.scenes.removePlane()
self.view.reset()
def updateStatusBar(self, point):
if self.view.rect().contains(point):
transform = self.view.scene_transform.inverted()[0]
scene_pt = transform.map(self.view.mapToScene(point)) / self.sample_scale
world_pt = [scene_pt.x(), scene_pt.y(), -self.old_distance] @ self.matrix.transpose()
cursor_text = f'X: {world_pt[0]:.3f} Y: {world_pt[1]:.3f} Z: {world_pt[2]:.3f}'
self.parent.cursor_label.setText(cursor_text)
else:
self.parent.cursor_label.clear()
def createGraphicsView(self):
self.scene = GraphicsScene(self.sample_scale, self)
self.view = GraphicsView(self.scene)
self.view.mouse_moved.connect(self.updateStatusBar)
self.view.setMinimumHeight(350)
self.splitter.addWidget(self.view)
def createControlPanel(self):
self.tabs = QtWidgets.QTabWidget()
self.tabs.setMinimumHeight(250)
self.tabs.setTabPosition(QtWidgets.QTabWidget.South)
self.splitter.addWidget(self.tabs)
self.createPlaneTab()
self.createSelectionToolsTab()
self.createGridOptionsTab()
point_manager = PointManager(PointType.Measurement, self.parent)
self.tabs.addTab(create_scroll_area(point_manager), 'Point Manager')
def createPlaneTab(self):
layout = QtWidgets.QVBoxLayout()
layout.addWidget(QtWidgets.QLabel('Specify Plane:'))
self.plane_combobox = QtWidgets.QComboBox()
self.plane_combobox.setView(QtWidgets.QListView())
self.plane_combobox.addItems([p.value for p in PlaneOptions])
self.plane_combobox.currentTextChanged.connect(self.setPlane)
self.createCustomPlaneBox()
layout.addWidget(self.plane_combobox)
layout.addWidget(self.custom_plane_widget)
layout.addSpacing(20)
slider_layout = QtWidgets.QHBoxLayout()
slider_layout.addWidget(QtWidgets.QLabel('Plane Distance from Origin (mm):'))
self.plane_lineedit = QtWidgets.QLineEdit()
validator = QtGui.QDoubleValidator(self.plane_lineedit)
validator.setNotation(QtGui.QDoubleValidator.StandardNotation)
validator.setDecimals(3)
self.plane_lineedit.setValidator(validator)
self.plane_lineedit.textEdited.connect(self.updateSlider)
self.plane_lineedit.editingFinished.connect(self.movePlane)
slider_layout.addStretch(1)
slider_layout.addWidget(self.plane_lineedit)
layout.addLayout(slider_layout)
self.plane_slider = QtWidgets.QSlider(QtCore.Qt.Horizontal)
self.plane_slider.setMinimum(self.slider_range[0])
self.plane_slider.setMaximum(self.slider_range[1])
self.plane_slider.setFocusPolicy(QtCore.Qt.StrongFocus)
self.plane_slider.setSingleStep(1)
self.plane_slider.sliderMoved.connect(self.updateLineEdit)
self.plane_slider.sliderReleased.connect(self.movePlane)
layout.addWidget(self.plane_slider)
layout.addStretch(1)
plane_tab = QtWidgets.QWidget()
plane_tab.setLayout(layout)
self.tabs.addTab(create_scroll_area(plane_tab), 'Define Plane')
def createSelectionToolsTab(self):
layout = QtWidgets.QVBoxLayout()
selector_layout = QtWidgets.QHBoxLayout()
selector_layout.addWidget(QtWidgets.QLabel('Select Geometry of Points: '))
self.button_group = QtWidgets.QButtonGroup()
self.button_group.buttonClicked[int].connect(self.changeSceneMode)
self.object_selector = create_tool_button(checkable=True, checked=True, tooltip='Select Points',
status_tip='Select movable points from the cross-section view',
style_name='MidToolButton', icon_path=path_for('select.png'))
self.point_selector = create_tool_button(checkable=True, tooltip='Draw a Point',
status_tip='Draw a single point at the selected position',
style_name='MidToolButton', icon_path=path_for('point.png'))
self.line_selector = create_tool_button(checkable=True, tooltip='Draw Points on Line',
status_tip='Draw equally spaced points on the selected line',
style_name='MidToolButton', icon_path=path_for('line_tool.png'))
self.area_selector = create_tool_button(checkable=True, tooltip='Draw Points on Area',
status_tip='Draw a grid of points on the selected area',
style_name='MidToolButton', icon_path=path_for('area_tool.png'))
self.button_group.addButton(self.object_selector, GraphicsScene.Mode.Select.value)
self.button_group.addButton(self.point_selector, GraphicsScene.Mode.Draw_point.value)
self.button_group.addButton(self.line_selector, GraphicsScene.Mode.Draw_line.value)
self.button_group.addButton(self.area_selector, GraphicsScene.Mode.Draw_area.value)
selector_layout.addWidget(self.object_selector)
selector_layout.addWidget(self.point_selector)
selector_layout.addWidget(self.line_selector)
selector_layout.addWidget(self.area_selector)
selector_layout.addStretch(1)
self.createLineToolWidget()
self.createAreaToolWidget()
layout.addLayout(selector_layout)
layout.addWidget(self.line_tool_widget)
layout.addWidget(self.area_tool_widget)
layout.addStretch(1)
select_tab = QtWidgets.QWidget()
select_tab.setLayout(layout)
self.tabs.addTab(create_scroll_area(select_tab), 'Selection Tools')
def createGridOptionsTab(self):
layout = QtWidgets.QVBoxLayout()
self.show_grid_checkbox = QtWidgets.QCheckBox('Show Grid')
self.show_grid_checkbox.stateChanged.connect(self.showGrid)
self.snap_to_grid_checkbox = QtWidgets.QCheckBox('Snap Selection to Grid')
self.snap_to_grid_checkbox.stateChanged.connect(self.snapToGrid)
self.snap_to_grid_checkbox.setEnabled(self.view.show_grid)
layout.addWidget(self.show_grid_checkbox)
layout.addWidget(self.snap_to_grid_checkbox)
self.createGridWidget()
layout.addWidget(self.grid_widget)
layout.addStretch(1)
grid_tab = QtWidgets.QWidget()
grid_tab.setLayout(layout)
self.tabs.addTab(create_scroll_area(grid_tab), 'Grid Options')
def createCustomPlaneBox(self):
self.custom_plane_widget = QtWidgets.QWidget(self)
layout = QtWidgets.QVBoxLayout()
self.form_group = FormGroup(FormGroup.Layout.Horizontal)
self.x_axis = FormControl('X', 1.0, required=True, number=True)
self.x_axis.range(-1.0, 1.0)
self.y_axis = FormControl('Y', 0.0, required=True, number=True)
self.y_axis.range(-1.0, 1.0)
self.z_axis = FormControl('Z', 0.0, required=True, number=True)
self.z_axis.range(-1.0, 1.0)
self.form_group.addControl(self.x_axis)
self.form_group.addControl(self.y_axis)
self.form_group.addControl(self.z_axis)
self.form_group.groupValidation.connect(self.setCustomPlane)
layout.addWidget(self.form_group)
self.custom_plane_widget.setLayout(layout)
def createLineToolWidget(self):
self.line_tool_widget = QtWidgets.QWidget(self)
layout = QtWidgets.QHBoxLayout()
layout.setContentsMargins(0, 20, 0, 0)
layout.addWidget(QtWidgets.QLabel('Number of Points: '))
self.line_point_count_spinbox = QtWidgets.QSpinBox()
self.line_point_count_spinbox.setValue(self.scene.line_tool_size)
self.line_point_count_spinbox.setRange(2, 100)
self.line_point_count_spinbox.valueChanged.connect(self.scene.setLineToolSize)
layout.addWidget(self.line_point_count_spinbox)
self.line_tool_widget.setVisible(False)
self.line_tool_widget.setLayout(layout)
def createAreaToolWidget(self):
self.area_tool_widget = QtWidgets.QWidget(self)
layout = QtWidgets.QHBoxLayout()
layout.setContentsMargins(0, 20, 0, 0)
layout.addWidget(QtWidgets.QLabel('Number of Points: '))
self.area_x_spinbox = QtWidgets.QSpinBox()
self.area_x_spinbox.setValue(self.scene.area_tool_size[0])
self.area_x_spinbox.setRange(2, 100)
self.area_y_spinbox = QtWidgets.QSpinBox()
self.area_y_spinbox.setValue(self.scene.area_tool_size[1])
self.area_y_spinbox.setRange(2, 100)
stretch_factor = 3
layout.addStretch(1)
layout.addWidget(QtWidgets.QLabel('X: '))
self.area_x_spinbox.valueChanged.connect(lambda: self.scene.setAreaToolSize(self.area_x_spinbox.value(),
self.area_y_spinbox.value()))
layout.addWidget(self.area_x_spinbox, stretch_factor)
layout.addStretch(1)
layout.addWidget(QtWidgets.QLabel('Y: '))
self.area_y_spinbox.valueChanged.connect(lambda: self.scene.setAreaToolSize(self.area_x_spinbox.value(),
self.area_y_spinbox.value()))
layout.addWidget(self.area_y_spinbox, stretch_factor)
self.area_tool_widget.setVisible(False)
self.area_tool_widget.setLayout(layout)
def createGridWidget(self):
self.grid_widget = QtWidgets.QWidget(self)
main_layout = QtWidgets.QVBoxLayout()
main_layout.setContentsMargins(0, 20, 0, 0)
layout = QtWidgets.QHBoxLayout()
layout.addWidget(QtWidgets.QLabel('Grid Type: '))
grid_combobox = QtWidgets.QComboBox()
grid_combobox.setView(QtWidgets.QListView())
grid_combobox.addItems([g.value for g in Grid.Type])
grid_combobox.currentTextChanged.connect(lambda value: self.setGridType(Grid.Type(value)))
layout.addWidget(grid_combobox)
main_layout.addLayout(layout)
main_layout.addSpacing(20)
layout = QtWidgets.QHBoxLayout()
layout.addWidget(QtWidgets.QLabel('Grid Size: '))
self.grid_x_label = QtWidgets.QLabel('')
self.grid_x_spinbox = QtWidgets.QDoubleSpinBox()
self.grid_x_spinbox.setDecimals(1)
self.grid_x_spinbox.setSingleStep(0.1)
self.grid_x_spinbox.valueChanged.connect(self.changeGridSize)
self.grid_y_label = QtWidgets.QLabel('')
self.grid_y_spinbox = QtWidgets.QDoubleSpinBox()
self.grid_y_spinbox.setDecimals(1)
self.grid_y_spinbox.setSingleStep(0.1)
self.grid_y_spinbox.valueChanged.connect(self.changeGridSize)
stretch_factor = 3
layout.addStretch(1)
layout.addWidget(self.grid_x_label)
layout.addWidget(self.grid_x_spinbox, stretch_factor)
layout.addStretch(1)
layout.addWidget(self.grid_y_label)
layout.addWidget(self.grid_y_spinbox, stretch_factor)
main_layout.addLayout(layout)
self.setGridType(self.view.grid.type)
self.grid_widget.setVisible(False)
self.grid_widget.setLayout(main_layout)
def changeGridSize(self):
if self.view.grid.type == Grid.Type.Box:
grid_x = int(self.grid_x_spinbox.value() * self.sample_scale)
grid_y = int(self.grid_y_spinbox.value() * self.sample_scale)
else:
grid_x = int(self.grid_x_spinbox.value() * self.sample_scale)
grid_y = self.grid_y_spinbox.value()
self.view.setGridSize((grid_x, grid_y))
def setGridType(self, grid_type):
self.view.setGridType(grid_type)
size = self.view.grid.size
if grid_type == Grid.Type.Box:
self.grid_x_label.setText('X (mm): ')
self.grid_y_label.setText('Y (mm): ')
self.grid_x_spinbox.setValue(size[0])
self.grid_y_spinbox.setValue(size[1])
self.grid_x_spinbox.setRange(0.1, 1000)
self.grid_y_spinbox.setRange(0.1, 1000)
else:
self.grid_x_label.setText('Radius (mm): ')
self.grid_y_label.setText('Angle (degree): ')
self.grid_x_spinbox.setValue(size[0])
self.grid_y_spinbox.setValue(size[1])
self.grid_x_spinbox.setRange(0.1, 1000)
self.grid_y_spinbox.setRange(0.1, 360)
def changeSceneMode(self, button_id):
self.scene.mode = GraphicsScene.Mode(button_id)
self.line_tool_widget.setVisible(self.scene.mode == GraphicsScene.Mode.Draw_line)
self.area_tool_widget.setVisible(self.scene.mode == GraphicsScene.Mode.Draw_area)
def showHelp(self):
self.view.show_help = False if self.view.has_foreground else True
self.scene.update()
def showGrid(self, state):
self.view.show_grid = True if state == QtCore.Qt.Checked else False
self.snap_to_grid_checkbox.setEnabled(self.view.show_grid)
self.grid_widget.setVisible(self.view.show_grid)
self.scene.update()
def snapToGrid(self, state):
self.view.snap_to_grid = True if state == QtCore.Qt.Checked else False
def updateSlider(self, value):
if not self.plane_lineedit.hasAcceptableInput():
return
new_distance = clamp(float(value), *self.plane_offset_range)
slider_value = int(map_range(*self.plane_offset_range, *self.slider_range, new_distance))
self.plane_slider.setValue(slider_value)
offset = new_distance - self.old_distance
self.parent.scenes.movePlane(offset * self.plane.normal)
self.old_distance = new_distance
def updateLineEdit(self, value):
new_distance = trunc(map_range(*self.slider_range, *self.plane_offset_range, value), 3)
self.plane_lineedit.setText('{:.3f}'.format(new_distance))
offset = new_distance - self.old_distance
self.parent.scenes.movePlane(offset * self.plane.normal)
self.old_distance = new_distance
def movePlane(self):
distance = clamp(float(self.plane_lineedit.text()), *self.plane_offset_range)
self.plane_lineedit.setText('{:.3f}'.format(distance))
point = distance * self.plane.normal
self.plane = Plane(self.plane.normal, point)
self.updateCrossSection()
def setCustomPlane(self, is_valid):
if is_valid:
normal = np.array([self.x_axis.value, self.y_axis.value, self.z_axis.value])
try:
self.initializePlane(normal, self.mesh.bounding_box.center)
except ValueError:
self.x_axis.validation_label.setText('Bad Normal')
def setPlane(self, selected_text):
if selected_text == PlaneOptions.Custom.value:
self.custom_plane_widget.setVisible(True)
self.form_group.validateGroup()
return
else:
self.custom_plane_widget.setVisible(False)
if selected_text == PlaneOptions.XY.value:
plane_normal = np.array([0., 0., 1.])
elif selected_text == PlaneOptions.XZ.value:
plane_normal = np.array([0., 1., 0.])
else:
plane_normal = np.array([1., 0., 0.])
self.initializePlane(plane_normal, self.mesh.bounding_box.center)
def initializePlane(self, plane_normal, plane_point):
self.plane = Plane(plane_normal, plane_point)
plane_size = self.mesh.bounding_box.radius
self.parent.scenes.drawPlane(self.plane, 2 * plane_size, 2 * plane_size)
distance = self.plane.distanceFromOrigin()
self.plane_offset_range = (distance - plane_size, distance + plane_size)
slider_value = int(map_range(*self.plane_offset_range, *self.slider_range, distance))
self.plane_slider.setValue(slider_value)
self.plane_lineedit.setText('{:.3f}'.format(distance))
self.old_distance = distance
self.matrix = self.__lookAt(-Vector3(self.plane.normal))
self.view.resetTransform()
self.updateCrossSection()
def updateCrossSection(self):
self.scene.clear()
segments = mesh_plane_intersection(self.mesh, self.plane)
if len(segments) == 0:
return
segments = np.array(segments)
item = QtWidgets.QGraphicsPathItem()
cross_section_path = QtGui.QPainterPath()
rotated_segments = self.sample_scale * (segments @ self.matrix)
for i in range(0, rotated_segments.shape[0], 2):
start = rotated_segments[i, :]
cross_section_path.moveTo(start[0], start[1])
end = rotated_segments[i + 1, :]
cross_section_path.lineTo(end[0], end[1])
item.setPath(cross_section_path)
item.setPen(self.path_pen)
item.setTransform(self.view.scene_transform)
self.scene.addItem(item)
rect = item.boundingRect()
anchor = rect.center()
ab = self.plane.point - self.parent_model.measurement_points.points
d = np.einsum('ij,ij->i', np.expand_dims(self.plane.normal, axis=0), ab)
index = np.where(np.abs(d) < VECTOR_EPS)[0]
rotated_points = self.parent_model.measurement_points.points[index, :]
rotated_points = rotated_points @ self.matrix
for i, p in zip(index, rotated_points):
point = QtCore.QPointF(p[0], p[1]) * self.sample_scale
point = self.view.scene_transform.map(point)
item = GraphicsPointItem(point, size=self.scene.point_size)
item.setToolTip(f'Point {i + 1}')
item.fixed = True
item.makeControllable(self.scene.mode == GraphicsScene.Mode.Select)
item.setPen(self.point_pen)
self.scene.addItem(item)
rect = rect.united(item.boundingRect().translated(point))
rect.united(rect.translated(anchor - rect.center()))
self.view.setSceneRect(rect)
self.view.fitInView(rect, QtCore.Qt.KeepAspectRatio)
self.view.anchor = rect
@staticmethod
def __lookAt(forward):
rot_matrix = Matrix33.identity()
up = Vector3([0., -1., 0.]) if -VECTOR_EPS < forward[1] < VECTOR_EPS else Vector3([0., 0., 1.])
left = up ^ forward
left.normalize()
up = forward ^ left
rot_matrix.c1[:3] = left
rot_matrix.c2[:3] = up
rot_matrix.c3[:3] = forward
return rot_matrix
def addPoints(self):
if len(self.scene.items()) < 2:
return
points_2d = []
transform = self.view.scene_transform.inverted()[0]
for item in self.scene.items():
if isinstance(item, GraphicsPointItem) and not item.fixed:
pos = transform.map(item.pos()) / self.sample_scale
points_2d.append([pos.x(), pos.y(), -self.old_distance])
self.scene.removeItem(item)
if not points_2d:
return
points = points_2d[::-1] @ self.matrix.transpose()
enabled = [True] * points.shape[0]
self.parent.presenter.addPoints(list(zip(points, enabled)), PointType.Measurement, False)
class AlignSample(QtWidgets.QWidget):
dock_flag = DockFlag.Upper
def __init__(self, parent):
super().__init__(parent)
self.parent = parent
self.parent.scenes.switchToInstrumentScene()
self.title = 'Align Sample with 6D pose'
self.setMinimumWidth(450)
self.main_layout = QtWidgets.QVBoxLayout()
self.setLayout(self.main_layout)
self.main_layout.addSpacing(20)
self.main_layout.addWidget(FormTitle('Create Transformation for Alignment'))
self.main_layout.addSpacing(10)
self.main_layout.addWidget(QtWidgets.QLabel('Translation along the X, Y, and Z axis (mm):'))
self.position_form_group = FormGroup(FormGroup.Layout.Horizontal)
self.x_position = FormControl('X', 0.0, required=True, number=True)
self.y_position = FormControl('Y', 0.0, required=True, number=True)
self.z_position = FormControl('Z', 0.0, required=True, number=True)
self.position_form_group.addControl(self.x_position)
self.position_form_group.addControl(self.y_position)
self.position_form_group.addControl(self.z_position)
self.position_form_group.groupValidation.connect(self.formValidation)
self.main_layout.addWidget(self.position_form_group)
self.main_layout.addWidget(QtWidgets.QLabel('Rotation around the X, Y, and Z axis (degrees):'))
self.orientation_form_group = FormGroup(FormGroup.Layout.Horizontal)
self.x_rotation = FormControl('X', 0.0, required=True, number=True)
self.x_rotation.range(-360.0, 360.0)
self.y_rotation = FormControl('Y', 0.0, required=True, number=True)
self.y_rotation.range(-360.0, 360.0)
self.z_rotation = FormControl('Z', 0.0, required=True, number=True)
self.z_rotation.range(-360.0, 360.0)
self.orientation_form_group.addControl(self.x_rotation)
self.orientation_form_group.addControl(self.y_rotation)
self.orientation_form_group.addControl(self.z_rotation)
self.orientation_form_group.groupValidation.connect(self.formValidation)
self.main_layout.addWidget(self.orientation_form_group)
button_layout = QtWidgets.QHBoxLayout()
self.execute_button = QtWidgets.QPushButton('Align Sample')
self.execute_button.clicked.connect(self.executeButtonClicked)
button_layout.addWidget(self.execute_button)
button_layout.addStretch(1)
self.main_layout.addLayout(button_layout)
self.main_layout.addStretch(1)
def formValidation(self):
if self.position_form_group.valid and self.orientation_form_group.valid:
self.execute_button.setEnabled(True)
else:
self.execute_button.setDisabled(True)
def executeButtonClicked(self):
pose = [self.x_position.value, self.y_position.value, self.z_position.value,
self.z_rotation.value, self.y_rotation.value, self.x_rotation.value]
self.parent.presenter.alignSampleWithPose(pose)
| true | true |
f71a74d3749d44e5926e7af02f116135904cbcf5 | 3,574 | py | Python | src/vacuum/webserver.py | nesyamun/vacuum | e58c24e4ff9f88d674e75b17a96c705d88189422 | [
"MIT"
] | 2 | 2021-03-15T15:44:23.000Z | 2021-04-08T20:58:24.000Z | src/vacuum/webserver.py | nesyamun/vacuum | e58c24e4ff9f88d674e75b17a96c705d88189422 | [
"MIT"
] | null | null | null | src/vacuum/webserver.py | nesyamun/vacuum | e58c24e4ff9f88d674e75b17a96c705d88189422 | [
"MIT"
] | 3 | 2021-03-15T15:44:37.000Z | 2022-03-05T03:44:23.000Z | from asyncio import AbstractEventLoop, Task, get_event_loop
from dataclasses import asdict
from datetime import datetime
from functools import wraps
from typing import Callable, Optional, Tuple
from quart import Quart, request
from werkzeug.exceptions import HTTPException
from .config import config
from .logger import get_logger, set_quart_logger_formatter
from .postgres import POSTGRES_HEALTHCHECK_TASK_NAME, postgres_healthcheck
from .state import state
from .streamer import STREAMING_TASK_NAME, stream
logger = get_logger(__name__)
app = Quart(__name__)
set_quart_logger_formatter()
def response(func: Callable) -> Callable:
@wraps(func)
async def inner(*args, **kwargs) -> dict:
extra: Optional[dict] = await func(*args, **kwargs)
if not extra:
extra = {"success": True}
return {
**asdict(state),
**{
"server_time": datetime.now(),
"path": request.path,
"method": request.method,
"status": "200 OK",
"status_code": 200,
},
**extra,
}
return inner
def error(code: int, status: str) -> Callable:
def wrapper(func: Callable) -> Callable:
@wraps(func)
async def inner(*args, **kwargs) -> Tuple[dict, int]:
extra: Optional[dict] = await func(*args, **kwargs)
if not extra:
extra = {}
return (
{
**{
"server_time": datetime.now(),
"success": False,
"path": request.path,
"method": request.method,
"status": f"{code} {status}",
"status_code": code,
},
**extra,
},
code,
)
return inner
return wrapper
@app.route("/healthz", methods=["GET"])
async def healthz() -> Tuple[str, int]:
return "", 200
@app.route("/status", methods=["GET"])
@response
async def status() -> None:
pass
@app.route("/start", methods=["POST"])
@response
async def start() -> dict:
logger.info("starting")
if state.streaming:
return {"success": True, "message": "Currently streaming"}
if not state.postgres:
return {"success": False, "message": "Postgres not available"}
loop: AbstractEventLoop = get_event_loop()
loop.create_task(stream(), name=STREAMING_TASK_NAME)
state.streaming = True
return {"success": True, "message": "Started streaming"}
@app.route("/stop", methods=["POST"])
@response
async def stop() -> dict:
logger.info("stopping")
if not state.streaming:
return {"success": True, "message": "Not currently streaming"}
for task in Task.all_tasks():
if task.get_name() == STREAMING_TASK_NAME:
task.cancel()
break
state.streaming = False
return {"success": True, "message": "Stopped streaming"}
@app.errorhandler(404)
@error(404, "Not Found")
async def page_not_found(e: HTTPException) -> None:
pass
@app.errorhandler(405)
@error(405, "Method Not Allowed")
async def method_not_allowed(e: HTTPException) -> None:
pass
@app.before_serving
async def startup() -> None:
loop: AbstractEventLoop = get_event_loop()
loop.create_task(postgres_healthcheck(), name=POSTGRES_HEALTHCHECK_TASK_NAME)
def webserver() -> None:
app.run(host=config["webserver"]["host"], port=config["webserver"]["port"])
| 25.528571 | 81 | 0.592334 | from asyncio import AbstractEventLoop, Task, get_event_loop
from dataclasses import asdict
from datetime import datetime
from functools import wraps
from typing import Callable, Optional, Tuple
from quart import Quart, request
from werkzeug.exceptions import HTTPException
from .config import config
from .logger import get_logger, set_quart_logger_formatter
from .postgres import POSTGRES_HEALTHCHECK_TASK_NAME, postgres_healthcheck
from .state import state
from .streamer import STREAMING_TASK_NAME, stream
logger = get_logger(__name__)
app = Quart(__name__)
set_quart_logger_formatter()
def response(func: Callable) -> Callable:
@wraps(func)
async def inner(*args, **kwargs) -> dict:
extra: Optional[dict] = await func(*args, **kwargs)
if not extra:
extra = {"success": True}
return {
**asdict(state),
**{
"server_time": datetime.now(),
"path": request.path,
"method": request.method,
"status": "200 OK",
"status_code": 200,
},
**extra,
}
return inner
def error(code: int, status: str) -> Callable:
def wrapper(func: Callable) -> Callable:
@wraps(func)
async def inner(*args, **kwargs) -> Tuple[dict, int]:
extra: Optional[dict] = await func(*args, **kwargs)
if not extra:
extra = {}
return (
{
**{
"server_time": datetime.now(),
"success": False,
"path": request.path,
"method": request.method,
"status": f"{code} {status}",
"status_code": code,
},
**extra,
},
code,
)
return inner
return wrapper
@app.route("/healthz", methods=["GET"])
async def healthz() -> Tuple[str, int]:
return "", 200
@app.route("/status", methods=["GET"])
@response
async def status() -> None:
pass
@app.route("/start", methods=["POST"])
@response
async def start() -> dict:
logger.info("starting")
if state.streaming:
return {"success": True, "message": "Currently streaming"}
if not state.postgres:
return {"success": False, "message": "Postgres not available"}
loop: AbstractEventLoop = get_event_loop()
loop.create_task(stream(), name=STREAMING_TASK_NAME)
state.streaming = True
return {"success": True, "message": "Started streaming"}
@app.route("/stop", methods=["POST"])
@response
async def stop() -> dict:
logger.info("stopping")
if not state.streaming:
return {"success": True, "message": "Not currently streaming"}
for task in Task.all_tasks():
if task.get_name() == STREAMING_TASK_NAME:
task.cancel()
break
state.streaming = False
return {"success": True, "message": "Stopped streaming"}
@app.errorhandler(404)
@error(404, "Not Found")
async def page_not_found(e: HTTPException) -> None:
pass
@app.errorhandler(405)
@error(405, "Method Not Allowed")
async def method_not_allowed(e: HTTPException) -> None:
pass
@app.before_serving
async def startup() -> None:
loop: AbstractEventLoop = get_event_loop()
loop.create_task(postgres_healthcheck(), name=POSTGRES_HEALTHCHECK_TASK_NAME)
def webserver() -> None:
app.run(host=config["webserver"]["host"], port=config["webserver"]["port"])
| true | true |
f71a75c9c5f86132584053248cbb481ec3e2449c | 6,138 | py | Python | poetry/console/config/application_config.py | michielboekhoff/poetry | 92b1e61c45f13868ffab663fa3e9be2e26e8c368 | [
"MIT"
] | null | null | null | poetry/console/config/application_config.py | michielboekhoff/poetry | 92b1e61c45f13868ffab663fa3e9be2e26e8c368 | [
"MIT"
] | null | null | null | poetry/console/config/application_config.py | michielboekhoff/poetry | 92b1e61c45f13868ffab663fa3e9be2e26e8c368 | [
"MIT"
] | null | null | null | import logging
from cleo.config import ApplicationConfig as BaseApplicationConfig
from clikit.api.event import PRE_HANDLE
from clikit.api.event import PreHandleEvent
from clikit.api.formatter import Style
from clikit.api.io import Input
from clikit.api.io import InputStream
from clikit.api.io import Output
from clikit.api.io import OutputStream
from clikit.api.io.flags import DEBUG
from clikit.api.io.flags import VERBOSE
from clikit.api.io.flags import VERY_VERBOSE
from clikit.formatter import AnsiFormatter
from clikit.formatter import PlainFormatter
from clikit.io.input_stream import StandardInputStream
from clikit.io.output_stream import ErrorOutputStream
from clikit.io.output_stream import StandardOutputStream
from poetry.console.commands.command import Command
from poetry.console.commands.env_command import EnvCommand
from poetry.console.logging import IOFormatter
from poetry.console.logging import IOHandler
class ApplicationConfig(BaseApplicationConfig):
def configure(self):
super(ApplicationConfig, self).configure()
self.add_style(Style("c1").fg("cyan"))
self.add_style(Style("info").fg("blue"))
self.add_style(Style("comment").fg("green"))
self.add_style(Style("error").fg("red").bold())
self.add_style(Style("warning").fg("yellow"))
self.add_style(Style("debug").fg("black").bold())
self.add_event_listener(PRE_HANDLE, self.register_command_loggers)
self.add_event_listener(PRE_HANDLE, self.set_env)
def register_command_loggers(
self, event, event_name, _ # type: PreHandleEvent # type: str
): # type: (...) -> None
command = event.command.config.handler
if not isinstance(command, Command):
return
io = event.io
if not command.loggers:
return
handler = IOHandler(io)
handler.setFormatter(IOFormatter())
for logger in command.loggers:
logger = logging.getLogger(logger)
logger.handlers = [handler]
logger.propagate = False
level = logging.WARNING
if io.is_debug():
level = logging.DEBUG
elif io.is_very_verbose() or io.is_verbose():
level = logging.INFO
logger.setLevel(level)
def set_env(self, event, event_name, _): # type: (PreHandleEvent, str, _) -> None
from poetry.semver import parse_constraint
from poetry.utils.env import EnvManager
command = event.command.config.handler # type: EnvCommand
if not isinstance(command, EnvCommand):
return
io = event.io
poetry = command.poetry
env_manager = EnvManager(poetry)
env = env_manager.create_venv(io)
if env.is_venv() and io.is_verbose():
io.write_line("Using virtualenv: <comment>{}</>".format(env.path))
command.set_env(env)
def resolve_help_command(
self, event, event_name, dispatcher
): # type: (PreResolveEvent, str, EventDispatcher) -> None
args = event.raw_args
application = event.application
if args.has_option_token("-h") or args.has_option_token("--help"):
from clikit.api.resolver import ResolvedCommand
resolved_command = self.command_resolver.resolve(args, application)
# If the current command is the run one, skip option
# check and interpret them as part of the executed command
if resolved_command.command.name == "run":
event.set_resolved_command(resolved_command)
return event.stop_propagation()
command = application.get_command("help")
# Enable lenient parsing
parsed_args = command.parse(args, True)
event.set_resolved_command(ResolvedCommand(command, parsed_args))
event.stop_propagation()
def create_io(
self,
application,
args,
input_stream=None,
output_stream=None,
error_stream=None,
): # type: (Application, RawArgs, InputStream, OutputStream, OutputStream) -> IO
if input_stream is None:
input_stream = StandardInputStream()
if output_stream is None:
output_stream = StandardOutputStream()
if error_stream is None:
error_stream = ErrorOutputStream()
style_set = application.config.style_set
if output_stream.supports_ansi():
output_formatter = AnsiFormatter(style_set)
else:
output_formatter = PlainFormatter(style_set)
if error_stream.supports_ansi():
error_formatter = AnsiFormatter(style_set)
else:
error_formatter = PlainFormatter(style_set)
io = self.io_class(
Input(input_stream),
Output(output_stream, output_formatter),
Output(error_stream, error_formatter),
)
resolved_command = application.resolve_command(args)
# If the current command is the run one, skip option
# check and interpret them as part of the executed command
if resolved_command.command.name == "run":
return io
if args.has_option_token("--no-ansi"):
formatter = PlainFormatter(style_set)
io.output.set_formatter(formatter)
io.error_output.set_formatter(formatter)
elif args.has_option_token("--ansi"):
formatter = AnsiFormatter(style_set, True)
io.output.set_formatter(formatter)
io.error_output.set_formatter(formatter)
if args.has_option_token("-vvv") or self.is_debug():
io.set_verbosity(DEBUG)
elif args.has_option_token("-vv"):
io.set_verbosity(VERY_VERBOSE)
elif args.has_option_token("-v"):
io.set_verbosity(VERBOSE)
if args.has_option_token("--quiet") or args.has_option_token("-q"):
io.set_quiet(True)
if args.has_option_token("--no-interaction") or args.has_option_token("-n"):
io.set_interactive(False)
return io
| 34.677966 | 86 | 0.655914 | import logging
from cleo.config import ApplicationConfig as BaseApplicationConfig
from clikit.api.event import PRE_HANDLE
from clikit.api.event import PreHandleEvent
from clikit.api.formatter import Style
from clikit.api.io import Input
from clikit.api.io import InputStream
from clikit.api.io import Output
from clikit.api.io import OutputStream
from clikit.api.io.flags import DEBUG
from clikit.api.io.flags import VERBOSE
from clikit.api.io.flags import VERY_VERBOSE
from clikit.formatter import AnsiFormatter
from clikit.formatter import PlainFormatter
from clikit.io.input_stream import StandardInputStream
from clikit.io.output_stream import ErrorOutputStream
from clikit.io.output_stream import StandardOutputStream
from poetry.console.commands.command import Command
from poetry.console.commands.env_command import EnvCommand
from poetry.console.logging import IOFormatter
from poetry.console.logging import IOHandler
class ApplicationConfig(BaseApplicationConfig):
def configure(self):
super(ApplicationConfig, self).configure()
self.add_style(Style("c1").fg("cyan"))
self.add_style(Style("info").fg("blue"))
self.add_style(Style("comment").fg("green"))
self.add_style(Style("error").fg("red").bold())
self.add_style(Style("warning").fg("yellow"))
self.add_style(Style("debug").fg("black").bold())
self.add_event_listener(PRE_HANDLE, self.register_command_loggers)
self.add_event_listener(PRE_HANDLE, self.set_env)
def register_command_loggers(
self, event, event_name, _ command = event.command.config.handler
if not isinstance(command, Command):
return
io = event.io
if not command.loggers:
return
handler = IOHandler(io)
handler.setFormatter(IOFormatter())
for logger in command.loggers:
logger = logging.getLogger(logger)
logger.handlers = [handler]
logger.propagate = False
level = logging.WARNING
if io.is_debug():
level = logging.DEBUG
elif io.is_very_verbose() or io.is_verbose():
level = logging.INFO
logger.setLevel(level)
def set_env(self, event, event_name, _):
from poetry.semver import parse_constraint
from poetry.utils.env import EnvManager
command = event.command.config.handler
if not isinstance(command, EnvCommand):
return
io = event.io
poetry = command.poetry
env_manager = EnvManager(poetry)
env = env_manager.create_venv(io)
if env.is_venv() and io.is_verbose():
io.write_line("Using virtualenv: <comment>{}</>".format(env.path))
command.set_env(env)
def resolve_help_command(
self, event, event_name, dispatcher
):
args = event.raw_args
application = event.application
if args.has_option_token("-h") or args.has_option_token("--help"):
from clikit.api.resolver import ResolvedCommand
resolved_command = self.command_resolver.resolve(args, application)
if resolved_command.command.name == "run":
event.set_resolved_command(resolved_command)
return event.stop_propagation()
command = application.get_command("help")
parsed_args = command.parse(args, True)
event.set_resolved_command(ResolvedCommand(command, parsed_args))
event.stop_propagation()
def create_io(
self,
application,
args,
input_stream=None,
output_stream=None,
error_stream=None,
):
if input_stream is None:
input_stream = StandardInputStream()
if output_stream is None:
output_stream = StandardOutputStream()
if error_stream is None:
error_stream = ErrorOutputStream()
style_set = application.config.style_set
if output_stream.supports_ansi():
output_formatter = AnsiFormatter(style_set)
else:
output_formatter = PlainFormatter(style_set)
if error_stream.supports_ansi():
error_formatter = AnsiFormatter(style_set)
else:
error_formatter = PlainFormatter(style_set)
io = self.io_class(
Input(input_stream),
Output(output_stream, output_formatter),
Output(error_stream, error_formatter),
)
resolved_command = application.resolve_command(args)
if resolved_command.command.name == "run":
return io
if args.has_option_token("--no-ansi"):
formatter = PlainFormatter(style_set)
io.output.set_formatter(formatter)
io.error_output.set_formatter(formatter)
elif args.has_option_token("--ansi"):
formatter = AnsiFormatter(style_set, True)
io.output.set_formatter(formatter)
io.error_output.set_formatter(formatter)
if args.has_option_token("-vvv") or self.is_debug():
io.set_verbosity(DEBUG)
elif args.has_option_token("-vv"):
io.set_verbosity(VERY_VERBOSE)
elif args.has_option_token("-v"):
io.set_verbosity(VERBOSE)
if args.has_option_token("--quiet") or args.has_option_token("-q"):
io.set_quiet(True)
if args.has_option_token("--no-interaction") or args.has_option_token("-n"):
io.set_interactive(False)
return io
| true | true |
f71a76296b3a7b1e16734137964be646122469c5 | 8,766 | py | Python | userbot/__init__.py | PratikGoswamiPM/OpenUserBot | 1ba7845522a5d5619d2705421a303aa82ce35abb | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1 | 2021-07-18T06:57:28.000Z | 2021-07-18T06:57:28.000Z | userbot/__init__.py | PratikGoswamiPM/OpenUserBot | 1ba7845522a5d5619d2705421a303aa82ce35abb | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | userbot/__init__.py | PratikGoswamiPM/OpenUserBot | 1ba7845522a5d5619d2705421a303aa82ce35abb | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | # Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.c (the "License");
# you may not use this file except in compliance with the License.
#
# thanks to penn5 for bug fixing
""" Userbot initialization. """
import os
from sys import version_info
from logging import basicConfig, getLogger, INFO, DEBUG
from distutils.util import strtobool as sb
from pymongo import MongoClient
from redis import StrictRedis
from pylast import LastFMNetwork, md5
from pySmartDL import SmartDL
from dotenv import load_dotenv
from requests import get
from telethon import TelegramClient
from telethon.sessions import StringSession
load_dotenv("config.env")
# Bot Logs setup:
CONSOLE_LOGGER_VERBOSE = sb(os.environ.get("CONSOLE_LOGGER_VERBOSE", "False"))
if CONSOLE_LOGGER_VERBOSE:
basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
level=DEBUG,
)
else:
basicConfig(format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
level=INFO)
LOGS = getLogger(__name__)
if version_info[0] < 3 or version_info[1] < 8:
LOGS.info("You MUST have a python version of at least 3.8."
"Multiple features depend on this. Bot quitting.")
quit(1)
# Check if the config was edited by using the already used variable.
# Basically, its the 'virginity check' for the config file ;)
CONFIG_CHECK = os.environ.get(
"___________PLOX_______REMOVE_____THIS_____LINE__________", None)
if CONFIG_CHECK:
LOGS.info(
"Please remove the line mentioned in the first hashtag from the config.env file"
)
quit(1)
# Telegram App KEY and HASH
API_KEY = os.environ.get("API_KEY", None)
API_HASH = os.environ.get("API_HASH", None)
# Photo Chat - Get this value from http://antiddos.systems
API_TOKEN = os.environ.get("API_TOKEN", None)
API_URL = os.environ.get("API_URL", "http://antiddos.systems")
# Userbot Session String
STRING_SESSION = os.environ.get("STRING_SESSION", None)
# Logging channel/group ID configuration.
BOTLOG_CHATID = int(os.environ.get("BOTLOG_CHATID", None))
# set to True if you want to log PMs to your PM_LOGGR_BOT_API_ID
NC_LOG_P_M_S = bool(os.environ.get("NC_LOG_P_M_S", False))
# send .get_id in any channel to forward all your NEW PMs to this group
PM_LOGGR_BOT_API_ID = int(os.environ.get("PM_LOGGR_BOT_API_ID", "-100"))
# Userbot logging feature switch.
BOTLOG = sb(os.environ.get("BOTLOG", "False"))
LOGSPAMMER = sb(os.environ.get("LOGSPAMMER", "False"))
# Bleep Blop, this is a bot ;)
PM_AUTO_BAN = sb(os.environ.get("PM_AUTO_BAN", "False"))
# Heroku Credentials for updater.
HEROKU_MEMEZ = sb(os.environ.get("HEROKU_MEMEZ", "False"))
HEROKU_APP_NAME = os.environ.get("HEROKU_APP_NAME", None)
HEROKU_API_KEY = os.environ.get("HEROKU_API_KEY", None)
# Github Credentials for updater and Gitupload.
GIT_REPO_NAME = os.environ.get("GIT_REPO_NAME", None)
GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", None)
# Custom (forked) repo URL for updater.
UPSTREAM_REPO_URL = os.environ.get(
"UPSTREAM_REPO_URL",
"https://github.com/mkaraniya/OpenUserBot.git")
# Console verbose logging
CONSOLE_LOGGER_VERBOSE = sb(os.environ.get("CONSOLE_LOGGER_VERBOSE", "False"))
# SQL Database URI
DB_URI = os.environ.get("DATABASE_URL", None)
# For MONGO based DataBase
MONGO_URI = os.environ.get("MONGO_URI", None)
# OCR API key
OCR_SPACE_API_KEY = os.environ.get("OCR_SPACE_API_KEY", None)
# remove.bg API key
REM_BG_API_KEY = os.environ.get("REM_BG_API_KEY", None)
# Chrome Driver and Headless Google Chrome Binaries
CHROME_DRIVER = os.environ.get("CHROME_DRIVER", None)
GOOGLE_CHROME_BIN = os.environ.get("GOOGLE_CHROME_BIN", None)
# OpenWeatherMap API Key
OPEN_WEATHER_MAP_APPID = os.environ.get("OPEN_WEATHER_MAP_APPID", None)
WEATHER_DEFCITY = os.environ.get("WEATHER_DEFCITY", None)
# Lydia API
LYDIA_API_KEY = os.environ.get("LYDIA_API_KEY", None)
# set blacklist_chats where you do not want userbot's features
UB_BLACK_LIST_CHAT = os.environ.get("UB_BLACK_LIST_CHAT", "")
# Telegraph
TELEGRAPH_SHORT_NAME = os.environ.get("TELEGRAPH_SHORT_NAME", None)
# Anti Spambot Config
ANTI_SPAMBOT = sb(os.environ.get("ANTI_SPAMBOT", "False"))
ANTI_SPAMBOT_SHOUT = sb(os.environ.get("ANTI_SPAMBOT_SHOUT", "False"))
# Youtube API key
YOUTUBE_API_KEY = os.environ.get("YOUTUBE_API_KEY", None)
# Default .alive name
ALIVE_NAME = os.environ.get("ALIVE_NAME", None)
# Time & Date - Country and Time Zone
COUNTRY = str(os.environ.get("COUNTRY", ""))
TZ_NUMBER = int(os.environ.get("TZ_NUMBER", 1))
TERM_ALIAS = os.environ.get("TERM_ALIAS", "OUB")
# Clean Welcome
CLEAN_WELCOME = sb(os.environ.get("CLEAN_WELCOME", "True"))
# Last.fm Module
BIO_PREFIX = os.environ.get("BIO_PREFIX", None)
DEFAULT_BIO = os.environ.get("DEFAULT_BIO", None)
LASTFM_API = os.environ.get("LASTFM_API", None)
LASTFM_SECRET = os.environ.get("LASTFM_SECRET", None)
LASTFM_USERNAME = os.environ.get("LASTFM_USERNAME", None)
LASTFM_PASSWORD_PLAIN = os.environ.get("LASTFM_PASSWORD", None)
LASTFM_PASS = md5(LASTFM_PASSWORD_PLAIN)
if LASTFM_API and LASTFM_SECRET and LASTFM_USERNAME and LASTFM_PASS:
lastfm = LastFMNetwork(api_key=LASTFM_API,
api_secret=LASTFM_SECRET,
username=LASTFM_USERNAME,
password_hash=LASTFM_PASS)
else:
lastfm = None
# Google Drive Module
G_DRIVE_DATA = os.environ.get("G_DRIVE_DATA", None)
G_DRIVE_CLIENT_ID = os.environ.get("G_DRIVE_CLIENT_ID", None)
G_DRIVE_CLIENT_SECRET = os.environ.get("G_DRIVE_CLIENT_SECRET", None)
G_DRIVE_AUTH_TOKEN_DATA = os.environ.get("G_DRIVE_AUTH_TOKEN_DATA", None)
GDRIVE_FOLDER_ID = os.environ.get("GDRIVE_FOLDER_ID", None)
TEMP_DOWNLOAD_DIRECTORY = os.environ.get("TMP_DOWNLOAD_DIRECTORY",
"./downloads")
# Genius lyrics get this value from https://genius.com/developers both has same values
GENIUS_API_TOKEN = os.environ.get("GENIUS", None)
# Genius lyrics get this value from https://genius.com/developers both has same values
GENIUS = os.environ.get("GENIUS_API_TOKEN", None)
# Init Mongo
MONGOCLIENT = MongoClient(MONGO_URI, 27017, serverSelectionTimeoutMS=1)
MONGO = MONGOCLIENT.userbot
# bit.ly module
BITLY_TOKEN = os.environ.get("BITLY_TOKEN", None)
def is_mongo_alive():
try:
MONGOCLIENT.server_info()
except BaseException:
return False
return True
# Init Redis
# Redis will be hosted inside the docker container that hosts the bot
# We need redis for just caching, so we just leave it to non-persistent
REDIS = StrictRedis(host='localhost', port=6379, db=0)
def is_redis_alive():
try:
REDIS.ping()
return True
except BaseException:
return False
# Setting Up CloudMail.ru and MEGA.nz extractor binaries,
# and giving them correct perms to work properly.
if not os.path.exists('bin'):
os.mkdir('bin')
binaries = {
"https://raw.githubusercontent.com/adekmaulana/megadown/master/megadown":
"bin/megadown",
"https://raw.githubusercontent.com/yshalsager/cmrudl.py/master/cmrudl.py":
"bin/cmrudl"
}
for binary, path in binaries.items():
downloader = SmartDL(binary, path, progress_bar=False)
downloader.start()
os.chmod(path, 0o755)
# 'bot' variable
if STRING_SESSION:
# pylint: disable=invalid-name
bot = TelegramClient(StringSession(STRING_SESSION), API_KEY, API_HASH)
else:
# pylint: disable=invalid-name
bot = TelegramClient("userbot", API_KEY, API_HASH)
async def check_botlog_chatid():
if not BOTLOG_CHATID and LOGSPAMMER:
LOGS.info(
"You must set up the BOTLOG_CHATID variable in the config.env or environment variables, for the private error log storage to work."
)
quit(1)
elif not BOTLOG_CHATID and BOTLOG:
LOGS.info(
"You must set up the BOTLOG_CHATID variable in the config.env or environment variables, for the userbot logging feature to work."
)
quit(1)
elif not BOTLOG or not LOGSPAMMER:
return
entity = await bot.get_entity(BOTLOG_CHATID)
if entity.default_banned_rights.send_messages:
LOGS.info(
"Your account doesn't have rights to send messages to BOTLOG_CHATID "
"group. Check if you typed the Chat ID correctly.")
quit(1)
with bot:
try:
bot.loop.run_until_complete(check_botlog_chatid())
except:
LOGS.info(
"BOTLOG_CHATID environment variable isn't a "
"valid entity. Check your environment variables/config.env file.")
quit(1)
# Global Variables
COUNT_MSG = 0
USERS = {}
COUNT_PM = {}
LASTMSG = {}
ENABLE_KILLME = True
CMD_HELP = {}
ISAFK = False
AFKREASON = None
| 31.876364 | 143 | 0.722222 |
import os
from sys import version_info
from logging import basicConfig, getLogger, INFO, DEBUG
from distutils.util import strtobool as sb
from pymongo import MongoClient
from redis import StrictRedis
from pylast import LastFMNetwork, md5
from pySmartDL import SmartDL
from dotenv import load_dotenv
from requests import get
from telethon import TelegramClient
from telethon.sessions import StringSession
load_dotenv("config.env")
CONSOLE_LOGGER_VERBOSE = sb(os.environ.get("CONSOLE_LOGGER_VERBOSE", "False"))
if CONSOLE_LOGGER_VERBOSE:
basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
level=DEBUG,
)
else:
basicConfig(format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
level=INFO)
LOGS = getLogger(__name__)
if version_info[0] < 3 or version_info[1] < 8:
LOGS.info("You MUST have a python version of at least 3.8."
"Multiple features depend on this. Bot quitting.")
quit(1)
CONFIG_CHECK = os.environ.get(
"___________PLOX_______REMOVE_____THIS_____LINE__________", None)
if CONFIG_CHECK:
LOGS.info(
"Please remove the line mentioned in the first hashtag from the config.env file"
)
quit(1)
API_KEY = os.environ.get("API_KEY", None)
API_HASH = os.environ.get("API_HASH", None)
API_TOKEN = os.environ.get("API_TOKEN", None)
API_URL = os.environ.get("API_URL", "http://antiddos.systems")
STRING_SESSION = os.environ.get("STRING_SESSION", None)
BOTLOG_CHATID = int(os.environ.get("BOTLOG_CHATID", None))
NC_LOG_P_M_S = bool(os.environ.get("NC_LOG_P_M_S", False))
PM_LOGGR_BOT_API_ID = int(os.environ.get("PM_LOGGR_BOT_API_ID", "-100"))
BOTLOG = sb(os.environ.get("BOTLOG", "False"))
LOGSPAMMER = sb(os.environ.get("LOGSPAMMER", "False"))
PM_AUTO_BAN = sb(os.environ.get("PM_AUTO_BAN", "False"))
HEROKU_MEMEZ = sb(os.environ.get("HEROKU_MEMEZ", "False"))
HEROKU_APP_NAME = os.environ.get("HEROKU_APP_NAME", None)
HEROKU_API_KEY = os.environ.get("HEROKU_API_KEY", None)
GIT_REPO_NAME = os.environ.get("GIT_REPO_NAME", None)
GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", None)
UPSTREAM_REPO_URL = os.environ.get(
"UPSTREAM_REPO_URL",
"https://github.com/mkaraniya/OpenUserBot.git")
CONSOLE_LOGGER_VERBOSE = sb(os.environ.get("CONSOLE_LOGGER_VERBOSE", "False"))
DB_URI = os.environ.get("DATABASE_URL", None)
MONGO_URI = os.environ.get("MONGO_URI", None)
OCR_SPACE_API_KEY = os.environ.get("OCR_SPACE_API_KEY", None)
REM_BG_API_KEY = os.environ.get("REM_BG_API_KEY", None)
CHROME_DRIVER = os.environ.get("CHROME_DRIVER", None)
GOOGLE_CHROME_BIN = os.environ.get("GOOGLE_CHROME_BIN", None)
OPEN_WEATHER_MAP_APPID = os.environ.get("OPEN_WEATHER_MAP_APPID", None)
WEATHER_DEFCITY = os.environ.get("WEATHER_DEFCITY", None)
LYDIA_API_KEY = os.environ.get("LYDIA_API_KEY", None)
UB_BLACK_LIST_CHAT = os.environ.get("UB_BLACK_LIST_CHAT", "")
# Telegraph
TELEGRAPH_SHORT_NAME = os.environ.get("TELEGRAPH_SHORT_NAME", None)
# Anti Spambot Config
ANTI_SPAMBOT = sb(os.environ.get("ANTI_SPAMBOT", "False"))
ANTI_SPAMBOT_SHOUT = sb(os.environ.get("ANTI_SPAMBOT_SHOUT", "False"))
# Youtube API key
YOUTUBE_API_KEY = os.environ.get("YOUTUBE_API_KEY", None)
# Default .alive name
ALIVE_NAME = os.environ.get("ALIVE_NAME", None)
# Time & Date - Country and Time Zone
COUNTRY = str(os.environ.get("COUNTRY", ""))
TZ_NUMBER = int(os.environ.get("TZ_NUMBER", 1))
TERM_ALIAS = os.environ.get("TERM_ALIAS", "OUB")
# Clean Welcome
CLEAN_WELCOME = sb(os.environ.get("CLEAN_WELCOME", "True"))
# Last.fm Module
BIO_PREFIX = os.environ.get("BIO_PREFIX", None)
DEFAULT_BIO = os.environ.get("DEFAULT_BIO", None)
LASTFM_API = os.environ.get("LASTFM_API", None)
LASTFM_SECRET = os.environ.get("LASTFM_SECRET", None)
LASTFM_USERNAME = os.environ.get("LASTFM_USERNAME", None)
LASTFM_PASSWORD_PLAIN = os.environ.get("LASTFM_PASSWORD", None)
LASTFM_PASS = md5(LASTFM_PASSWORD_PLAIN)
if LASTFM_API and LASTFM_SECRET and LASTFM_USERNAME and LASTFM_PASS:
lastfm = LastFMNetwork(api_key=LASTFM_API,
api_secret=LASTFM_SECRET,
username=LASTFM_USERNAME,
password_hash=LASTFM_PASS)
else:
lastfm = None
# Google Drive Module
G_DRIVE_DATA = os.environ.get("G_DRIVE_DATA", None)
G_DRIVE_CLIENT_ID = os.environ.get("G_DRIVE_CLIENT_ID", None)
G_DRIVE_CLIENT_SECRET = os.environ.get("G_DRIVE_CLIENT_SECRET", None)
G_DRIVE_AUTH_TOKEN_DATA = os.environ.get("G_DRIVE_AUTH_TOKEN_DATA", None)
GDRIVE_FOLDER_ID = os.environ.get("GDRIVE_FOLDER_ID", None)
TEMP_DOWNLOAD_DIRECTORY = os.environ.get("TMP_DOWNLOAD_DIRECTORY",
"./downloads")
# Genius lyrics get this value from https://genius.com/developers both has same values
GENIUS_API_TOKEN = os.environ.get("GENIUS", None)
# Genius lyrics get this value from https://genius.com/developers both has same values
GENIUS = os.environ.get("GENIUS_API_TOKEN", None)
# Init Mongo
MONGOCLIENT = MongoClient(MONGO_URI, 27017, serverSelectionTimeoutMS=1)
MONGO = MONGOCLIENT.userbot
# bit.ly module
BITLY_TOKEN = os.environ.get("BITLY_TOKEN", None)
def is_mongo_alive():
try:
MONGOCLIENT.server_info()
except BaseException:
return False
return True
# Init Redis
# Redis will be hosted inside the docker container that hosts the bot
# We need redis for just caching, so we just leave it to non-persistent
REDIS = StrictRedis(host='localhost', port=6379, db=0)
def is_redis_alive():
try:
REDIS.ping()
return True
except BaseException:
return False
# Setting Up CloudMail.ru and MEGA.nz extractor binaries,
# and giving them correct perms to work properly.
if not os.path.exists('bin'):
os.mkdir('bin')
binaries = {
"https://raw.githubusercontent.com/adekmaulana/megadown/master/megadown":
"bin/megadown",
"https://raw.githubusercontent.com/yshalsager/cmrudl.py/master/cmrudl.py":
"bin/cmrudl"
}
for binary, path in binaries.items():
downloader = SmartDL(binary, path, progress_bar=False)
downloader.start()
os.chmod(path, 0o755)
# 'bot' variable
if STRING_SESSION:
# pylint: disable=invalid-name
bot = TelegramClient(StringSession(STRING_SESSION), API_KEY, API_HASH)
else:
# pylint: disable=invalid-name
bot = TelegramClient("userbot", API_KEY, API_HASH)
async def check_botlog_chatid():
if not BOTLOG_CHATID and LOGSPAMMER:
LOGS.info(
"You must set up the BOTLOG_CHATID variable in the config.env or environment variables, for the private error log storage to work."
)
quit(1)
elif not BOTLOG_CHATID and BOTLOG:
LOGS.info(
"You must set up the BOTLOG_CHATID variable in the config.env or environment variables, for the userbot logging feature to work."
)
quit(1)
elif not BOTLOG or not LOGSPAMMER:
return
entity = await bot.get_entity(BOTLOG_CHATID)
if entity.default_banned_rights.send_messages:
LOGS.info(
"Your account doesn't have rights to send messages to BOTLOG_CHATID "
"group. Check if you typed the Chat ID correctly.")
quit(1)
with bot:
try:
bot.loop.run_until_complete(check_botlog_chatid())
except:
LOGS.info(
"BOTLOG_CHATID environment variable isn't a "
"valid entity. Check your environment variables/config.env file.")
quit(1)
# Global Variables
COUNT_MSG = 0
USERS = {}
COUNT_PM = {}
LASTMSG = {}
ENABLE_KILLME = True
CMD_HELP = {}
ISAFK = False
AFKREASON = None
| true | true |
f71a763946c4caf38418e8a819b9202fc549a816 | 15,744 | py | Python | superset/connectors/druid/views.py | whelan9453/incubator-superset | 4e3cea45a5136a28442eea50fddc6cf423a9ddd5 | [
"Apache-2.0"
] | null | null | null | superset/connectors/druid/views.py | whelan9453/incubator-superset | 4e3cea45a5136a28442eea50fddc6cf423a9ddd5 | [
"Apache-2.0"
] | 2 | 2019-11-11T11:16:32.000Z | 2019-12-13T07:12:09.000Z | superset/connectors/druid/views.py | whelan9453/incubator-superset | 4e3cea45a5136a28442eea50fddc6cf423a9ddd5 | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
import json
import logging
from datetime import datetime
from flask import flash, Markup, redirect
from flask_appbuilder import CompactCRUDMixin, expose
from flask_appbuilder.fieldwidgets import Select2Widget
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder.security.decorators import has_access
from flask_babel import gettext as __, lazy_gettext as _
from wtforms.ext.sqlalchemy.fields import QuerySelectField
from superset import appbuilder, db, security_manager
from superset.connectors.base.views import DatasourceModelView
from superset.connectors.connector_registry import ConnectorRegistry
from superset.utils import core as utils
from superset.views.base import (
BaseSupersetView,
DatasourceFilter,
DeleteMixin,
get_datasource_exist_error_msg,
ListWidgetWithCheckboxes,
SupersetModelView,
validate_json,
YamlExportMixin,
)
from . import models
class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView):
datamodel = SQLAInterface(models.DruidColumn)
list_title = _("Columns")
show_title = _("Show Druid Column")
add_title = _("Add Druid Column")
edit_title = _("Edit Druid Column")
list_widget = ListWidgetWithCheckboxes
edit_columns = [
"column_name",
"verbose_name",
"description",
"dimension_spec_json",
"datasource",
"groupby",
"filterable",
]
add_columns = edit_columns
list_columns = ["column_name", "verbose_name", "type", "groupby", "filterable"]
can_delete = False
page_size = 500
label_columns = {
"column_name": _("Column"),
"type": _("Type"),
"datasource": _("Datasource"),
"groupby": _("Groupable"),
"filterable": _("Filterable"),
}
description_columns = {
"filterable": _(
"Whether this column is exposed in the `Filters` section "
"of the explore view."
),
"dimension_spec_json": utils.markdown(
"this field can be used to specify "
"a `dimensionSpec` as documented [here]"
"(http://druid.io/docs/latest/querying/dimensionspecs.html). "
"Make sure to input valid JSON and that the "
"`outputName` matches the `column_name` defined "
"above.",
True,
),
}
add_form_extra_fields = {
"datasource": QuerySelectField(
"Datasource",
query_factory=lambda: db.session().query(models.DruidDatasource),
allow_blank=True,
widget=Select2Widget(extra_classes="readonly"),
)
}
edit_form_extra_fields = add_form_extra_fields
def pre_update(self, col):
# If a dimension spec JSON is given, ensure that it is
# valid JSON and that `outputName` is specified
if col.dimension_spec_json:
try:
dimension_spec = json.loads(col.dimension_spec_json)
except ValueError as e:
raise ValueError("Invalid Dimension Spec JSON: " + str(e))
if not isinstance(dimension_spec, dict):
raise ValueError("Dimension Spec must be a JSON object")
if "outputName" not in dimension_spec:
raise ValueError("Dimension Spec does not contain `outputName`")
if "dimension" not in dimension_spec:
raise ValueError("Dimension Spec is missing `dimension`")
# `outputName` should be the same as the `column_name`
if dimension_spec["outputName"] != col.column_name:
raise ValueError(
"`outputName` [{}] unequal to `column_name` [{}]".format(
dimension_spec["outputName"], col.column_name
)
)
def post_update(self, col):
col.refresh_metrics()
def post_add(self, col):
self.post_update(col)
appbuilder.add_view_no_menu(DruidColumnInlineView)
class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView):
datamodel = SQLAInterface(models.DruidMetric)
list_title = _("Metrics")
show_title = _("Show Druid Metric")
add_title = _("Add Druid Metric")
edit_title = _("Edit Druid Metric")
list_columns = ["metric_name", "verbose_name", "metric_type"]
edit_columns = [
"metric_name",
"description",
"verbose_name",
"metric_type",
"json",
"datasource",
"d3format",
"warning_text",
]
add_columns = edit_columns
page_size = 500
validators_columns = {"json": [validate_json]}
description_columns = {
"metric_type": utils.markdown(
"use `postagg` as the metric type if you are defining a "
"[Druid Post Aggregation]"
"(http://druid.io/docs/latest/querying/post-aggregations.html)",
True,
)
}
label_columns = {
"metric_name": _("Metric"),
"description": _("Description"),
"verbose_name": _("Verbose Name"),
"metric_type": _("Type"),
"json": _("JSON"),
"datasource": _("Druid Datasource"),
"warning_text": _("Warning Message"),
}
add_form_extra_fields = {
"datasource": QuerySelectField(
"Datasource",
query_factory=lambda: db.session().query(models.DruidDatasource),
allow_blank=True,
widget=Select2Widget(extra_classes="readonly"),
)
}
edit_form_extra_fields = add_form_extra_fields
appbuilder.add_view_no_menu(DruidMetricInlineView)
class DruidClusterModelView(SupersetModelView, DeleteMixin, YamlExportMixin):
datamodel = SQLAInterface(models.DruidCluster)
list_title = _("Druid Clusters")
show_title = _("Show Druid Cluster")
add_title = _("Add Druid Cluster")
edit_title = _("Edit Druid Cluster")
add_columns = [
"verbose_name",
"broker_host",
"broker_port",
"broker_user",
"broker_pass",
"broker_endpoint",
"cache_timeout",
"cluster_name",
]
edit_columns = add_columns
list_columns = ["cluster_name", "metadata_last_refreshed"]
search_columns = ("cluster_name",)
label_columns = {
"cluster_name": _("Cluster"),
"broker_host": _("Broker Host"),
"broker_port": _("Broker Port"),
"broker_user": _("Broker Username"),
"broker_pass": _("Broker Password"),
"broker_endpoint": _("Broker Endpoint"),
"verbose_name": _("Verbose Name"),
"cache_timeout": _("Cache Timeout"),
"metadata_last_refreshed": _("Metadata Last Refreshed"),
}
description_columns = {
"cache_timeout": _(
"Duration (in seconds) of the caching timeout for this cluster. "
"A timeout of 0 indicates that the cache never expires. "
"Note this defaults to the global timeout if undefined."
),
"broker_user": _(
"Druid supports basic authentication. See "
"[auth](http://druid.io/docs/latest/design/auth.html) and "
"druid-basic-security extension"
),
"broker_pass": _(
"Druid supports basic authentication. See "
"[auth](http://druid.io/docs/latest/design/auth.html) and "
"druid-basic-security extension"
),
}
yaml_dict_key = "databases"
edit_form_extra_fields = {
"cluster_name": QuerySelectField(
"Cluster",
query_factory=lambda: db.session().query(models.DruidCluster),
widget=Select2Widget(extra_classes="readonly"),
)
}
def pre_add(self, cluster):
security_manager.add_permission_view_menu("database_access", cluster.perm)
def pre_update(self, cluster):
self.pre_add(cluster)
def _delete(self, pk):
DeleteMixin._delete(self, pk)
appbuilder.add_view(
DruidClusterModelView,
name="Druid Clusters",
label=__("Druid Clusters"),
icon="fa-cubes",
category="Sources",
category_label=__("Sources"),
category_icon="fa-database",
)
class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):
datamodel = SQLAInterface(models.DruidDatasource)
list_title = _("Druid Datasources")
show_title = _("Show Druid Datasource")
add_title = _("Add Druid Datasource")
edit_title = _("Edit Druid Datasource")
list_columns = ["datasource_link", "cluster", "changed_by_", "modified"]
order_columns = ["datasource_link", "modified"]
related_views = [DruidColumnInlineView, DruidMetricInlineView]
edit_columns = [
"datasource_name",
"cluster",
"description",
"owners",
"is_hidden",
"filter_select_enabled",
"fetch_values_from",
"default_endpoint",
"offset",
"cache_timeout",
]
search_columns = ("datasource_name", "cluster", "description", "owners")
add_columns = edit_columns
show_columns = add_columns + ["perm", "slices"]
page_size = 500
base_order = ("datasource_name", "asc")
description_columns = {
"slices": _(
"The list of charts associated with this table. By "
"altering this datasource, you may change how these associated "
"charts behave. "
"Also note that charts need to point to a datasource, so "
"this form will fail at saving if removing charts from a "
"datasource. If you want to change the datasource for a chart, "
"overwrite the chart from the 'explore view'"
),
"offset": _("Timezone offset (in hours) for this datasource"),
"description": Markup(
'Supports <a href="'
'https://daringfireball.net/projects/markdown/">markdown</a>'
),
"fetch_values_from": _(
"Time expression to use as a predicate when retrieving "
"distinct values to populate the filter component. "
"Only applies when `Enable Filter Select` is on. If "
"you enter `7 days ago`, the distinct list of values in "
"the filter will be populated based on the distinct value over "
"the past week"
),
"filter_select_enabled": _(
"Whether to populate the filter's dropdown in the explore "
"view's filter section with a list of distinct values fetched "
"from the backend on the fly"
),
"default_endpoint": _(
"Redirects to this endpoint when clicking on the datasource "
"from the datasource list"
),
"cache_timeout": _(
"Duration (in seconds) of the caching timeout for this datasource. "
"A timeout of 0 indicates that the cache never expires. "
"Note this defaults to the cluster timeout if undefined."
),
}
base_filters = [["id", DatasourceFilter, lambda: []]]
label_columns = {
"slices": _("Associated Charts"),
"datasource_link": _("Data Source"),
"cluster": _("Cluster"),
"description": _("Description"),
"owners": _("Owners"),
"is_hidden": _("Is Hidden"),
"filter_select_enabled": _("Enable Filter Select"),
"default_endpoint": _("Default Endpoint"),
"offset": _("Time Offset"),
"cache_timeout": _("Cache Timeout"),
"datasource_name": _("Datasource Name"),
"fetch_values_from": _("Fetch Values From"),
"changed_by_": _("Changed By"),
"modified": _("Modified"),
}
def pre_add(self, datasource):
with db.session.no_autoflush:
query = db.session.query(models.DruidDatasource).filter(
models.DruidDatasource.datasource_name == datasource.datasource_name,
models.DruidDatasource.cluster_name == datasource.cluster.id,
)
if db.session.query(query.exists()).scalar():
raise Exception(get_datasource_exist_error_msg(datasource.full_name))
def post_add(self, datasource):
datasource.refresh_metrics()
security_manager.add_permission_view_menu(
"datasource_access", datasource.get_perm()
)
if datasource.schema:
security_manager.add_permission_view_menu(
"schema_access", datasource.schema_perm
)
def post_update(self, datasource):
self.post_add(datasource)
def _delete(self, pk):
DeleteMixin._delete(self, pk)
appbuilder.add_view(
DruidDatasourceModelView,
"Druid Datasources",
label=__("Druid Datasources"),
category="Sources",
category_label=__("Sources"),
icon="fa-cube",
)
class Druid(BaseSupersetView):
"""The base views for Superset!"""
@has_access
@expose("/refresh_datasources/")
def refresh_datasources(self, refresh_all=True):
"""endpoint that refreshes druid datasources metadata"""
session = db.session()
DruidCluster = ConnectorRegistry.sources["druid"].cluster_class
for cluster in session.query(DruidCluster).all():
cluster_name = cluster.cluster_name
valid_cluster = True
try:
cluster.refresh_datasources(refresh_all=refresh_all)
except Exception as e:
valid_cluster = False
flash(
"Error while processing cluster '{}'\n{}".format(
cluster_name, utils.error_msg_from_exception(e)
),
"danger",
)
logging.exception(e)
pass
if valid_cluster:
cluster.metadata_last_refreshed = datetime.now()
flash(
_("Refreshed metadata from cluster [{}]").format(
cluster.cluster_name
),
"info",
)
session.commit()
return redirect("/druiddatasourcemodelview/list/")
@has_access
@expose("/scan_new_datasources/")
def scan_new_datasources(self):
"""
Calling this endpoint will cause a scan for new
datasources only and add them.
"""
return self.refresh_datasources(refresh_all=False)
appbuilder.add_view_no_menu(Druid)
appbuilder.add_link(
"Scan New Datasources",
label=__("Scan New Datasources"),
href="/druid/scan_new_datasources/",
category="Sources",
category_label=__("Sources"),
category_icon="fa-database",
icon="fa-refresh",
)
appbuilder.add_link(
"Refresh Druid Metadata",
label=__("Refresh Druid Metadata"),
href="/druid/refresh_datasources/",
category="Sources",
category_label=__("Sources"),
category_icon="fa-database",
icon="fa-cog",
)
appbuilder.add_separator("Sources")
| 34.151844 | 85 | 0.624111 |
import json
import logging
from datetime import datetime
from flask import flash, Markup, redirect
from flask_appbuilder import CompactCRUDMixin, expose
from flask_appbuilder.fieldwidgets import Select2Widget
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder.security.decorators import has_access
from flask_babel import gettext as __, lazy_gettext as _
from wtforms.ext.sqlalchemy.fields import QuerySelectField
from superset import appbuilder, db, security_manager
from superset.connectors.base.views import DatasourceModelView
from superset.connectors.connector_registry import ConnectorRegistry
from superset.utils import core as utils
from superset.views.base import (
BaseSupersetView,
DatasourceFilter,
DeleteMixin,
get_datasource_exist_error_msg,
ListWidgetWithCheckboxes,
SupersetModelView,
validate_json,
YamlExportMixin,
)
from . import models
class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView):
datamodel = SQLAInterface(models.DruidColumn)
list_title = _("Columns")
show_title = _("Show Druid Column")
add_title = _("Add Druid Column")
edit_title = _("Edit Druid Column")
list_widget = ListWidgetWithCheckboxes
edit_columns = [
"column_name",
"verbose_name",
"description",
"dimension_spec_json",
"datasource",
"groupby",
"filterable",
]
add_columns = edit_columns
list_columns = ["column_name", "verbose_name", "type", "groupby", "filterable"]
can_delete = False
page_size = 500
label_columns = {
"column_name": _("Column"),
"type": _("Type"),
"datasource": _("Datasource"),
"groupby": _("Groupable"),
"filterable": _("Filterable"),
}
description_columns = {
"filterable": _(
"Whether this column is exposed in the `Filters` section "
"of the explore view."
),
"dimension_spec_json": utils.markdown(
"this field can be used to specify "
"a `dimensionSpec` as documented [here]"
"(http://druid.io/docs/latest/querying/dimensionspecs.html). "
"Make sure to input valid JSON and that the "
"`outputName` matches the `column_name` defined "
"above.",
True,
),
}
add_form_extra_fields = {
"datasource": QuerySelectField(
"Datasource",
query_factory=lambda: db.session().query(models.DruidDatasource),
allow_blank=True,
widget=Select2Widget(extra_classes="readonly"),
)
}
edit_form_extra_fields = add_form_extra_fields
def pre_update(self, col):
if col.dimension_spec_json:
try:
dimension_spec = json.loads(col.dimension_spec_json)
except ValueError as e:
raise ValueError("Invalid Dimension Spec JSON: " + str(e))
if not isinstance(dimension_spec, dict):
raise ValueError("Dimension Spec must be a JSON object")
if "outputName" not in dimension_spec:
raise ValueError("Dimension Spec does not contain `outputName`")
if "dimension" not in dimension_spec:
raise ValueError("Dimension Spec is missing `dimension`")
if dimension_spec["outputName"] != col.column_name:
raise ValueError(
"`outputName` [{}] unequal to `column_name` [{}]".format(
dimension_spec["outputName"], col.column_name
)
)
def post_update(self, col):
col.refresh_metrics()
def post_add(self, col):
self.post_update(col)
appbuilder.add_view_no_menu(DruidColumnInlineView)
class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView):
datamodel = SQLAInterface(models.DruidMetric)
list_title = _("Metrics")
show_title = _("Show Druid Metric")
add_title = _("Add Druid Metric")
edit_title = _("Edit Druid Metric")
list_columns = ["metric_name", "verbose_name", "metric_type"]
edit_columns = [
"metric_name",
"description",
"verbose_name",
"metric_type",
"json",
"datasource",
"d3format",
"warning_text",
]
add_columns = edit_columns
page_size = 500
validators_columns = {"json": [validate_json]}
description_columns = {
"metric_type": utils.markdown(
"use `postagg` as the metric type if you are defining a "
"[Druid Post Aggregation]"
"(http://druid.io/docs/latest/querying/post-aggregations.html)",
True,
)
}
label_columns = {
"metric_name": _("Metric"),
"description": _("Description"),
"verbose_name": _("Verbose Name"),
"metric_type": _("Type"),
"json": _("JSON"),
"datasource": _("Druid Datasource"),
"warning_text": _("Warning Message"),
}
add_form_extra_fields = {
"datasource": QuerySelectField(
"Datasource",
query_factory=lambda: db.session().query(models.DruidDatasource),
allow_blank=True,
widget=Select2Widget(extra_classes="readonly"),
)
}
edit_form_extra_fields = add_form_extra_fields
appbuilder.add_view_no_menu(DruidMetricInlineView)
class DruidClusterModelView(SupersetModelView, DeleteMixin, YamlExportMixin):
datamodel = SQLAInterface(models.DruidCluster)
list_title = _("Druid Clusters")
show_title = _("Show Druid Cluster")
add_title = _("Add Druid Cluster")
edit_title = _("Edit Druid Cluster")
add_columns = [
"verbose_name",
"broker_host",
"broker_port",
"broker_user",
"broker_pass",
"broker_endpoint",
"cache_timeout",
"cluster_name",
]
edit_columns = add_columns
list_columns = ["cluster_name", "metadata_last_refreshed"]
search_columns = ("cluster_name",)
label_columns = {
"cluster_name": _("Cluster"),
"broker_host": _("Broker Host"),
"broker_port": _("Broker Port"),
"broker_user": _("Broker Username"),
"broker_pass": _("Broker Password"),
"broker_endpoint": _("Broker Endpoint"),
"verbose_name": _("Verbose Name"),
"cache_timeout": _("Cache Timeout"),
"metadata_last_refreshed": _("Metadata Last Refreshed"),
}
description_columns = {
"cache_timeout": _(
"Duration (in seconds) of the caching timeout for this cluster. "
"A timeout of 0 indicates that the cache never expires. "
"Note this defaults to the global timeout if undefined."
),
"broker_user": _(
"Druid supports basic authentication. See "
"[auth](http://druid.io/docs/latest/design/auth.html) and "
"druid-basic-security extension"
),
"broker_pass": _(
"Druid supports basic authentication. See "
"[auth](http://druid.io/docs/latest/design/auth.html) and "
"druid-basic-security extension"
),
}
yaml_dict_key = "databases"
edit_form_extra_fields = {
"cluster_name": QuerySelectField(
"Cluster",
query_factory=lambda: db.session().query(models.DruidCluster),
widget=Select2Widget(extra_classes="readonly"),
)
}
def pre_add(self, cluster):
security_manager.add_permission_view_menu("database_access", cluster.perm)
def pre_update(self, cluster):
self.pre_add(cluster)
def _delete(self, pk):
DeleteMixin._delete(self, pk)
appbuilder.add_view(
DruidClusterModelView,
name="Druid Clusters",
label=__("Druid Clusters"),
icon="fa-cubes",
category="Sources",
category_label=__("Sources"),
category_icon="fa-database",
)
class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin):
datamodel = SQLAInterface(models.DruidDatasource)
list_title = _("Druid Datasources")
show_title = _("Show Druid Datasource")
add_title = _("Add Druid Datasource")
edit_title = _("Edit Druid Datasource")
list_columns = ["datasource_link", "cluster", "changed_by_", "modified"]
order_columns = ["datasource_link", "modified"]
related_views = [DruidColumnInlineView, DruidMetricInlineView]
edit_columns = [
"datasource_name",
"cluster",
"description",
"owners",
"is_hidden",
"filter_select_enabled",
"fetch_values_from",
"default_endpoint",
"offset",
"cache_timeout",
]
search_columns = ("datasource_name", "cluster", "description", "owners")
add_columns = edit_columns
show_columns = add_columns + ["perm", "slices"]
page_size = 500
base_order = ("datasource_name", "asc")
description_columns = {
"slices": _(
"The list of charts associated with this table. By "
"altering this datasource, you may change how these associated "
"charts behave. "
"Also note that charts need to point to a datasource, so "
"this form will fail at saving if removing charts from a "
"datasource. If you want to change the datasource for a chart, "
"overwrite the chart from the 'explore view'"
),
"offset": _("Timezone offset (in hours) for this datasource"),
"description": Markup(
'Supports <a href="'
'https://daringfireball.net/projects/markdown/">markdown</a>'
),
"fetch_values_from": _(
"Time expression to use as a predicate when retrieving "
"distinct values to populate the filter component. "
"Only applies when `Enable Filter Select` is on. If "
"you enter `7 days ago`, the distinct list of values in "
"the filter will be populated based on the distinct value over "
"the past week"
),
"filter_select_enabled": _(
"Whether to populate the filter's dropdown in the explore "
"view's filter section with a list of distinct values fetched "
"from the backend on the fly"
),
"default_endpoint": _(
"Redirects to this endpoint when clicking on the datasource "
"from the datasource list"
),
"cache_timeout": _(
"Duration (in seconds) of the caching timeout for this datasource. "
"A timeout of 0 indicates that the cache never expires. "
"Note this defaults to the cluster timeout if undefined."
),
}
base_filters = [["id", DatasourceFilter, lambda: []]]
label_columns = {
"slices": _("Associated Charts"),
"datasource_link": _("Data Source"),
"cluster": _("Cluster"),
"description": _("Description"),
"owners": _("Owners"),
"is_hidden": _("Is Hidden"),
"filter_select_enabled": _("Enable Filter Select"),
"default_endpoint": _("Default Endpoint"),
"offset": _("Time Offset"),
"cache_timeout": _("Cache Timeout"),
"datasource_name": _("Datasource Name"),
"fetch_values_from": _("Fetch Values From"),
"changed_by_": _("Changed By"),
"modified": _("Modified"),
}
def pre_add(self, datasource):
with db.session.no_autoflush:
query = db.session.query(models.DruidDatasource).filter(
models.DruidDatasource.datasource_name == datasource.datasource_name,
models.DruidDatasource.cluster_name == datasource.cluster.id,
)
if db.session.query(query.exists()).scalar():
raise Exception(get_datasource_exist_error_msg(datasource.full_name))
def post_add(self, datasource):
datasource.refresh_metrics()
security_manager.add_permission_view_menu(
"datasource_access", datasource.get_perm()
)
if datasource.schema:
security_manager.add_permission_view_menu(
"schema_access", datasource.schema_perm
)
def post_update(self, datasource):
self.post_add(datasource)
def _delete(self, pk):
DeleteMixin._delete(self, pk)
appbuilder.add_view(
DruidDatasourceModelView,
"Druid Datasources",
label=__("Druid Datasources"),
category="Sources",
category_label=__("Sources"),
icon="fa-cube",
)
class Druid(BaseSupersetView):
@has_access
@expose("/refresh_datasources/")
def refresh_datasources(self, refresh_all=True):
session = db.session()
DruidCluster = ConnectorRegistry.sources["druid"].cluster_class
for cluster in session.query(DruidCluster).all():
cluster_name = cluster.cluster_name
valid_cluster = True
try:
cluster.refresh_datasources(refresh_all=refresh_all)
except Exception as e:
valid_cluster = False
flash(
"Error while processing cluster '{}'\n{}".format(
cluster_name, utils.error_msg_from_exception(e)
),
"danger",
)
logging.exception(e)
pass
if valid_cluster:
cluster.metadata_last_refreshed = datetime.now()
flash(
_("Refreshed metadata from cluster [{}]").format(
cluster.cluster_name
),
"info",
)
session.commit()
return redirect("/druiddatasourcemodelview/list/")
@has_access
@expose("/scan_new_datasources/")
def scan_new_datasources(self):
return self.refresh_datasources(refresh_all=False)
appbuilder.add_view_no_menu(Druid)
appbuilder.add_link(
"Scan New Datasources",
label=__("Scan New Datasources"),
href="/druid/scan_new_datasources/",
category="Sources",
category_label=__("Sources"),
category_icon="fa-database",
icon="fa-refresh",
)
appbuilder.add_link(
"Refresh Druid Metadata",
label=__("Refresh Druid Metadata"),
href="/druid/refresh_datasources/",
category="Sources",
category_label=__("Sources"),
category_icon="fa-database",
icon="fa-cog",
)
appbuilder.add_separator("Sources")
| true | true |
f71a774da43e92d9a5b2ea6f28b39201e558710f | 2,045 | py | Python | speech_recognition.py | pmaen/biopython | b6cafe09b3670762d0768cbf2df36fb21b4bd5af | [
"MIT"
] | 1 | 2020-12-24T13:06:31.000Z | 2020-12-24T13:06:31.000Z | speech_recognition.py | pmaen/biopython | b6cafe09b3670762d0768cbf2df36fb21b4bd5af | [
"MIT"
] | null | null | null | speech_recognition.py | pmaen/biopython | b6cafe09b3670762d0768cbf2df36fb21b4bd5af | [
"MIT"
] | null | null | null | import os.path
import speech_recognition as sr
import moviepy.editor as mp
from pydub import AudioSegment
from pydub.utils import make_chunks
import time
import glob
import re
import math
from pathlib import Path
import soundfile as sf
lang = input("Please choose the language for voice recognition by language code. (deutsch: de-DE)\n")
filename = input("Please enter the whole file path including the extension:\n")
fileaudio = filename + ".wav"
title = input("What's the topic?\n")
start_time= time.time()
clip = mp.VideoFileClip(filename)
clip.audio.write_audiofile(fileaudio)
myaudio = AudioSegment.from_file(fileaudio, "wav")
chunk_length_ms = 60000 # pydub calculates in millisec
chunks = make_chunks(myaudio,chunk_length_ms)
r = sr.Recognizer()
for i, chunk in enumerate(chunks):
chunk_name = "{0}.wav".format(i)
print ("exporting", chunk_name)
chunk.export(chunk_name, format="wav")
audio = sr.AudioFile(chunk_name)
x, fs = sf.read(chunk_name)
vol_rms = x.max() - x.min()
if vol_rms <= 6.103515625e-05:
os.remove(chunk_name)
print(chunk_name + "was empty and therefore deleted.")
else:
with audio as source:
audio_file = r.record(source)
result = r.recognize_google(audio_file, language=lang)
with open(chunk_name + ".rectext" ,mode ='w') as file:
file.write(result)
print("Part " + str(i) + " finished.")
os.remove(chunk_name)
file_pattern = re.compile(r'.*?(\d+).*?')
def get_order(file):
match = file_pattern.match(Path(file).name)
if not match:
return math.inf
return int(match.groups()[0])
read_files = sorted(glob.glob("*.rectext"), key=get_order)
with open(filename + "_transcript.txt", "w") as outfile:
for f in read_files:
with open(f, "r") as infile:
outfile.write(infile.read())
outfile.write("\n")
cleanup = glob.glob("*.rectext")
for rectextfile in cleanup:
os.remove(rectextfile)
print("Done after %.2f seconds."% (time.time() - start_time))
| 31.953125 | 101 | 0.681174 | import os.path
import speech_recognition as sr
import moviepy.editor as mp
from pydub import AudioSegment
from pydub.utils import make_chunks
import time
import glob
import re
import math
from pathlib import Path
import soundfile as sf
lang = input("Please choose the language for voice recognition by language code. (deutsch: de-DE)\n")
filename = input("Please enter the whole file path including the extension:\n")
fileaudio = filename + ".wav"
title = input("What's the topic?\n")
start_time= time.time()
clip = mp.VideoFileClip(filename)
clip.audio.write_audiofile(fileaudio)
myaudio = AudioSegment.from_file(fileaudio, "wav")
chunk_length_ms = 60000 # pydub calculates in millisec
chunks = make_chunks(myaudio,chunk_length_ms)
r = sr.Recognizer()
for i, chunk in enumerate(chunks):
chunk_name = "{0}.wav".format(i)
print ("exporting", chunk_name)
chunk.export(chunk_name, format="wav")
audio = sr.AudioFile(chunk_name)
x, fs = sf.read(chunk_name)
vol_rms = x.max() - x.min()
if vol_rms <= 6.103515625e-05:
os.remove(chunk_name)
print(chunk_name + "was empty and therefore deleted.")
else:
with audio as source:
audio_file = r.record(source)
result = r.recognize_google(audio_file, language=lang)
with open(chunk_name + ".rectext" ,mode ='w') as file:
file.write(result)
print("Part " + str(i) + " finished.")
os.remove(chunk_name)
file_pattern = re.compile(r'.*?(\d+).*?')
def get_order(file):
match = file_pattern.match(Path(file).name)
if not match:
return math.inf
return int(match.groups()[0])
read_files = sorted(glob.glob("*.rectext"), key=get_order)
with open(filename + "_transcript.txt", "w") as outfile:
for f in read_files:
with open(f, "r") as infile:
outfile.write(infile.read())
outfile.write("\n")
cleanup = glob.glob("*.rectext")
for rectextfile in cleanup:
os.remove(rectextfile)
print("Done after %.2f seconds."% (time.time() - start_time))
| true | true |
f71a77a49def227a97ac06d0cce2532e8e039b8f | 2,042 | py | Python | code/game/goldspinner.py | LordZagreus/LodeRunner | 68aab36be47cabe31e52f3ee43520bdafcdf3c95 | [
"MIT"
] | 1 | 2017-10-31T22:26:22.000Z | 2017-10-31T22:26:22.000Z | code/game/goldspinner.py | team-sparrow/LodeRunner | 68aab36be47cabe31e52f3ee43520bdafcdf3c95 | [
"MIT"
] | 2 | 2019-07-05T03:17:18.000Z | 2019-07-08T16:15:29.000Z | code/game/goldspinner.py | team-sparrow/LodeRunner | 68aab36be47cabe31e52f3ee43520bdafcdf3c95 | [
"MIT"
] | 1 | 2020-10-15T09:03:20.000Z | 2020-10-15T09:03:20.000Z | import math
from particle import Particle
#from glfunctions import draw_sprite
from code.constants.common import GOLD_SPINNER_LIFESPAN, TILE_WIDTH, TILE_HEIGHT
from code.controllers.intervalcontroller import IntervalController
class GoldSpinner(Particle):
def __init__(self, x, y, dest_x, dest_y):
Particle.__init__(self, x, y, 0, 0, 0) # I don't care about tile index / particle index stuff
# No alpha delay
self.alpha_wait = 0
# These things don't have gravity...
self.gravity = 0
self.max_gravity = 0
# Calculate the distance between spawn and target
distance = math.sqrt( ((x - dest_x) * (x - dest_x)) + ((y - dest_y) * (y - dest_y)) )
# Calculate the angle between the spawn location and the target location...
radians = (math.pi / 4)
# Prevent division by 0
if (dest_x != x):
radians = math.atan( float(abs(dest_y - y)) / float(abs(dest_x - x)) )
# The gold spinner has a given lifspan. We must cross the distance in that duration...
speed = float(distance) / float(GOLD_SPINNER_LIFESPAN)
# Define rate of movement
self.dx = int( math.cos(radians) * speed )
self.dy = int( math.sin(radians) * speed )
# Adjust +/- for the direction this gold is headed...
if (x > dest_x):
self.dx *= -1
if (y > dest_y):
self.dy *= -1
# Based on destination coordinates and the time this particle is allowed to exist,
# calculate an appropriate alpha fade speed...
self.alpha_controller.set_speed_out( (1 / float(GOLD_SPINNER_LIFESPAN)) )
# Define a rotational speed
self.rotational_speed = -10
def render(self, sx, sy, gold_sprite, window_controller):
window_controller.get_gfx_controller().draw_sprite(sx + self.get_x(), sy + self.get_y(), TILE_WIDTH, TILE_HEIGHT, gold_sprite, frame = 0, gl_color = (1, 1, 1, self.alpha_controller.get_interval()), degrees = self.degrees)
| 31.90625 | 229 | 0.639079 | import math
from particle import Particle
from code.constants.common import GOLD_SPINNER_LIFESPAN, TILE_WIDTH, TILE_HEIGHT
from code.controllers.intervalcontroller import IntervalController
class GoldSpinner(Particle):
def __init__(self, x, y, dest_x, dest_y):
Particle.__init__(self, x, y, 0, 0, 0)
# No alpha delay
self.alpha_wait = 0
# These things don't have gravity...
self.gravity = 0
self.max_gravity = 0
distance = math.sqrt( ((x - dest_x) * (x - dest_x)) + ((y - dest_y) * (y - dest_y)) )
radians = (math.pi / 4)
if (dest_x != x):
radians = math.atan( float(abs(dest_y - y)) / float(abs(dest_x - x)) )
speed = float(distance) / float(GOLD_SPINNER_LIFESPAN)
self.dx = int( math.cos(radians) * speed )
self.dy = int( math.sin(radians) * speed )
if (x > dest_x):
self.dx *= -1
if (y > dest_y):
self.dy *= -1
self.alpha_controller.set_speed_out( (1 / float(GOLD_SPINNER_LIFESPAN)) )
self.rotational_speed = -10
def render(self, sx, sy, gold_sprite, window_controller):
window_controller.get_gfx_controller().draw_sprite(sx + self.get_x(), sy + self.get_y(), TILE_WIDTH, TILE_HEIGHT, gold_sprite, frame = 0, gl_color = (1, 1, 1, self.alpha_controller.get_interval()), degrees = self.degrees)
| true | true |
f71a77c1632e053843e6fa96b6402b20781b54ae | 561 | py | Python | audiovisual/indico_audiovisual/blueprint.py | pferreir/indico-plugins-cern | 0fc2eb6b1aa3c3083a813477886a6632f148a4d9 | [
"MIT"
] | null | null | null | audiovisual/indico_audiovisual/blueprint.py | pferreir/indico-plugins-cern | 0fc2eb6b1aa3c3083a813477886a6632f148a4d9 | [
"MIT"
] | null | null | null | audiovisual/indico_audiovisual/blueprint.py | pferreir/indico-plugins-cern | 0fc2eb6b1aa3c3083a813477886a6632f148a4d9 | [
"MIT"
] | null | null | null | # This file is part of the CERN Indico plugins.
# Copyright (C) 2014 - 2019 CERN
#
# The CERN Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License; see
# the LICENSE file for more details.
from __future__ import unicode_literals
from indico.core.plugins import IndicoPluginBlueprint
from indico_audiovisual.controllers import RHRequestList
blueprint = IndicoPluginBlueprint('audiovisual', __name__, url_prefix='/service/audiovisual')
blueprint.add_url_rule('/', 'request_list', RHRequestList)
| 33 | 93 | 0.798574 |
from __future__ import unicode_literals
from indico.core.plugins import IndicoPluginBlueprint
from indico_audiovisual.controllers import RHRequestList
blueprint = IndicoPluginBlueprint('audiovisual', __name__, url_prefix='/service/audiovisual')
blueprint.add_url_rule('/', 'request_list', RHRequestList)
| true | true |
f71a787e6cf602bff2ff9c173e3363f87c7e53c4 | 42,908 | py | Python | ForgeSVN/forgesvn/tests/model/test_repository.py | rohankumardubey/allura | 9c490a051ca912d28b81ce656441d6fed100cb24 | [
"Apache-2.0"
] | 113 | 2015-03-25T10:33:37.000Z | 2022-02-16T20:55:06.000Z | ForgeSVN/forgesvn/tests/model/test_repository.py | rohankumardubey/allura | 9c490a051ca912d28b81ce656441d6fed100cb24 | [
"Apache-2.0"
] | 4 | 2017-08-04T16:19:07.000Z | 2020-06-08T19:01:33.000Z | ForgeSVN/forgesvn/tests/model/test_repository.py | rohankumardubey/allura | 9c490a051ca912d28b81ce656441d6fed100cb24 | [
"Apache-2.0"
] | 36 | 2015-08-14T16:27:39.000Z | 2022-02-16T20:54:35.000Z | # coding: utf-8
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
import os
import shutil
import unittest
from unittest import skipUnless
import pkg_resources
from itertools import count, product
from datetime import datetime
from zipfile import ZipFile
from io import BytesIO
from collections import defaultdict
from tg import tmpl_context as c, app_globals as g
import mock
from alluratest.tools import assert_equal, assert_in
from datadiff.tools import assert_equals
import tg
import ming
from ming.base import Object
from ming.orm import session, ThreadLocalORMSession
from testfixtures import TempDirectory
from alluratest.controller import setup_basic_test, setup_global_objects
from allura import model as M
from allura.model.repo_refresh import send_notifications
from allura.lib import helpers as h
from allura.webhooks import RepoPushWebhookSender
from allura.tests.model.test_repo import RepoImplTestBase
from forgesvn import model as SM
from forgesvn.model.svn import svn_path_exists
from forgesvn.tests import with_svn
from allura.tests.decorators import with_tool
import six
from io import open
from six.moves import range
class TestNewRepo(unittest.TestCase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
c.app.repo.name = 'testsvn'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
self.rev = self.repo.commit('HEAD')
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_last_commit_for(self):
tree = self.rev.tree
for row in tree.ls():
assert row['last_commit']['author'] is not None
def test_commit(self):
latest_rev = 7
assert self.rev.primary() is self.rev
assert self.rev.index_id().startswith('allura/model/repo/Commit#')
self.rev.author_url
self.rev.committer_url
assert_equal(self.rev.tree._id, self.rev.tree_id)
assert_equal(self.rev.shorthand_id(), '[r{}]'.format(latest_rev))
assert_equal(self.rev.symbolic_ids, ([], []))
assert_equal(self.rev.url(), '/p/test/src/{}/'.format(latest_rev))
all_cis = list(self.repo.log(self.rev._id, limit=25))
assert_equal(len(all_cis), latest_rev)
self.rev.tree.ls()
assert_equal(self.rev.tree.readme(), ('README', 'This is readme\nAnother Line\n'))
assert_equal(self.rev.tree.path(), '/')
assert_equal(self.rev.tree.url(), '/p/test/src/{}/tree/'.format(latest_rev))
self.rev.tree.by_name['README']
assert self.rev.tree.is_blob('README') is True
assert_equal(self.rev.tree['a']['b']['c'].ls(), [])
self.assertRaises(KeyError, lambda: self.rev.tree['a']['b']['d'])
assert_equal(self.rev.authored_user, None)
assert_equal(self.rev.committed_user, None)
assert_equal(
sorted(self.rev.webhook_info.keys()),
sorted(['id', 'url', 'timestamp', 'message', 'author',
'committer', 'added', 'removed', 'renamed', 'modified', 'copied']))
class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
@with_tool('test', 'SVN', 'svn-tags', 'SVN with tags')
def setup_with_tools(self):
setup_global_objects()
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
with h.push_context('test', 'src', neighborhood='Projects'):
c.app.repo.name = 'testsvn'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
with h.push_context('test', 'svn-tags', neighborhood='Projects'):
c.app.repo.name = 'testsvn-trunk-tags-branches'
c.app.repo.fs_path = repo_dir
self.svn_tags = c.app.repo
self.svn_tags.refresh()
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
h.set_context('test', 'src', neighborhood='Projects')
def test_init(self):
repo = SM.Repository(
name='testsvn',
fs_path=g.tmpdir + '/',
url_path='/test/',
tool='svn',
status='creating')
dirname = os.path.join(repo.fs_path, repo.name)
if os.path.exists(dirname):
shutil.rmtree(dirname)
repo.init()
shutil.rmtree(dirname)
def test_fork(self):
repo = SM.Repository(
name='testsvn',
fs_path=g.tmpdir + '/',
url_path='/test/',
tool='svn',
status='creating')
repo_path = pkg_resources.resource_filename(
'forgesvn', 'tests/data/testsvn')
dirname = os.path.join(repo.fs_path, repo.name)
if os.path.exists(dirname):
shutil.rmtree(dirname)
repo.init()
repo._impl.clone_from('file://' + repo_path)
assert not os.path.exists(
os.path.join(g.tmpdir, 'testsvn/hooks/pre-revprop-change'))
assert os.path.exists(
os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'))
assert os.access(
os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'), os.X_OK)
with open(os.path.join(g.tmpdir, 'testsvn/hooks/post-commit')) as f:
hook_data = f.read()
self.assertIn(
'curl -s http://localhost/auth/refresh_repo/p/test/src/\n',
hook_data)
self.assertIn('exec $DIR/post-commit-user "$@"\n', hook_data)
repo.refresh(notify=False)
assert len(list(repo.log(limit=100)))
shutil.rmtree(dirname)
@mock.patch('forgesvn.model.svn.tg')
def test_can_hotcopy(self, tg):
from forgesvn.model.svn import SVNImplementation
func = SVNImplementation.can_hotcopy
obj = mock.Mock(spec=SVNImplementation)
for combo in product(
['file:///myfile', 'http://myfile'],
[True, False],
['version 1.7', 'version 1.6', 'version 2.0.3']):
source_url = combo[0]
tg.config = {'scm.svn.hotcopy': combo[1]}
stdout = combo[2]
obj.check_call.return_value = stdout, '', 0
expected = (source_url.startswith('file://') and
tg.config['scm.svn.hotcopy'] and
stdout != 'version 1.6')
result = func(obj, source_url)
assert result == expected
@mock.patch('forgesvn.model.svn.g.post_event')
def test_clone(self, post_event):
repo = SM.Repository(
name='testsvn',
fs_path=g.tmpdir + '/',
url_path='/test/',
tool='svn',
status='creating')
repo_path = pkg_resources.resource_filename(
'forgesvn', 'tests/data/testsvn')
dirname = os.path.join(repo.fs_path, repo.name)
if os.path.exists(dirname):
shutil.rmtree(dirname)
repo.init()
repo._impl.clone_from('file://' + repo_path)
assert not os.path.exists(
os.path.join(g.tmpdir, 'testsvn/hooks/pre-revprop-change'))
assert os.path.exists(
os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'))
assert os.access(
os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'), os.X_OK)
with open(os.path.join(g.tmpdir, 'testsvn/hooks/post-commit')) as f:
c = f.read()
self.assertIn(
'curl -s http://localhost/auth/refresh_repo/p/test/src/\n', c)
self.assertIn('exec $DIR/post-commit-user "$@"\n', c)
repo.refresh(notify=False)
assert len(list(repo.log(limit=100)))
shutil.rmtree(dirname)
def test_index(self):
i = self.repo.index()
assert i['type_s'] == 'SVN Repository', i
def test_log_id_only(self):
entries = list(self.repo.log(id_only=True, limit=25))
assert_equal(entries, [7, 6, 5, 4, 3, 2, 1])
def test_log(self):
entries = list(self.repo.log(id_only=False, limit=25))
assert_equal(entries[len(entries)-6:], # only 6, so this test doesn't have to change when commits added
[
{'parents': [5],
'refs': [],
'committed': {
'date': datetime(2013, 11, 8, 13, 38, 11, 152821),
'name': 'coldmind', 'email': ''},
'message': '',
'rename_details': {},
'id': 6,
'authored': {
'date': datetime(2013, 11, 8, 13, 38, 11, 152821),
'name': 'coldmind',
'email': ''
}, 'size': None},
{'parents': [4],
'refs': [],
'committed': {
'date': datetime(2010, 11, 18, 20, 14, 21, 515743),
'name': 'rick446',
'email': ''},
'message': 'Copied a => b',
'rename_details': {},
'id': 5,
'authored': {
'date': datetime(2010, 11, 18, 20, 14, 21, 515743),
'name': 'rick446',
'email': ''},
'size': None},
{'parents': [3],
'refs': [],
'committed': {
'date': datetime(2010, 10, 8, 15, 32, 59, 383719),
'name': 'rick446',
'email': ''},
'message': 'Remove hello.txt',
'rename_details': {},
'id': 4,
'authored': {
'date': datetime(2010, 10, 8, 15, 32, 59, 383719),
'name': 'rick446',
'email': ''},
'size': None},
{'parents': [2],
'refs': [],
'committed': {
'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
'name': 'rick446',
'email': ''},
'message': 'Modify readme',
'rename_details': {},
'id': 3,
'authored':
{'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
'name': 'rick446',
'email': ''},
'size': None},
{'parents': [1],
'refs': [],
'committed': {
'date': datetime(2010, 10, 8, 15, 32, 36, 221863),
'name': 'rick446',
'email': ''},
'message': 'Add path',
'rename_details': {},
'id': 2,
'authored': {
'date': datetime(2010, 10, 8, 15, 32, 36, 221863),
'name': 'rick446',
'email': ''},
'size': None},
{'parents': [],
'refs': [],
'committed': {
'date': datetime(2010, 10, 8, 15, 32, 7, 238375),
'name': 'rick446',
'email': ''},
'message': 'Create readme',
'rename_details': {},
'id': 1,
'authored': {
'date': datetime(2010, 10, 8, 15, 32, 7, 238375),
'name': 'rick446',
'email': ''},
'size': None}])
def test_log_file(self):
entries = list(self.repo.log(path='/README', id_only=False, limit=25))
assert_equal(entries, [
{'authored': {'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
'email': '',
'name': 'rick446'},
'committed': {'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
'email': '',
'name': 'rick446'},
'id': 3,
'message': 'Modify readme',
'parents': [2],
'refs': [],
'size': 28,
'rename_details': {}},
{'authored': {'date': datetime(2010, 10, 8, 15, 32, 7, 238375),
'email': '',
'name': 'rick446'},
'committed': {'date': datetime(2010, 10, 8, 15, 32, 7, 238375),
'email': '',
'name': 'rick446'},
'id': 1,
'message': 'Create readme',
'parents': [],
'refs': [],
'size': 15,
'rename_details': {}},
])
def test_is_file(self):
assert self.repo.is_file('/README')
assert not self.repo.is_file('/a')
def test_paged_diffs(self):
entry = self.repo.commit(next(self.repo.log(2, id_only=True, limit=1)))
self.assertEqual(entry.diffs, entry.paged_diffs())
self.assertEqual(entry.diffs, entry.paged_diffs(start=0))
added_expected = entry.diffs.added[1:3]
expected = dict(
copied=[], changed=[], removed=[], renamed=[],
added=added_expected, total=4)
actual = entry.paged_diffs(start=1, end=3)
self.assertEqual(expected, actual)
fake_id = self.repo._impl._oid(100)
empty = M.repository.Commit(_id=fake_id, repo=self.repo).paged_diffs()
self.assertEqual(sorted(actual.keys()), sorted(empty.keys()))
def test_diff_create_file(self):
entry = self.repo.commit(next(self.repo.log(1, id_only=True, limit=1)))
self.assertEqual(
entry.diffs, dict(
copied=[], changed=[], renamed=[],
removed=[], added=['/README'], total=1))
def test_diff_create_path(self):
entry = self.repo.commit(next(self.repo.log(2, id_only=True, limit=1)))
actual = entry.diffs
actual.added = sorted(actual.added)
self.assertEqual(
entry.diffs, dict(
copied=[], changed=[], removed=[], renamed=[],
added=sorted([
'/a', '/a/b', '/a/b/c',
'/a/b/c/hello.txt']), total=4))
def test_diff_modify_file(self):
entry = self.repo.commit(next(self.repo.log(3, id_only=True, limit=1)))
self.assertEqual(
entry.diffs, dict(
copied=[], changed=['/README'], renamed=[],
removed=[], added=[], total=1))
def test_diff_delete(self):
entry = self.repo.commit(next(self.repo.log(4, id_only=True, limit=1)))
self.assertEqual(
entry.diffs, dict(
copied=[], changed=[], renamed=[],
removed=['/a/b/c/hello.txt'], added=[], total=1))
def test_diff_copy(self):
entry = self.repo.commit(next(self.repo.log(5, id_only=True, limit=1)))
assert_equals(dict(entry.diffs), dict(
copied=[{'new': '/b', 'old': '/a', 'ratio': 1}], renamed=[],
changed=[], removed=[], added=[], total=1))
def test_commit(self):
entry = self.repo.commit(1)
assert entry.committed.name == 'rick446'
assert entry.message
def test_svn_path_exists(self):
repo_path = pkg_resources.resource_filename(
'forgesvn', 'tests/data/testsvn')
assert svn_path_exists("file://%s/a" % repo_path)
assert svn_path_exists("file://%s" % repo_path)
assert not svn_path_exists("file://%s/badpath" % repo_path)
with mock.patch('forgesvn.model.svn.pysvn') as pysvn:
svn_path_exists('dummy')
pysvn.Client.return_value.info2.assert_called_once_with(
'dummy',
revision=pysvn.Revision.return_value,
recurse=False)
@skipUnless(os.path.exists(tg.config.get('scm.repos.tarball.zip_binary', '/usr/bin/zip')), 'zip binary is missing')
def test_tarball(self):
tmpdir = tg.config['scm.repos.tarball.root']
assert_equal(self.repo.tarball_path,
os.path.join(tmpdir, 'svn/t/te/test/testsvn'))
assert_equal(self.repo.tarball_url('1'),
'file:///svn/t/te/test/testsvn/test-src-r1.zip')
self.repo.tarball('1')
assert os.path.isfile(
os.path.join(tmpdir, "svn/t/te/test/testsvn/test-src-r1.zip"))
tarball_zip = ZipFile(
os.path.join(tmpdir, 'svn/t/te/test/testsvn/test-src-r1.zip'), 'r')
assert_equal(tarball_zip.namelist(),
['test-src-r1/', 'test-src-r1/README'])
shutil.rmtree(self.repo.tarball_path.encode('utf-8'),
ignore_errors=True)
@skipUnless(os.path.exists(tg.config.get('scm.repos.tarball.zip_binary', '/usr/bin/zip')), 'zip binary is missing')
def test_tarball_paths(self):
rev = '19'
h.set_context('test', 'svn-tags', neighborhood='Projects')
tmpdir = tg.config['scm.repos.tarball.root']
tarball_path = os.path.join(tmpdir, 'svn/t/te/test/testsvn-trunk-tags-branches/')
# a tag
self.svn_tags.tarball(rev, '/tags/tag-1.0/')
fn = tarball_path + 'test-svn-tags-r19-tags-tag-1.0.zip'
assert os.path.isfile(fn), fn
snapshot = ZipFile(fn, 'r')
tag_content = sorted(['test-svn-tags-r19-tags-tag-1.0/',
'test-svn-tags-r19-tags-tag-1.0/svn-commit.tmp',
'test-svn-tags-r19-tags-tag-1.0/README'])
assert_equal(sorted(snapshot.namelist()), tag_content)
os.remove(fn)
# a directory (of tags)
self.svn_tags.tarball(rev, '/tags/')
fn = tarball_path + 'test-svn-tags-r19-tags.zip'
assert os.path.isfile(fn), fn
snapshot = ZipFile(fn, 'r')
tags_content = sorted(['test-svn-tags-r19-tags/',
'test-svn-tags-r19-tags/tag-1.0/',
'test-svn-tags-r19-tags/tag-1.0/svn-commit.tmp',
'test-svn-tags-r19-tags/tag-1.0/README'])
assert_equal(sorted(snapshot.namelist()), tags_content)
os.remove(fn)
# no path, but there are trunk in the repo
# expect snapshot of trunk
self.svn_tags.tarball(rev)
fn = tarball_path + 'test-svn-tags-r19-trunk.zip'
assert os.path.isfile(fn), fn
snapshot = ZipFile(fn, 'r')
trunk_content = sorted(['test-svn-tags-r19-trunk/',
'test-svn-tags-r19-trunk/aaa.txt',
'test-svn-tags-r19-trunk/bbb.txt',
'test-svn-tags-r19-trunk/ccc.txt',
'test-svn-tags-r19-trunk/README'])
assert_equal(sorted(snapshot.namelist()), trunk_content)
os.remove(fn)
# no path, and no trunk dir
# expect snapshot of repo root
h.set_context('test', 'src', neighborhood='Projects')
fn = os.path.join(tmpdir, 'svn/t/te/test/testsvn/test-src-r1.zip')
self.repo.tarball('1')
assert os.path.isfile(fn), fn
snapshot = ZipFile(fn, 'r')
assert_equal(snapshot.namelist(), ['test-src-r1/', 'test-src-r1/README'])
shutil.rmtree(os.path.join(tmpdir, 'svn/t/te/test/testsvn/'),
ignore_errors=True)
shutil.rmtree(tarball_path, ignore_errors=True)
def test_is_empty(self):
assert not self.repo.is_empty()
with TempDirectory() as d:
repo2 = SM.Repository(
name='test',
fs_path=d.path,
url_path='/test/',
tool='svn',
status='creating')
repo2.init()
assert repo2.is_empty()
repo2.refresh()
ThreadLocalORMSession.flush_all()
assert repo2.is_empty()
def test_webhook_payload(self):
sender = RepoPushWebhookSender()
all_commits = list(self.repo.all_commit_ids())
start = len(all_commits) - 6 # only get a few so test doesn't have to change after new testdata commits
cids = all_commits[start:start+2]
payload = sender.get_payload(commit_ids=cids)
expected_payload = {
'size': 2,
'after': 'r6',
'before': 'r4',
'commits': [{
'id': 'r6',
'url': 'http://localhost/p/test/src/6/',
'timestamp': datetime(2013, 11, 8, 13, 38, 11, 152000),
'message': '',
'author': {'name': 'coldmind',
'email': '',
'username': ''},
'committer': {'name': 'coldmind',
'email': '',
'username': ''},
'added': ['/ЗРЯЧИЙ_ТА_ПОБАЧИТЬ'],
'removed': [],
'modified': [],
'copied': [],
'renamed': [],
}, {
'id': 'r5',
'url': 'http://localhost/p/test/src/5/',
'timestamp': datetime(2010, 11, 18, 20, 14, 21, 515000),
'message': 'Copied a => b',
'author': {'name': 'rick446',
'email': '',
'username': ''},
'committer': {'name': 'rick446',
'email': '',
'username': ''},
'added': [],
'removed': [],
'modified': [],
'copied': [
{'new': '/b', 'old': '/a', 'ratio': 1},
],
'renamed': [],
}],
'repository': {
'name': 'SVN',
'full_name': '/p/test/src/',
'url': 'http://localhost/p/test/src/',
},
}
assert_equals(payload, expected_payload)
class TestSVNRev(unittest.TestCase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
c.app.repo.name = 'testsvn'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
self.rev = self.repo.commit(1)
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_url(self):
assert self.rev.url().endswith('/1/')
def test_primary(self):
assert self.rev.primary() == self.rev
def test_shorthand(self):
assert self.rev.shorthand_id() == '[r1]'
def test_diff(self):
diffs = (self.rev.diffs.added
+ self.rev.diffs.removed
+ self.rev.diffs.changed
+ self.rev.diffs.copied)
for d in diffs:
print(d)
def _oid(self, rev_id):
return '%s:%s' % (self.repo._id, rev_id)
def test_log(self):
# path only
commits = list(self.repo.log(self.repo.head, id_only=True, limit=25))
assert_equal(commits, [7, 6, 5, 4, 3, 2, 1])
commits = list(self.repo.log(self.repo.head, 'README', id_only=True, limit=25))
assert_equal(commits, [3, 1])
commits = list(self.repo.log(1, 'README', id_only=True, limit=25))
assert_equal(commits, [1])
commits = list(self.repo.log(self.repo.head, 'a/b/c/', id_only=True, limit=25))
assert_equal(commits, [4, 2])
commits = list(self.repo.log(3, 'a/b/c/', id_only=True, limit=25))
assert_equal(commits, [2])
assert_equal(
list(self.repo.log(self.repo.head, 'does/not/exist', id_only=True, limit=25)), [])
def test_notification_email(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
self.repo = SM.Repository(
name='testsvn',
fs_path=repo_dir,
url_path='/test/',
tool='svn',
status='creating')
self.repo.refresh()
ThreadLocalORMSession.flush_all()
send_notifications(self.repo, [self.repo.rev_to_commit_id(1)])
ThreadLocalORMSession.flush_all()
n = M.Notification.query.find({'subject': '[test:src] New commit [r1] by rick446'}).first()
assert n
assert_in('By rick446', n.text)
assert_in('Create readme', n.text)
class _Test(unittest.TestCase):
idgen = ('obj_%d' % i for i in count())
def _make_tree(self, object_id, **kwargs):
t, isnew = M.repository.Tree.upsert(object_id)
repo = getattr(self, 'repo', None)
t.repo = repo
for k, v in six.iteritems(kwargs):
if isinstance(v, six.string_types):
obj = M.repository.Blob(
t, k, next(self.idgen))
t.blob_ids.append(Object(
name=k, id=obj._id))
else:
obj = self._make_tree(next(self.idgen), **v)
t.tree_ids.append(Object(
name=k, id=obj._id))
session(t).flush()
return t
def _make_commit(self, object_id, **tree_parts):
ci, isnew = M.repository.Commit.upsert(object_id)
if isnew:
ci.committed.email = c.user.email_addresses[0]
ci.authored.email = c.user.email_addresses[0]
dt = datetime.utcnow()
# BSON datetime resolution is to 1 millisecond, not 1 microsecond
# like Python. Round this now so it'll match the value that's
# pulled from MongoDB in the tests.
ci.authored.date = dt.replace(microsecond=dt.microsecond // 1000 * 1000)
ci.message = 'summary\n\nddescription'
ci.set_context(self.repo)
ci.tree_id = 't_' + object_id
ci.tree = self._make_tree(ci.tree_id, **tree_parts)
return ci, isnew
def _make_log(self, ci):
session(ci).flush(ci)
def setUp(self):
setup_basic_test()
setup_global_objects()
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
self.prefix = tg.config.get('scm.repos.root', '/')
class _TestWithRepo(_Test):
def setUp(self):
super(_TestWithRepo, self).setUp()
h.set_context('test', neighborhood='Projects')
c.project.install_app('svn', 'test1')
h.set_context('test', 'test1', neighborhood='Projects')
self.repo = M.Repository(name='test1', tool='svn')
self.repo._impl = mock.Mock(spec=M.RepositoryImplementation())
self.repo._impl.shorthand_for_commit = M.RepositoryImplementation.shorthand_for_commit
self.repo._impl.url_for_commit = (
lambda *a, **kw: M.RepositoryImplementation.url_for_commit(
self.repo._impl, *a, **kw))
self.repo._impl._repo = self.repo
self.repo._impl.all_commit_ids = lambda *a, **kw: []
self.repo._impl.commit().symbolic_ids = None
ThreadLocalORMSession.flush_all()
class _TestWithRepoAndCommit(_TestWithRepo):
def setUp(self):
super(_TestWithRepoAndCommit, self).setUp()
self.ci, isnew = self._make_commit('foo')
ThreadLocalORMSession.flush_all()
# ThreadLocalORMSession.close_all()
class TestRepo(_TestWithRepo):
def test_create(self):
assert self.repo.fs_path == os.path.join(self.prefix, 'svn/p/test/')
assert self.repo.url_path == '/p/test/'
assert self.repo.full_fs_path == os.path.join(
self.prefix, 'svn/p/test/test1')
def test_passthrough(self):
argless = ['init']
for fn in argless:
getattr(self.repo, fn)()
getattr(self.repo._impl, fn).assert_called_with()
unary = ['commit', 'open_blob']
for fn in unary:
getattr(self.repo, fn)('foo')
getattr(self.repo._impl, fn).assert_called_with('foo')
def test_shorthand_for_commit(self):
self.assertEqual(
self.repo.shorthand_for_commit('a' * 40),
'[aaaaaa]')
def test_url_for_commit(self):
self.assertEqual(
self.repo.url_for_commit('a' * 40),
'/p/test/test1/ci/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/')
@mock.patch('allura.model.repository.g.post_event')
def test_init_as_clone(self, post_event):
self.repo.init_as_clone('srcpath', 'srcname', 'srcurl')
assert self.repo.upstream_repo.name == 'srcname'
assert self.repo.upstream_repo.url == 'srcurl'
assert self.repo._impl.clone_from.called_with('srcpath')
post_event.assert_called_once_with('repo_cloned', 'srcurl', 'srcpath')
def test_latest(self):
ci = mock.Mock()
self.repo._impl.commit = mock.Mock(return_value=ci)
assert self.repo.latest() is ci
def test_index(self):
i = self.repo.index()
assert i['type_s'] == 'Repository', i
assert i['name_s'] == 'test1', i
def test_scm_host_url(self):
assert_equal(self.repo.clone_url('rw', 'nobody'),
'svn+ssh://nobody@localhost:8022/scm-repo/p/test/test1/')
assert_equal(self.repo.clone_url('https', 'nobody'),
'https://nobody@localhost:8022/scm-repo/p/test/test1/')
with h.push_config(self.repo.app.config.options, external_checkout_url='https://$username@foo.com/'):
assert_equal(self.repo.clone_url('https', 'user'),
'https://user@foo.com/')
def test_guess_type(self):
assert self.repo.guess_type('foo.txt') == ('text/plain', None)
assert self.repo.guess_type('foo.gbaer') == (
'application/octet-stream', None)
assert self.repo.guess_type('foo.html') == ('text/html', None)
assert self.repo.guess_type('.gitignore') == ('text/plain', None)
def test_refresh(self):
committer_name = 'Test Committer'
committer_email = 'test@example.com'
ci = mock.Mock()
ci.authored.name = committer_name
ci.committed.name = committer_name
ci.committed.email = committer_email
ci.author_url = '/u/test-committer/'
ci.activity_name = '[deadbeef]'
ci.activity_url = 'url'
ci.activity_extras = {}
del ci.node_id
self.repo._impl.commit = mock.Mock(return_value=ci)
self.repo._impl.new_commits = mock.Mock(
return_value=['foo%d' % i for i in range(100)])
self.repo._impl.all_commit_ids = mock.Mock(
return_value=['foo%d' % i for i in range(100)])
self.repo.symbolics_for_commit = mock.Mock(
return_value=[['master', 'branch'], []])
def refresh_commit_info(oid, seen, lazy=False):
M.repository.CommitDoc(dict(
authored=dict(
name=committer_name,
date=datetime(2010, 10, 8, 15, 32, 48, 0),
email=committer_email),
_id=oid)).m.insert()
self.repo._impl.refresh_commit_info = refresh_commit_info
_id = lambda oid: getattr(oid, '_id', str(oid))
self.repo.shorthand_for_commit = lambda oid: '[' + _id(oid) + ']'
self.repo.url_for_commit = lambda oid: '/ci/' + _id(oid) + '/'
self.repo.refresh()
ThreadLocalORMSession.flush_all()
notifications = M.Notification.query.find().all()
for n in notifications:
if '100 new commits' in n.subject:
assert_in('By Test Committer on 10/08/2010 15:32', n.text)
assert_in('http://localhost/ci/foo99/', n.text)
break
else:
assert False, 'Did not find notification'
assert M.Feed.query.find(dict(
author_name=committer_name)).count() == 100
def test_refresh_private(self):
ci = mock.Mock()
self.repo._impl.commit = mock.Mock(return_value=ci)
self.repo._impl.new_commits = mock.Mock(
return_value=['foo%d' % i for i in range(100)])
# make unreadable by *anonymous, so additional notification logic
# executes
self.repo.acl = []
c.project.acl = []
self.repo.refresh()
def test_push_upstream_context(self):
self.repo.init_as_clone('srcpath', '/p/test/svn/', '/p/test/svn/')
old_app_instance = M.Project.app_instance
try:
M.Project.app_instance = mock.Mock(return_value=ming.base.Object(
config=ming.base.Object(_id=None)))
with self.repo.push_upstream_context():
assert c.project.shortname == 'test'
finally:
M.Project.app_instance = old_app_instance
def test_pending_upstream_merges(self):
self.repo.init_as_clone('srcpath', '/p/test/svn/', '/p/test/svn/')
old_app_instance = M.Project.app_instance
try:
M.Project.app_instance = mock.Mock(return_value=ming.base.Object(
config=ming.base.Object(_id=None)))
self.repo.pending_upstream_merges()
finally:
M.Project.app_instance = old_app_instance
class TestRepoObject(_TestWithRepoAndCommit):
def test_upsert(self):
obj0, isnew0 = M.repository.Tree.upsert('foo1')
obj1, isnew1 = M.repository.Tree.upsert('foo1')
assert obj0 is obj1
assert isnew0 and not isnew1
def test_artifact_methods(self):
assert self.ci.index_id(
) == 'allura/model/repo/Commit#foo', self.ci.index_id()
assert self.ci.primary() is self.ci, self.ci.primary()
class TestCommit(_TestWithRepo):
def setUp(self):
super(TestCommit, self).setUp()
self.ci, isnew = self._make_commit(
'foo',
a=dict(
a=dict(
a='',
b='',),
b=''))
self.tree = self.ci.tree
impl = M.RepositoryImplementation()
impl._repo = self.repo
self.repo._impl.shorthand_for_commit = impl.shorthand_for_commit
self.repo._impl.url_for_commit = impl.url_for_commit
def test_upsert(self):
obj0, isnew0 = M.repository.Commit.upsert('foo')
obj1, isnew1 = M.repository.Commit.upsert('foo')
assert obj0 is obj1
assert not isnew1
u = M.User.by_username('test-admin')
assert self.ci.author_url == u.url()
assert self.ci.committer_url == u.url()
assert self.ci.tree is self.tree
assert self.ci.summary == 'summary'
assert self.ci.shorthand_id() == '[foo]'
assert self.ci.url() == '/p/test/test1/ci/foo/'
def test_get_path(self):
b = self.ci.get_path('a/a/a')
assert isinstance(b, M.repository.Blob)
x = self.ci.get_path('a/a')
assert isinstance(x, M.repository.Tree)
def _unique_blobs(self):
def counter():
counter.i += 1
return counter.i
counter.i = 0
blobs = defaultdict(counter)
return lambda blob: BytesIO(str(blobs[blob.path()]))
def test_diffs_file_renames(self):
def open_blob(blob):
blobs = {
'a': 'Leia',
'/b/a/a': 'Darth Vader',
'/b/a/b': 'Luke Skywalker',
'/b/b': 'Death Star will destroy you',
'/b/c': 'Luke Skywalker', # moved from /b/a/b
# moved from /b/b and modified
'/b/a/z': 'Death Star will destroy you\nALL',
}
return BytesIO(blobs.get(blob.path(), ''))
self.repo._impl.open_blob = open_blob
self.repo._impl.commit = mock.Mock(return_value=self.ci)
self.repo._impl.paged_diffs.return_value = {
'added': ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'],
'changed': [],
'copied': [],
'renamed': [],
'removed': [],
'total': 5,
}
assert_equal(self.ci.diffs.added,
['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'])
assert (self.ci.diffs.copied
== self.ci.diffs.changed
== self.ci.diffs.removed
== [])
ci, isnew = self._make_commit(
'bar',
b=dict(
a=dict(
a='',
b='',),
b=''))
ci.parent_ids = ['foo']
self._make_log(ci)
self.repo._impl.paged_diffs.return_value = {
'added': ['b', 'b/a', 'b/a/a', 'b/a/b', 'b/b'],
'renamed': [],
'copied': [],
'changed': [],
'removed': ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'],
'total': 10,
}
assert_equal(ci.diffs.added, ['b', 'b/a', 'b/a/a', 'b/a/b', 'b/b'])
assert_equal(ci.diffs.removed, ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'])
assert (ci.diffs.copied
== ci.diffs.changed
== [])
ci, isnew = self._make_commit(
'baz',
b=dict(
a=dict(
z=''),
c=''))
ci.parent_ids = ['bar']
self._make_log(ci)
self.repo._impl.paged_diffs.return_value = {
'added': ['b/c', 'b/a/z'],
'removed': ['/b/a/b', 'b/b'],
'changed': [],
'copied': [
{
'new': 'b/c',
'old': 'b/a/b',
'ratio': 1,
'diff': '',
},
{
'new': 'b/a/z',
'old': 'b/b',
'ratio': 1,
'diff': '',
},
],
'renamed': [],
'total': 2
}
assert_equal(ci.diffs.added, ['b/a/z', 'b/c'])
assert_equal(ci.diffs.changed, [])
assert_equal(ci.diffs.removed, ['/b/a/b', 'b/b'])
# see mock for open_blob
assert_equal(len(ci.diffs.copied), 2)
assert_equal(ci.diffs.copied[1]['old'], 'b/a/b')
assert_equal(ci.diffs.copied[1]['new'], 'b/c')
assert_equal(ci.diffs.copied[1]['ratio'], 1)
assert_equal(ci.diffs.copied[1]['diff'], '')
assert_equal(ci.diffs.copied[0]['old'], 'b/b')
assert_equal(ci.diffs.copied[0]['new'], 'b/a/z')
def test_context(self):
self.ci.context()
class TestRename(unittest.TestCase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
c.app.repo.name = 'testsvn-rename'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
self.rev = self.repo.commit('HEAD')
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_log_file_with_rename(self):
entry = list(self.repo.log(path='/dir/b.txt', id_only=False, limit=1))[0]
assert_equal(entry['id'], 3)
assert_equal(entry['rename_details']['path'], '/dir/a.txt')
assert_equal(
entry['rename_details']['commit_url'],
self.repo.url_for_commit(2) # previous revision
)
def test_check_changed_path(self):
changed_path = {'copyfrom_path': '/test/path', 'path': '/test/path2'}
result = self.repo._impl._check_changed_path(
changed_path, '/test/path2/file.txt')
assert_equal({'path': '/test/path2/file.txt',
'copyfrom_path': '/test/path/file.txt'}, result)
class TestDirectRepoAccess(object):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
c.app.repo.name = 'testsvn'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
self.rev = self.repo.commit('HEAD')
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_paged_diffs(self):
_id = self.repo._impl._oid(6)
diffs = self.repo.commit(_id).diffs
expected = {
'added': ['/ЗРЯЧИЙ_ТА_ПОБАЧИТЬ'],
'removed': [],
'changed': [],
'copied': [],
'renamed': [],
'total': 1,
}
assert_equals(diffs, expected)
_id = self.repo._impl._oid(2)
diffs = self.repo.commit(_id).diffs
expected = {
'added': ['/a', '/a/b', '/a/b/c', '/a/b/c/hello.txt'],
'removed': [],
'changed': [],
'renamed': [],
'copied': [],
'total': 4,
}
assert_equals(diffs, expected)
_id = self.repo._impl._oid(3)
diffs = self.repo.commit(_id).diffs
expected = {
'added': [],
'removed': [],
'renamed': [],
'changed': ['/README'],
'copied': [],
'total': 1,
}
assert_equals(diffs, expected)
_id = self.repo._impl._oid(4)
diffs = self.repo.commit(_id).diffs
expected = {
'added': [],
'removed': ['/a/b/c/hello.txt'],
'changed': [],
'renamed': [],
'copied': [],
'total': 1,
}
assert_equals(diffs, expected)
| 37.837743 | 119 | 0.53969 |
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
import os
import shutil
import unittest
from unittest import skipUnless
import pkg_resources
from itertools import count, product
from datetime import datetime
from zipfile import ZipFile
from io import BytesIO
from collections import defaultdict
from tg import tmpl_context as c, app_globals as g
import mock
from alluratest.tools import assert_equal, assert_in
from datadiff.tools import assert_equals
import tg
import ming
from ming.base import Object
from ming.orm import session, ThreadLocalORMSession
from testfixtures import TempDirectory
from alluratest.controller import setup_basic_test, setup_global_objects
from allura import model as M
from allura.model.repo_refresh import send_notifications
from allura.lib import helpers as h
from allura.webhooks import RepoPushWebhookSender
from allura.tests.model.test_repo import RepoImplTestBase
from forgesvn import model as SM
from forgesvn.model.svn import svn_path_exists
from forgesvn.tests import with_svn
from allura.tests.decorators import with_tool
import six
from io import open
from six.moves import range
class TestNewRepo(unittest.TestCase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
c.app.repo.name = 'testsvn'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
self.rev = self.repo.commit('HEAD')
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_last_commit_for(self):
tree = self.rev.tree
for row in tree.ls():
assert row['last_commit']['author'] is not None
def test_commit(self):
latest_rev = 7
assert self.rev.primary() is self.rev
assert self.rev.index_id().startswith('allura/model/repo/Commit#')
self.rev.author_url
self.rev.committer_url
assert_equal(self.rev.tree._id, self.rev.tree_id)
assert_equal(self.rev.shorthand_id(), '[r{}]'.format(latest_rev))
assert_equal(self.rev.symbolic_ids, ([], []))
assert_equal(self.rev.url(), '/p/test/src/{}/'.format(latest_rev))
all_cis = list(self.repo.log(self.rev._id, limit=25))
assert_equal(len(all_cis), latest_rev)
self.rev.tree.ls()
assert_equal(self.rev.tree.readme(), ('README', 'This is readme\nAnother Line\n'))
assert_equal(self.rev.tree.path(), '/')
assert_equal(self.rev.tree.url(), '/p/test/src/{}/tree/'.format(latest_rev))
self.rev.tree.by_name['README']
assert self.rev.tree.is_blob('README') is True
assert_equal(self.rev.tree['a']['b']['c'].ls(), [])
self.assertRaises(KeyError, lambda: self.rev.tree['a']['b']['d'])
assert_equal(self.rev.authored_user, None)
assert_equal(self.rev.committed_user, None)
assert_equal(
sorted(self.rev.webhook_info.keys()),
sorted(['id', 'url', 'timestamp', 'message', 'author',
'committer', 'added', 'removed', 'renamed', 'modified', 'copied']))
class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
@with_tool('test', 'SVN', 'svn-tags', 'SVN with tags')
def setup_with_tools(self):
setup_global_objects()
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
with h.push_context('test', 'src', neighborhood='Projects'):
c.app.repo.name = 'testsvn'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
with h.push_context('test', 'svn-tags', neighborhood='Projects'):
c.app.repo.name = 'testsvn-trunk-tags-branches'
c.app.repo.fs_path = repo_dir
self.svn_tags = c.app.repo
self.svn_tags.refresh()
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
h.set_context('test', 'src', neighborhood='Projects')
def test_init(self):
repo = SM.Repository(
name='testsvn',
fs_path=g.tmpdir + '/',
url_path='/test/',
tool='svn',
status='creating')
dirname = os.path.join(repo.fs_path, repo.name)
if os.path.exists(dirname):
shutil.rmtree(dirname)
repo.init()
shutil.rmtree(dirname)
def test_fork(self):
repo = SM.Repository(
name='testsvn',
fs_path=g.tmpdir + '/',
url_path='/test/',
tool='svn',
status='creating')
repo_path = pkg_resources.resource_filename(
'forgesvn', 'tests/data/testsvn')
dirname = os.path.join(repo.fs_path, repo.name)
if os.path.exists(dirname):
shutil.rmtree(dirname)
repo.init()
repo._impl.clone_from('file://' + repo_path)
assert not os.path.exists(
os.path.join(g.tmpdir, 'testsvn/hooks/pre-revprop-change'))
assert os.path.exists(
os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'))
assert os.access(
os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'), os.X_OK)
with open(os.path.join(g.tmpdir, 'testsvn/hooks/post-commit')) as f:
hook_data = f.read()
self.assertIn(
'curl -s http://localhost/auth/refresh_repo/p/test/src/\n',
hook_data)
self.assertIn('exec $DIR/post-commit-user "$@"\n', hook_data)
repo.refresh(notify=False)
assert len(list(repo.log(limit=100)))
shutil.rmtree(dirname)
@mock.patch('forgesvn.model.svn.tg')
def test_can_hotcopy(self, tg):
from forgesvn.model.svn import SVNImplementation
func = SVNImplementation.can_hotcopy
obj = mock.Mock(spec=SVNImplementation)
for combo in product(
['file:///myfile', 'http://myfile'],
[True, False],
['version 1.7', 'version 1.6', 'version 2.0.3']):
source_url = combo[0]
tg.config = {'scm.svn.hotcopy': combo[1]}
stdout = combo[2]
obj.check_call.return_value = stdout, '', 0
expected = (source_url.startswith('file://') and
tg.config['scm.svn.hotcopy'] and
stdout != 'version 1.6')
result = func(obj, source_url)
assert result == expected
@mock.patch('forgesvn.model.svn.g.post_event')
def test_clone(self, post_event):
repo = SM.Repository(
name='testsvn',
fs_path=g.tmpdir + '/',
url_path='/test/',
tool='svn',
status='creating')
repo_path = pkg_resources.resource_filename(
'forgesvn', 'tests/data/testsvn')
dirname = os.path.join(repo.fs_path, repo.name)
if os.path.exists(dirname):
shutil.rmtree(dirname)
repo.init()
repo._impl.clone_from('file://' + repo_path)
assert not os.path.exists(
os.path.join(g.tmpdir, 'testsvn/hooks/pre-revprop-change'))
assert os.path.exists(
os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'))
assert os.access(
os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'), os.X_OK)
with open(os.path.join(g.tmpdir, 'testsvn/hooks/post-commit')) as f:
c = f.read()
self.assertIn(
'curl -s http://localhost/auth/refresh_repo/p/test/src/\n', c)
self.assertIn('exec $DIR/post-commit-user "$@"\n', c)
repo.refresh(notify=False)
assert len(list(repo.log(limit=100)))
shutil.rmtree(dirname)
def test_index(self):
i = self.repo.index()
assert i['type_s'] == 'SVN Repository', i
def test_log_id_only(self):
entries = list(self.repo.log(id_only=True, limit=25))
assert_equal(entries, [7, 6, 5, 4, 3, 2, 1])
def test_log(self):
entries = list(self.repo.log(id_only=False, limit=25))
assert_equal(entries[len(entries)-6:],
[
{'parents': [5],
'refs': [],
'committed': {
'date': datetime(2013, 11, 8, 13, 38, 11, 152821),
'name': 'coldmind', 'email': ''},
'message': '',
'rename_details': {},
'id': 6,
'authored': {
'date': datetime(2013, 11, 8, 13, 38, 11, 152821),
'name': 'coldmind',
'email': ''
}, 'size': None},
{'parents': [4],
'refs': [],
'committed': {
'date': datetime(2010, 11, 18, 20, 14, 21, 515743),
'name': 'rick446',
'email': ''},
'message': 'Copied a => b',
'rename_details': {},
'id': 5,
'authored': {
'date': datetime(2010, 11, 18, 20, 14, 21, 515743),
'name': 'rick446',
'email': ''},
'size': None},
{'parents': [3],
'refs': [],
'committed': {
'date': datetime(2010, 10, 8, 15, 32, 59, 383719),
'name': 'rick446',
'email': ''},
'message': 'Remove hello.txt',
'rename_details': {},
'id': 4,
'authored': {
'date': datetime(2010, 10, 8, 15, 32, 59, 383719),
'name': 'rick446',
'email': ''},
'size': None},
{'parents': [2],
'refs': [],
'committed': {
'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
'name': 'rick446',
'email': ''},
'message': 'Modify readme',
'rename_details': {},
'id': 3,
'authored':
{'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
'name': 'rick446',
'email': ''},
'size': None},
{'parents': [1],
'refs': [],
'committed': {
'date': datetime(2010, 10, 8, 15, 32, 36, 221863),
'name': 'rick446',
'email': ''},
'message': 'Add path',
'rename_details': {},
'id': 2,
'authored': {
'date': datetime(2010, 10, 8, 15, 32, 36, 221863),
'name': 'rick446',
'email': ''},
'size': None},
{'parents': [],
'refs': [],
'committed': {
'date': datetime(2010, 10, 8, 15, 32, 7, 238375),
'name': 'rick446',
'email': ''},
'message': 'Create readme',
'rename_details': {},
'id': 1,
'authored': {
'date': datetime(2010, 10, 8, 15, 32, 7, 238375),
'name': 'rick446',
'email': ''},
'size': None}])
def test_log_file(self):
entries = list(self.repo.log(path='/README', id_only=False, limit=25))
assert_equal(entries, [
{'authored': {'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
'email': '',
'name': 'rick446'},
'committed': {'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
'email': '',
'name': 'rick446'},
'id': 3,
'message': 'Modify readme',
'parents': [2],
'refs': [],
'size': 28,
'rename_details': {}},
{'authored': {'date': datetime(2010, 10, 8, 15, 32, 7, 238375),
'email': '',
'name': 'rick446'},
'committed': {'date': datetime(2010, 10, 8, 15, 32, 7, 238375),
'email': '',
'name': 'rick446'},
'id': 1,
'message': 'Create readme',
'parents': [],
'refs': [],
'size': 15,
'rename_details': {}},
])
def test_is_file(self):
assert self.repo.is_file('/README')
assert not self.repo.is_file('/a')
def test_paged_diffs(self):
entry = self.repo.commit(next(self.repo.log(2, id_only=True, limit=1)))
self.assertEqual(entry.diffs, entry.paged_diffs())
self.assertEqual(entry.diffs, entry.paged_diffs(start=0))
added_expected = entry.diffs.added[1:3]
expected = dict(
copied=[], changed=[], removed=[], renamed=[],
added=added_expected, total=4)
actual = entry.paged_diffs(start=1, end=3)
self.assertEqual(expected, actual)
fake_id = self.repo._impl._oid(100)
empty = M.repository.Commit(_id=fake_id, repo=self.repo).paged_diffs()
self.assertEqual(sorted(actual.keys()), sorted(empty.keys()))
def test_diff_create_file(self):
entry = self.repo.commit(next(self.repo.log(1, id_only=True, limit=1)))
self.assertEqual(
entry.diffs, dict(
copied=[], changed=[], renamed=[],
removed=[], added=['/README'], total=1))
def test_diff_create_path(self):
entry = self.repo.commit(next(self.repo.log(2, id_only=True, limit=1)))
actual = entry.diffs
actual.added = sorted(actual.added)
self.assertEqual(
entry.diffs, dict(
copied=[], changed=[], removed=[], renamed=[],
added=sorted([
'/a', '/a/b', '/a/b/c',
'/a/b/c/hello.txt']), total=4))
def test_diff_modify_file(self):
entry = self.repo.commit(next(self.repo.log(3, id_only=True, limit=1)))
self.assertEqual(
entry.diffs, dict(
copied=[], changed=['/README'], renamed=[],
removed=[], added=[], total=1))
def test_diff_delete(self):
entry = self.repo.commit(next(self.repo.log(4, id_only=True, limit=1)))
self.assertEqual(
entry.diffs, dict(
copied=[], changed=[], renamed=[],
removed=['/a/b/c/hello.txt'], added=[], total=1))
def test_diff_copy(self):
entry = self.repo.commit(next(self.repo.log(5, id_only=True, limit=1)))
assert_equals(dict(entry.diffs), dict(
copied=[{'new': '/b', 'old': '/a', 'ratio': 1}], renamed=[],
changed=[], removed=[], added=[], total=1))
def test_commit(self):
entry = self.repo.commit(1)
assert entry.committed.name == 'rick446'
assert entry.message
def test_svn_path_exists(self):
repo_path = pkg_resources.resource_filename(
'forgesvn', 'tests/data/testsvn')
assert svn_path_exists("file://%s/a" % repo_path)
assert svn_path_exists("file://%s" % repo_path)
assert not svn_path_exists("file://%s/badpath" % repo_path)
with mock.patch('forgesvn.model.svn.pysvn') as pysvn:
svn_path_exists('dummy')
pysvn.Client.return_value.info2.assert_called_once_with(
'dummy',
revision=pysvn.Revision.return_value,
recurse=False)
@skipUnless(os.path.exists(tg.config.get('scm.repos.tarball.zip_binary', '/usr/bin/zip')), 'zip binary is missing')
def test_tarball(self):
tmpdir = tg.config['scm.repos.tarball.root']
assert_equal(self.repo.tarball_path,
os.path.join(tmpdir, 'svn/t/te/test/testsvn'))
assert_equal(self.repo.tarball_url('1'),
'file:///svn/t/te/test/testsvn/test-src-r1.zip')
self.repo.tarball('1')
assert os.path.isfile(
os.path.join(tmpdir, "svn/t/te/test/testsvn/test-src-r1.zip"))
tarball_zip = ZipFile(
os.path.join(tmpdir, 'svn/t/te/test/testsvn/test-src-r1.zip'), 'r')
assert_equal(tarball_zip.namelist(),
['test-src-r1/', 'test-src-r1/README'])
shutil.rmtree(self.repo.tarball_path.encode('utf-8'),
ignore_errors=True)
@skipUnless(os.path.exists(tg.config.get('scm.repos.tarball.zip_binary', '/usr/bin/zip')), 'zip binary is missing')
def test_tarball_paths(self):
rev = '19'
h.set_context('test', 'svn-tags', neighborhood='Projects')
tmpdir = tg.config['scm.repos.tarball.root']
tarball_path = os.path.join(tmpdir, 'svn/t/te/test/testsvn-trunk-tags-branches/')
# a tag
self.svn_tags.tarball(rev, '/tags/tag-1.0/')
fn = tarball_path + 'test-svn-tags-r19-tags-tag-1.0.zip'
assert os.path.isfile(fn), fn
snapshot = ZipFile(fn, 'r')
tag_content = sorted(['test-svn-tags-r19-tags-tag-1.0/',
'test-svn-tags-r19-tags-tag-1.0/svn-commit.tmp',
'test-svn-tags-r19-tags-tag-1.0/README'])
assert_equal(sorted(snapshot.namelist()), tag_content)
os.remove(fn)
# a directory (of tags)
self.svn_tags.tarball(rev, '/tags/')
fn = tarball_path + 'test-svn-tags-r19-tags.zip'
assert os.path.isfile(fn), fn
snapshot = ZipFile(fn, 'r')
tags_content = sorted(['test-svn-tags-r19-tags/',
'test-svn-tags-r19-tags/tag-1.0/',
'test-svn-tags-r19-tags/tag-1.0/svn-commit.tmp',
'test-svn-tags-r19-tags/tag-1.0/README'])
assert_equal(sorted(snapshot.namelist()), tags_content)
os.remove(fn)
# no path, but there are trunk in the repo
# expect snapshot of trunk
self.svn_tags.tarball(rev)
fn = tarball_path + 'test-svn-tags-r19-trunk.zip'
assert os.path.isfile(fn), fn
snapshot = ZipFile(fn, 'r')
trunk_content = sorted(['test-svn-tags-r19-trunk/',
'test-svn-tags-r19-trunk/aaa.txt',
'test-svn-tags-r19-trunk/bbb.txt',
'test-svn-tags-r19-trunk/ccc.txt',
'test-svn-tags-r19-trunk/README'])
assert_equal(sorted(snapshot.namelist()), trunk_content)
os.remove(fn)
# no path, and no trunk dir
# expect snapshot of repo root
h.set_context('test', 'src', neighborhood='Projects')
fn = os.path.join(tmpdir, 'svn/t/te/test/testsvn/test-src-r1.zip')
self.repo.tarball('1')
assert os.path.isfile(fn), fn
snapshot = ZipFile(fn, 'r')
assert_equal(snapshot.namelist(), ['test-src-r1/', 'test-src-r1/README'])
shutil.rmtree(os.path.join(tmpdir, 'svn/t/te/test/testsvn/'),
ignore_errors=True)
shutil.rmtree(tarball_path, ignore_errors=True)
def test_is_empty(self):
assert not self.repo.is_empty()
with TempDirectory() as d:
repo2 = SM.Repository(
name='test',
fs_path=d.path,
url_path='/test/',
tool='svn',
status='creating')
repo2.init()
assert repo2.is_empty()
repo2.refresh()
ThreadLocalORMSession.flush_all()
assert repo2.is_empty()
def test_webhook_payload(self):
sender = RepoPushWebhookSender()
all_commits = list(self.repo.all_commit_ids())
start = len(all_commits) - 6 # only get a few so test doesn't have to change after new testdata commits
cids = all_commits[start:start+2]
payload = sender.get_payload(commit_ids=cids)
expected_payload = {
'size': 2,
'after': 'r6',
'before': 'r4',
'commits': [{
'id': 'r6',
'url': 'http://localhost/p/test/src/6/',
'timestamp': datetime(2013, 11, 8, 13, 38, 11, 152000),
'message': '',
'author': {'name': 'coldmind',
'email': '',
'username': ''},
'committer': {'name': 'coldmind',
'email': '',
'username': ''},
'added': ['/ЗРЯЧИЙ_ТА_ПОБАЧИТЬ'],
'removed': [],
'modified': [],
'copied': [],
'renamed': [],
}, {
'id': 'r5',
'url': 'http://localhost/p/test/src/5/',
'timestamp': datetime(2010, 11, 18, 20, 14, 21, 515000),
'message': 'Copied a => b',
'author': {'name': 'rick446',
'email': '',
'username': ''},
'committer': {'name': 'rick446',
'email': '',
'username': ''},
'added': [],
'removed': [],
'modified': [],
'copied': [
{'new': '/b', 'old': '/a', 'ratio': 1},
],
'renamed': [],
}],
'repository': {
'name': 'SVN',
'full_name': '/p/test/src/',
'url': 'http://localhost/p/test/src/',
},
}
assert_equals(payload, expected_payload)
class TestSVNRev(unittest.TestCase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
c.app.repo.name = 'testsvn'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
self.rev = self.repo.commit(1)
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_url(self):
assert self.rev.url().endswith('/1/')
def test_primary(self):
assert self.rev.primary() == self.rev
def test_shorthand(self):
assert self.rev.shorthand_id() == '[r1]'
def test_diff(self):
diffs = (self.rev.diffs.added
+ self.rev.diffs.removed
+ self.rev.diffs.changed
+ self.rev.diffs.copied)
for d in diffs:
print(d)
def _oid(self, rev_id):
return '%s:%s' % (self.repo._id, rev_id)
def test_log(self):
commits = list(self.repo.log(self.repo.head, id_only=True, limit=25))
assert_equal(commits, [7, 6, 5, 4, 3, 2, 1])
commits = list(self.repo.log(self.repo.head, 'README', id_only=True, limit=25))
assert_equal(commits, [3, 1])
commits = list(self.repo.log(1, 'README', id_only=True, limit=25))
assert_equal(commits, [1])
commits = list(self.repo.log(self.repo.head, 'a/b/c/', id_only=True, limit=25))
assert_equal(commits, [4, 2])
commits = list(self.repo.log(3, 'a/b/c/', id_only=True, limit=25))
assert_equal(commits, [2])
assert_equal(
list(self.repo.log(self.repo.head, 'does/not/exist', id_only=True, limit=25)), [])
def test_notification_email(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
self.repo = SM.Repository(
name='testsvn',
fs_path=repo_dir,
url_path='/test/',
tool='svn',
status='creating')
self.repo.refresh()
ThreadLocalORMSession.flush_all()
send_notifications(self.repo, [self.repo.rev_to_commit_id(1)])
ThreadLocalORMSession.flush_all()
n = M.Notification.query.find({'subject': '[test:src] New commit [r1] by rick446'}).first()
assert n
assert_in('By rick446', n.text)
assert_in('Create readme', n.text)
class _Test(unittest.TestCase):
idgen = ('obj_%d' % i for i in count())
def _make_tree(self, object_id, **kwargs):
t, isnew = M.repository.Tree.upsert(object_id)
repo = getattr(self, 'repo', None)
t.repo = repo
for k, v in six.iteritems(kwargs):
if isinstance(v, six.string_types):
obj = M.repository.Blob(
t, k, next(self.idgen))
t.blob_ids.append(Object(
name=k, id=obj._id))
else:
obj = self._make_tree(next(self.idgen), **v)
t.tree_ids.append(Object(
name=k, id=obj._id))
session(t).flush()
return t
def _make_commit(self, object_id, **tree_parts):
ci, isnew = M.repository.Commit.upsert(object_id)
if isnew:
ci.committed.email = c.user.email_addresses[0]
ci.authored.email = c.user.email_addresses[0]
dt = datetime.utcnow()
ci.authored.date = dt.replace(microsecond=dt.microsecond // 1000 * 1000)
ci.message = 'summary\n\nddescription'
ci.set_context(self.repo)
ci.tree_id = 't_' + object_id
ci.tree = self._make_tree(ci.tree_id, **tree_parts)
return ci, isnew
def _make_log(self, ci):
session(ci).flush(ci)
def setUp(self):
setup_basic_test()
setup_global_objects()
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
self.prefix = tg.config.get('scm.repos.root', '/')
class _TestWithRepo(_Test):
def setUp(self):
super(_TestWithRepo, self).setUp()
h.set_context('test', neighborhood='Projects')
c.project.install_app('svn', 'test1')
h.set_context('test', 'test1', neighborhood='Projects')
self.repo = M.Repository(name='test1', tool='svn')
self.repo._impl = mock.Mock(spec=M.RepositoryImplementation())
self.repo._impl.shorthand_for_commit = M.RepositoryImplementation.shorthand_for_commit
self.repo._impl.url_for_commit = (
lambda *a, **kw: M.RepositoryImplementation.url_for_commit(
self.repo._impl, *a, **kw))
self.repo._impl._repo = self.repo
self.repo._impl.all_commit_ids = lambda *a, **kw: []
self.repo._impl.commit().symbolic_ids = None
ThreadLocalORMSession.flush_all()
class _TestWithRepoAndCommit(_TestWithRepo):
def setUp(self):
super(_TestWithRepoAndCommit, self).setUp()
self.ci, isnew = self._make_commit('foo')
ThreadLocalORMSession.flush_all()
class TestRepo(_TestWithRepo):
def test_create(self):
assert self.repo.fs_path == os.path.join(self.prefix, 'svn/p/test/')
assert self.repo.url_path == '/p/test/'
assert self.repo.full_fs_path == os.path.join(
self.prefix, 'svn/p/test/test1')
def test_passthrough(self):
argless = ['init']
for fn in argless:
getattr(self.repo, fn)()
getattr(self.repo._impl, fn).assert_called_with()
unary = ['commit', 'open_blob']
for fn in unary:
getattr(self.repo, fn)('foo')
getattr(self.repo._impl, fn).assert_called_with('foo')
def test_shorthand_for_commit(self):
self.assertEqual(
self.repo.shorthand_for_commit('a' * 40),
'[aaaaaa]')
def test_url_for_commit(self):
self.assertEqual(
self.repo.url_for_commit('a' * 40),
'/p/test/test1/ci/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/')
@mock.patch('allura.model.repository.g.post_event')
def test_init_as_clone(self, post_event):
self.repo.init_as_clone('srcpath', 'srcname', 'srcurl')
assert self.repo.upstream_repo.name == 'srcname'
assert self.repo.upstream_repo.url == 'srcurl'
assert self.repo._impl.clone_from.called_with('srcpath')
post_event.assert_called_once_with('repo_cloned', 'srcurl', 'srcpath')
def test_latest(self):
ci = mock.Mock()
self.repo._impl.commit = mock.Mock(return_value=ci)
assert self.repo.latest() is ci
def test_index(self):
i = self.repo.index()
assert i['type_s'] == 'Repository', i
assert i['name_s'] == 'test1', i
def test_scm_host_url(self):
assert_equal(self.repo.clone_url('rw', 'nobody'),
'svn+ssh://nobody@localhost:8022/scm-repo/p/test/test1/')
assert_equal(self.repo.clone_url('https', 'nobody'),
'https://nobody@localhost:8022/scm-repo/p/test/test1/')
with h.push_config(self.repo.app.config.options, external_checkout_url='https://$username@foo.com/'):
assert_equal(self.repo.clone_url('https', 'user'),
'https://user@foo.com/')
def test_guess_type(self):
assert self.repo.guess_type('foo.txt') == ('text/plain', None)
assert self.repo.guess_type('foo.gbaer') == (
'application/octet-stream', None)
assert self.repo.guess_type('foo.html') == ('text/html', None)
assert self.repo.guess_type('.gitignore') == ('text/plain', None)
def test_refresh(self):
committer_name = 'Test Committer'
committer_email = 'test@example.com'
ci = mock.Mock()
ci.authored.name = committer_name
ci.committed.name = committer_name
ci.committed.email = committer_email
ci.author_url = '/u/test-committer/'
ci.activity_name = '[deadbeef]'
ci.activity_url = 'url'
ci.activity_extras = {}
del ci.node_id
self.repo._impl.commit = mock.Mock(return_value=ci)
self.repo._impl.new_commits = mock.Mock(
return_value=['foo%d' % i for i in range(100)])
self.repo._impl.all_commit_ids = mock.Mock(
return_value=['foo%d' % i for i in range(100)])
self.repo.symbolics_for_commit = mock.Mock(
return_value=[['master', 'branch'], []])
def refresh_commit_info(oid, seen, lazy=False):
M.repository.CommitDoc(dict(
authored=dict(
name=committer_name,
date=datetime(2010, 10, 8, 15, 32, 48, 0),
email=committer_email),
_id=oid)).m.insert()
self.repo._impl.refresh_commit_info = refresh_commit_info
_id = lambda oid: getattr(oid, '_id', str(oid))
self.repo.shorthand_for_commit = lambda oid: '[' + _id(oid) + ']'
self.repo.url_for_commit = lambda oid: '/ci/' + _id(oid) + '/'
self.repo.refresh()
ThreadLocalORMSession.flush_all()
notifications = M.Notification.query.find().all()
for n in notifications:
if '100 new commits' in n.subject:
assert_in('By Test Committer on 10/08/2010 15:32', n.text)
assert_in('http://localhost/ci/foo99/', n.text)
break
else:
assert False, 'Did not find notification'
assert M.Feed.query.find(dict(
author_name=committer_name)).count() == 100
def test_refresh_private(self):
ci = mock.Mock()
self.repo._impl.commit = mock.Mock(return_value=ci)
self.repo._impl.new_commits = mock.Mock(
return_value=['foo%d' % i for i in range(100)])
self.repo.acl = []
c.project.acl = []
self.repo.refresh()
def test_push_upstream_context(self):
self.repo.init_as_clone('srcpath', '/p/test/svn/', '/p/test/svn/')
old_app_instance = M.Project.app_instance
try:
M.Project.app_instance = mock.Mock(return_value=ming.base.Object(
config=ming.base.Object(_id=None)))
with self.repo.push_upstream_context():
assert c.project.shortname == 'test'
finally:
M.Project.app_instance = old_app_instance
def test_pending_upstream_merges(self):
self.repo.init_as_clone('srcpath', '/p/test/svn/', '/p/test/svn/')
old_app_instance = M.Project.app_instance
try:
M.Project.app_instance = mock.Mock(return_value=ming.base.Object(
config=ming.base.Object(_id=None)))
self.repo.pending_upstream_merges()
finally:
M.Project.app_instance = old_app_instance
class TestRepoObject(_TestWithRepoAndCommit):
def test_upsert(self):
obj0, isnew0 = M.repository.Tree.upsert('foo1')
obj1, isnew1 = M.repository.Tree.upsert('foo1')
assert obj0 is obj1
assert isnew0 and not isnew1
def test_artifact_methods(self):
assert self.ci.index_id(
) == 'allura/model/repo/Commit#foo', self.ci.index_id()
assert self.ci.primary() is self.ci, self.ci.primary()
class TestCommit(_TestWithRepo):
def setUp(self):
super(TestCommit, self).setUp()
self.ci, isnew = self._make_commit(
'foo',
a=dict(
a=dict(
a='',
b='',),
b=''))
self.tree = self.ci.tree
impl = M.RepositoryImplementation()
impl._repo = self.repo
self.repo._impl.shorthand_for_commit = impl.shorthand_for_commit
self.repo._impl.url_for_commit = impl.url_for_commit
def test_upsert(self):
obj0, isnew0 = M.repository.Commit.upsert('foo')
obj1, isnew1 = M.repository.Commit.upsert('foo')
assert obj0 is obj1
assert not isnew1
u = M.User.by_username('test-admin')
assert self.ci.author_url == u.url()
assert self.ci.committer_url == u.url()
assert self.ci.tree is self.tree
assert self.ci.summary == 'summary'
assert self.ci.shorthand_id() == '[foo]'
assert self.ci.url() == '/p/test/test1/ci/foo/'
def test_get_path(self):
b = self.ci.get_path('a/a/a')
assert isinstance(b, M.repository.Blob)
x = self.ci.get_path('a/a')
assert isinstance(x, M.repository.Tree)
def _unique_blobs(self):
def counter():
counter.i += 1
return counter.i
counter.i = 0
blobs = defaultdict(counter)
return lambda blob: BytesIO(str(blobs[blob.path()]))
def test_diffs_file_renames(self):
def open_blob(blob):
blobs = {
'a': 'Leia',
'/b/a/a': 'Darth Vader',
'/b/a/b': 'Luke Skywalker',
'/b/b': 'Death Star will destroy you',
'/b/c': 'Luke Skywalker',
'/b/a/z': 'Death Star will destroy you\nALL',
}
return BytesIO(blobs.get(blob.path(), ''))
self.repo._impl.open_blob = open_blob
self.repo._impl.commit = mock.Mock(return_value=self.ci)
self.repo._impl.paged_diffs.return_value = {
'added': ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'],
'changed': [],
'copied': [],
'renamed': [],
'removed': [],
'total': 5,
}
assert_equal(self.ci.diffs.added,
['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'])
assert (self.ci.diffs.copied
== self.ci.diffs.changed
== self.ci.diffs.removed
== [])
ci, isnew = self._make_commit(
'bar',
b=dict(
a=dict(
a='',
b='',),
b=''))
ci.parent_ids = ['foo']
self._make_log(ci)
self.repo._impl.paged_diffs.return_value = {
'added': ['b', 'b/a', 'b/a/a', 'b/a/b', 'b/b'],
'renamed': [],
'copied': [],
'changed': [],
'removed': ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'],
'total': 10,
}
assert_equal(ci.diffs.added, ['b', 'b/a', 'b/a/a', 'b/a/b', 'b/b'])
assert_equal(ci.diffs.removed, ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'])
assert (ci.diffs.copied
== ci.diffs.changed
== [])
ci, isnew = self._make_commit(
'baz',
b=dict(
a=dict(
z=''),
c=''))
ci.parent_ids = ['bar']
self._make_log(ci)
self.repo._impl.paged_diffs.return_value = {
'added': ['b/c', 'b/a/z'],
'removed': ['/b/a/b', 'b/b'],
'changed': [],
'copied': [
{
'new': 'b/c',
'old': 'b/a/b',
'ratio': 1,
'diff': '',
},
{
'new': 'b/a/z',
'old': 'b/b',
'ratio': 1,
'diff': '',
},
],
'renamed': [],
'total': 2
}
assert_equal(ci.diffs.added, ['b/a/z', 'b/c'])
assert_equal(ci.diffs.changed, [])
assert_equal(ci.diffs.removed, ['/b/a/b', 'b/b'])
assert_equal(len(ci.diffs.copied), 2)
assert_equal(ci.diffs.copied[1]['old'], 'b/a/b')
assert_equal(ci.diffs.copied[1]['new'], 'b/c')
assert_equal(ci.diffs.copied[1]['ratio'], 1)
assert_equal(ci.diffs.copied[1]['diff'], '')
assert_equal(ci.diffs.copied[0]['old'], 'b/b')
assert_equal(ci.diffs.copied[0]['new'], 'b/a/z')
def test_context(self):
self.ci.context()
class TestRename(unittest.TestCase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
c.app.repo.name = 'testsvn-rename'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
self.rev = self.repo.commit('HEAD')
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_log_file_with_rename(self):
entry = list(self.repo.log(path='/dir/b.txt', id_only=False, limit=1))[0]
assert_equal(entry['id'], 3)
assert_equal(entry['rename_details']['path'], '/dir/a.txt')
assert_equal(
entry['rename_details']['commit_url'],
self.repo.url_for_commit(2)
)
def test_check_changed_path(self):
changed_path = {'copyfrom_path': '/test/path', 'path': '/test/path2'}
result = self.repo._impl._check_changed_path(
changed_path, '/test/path2/file.txt')
assert_equal({'path': '/test/path2/file.txt',
'copyfrom_path': '/test/path/file.txt'}, result)
class TestDirectRepoAccess(object):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
c.app.repo.name = 'testsvn'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
self.rev = self.repo.commit('HEAD')
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_paged_diffs(self):
_id = self.repo._impl._oid(6)
diffs = self.repo.commit(_id).diffs
expected = {
'added': ['/ЗРЯЧИЙ_ТА_ПОБАЧИТЬ'],
'removed': [],
'changed': [],
'copied': [],
'renamed': [],
'total': 1,
}
assert_equals(diffs, expected)
_id = self.repo._impl._oid(2)
diffs = self.repo.commit(_id).diffs
expected = {
'added': ['/a', '/a/b', '/a/b/c', '/a/b/c/hello.txt'],
'removed': [],
'changed': [],
'renamed': [],
'copied': [],
'total': 4,
}
assert_equals(diffs, expected)
_id = self.repo._impl._oid(3)
diffs = self.repo.commit(_id).diffs
expected = {
'added': [],
'removed': [],
'renamed': [],
'changed': ['/README'],
'copied': [],
'total': 1,
}
assert_equals(diffs, expected)
_id = self.repo._impl._oid(4)
diffs = self.repo.commit(_id).diffs
expected = {
'added': [],
'removed': ['/a/b/c/hello.txt'],
'changed': [],
'renamed': [],
'copied': [],
'total': 1,
}
assert_equals(diffs, expected)
| true | true |
f71a788ec1af6202640b3afb171e260ba38421a6 | 18,525 | py | Python | lib/spack/spack/cmd/install.py | padamson/spack | d3f67a48552691b4846ccc4a10f76740b154090c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | lib/spack/spack/cmd/install.py | padamson/spack | d3f67a48552691b4846ccc4a10f76740b154090c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 11 | 2021-03-15T09:26:41.000Z | 2022-02-28T15:08:23.000Z | lib/spack/spack/cmd/install.py | padamson/spack | d3f67a48552691b4846ccc4a10f76740b154090c | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import os
import shutil
import sys
import textwrap
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import spack.build_environment
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
import spack.fetch_strategy
import spack.monitor
import spack.paths
import spack.report
from spack.error import SpackError
from spack.installer import PackageInstaller
description = "build and install packages"
section = "build"
level = "short"
def update_kwargs_from_args(args, kwargs):
"""Parse cli arguments and construct a dictionary
that will be passed to the package installer."""
kwargs.update({
'fail_fast': args.fail_fast,
'keep_prefix': args.keep_prefix,
'keep_stage': args.keep_stage,
'restage': not args.dont_restage,
'install_source': args.install_source,
'verbose': args.verbose,
'fake': args.fake,
'dirty': args.dirty,
'use_cache': args.use_cache,
'cache_only': args.cache_only,
'include_build_deps': args.include_build_deps,
'explicit': True, # Always true for install command
'stop_at': args.until,
'unsigned': args.unsigned,
'full_hash_match': args.full_hash_match,
})
kwargs.update({
'install_deps': ('dependencies' in args.things_to_install),
'install_package': ('package' in args.things_to_install)
})
if hasattr(args, 'setup'):
setups = set()
for arglist_s in args.setup:
for arg in [x.strip() for x in arglist_s.split(',')]:
setups.add(arg)
kwargs['setup'] = setups
tty.msg('Setup={0}'.format(kwargs['setup']))
def setup_parser(subparser):
subparser.add_argument(
'--only',
default='package,dependencies',
dest='things_to_install',
choices=['package', 'dependencies'],
help="""select the mode of installation.
the default is to install the package along with all its dependencies.
alternatively one can decide to install only the package or only
the dependencies"""
)
subparser.add_argument(
'-u', '--until', type=str, dest='until', default=None,
help="phase to stop after when installing (default None)")
arguments.add_common_arguments(subparser, ['jobs'])
subparser.add_argument(
'--overwrite', action='store_true',
help="reinstall an existing spec, even if it has dependents")
subparser.add_argument(
'--fail-fast', action='store_true',
help="stop all builds if any build fails (default is best effort)")
subparser.add_argument(
'--keep-prefix', action='store_true',
help="don't remove the install prefix if installation fails")
subparser.add_argument(
'--keep-stage', action='store_true',
help="don't remove the build stage if installation succeeds")
subparser.add_argument(
'--dont-restage', action='store_true',
help="if a partial install is detected, don't delete prior state")
cache_group = subparser.add_mutually_exclusive_group()
cache_group.add_argument(
'--use-cache', action='store_true', dest='use_cache', default=True,
help="check for pre-built Spack packages in mirrors (default)")
cache_group.add_argument(
'--no-cache', action='store_false', dest='use_cache', default=True,
help="do not check for pre-built Spack packages in mirrors")
cache_group.add_argument(
'--cache-only', action='store_true', dest='cache_only', default=False,
help="only install package from binary mirrors")
monitor_group = spack.monitor.get_monitor_group(subparser) # noqa
subparser.add_argument(
'--include-build-deps', action='store_true', dest='include_build_deps',
default=False, help="""include build deps when installing from cache,
which is useful for CI pipeline troubleshooting""")
subparser.add_argument(
'--no-check-signature', action='store_true',
dest='unsigned', default=False,
help="do not check signatures of binary packages")
subparser.add_argument(
'--require-full-hash-match', action='store_true',
dest='full_hash_match', default=False, help="""when installing from
binary mirrors, do not install binary package unless the full hash of the
remote spec matches that of the local spec""")
subparser.add_argument(
'--show-log-on-error', action='store_true',
help="print full build log to stderr if build fails")
subparser.add_argument(
'--source', action='store_true', dest='install_source',
help="install source files in prefix")
arguments.add_common_arguments(subparser, ['no_checksum', 'deprecated'])
subparser.add_argument(
'-v', '--verbose', action='store_true',
help="display verbose build output while installing")
subparser.add_argument(
'--fake', action='store_true',
help="fake install for debug purposes.")
subparser.add_argument(
'--only-concrete', action='store_true', default=False,
help='(with environment) only install already concretized specs')
subparser.add_argument(
'--no-add', action='store_true', default=False,
help="""(with environment) only install specs provided as argument
if they are already in the concretized environment""")
subparser.add_argument(
'-f', '--file', action='append', default=[],
dest='specfiles', metavar='SPEC_YAML_FILE',
help="install from file. Read specs to install from .yaml files")
cd_group = subparser.add_mutually_exclusive_group()
arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
testing = subparser.add_mutually_exclusive_group()
testing.add_argument(
'--test', default=None,
choices=['root', 'all'],
help="""If 'root' is chosen, run package tests during
installation for top-level packages (but skip tests for dependencies).
if 'all' is chosen, run package tests during installation for all
packages. If neither are chosen, don't run tests for any packages."""
)
testing.add_argument(
'--run-tests', action='store_true',
help='run package tests during installation (same as --test=all)'
)
subparser.add_argument(
'--log-format',
default=None,
choices=spack.report.valid_formats,
help="format to be used for log files"
)
subparser.add_argument(
'--log-file',
default=None,
help="filename for the log file. if not passed a default will be used"
)
subparser.add_argument(
'--help-cdash',
action='store_true',
help="Show usage instructions for CDash reporting"
)
arguments.add_cdash_args(subparser, False)
arguments.add_common_arguments(subparser, ['yes_to_all', 'spec'])
def default_log_file(spec):
"""Computes the default filename for the log file and creates
the corresponding directory if not present
"""
fmt = 'test-{x.name}-{x.version}-{hash}.xml'
basename = fmt.format(x=spec, hash=spec.dag_hash())
dirname = fs.os.path.join(spack.paths.reports_path, 'junit')
fs.mkdirp(dirname)
return fs.os.path.join(dirname, basename)
def install_specs(cli_args, kwargs, specs):
"""Do the actual installation.
Args:
cli_args (argparse.Namespace): argparse namespace with command arguments
kwargs (dict): keyword arguments
specs (list): list of (abstract, concrete) spec tuples
"""
# handle active environment, if any
env = ev.get_env(cli_args, 'install')
try:
if env:
specs_to_install = []
specs_to_add = []
for abstract, concrete in specs:
# This won't find specs added to the env since last
# concretize, therefore should we consider enforcing
# concretization of the env before allowing to install
# specs?
m_spec = env.matching_spec(abstract)
# If there is any ambiguity in the above call to matching_spec
# (i.e. if more than one spec in the environment matches), then
# SpackEnvironmentError is rasied, with a message listing the
# the matches. Getting to this point means there were either
# no matches or exactly one match.
if not m_spec:
tty.debug('{0} matched nothing in the env'.format(
abstract.name))
# no matches in the env
if cli_args.no_add:
msg = ('You asked to install {0} without adding it ' +
'(--no-add), but no such spec exists in ' +
'environment').format(abstract.name)
tty.die(msg)
else:
tty.debug('adding {0} as a root'.format(abstract.name))
specs_to_add.append((abstract, concrete))
continue
tty.debug('exactly one match for {0} in env -> {1}'.format(
m_spec.name, m_spec.dag_hash()))
if m_spec in env.roots() or cli_args.no_add:
# either the single match is a root spec (and --no-add is
# the default for roots) or --no-add was stated explictly
tty.debug('just install {0}'.format(m_spec.name))
specs_to_install.append(m_spec)
else:
# the single match is not a root (i.e. it's a dependency),
# and --no-add was not specified, so we'll add it as a
# root before installing
tty.debug('add {0} then install it'.format(m_spec.name))
specs_to_add.append((abstract, concrete))
if specs_to_add:
tty.debug('Adding the following specs as roots:')
for abstract, concrete in specs_to_add:
tty.debug(' {0}'.format(abstract.name))
with env.write_transaction():
specs_to_install.append(
env.concretize_and_add(abstract, concrete))
env.write(regenerate=False)
# Install the validated list of cli specs
if specs_to_install:
tty.debug('Installing the following cli specs:')
for s in specs_to_install:
tty.debug(' {0}'.format(s.name))
env.install_specs(specs_to_install, args=cli_args, **kwargs)
else:
installs = [(concrete.package, kwargs) for _, concrete in specs]
builder = PackageInstaller(installs)
builder.install()
except spack.build_environment.InstallError as e:
if cli_args.show_log_on_error:
e.print_context()
if not os.path.exists(e.pkg.build_log_path):
tty.error("'spack install' created no log.")
else:
sys.stderr.write('Full build log:\n')
with open(e.pkg.build_log_path) as log:
shutil.copyfileobj(log, sys.stderr)
raise
def install(parser, args, **kwargs):
if args.help_cdash:
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=textwrap.dedent('''\
environment variables:
SPACK_CDASH_AUTH_TOKEN
authentication token to present to CDash
'''))
arguments.add_cdash_args(parser, True)
parser.print_help()
return
# The user wants to monitor builds using github.com/spack/spack-monitor
if args.use_monitor:
monitor = spack.monitor.get_client(
host=args.monitor_host,
prefix=args.monitor_prefix,
disable_auth=args.monitor_disable_auth,
tags=args.monitor_tags,
save_local=args.monitor_save_local,
)
reporter = spack.report.collect_info(
spack.package.PackageInstaller, '_install_task', args.log_format, args)
if args.log_file:
reporter.filename = args.log_file
if args.run_tests:
tty.warn("Deprecated option: --run-tests: use --test=all instead")
def get_tests(specs):
if args.test == 'all' or args.run_tests:
return True
elif args.test == 'root':
return [spec.name for spec in specs]
else:
return False
if not args.spec and not args.specfiles:
# if there are no args but an active environment
# then install the packages from it.
env = ev.get_env(args, 'install')
if env:
tests = get_tests(env.user_specs)
kwargs['tests'] = tests
if not args.only_concrete:
with env.write_transaction():
concretized_specs = env.concretize(tests=tests)
ev.display_specs(concretized_specs)
# save view regeneration for later, so that we only do it
# once, as it can be slow.
env.write(regenerate=False)
specs = env.all_specs()
if not args.log_file and not reporter.filename:
reporter.filename = default_log_file(specs[0])
reporter.specs = specs
# Tell the monitor about the specs
if args.use_monitor and specs:
monitor.new_configuration(specs)
tty.msg("Installing environment {0}".format(env.name))
with reporter('build'):
env.install_all(args, **kwargs)
tty.debug("Regenerating environment views for {0}"
.format(env.name))
with env.write_transaction():
# write env to trigger view generation and modulefile
# generation
env.write()
return
else:
msg = "install requires a package argument or active environment"
if 'spack.yaml' in os.listdir(os.getcwd()):
# There's a spack.yaml file in the working dir, the user may
# have intended to use that
msg += "\n\n"
msg += "Did you mean to install using the `spack.yaml`"
msg += " in this directory? Try: \n"
msg += " spack env activate .\n"
msg += " spack install\n"
msg += " OR\n"
msg += " spack --env . install"
tty.die(msg)
if args.no_checksum:
spack.config.set('config:checksum', False, scope='command_line')
if args.deprecated:
spack.config.set('config:deprecated', True, scope='command_line')
# Parse cli arguments and construct a dictionary
# that will be passed to the package installer
update_kwargs_from_args(args, kwargs)
# 1. Abstract specs from cli
abstract_specs = spack.cmd.parse_specs(args.spec)
tests = get_tests(abstract_specs)
kwargs['tests'] = tests
try:
specs = spack.cmd.parse_specs(
args.spec, concretize=True, tests=tests)
except SpackError as e:
tty.debug(e)
reporter.concretization_report(e.message)
raise
# 2. Concrete specs from yaml files
for file in args.specfiles:
with open(file, 'r') as f:
s = spack.spec.Spec.from_yaml(f)
concretized = s.concretized()
if concretized.dag_hash() != s.dag_hash():
msg = 'skipped invalid file "{0}". '
msg += 'The file does not contain a concrete spec.'
tty.warn(msg.format(file))
continue
abstract_specs.append(s)
specs.append(concretized)
if len(specs) == 0:
tty.die('The `spack install` command requires a spec to install.')
if not args.log_file and not reporter.filename:
reporter.filename = default_log_file(specs[0])
reporter.specs = specs
with reporter('build'):
if args.overwrite:
installed = list(filter(lambda x: x,
map(spack.store.db.query_one, specs)))
if not args.yes_to_all:
display_args = {
'long': True,
'show_flags': True,
'variants': True
}
if installed:
tty.msg('The following package specs will be '
'reinstalled:\n')
spack.cmd.display_specs(installed, **display_args)
not_installed = list(filter(lambda x: x not in installed,
specs))
if not_installed:
tty.msg('The following package specs are not installed and'
' the --overwrite flag was given. The package spec'
' will be newly installed:\n')
spack.cmd.display_specs(not_installed, **display_args)
# We have some specs, so one of the above must have been true
answer = tty.get_yes_or_no(
'Do you want to proceed?', default=False
)
if not answer:
tty.die('Reinstallation aborted.')
# overwrite all concrete explicit specs from this build
kwargs['overwrite'] = [spec.dag_hash() for spec in specs]
# Update install_args with the monitor args, needed for build task
kwargs.update({
"monitor_disable_auth": args.monitor_disable_auth,
"monitor_keep_going": args.monitor_keep_going,
"monitor_host": args.monitor_host,
"use_monitor": args.use_monitor,
"monitor_prefix": args.monitor_prefix,
})
# If we are using the monitor, we send configs. and create build
# The full_hash is the main package id, the build_hash for others
if args.use_monitor and specs:
monitor.new_configuration(specs)
install_specs(args, kwargs, zip(abstract_specs, specs))
| 39.33121 | 80 | 0.601404 |
import argparse
import os
import shutil
import sys
import textwrap
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import spack.build_environment
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
import spack.fetch_strategy
import spack.monitor
import spack.paths
import spack.report
from spack.error import SpackError
from spack.installer import PackageInstaller
description = "build and install packages"
section = "build"
level = "short"
def update_kwargs_from_args(args, kwargs):
kwargs.update({
'fail_fast': args.fail_fast,
'keep_prefix': args.keep_prefix,
'keep_stage': args.keep_stage,
'restage': not args.dont_restage,
'install_source': args.install_source,
'verbose': args.verbose,
'fake': args.fake,
'dirty': args.dirty,
'use_cache': args.use_cache,
'cache_only': args.cache_only,
'include_build_deps': args.include_build_deps,
'explicit': True,
'stop_at': args.until,
'unsigned': args.unsigned,
'full_hash_match': args.full_hash_match,
})
kwargs.update({
'install_deps': ('dependencies' in args.things_to_install),
'install_package': ('package' in args.things_to_install)
})
if hasattr(args, 'setup'):
setups = set()
for arglist_s in args.setup:
for arg in [x.strip() for x in arglist_s.split(',')]:
setups.add(arg)
kwargs['setup'] = setups
tty.msg('Setup={0}'.format(kwargs['setup']))
def setup_parser(subparser):
subparser.add_argument(
'--only',
default='package,dependencies',
dest='things_to_install',
choices=['package', 'dependencies'],
help="""select the mode of installation.
the default is to install the package along with all its dependencies.
alternatively one can decide to install only the package or only
the dependencies"""
)
subparser.add_argument(
'-u', '--until', type=str, dest='until', default=None,
help="phase to stop after when installing (default None)")
arguments.add_common_arguments(subparser, ['jobs'])
subparser.add_argument(
'--overwrite', action='store_true',
help="reinstall an existing spec, even if it has dependents")
subparser.add_argument(
'--fail-fast', action='store_true',
help="stop all builds if any build fails (default is best effort)")
subparser.add_argument(
'--keep-prefix', action='store_true',
help="don't remove the install prefix if installation fails")
subparser.add_argument(
'--keep-stage', action='store_true',
help="don't remove the build stage if installation succeeds")
subparser.add_argument(
'--dont-restage', action='store_true',
help="if a partial install is detected, don't delete prior state")
cache_group = subparser.add_mutually_exclusive_group()
cache_group.add_argument(
'--use-cache', action='store_true', dest='use_cache', default=True,
help="check for pre-built Spack packages in mirrors (default)")
cache_group.add_argument(
'--no-cache', action='store_false', dest='use_cache', default=True,
help="do not check for pre-built Spack packages in mirrors")
cache_group.add_argument(
'--cache-only', action='store_true', dest='cache_only', default=False,
help="only install package from binary mirrors")
monitor_group = spack.monitor.get_monitor_group(subparser) # noqa
subparser.add_argument(
'--include-build-deps', action='store_true', dest='include_build_deps',
default=False, help="""include build deps when installing from cache,
which is useful for CI pipeline troubleshooting""")
subparser.add_argument(
'--no-check-signature', action='store_true',
dest='unsigned', default=False,
help="do not check signatures of binary packages")
subparser.add_argument(
'--require-full-hash-match', action='store_true',
dest='full_hash_match', default=False, help="""when installing from
binary mirrors, do not install binary package unless the full hash of the
remote spec matches that of the local spec""")
subparser.add_argument(
'--show-log-on-error', action='store_true',
help="print full build log to stderr if build fails")
subparser.add_argument(
'--source', action='store_true', dest='install_source',
help="install source files in prefix")
arguments.add_common_arguments(subparser, ['no_checksum', 'deprecated'])
subparser.add_argument(
'-v', '--verbose', action='store_true',
help="display verbose build output while installing")
subparser.add_argument(
'--fake', action='store_true',
help="fake install for debug purposes.")
subparser.add_argument(
'--only-concrete', action='store_true', default=False,
help='(with environment) only install already concretized specs')
subparser.add_argument(
'--no-add', action='store_true', default=False,
help="""(with environment) only install specs provided as argument
if they are already in the concretized environment""")
subparser.add_argument(
'-f', '--file', action='append', default=[],
dest='specfiles', metavar='SPEC_YAML_FILE',
help="install from file. Read specs to install from .yaml files")
cd_group = subparser.add_mutually_exclusive_group()
arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
testing = subparser.add_mutually_exclusive_group()
testing.add_argument(
'--test', default=None,
choices=['root', 'all'],
help="""If 'root' is chosen, run package tests during
installation for top-level packages (but skip tests for dependencies).
if 'all' is chosen, run package tests during installation for all
packages. If neither are chosen, don't run tests for any packages."""
)
testing.add_argument(
'--run-tests', action='store_true',
help='run package tests during installation (same as --test=all)'
)
subparser.add_argument(
'--log-format',
default=None,
choices=spack.report.valid_formats,
help="format to be used for log files"
)
subparser.add_argument(
'--log-file',
default=None,
help="filename for the log file. if not passed a default will be used"
)
subparser.add_argument(
'--help-cdash',
action='store_true',
help="Show usage instructions for CDash reporting"
)
arguments.add_cdash_args(subparser, False)
arguments.add_common_arguments(subparser, ['yes_to_all', 'spec'])
def default_log_file(spec):
fmt = 'test-{x.name}-{x.version}-{hash}.xml'
basename = fmt.format(x=spec, hash=spec.dag_hash())
dirname = fs.os.path.join(spack.paths.reports_path, 'junit')
fs.mkdirp(dirname)
return fs.os.path.join(dirname, basename)
def install_specs(cli_args, kwargs, specs):
env = ev.get_env(cli_args, 'install')
try:
if env:
specs_to_install = []
specs_to_add = []
for abstract, concrete in specs:
# concretize, therefore should we consider enforcing
# concretization of the env before allowing to install
# specs?
m_spec = env.matching_spec(abstract)
# If there is any ambiguity in the above call to matching_spec
# (i.e. if more than one spec in the environment matches), then
# SpackEnvironmentError is rasied, with a message listing the
# the matches. Getting to this point means there were either
# no matches or exactly one match.
if not m_spec:
tty.debug('{0} matched nothing in the env'.format(
abstract.name))
# no matches in the env
if cli_args.no_add:
msg = ('You asked to install {0} without adding it ' +
'(--no-add), but no such spec exists in ' +
'environment').format(abstract.name)
tty.die(msg)
else:
tty.debug('adding {0} as a root'.format(abstract.name))
specs_to_add.append((abstract, concrete))
continue
tty.debug('exactly one match for {0} in env -> {1}'.format(
m_spec.name, m_spec.dag_hash()))
if m_spec in env.roots() or cli_args.no_add:
# either the single match is a root spec (and --no-add is
# the default for roots) or --no-add was stated explictly
tty.debug('just install {0}'.format(m_spec.name))
specs_to_install.append(m_spec)
else:
# the single match is not a root (i.e. it's a dependency),
# root before installing
tty.debug('add {0} then install it'.format(m_spec.name))
specs_to_add.append((abstract, concrete))
if specs_to_add:
tty.debug('Adding the following specs as roots:')
for abstract, concrete in specs_to_add:
tty.debug(' {0}'.format(abstract.name))
with env.write_transaction():
specs_to_install.append(
env.concretize_and_add(abstract, concrete))
env.write(regenerate=False)
# Install the validated list of cli specs
if specs_to_install:
tty.debug('Installing the following cli specs:')
for s in specs_to_install:
tty.debug(' {0}'.format(s.name))
env.install_specs(specs_to_install, args=cli_args, **kwargs)
else:
installs = [(concrete.package, kwargs) for _, concrete in specs]
builder = PackageInstaller(installs)
builder.install()
except spack.build_environment.InstallError as e:
if cli_args.show_log_on_error:
e.print_context()
if not os.path.exists(e.pkg.build_log_path):
tty.error("'spack install' created no log.")
else:
sys.stderr.write('Full build log:\n')
with open(e.pkg.build_log_path) as log:
shutil.copyfileobj(log, sys.stderr)
raise
def install(parser, args, **kwargs):
if args.help_cdash:
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=textwrap.dedent('''\
environment variables:
SPACK_CDASH_AUTH_TOKEN
authentication token to present to CDash
'''))
arguments.add_cdash_args(parser, True)
parser.print_help()
return
# The user wants to monitor builds using github.com/spack/spack-monitor
if args.use_monitor:
monitor = spack.monitor.get_client(
host=args.monitor_host,
prefix=args.monitor_prefix,
disable_auth=args.monitor_disable_auth,
tags=args.monitor_tags,
save_local=args.monitor_save_local,
)
reporter = spack.report.collect_info(
spack.package.PackageInstaller, '_install_task', args.log_format, args)
if args.log_file:
reporter.filename = args.log_file
if args.run_tests:
tty.warn("Deprecated option: --run-tests: use --test=all instead")
def get_tests(specs):
if args.test == 'all' or args.run_tests:
return True
elif args.test == 'root':
return [spec.name for spec in specs]
else:
return False
if not args.spec and not args.specfiles:
# if there are no args but an active environment
# then install the packages from it.
env = ev.get_env(args, 'install')
if env:
tests = get_tests(env.user_specs)
kwargs['tests'] = tests
if not args.only_concrete:
with env.write_transaction():
concretized_specs = env.concretize(tests=tests)
ev.display_specs(concretized_specs)
# save view regeneration for later, so that we only do it
# once, as it can be slow.
env.write(regenerate=False)
specs = env.all_specs()
if not args.log_file and not reporter.filename:
reporter.filename = default_log_file(specs[0])
reporter.specs = specs
# Tell the monitor about the specs
if args.use_monitor and specs:
monitor.new_configuration(specs)
tty.msg("Installing environment {0}".format(env.name))
with reporter('build'):
env.install_all(args, **kwargs)
tty.debug("Regenerating environment views for {0}"
.format(env.name))
with env.write_transaction():
# write env to trigger view generation and modulefile
# generation
env.write()
return
else:
msg = "install requires a package argument or active environment"
if 'spack.yaml' in os.listdir(os.getcwd()):
# There's a spack.yaml file in the working dir, the user may
msg += "\n\n"
msg += "Did you mean to install using the `spack.yaml`"
msg += " in this directory? Try: \n"
msg += " spack env activate .\n"
msg += " spack install\n"
msg += " OR\n"
msg += " spack --env . install"
tty.die(msg)
if args.no_checksum:
spack.config.set('config:checksum', False, scope='command_line')
if args.deprecated:
spack.config.set('config:deprecated', True, scope='command_line')
update_kwargs_from_args(args, kwargs)
abstract_specs = spack.cmd.parse_specs(args.spec)
tests = get_tests(abstract_specs)
kwargs['tests'] = tests
try:
specs = spack.cmd.parse_specs(
args.spec, concretize=True, tests=tests)
except SpackError as e:
tty.debug(e)
reporter.concretization_report(e.message)
raise
for file in args.specfiles:
with open(file, 'r') as f:
s = spack.spec.Spec.from_yaml(f)
concretized = s.concretized()
if concretized.dag_hash() != s.dag_hash():
msg = 'skipped invalid file "{0}". '
msg += 'The file does not contain a concrete spec.'
tty.warn(msg.format(file))
continue
abstract_specs.append(s)
specs.append(concretized)
if len(specs) == 0:
tty.die('The `spack install` command requires a spec to install.')
if not args.log_file and not reporter.filename:
reporter.filename = default_log_file(specs[0])
reporter.specs = specs
with reporter('build'):
if args.overwrite:
installed = list(filter(lambda x: x,
map(spack.store.db.query_one, specs)))
if not args.yes_to_all:
display_args = {
'long': True,
'show_flags': True,
'variants': True
}
if installed:
tty.msg('The following package specs will be '
'reinstalled:\n')
spack.cmd.display_specs(installed, **display_args)
not_installed = list(filter(lambda x: x not in installed,
specs))
if not_installed:
tty.msg('The following package specs are not installed and'
' the --overwrite flag was given. The package spec'
' will be newly installed:\n')
spack.cmd.display_specs(not_installed, **display_args)
answer = tty.get_yes_or_no(
'Do you want to proceed?', default=False
)
if not answer:
tty.die('Reinstallation aborted.')
kwargs['overwrite'] = [spec.dag_hash() for spec in specs]
kwargs.update({
"monitor_disable_auth": args.monitor_disable_auth,
"monitor_keep_going": args.monitor_keep_going,
"monitor_host": args.monitor_host,
"use_monitor": args.use_monitor,
"monitor_prefix": args.monitor_prefix,
})
if args.use_monitor and specs:
monitor.new_configuration(specs)
install_specs(args, kwargs, zip(abstract_specs, specs))
| true | true |
f71a7a522882e618e8873734efaa5c00541a1526 | 2,196 | py | Python | onnx/backend/test/case/node/batchnorm.py | cnheider/onnx | 8e9c7d57f7c5aa6f6eb7ee7abb0ba2a243781933 | [
"MIT"
] | 137 | 2020-04-28T12:28:32.000Z | 2022-03-18T10:48:25.000Z | onnx/backend/test/case/node/batchnorm.py | cnheider/onnx | 8e9c7d57f7c5aa6f6eb7ee7abb0ba2a243781933 | [
"MIT"
] | 24 | 2020-05-06T08:06:42.000Z | 2021-12-31T07:46:13.000Z | Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/3rdparty/onnx-tensorrt/third_party/onnx/onnx/backend/test/case/node/batchnorm.py | lablup/training_results_v0.7 | f5bb59aa0f8b18b602763abe47d1d24d0d54b197 | [
"Apache-2.0"
] | 51 | 2019-07-12T05:10:25.000Z | 2021-07-28T16:19:06.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np # type: ignore
import onnx
from ..base import Base
from . import expect
class BatchNormalization(Base):
@staticmethod
def export(): # type: () -> None
def _batchnorm_test_mode(x, s, bias, mean, var, epsilon=1e-5): # type: ignore
dims_x = len(x.shape)
dim_ones = (1,) * (dims_x - 2)
s = s.reshape(-1, *dim_ones)
bias = bias.reshape(-1, *dim_ones)
mean = mean.reshape(-1, *dim_ones)
var = var.reshape(-1, *dim_ones)
return s * (x - mean) / np.sqrt(var + epsilon) + bias
# input size: (1, 2, 1, 3)
x = np.array([[[[-1, 0, 1]], [[2, 3, 4]]]]).astype(np.float32)
s = np.array([1.0, 1.5]).astype(np.float32)
bias = np.array([0, 1]).astype(np.float32)
mean = np.array([0, 3]).astype(np.float32)
var = np.array([1, 1.5]).astype(np.float32)
y = _batchnorm_test_mode(x, s, bias, mean, var).astype(np.float32)
node = onnx.helper.make_node(
'BatchNormalization',
inputs=['x', 's', 'bias', 'mean', 'var'],
outputs=['y'],
)
# output size: (1, 2, 1, 3)
expect(node, inputs=[x, s, bias, mean, var], outputs=[y],
name='test_batchnorm_example')
# input size: (2, 3, 4, 5)
x = np.random.randn(2, 3, 4, 5).astype(np.float32)
s = np.random.randn(3).astype(np.float32)
bias = np.random.randn(3).astype(np.float32)
mean = np.random.randn(3).astype(np.float32)
var = np.random.rand(3).astype(np.float32)
epsilon = 1e-2
y = _batchnorm_test_mode(x, s, bias, mean, var, epsilon).astype(np.float32)
node = onnx.helper.make_node(
'BatchNormalization',
inputs=['x', 's', 'bias', 'mean', 'var'],
outputs=['y'],
epsilon=epsilon,
)
# output size: (2, 3, 4, 5)
expect(node, inputs=[x, s, bias, mean, var], outputs=[y],
name='test_batchnorm_epsilon')
| 34.857143 | 86 | 0.551002 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import onnx
from ..base import Base
from . import expect
class BatchNormalization(Base):
@staticmethod
def export():
def _batchnorm_test_mode(x, s, bias, mean, var, epsilon=1e-5):
dims_x = len(x.shape)
dim_ones = (1,) * (dims_x - 2)
s = s.reshape(-1, *dim_ones)
bias = bias.reshape(-1, *dim_ones)
mean = mean.reshape(-1, *dim_ones)
var = var.reshape(-1, *dim_ones)
return s * (x - mean) / np.sqrt(var + epsilon) + bias
x = np.array([[[[-1, 0, 1]], [[2, 3, 4]]]]).astype(np.float32)
s = np.array([1.0, 1.5]).astype(np.float32)
bias = np.array([0, 1]).astype(np.float32)
mean = np.array([0, 3]).astype(np.float32)
var = np.array([1, 1.5]).astype(np.float32)
y = _batchnorm_test_mode(x, s, bias, mean, var).astype(np.float32)
node = onnx.helper.make_node(
'BatchNormalization',
inputs=['x', 's', 'bias', 'mean', 'var'],
outputs=['y'],
)
expect(node, inputs=[x, s, bias, mean, var], outputs=[y],
name='test_batchnorm_example')
x = np.random.randn(2, 3, 4, 5).astype(np.float32)
s = np.random.randn(3).astype(np.float32)
bias = np.random.randn(3).astype(np.float32)
mean = np.random.randn(3).astype(np.float32)
var = np.random.rand(3).astype(np.float32)
epsilon = 1e-2
y = _batchnorm_test_mode(x, s, bias, mean, var, epsilon).astype(np.float32)
node = onnx.helper.make_node(
'BatchNormalization',
inputs=['x', 's', 'bias', 'mean', 'var'],
outputs=['y'],
epsilon=epsilon,
)
expect(node, inputs=[x, s, bias, mean, var], outputs=[y],
name='test_batchnorm_epsilon')
| true | true |
f71a7adb1bcafed077beb9dfb10755cea07d0e0b | 1,239 | py | Python | examples/example.py | Mizux/cmake-pybind11 | d3b89746546734990eae5a86532674bf3462a2f3 | [
"Apache-2.0"
] | null | null | null | examples/example.py | Mizux/cmake-pybind11 | d3b89746546734990eae5a86532674bf3462a2f3 | [
"Apache-2.0"
] | null | null | null | examples/example.py | Mizux/cmake-pybind11 | d3b89746546734990eae5a86532674bf3462a2f3 | [
"Apache-2.0"
] | null | null | null | import cmakepybind11
from cmakepybind11.foo import pyFoo
from cmakepybind11.bar import pyBar
from cmakepybind11.foobar import pyFooBar
print(f'version: {cmakepybind11.__version__}')
# foo
print(f'Foo: {dir(pyFoo.Foo)}')
pyFoo.free_function(2147483647) # max int
pyFoo.free_function(2147483647+1) # max int + 1
f = pyFoo.Foo()
print(f'class Foo: {dir(f)}')
f.static_function(1)
f.static_function(2147483647)
f.static_function(2147483647+1)
f.int = 13
assert(f.int == 13)
f.int64 = 31
assert(f.int64 == 31)
# bar
print(f'Bar: {dir(pyBar.Bar)}')
pyBar.free_function(2147483647) # max int
pyBar.free_function(2147483647+1) # max int + 1
b = pyBar.Bar()
print(f'class Bar: {dir(b)}')
b.static_function(1)
b.static_function(2147483647)
b.static_function(2147483647+1)
b.int = 13
assert(b.int == 13)
b.int64 = 31
assert(b.int64 == 31)
# foobar
print(f'FooBar: {dir(pyFooBar.FooBar)}')
pyFooBar.free_function(2147483647) # max int
pyFooBar.free_function(2147483647+1) # max int + 1
fb = pyFooBar.FooBar()
print(f'class FooBar: {dir(fb)}')
fb.static_function(1)
fb.static_function(2147483647)
fb.static_function(2147483647+1)
fb.foo_int = 13
fb.bar_int = 17
assert(fb.int == 30)
fb.foo_int64 = 31
fb.bar_int64 = 37
assert(fb.int64 == 68)
| 21.736842 | 50 | 0.736885 | import cmakepybind11
from cmakepybind11.foo import pyFoo
from cmakepybind11.bar import pyBar
from cmakepybind11.foobar import pyFooBar
print(f'version: {cmakepybind11.__version__}')
print(f'Foo: {dir(pyFoo.Foo)}')
pyFoo.free_function(2147483647)
pyFoo.free_function(2147483647+1)
f = pyFoo.Foo()
print(f'class Foo: {dir(f)}')
f.static_function(1)
f.static_function(2147483647)
f.static_function(2147483647+1)
f.int = 13
assert(f.int == 13)
f.int64 = 31
assert(f.int64 == 31)
print(f'Bar: {dir(pyBar.Bar)}')
pyBar.free_function(2147483647)
pyBar.free_function(2147483647+1)
b = pyBar.Bar()
print(f'class Bar: {dir(b)}')
b.static_function(1)
b.static_function(2147483647)
b.static_function(2147483647+1)
b.int = 13
assert(b.int == 13)
b.int64 = 31
assert(b.int64 == 31)
print(f'FooBar: {dir(pyFooBar.FooBar)}')
pyFooBar.free_function(2147483647)
pyFooBar.free_function(2147483647+1)
fb = pyFooBar.FooBar()
print(f'class FooBar: {dir(fb)}')
fb.static_function(1)
fb.static_function(2147483647)
fb.static_function(2147483647+1)
fb.foo_int = 13
fb.bar_int = 17
assert(fb.int == 30)
fb.foo_int64 = 31
fb.bar_int64 = 37
assert(fb.int64 == 68)
| true | true |
f71a7b8e9aca170c7b5bfc6407b13285000df309 | 19,338 | py | Python | .history/implementations/pixelda/pixelda_20190101224024.py | Napkin-DL/PyTorch-GAN | 4668fb434a74a4e4771631944e4abfb0ec1c8795 | [
"MIT"
] | null | null | null | .history/implementations/pixelda/pixelda_20190101224024.py | Napkin-DL/PyTorch-GAN | 4668fb434a74a4e4771631944e4abfb0ec1c8795 | [
"MIT"
] | null | null | null | .history/implementations/pixelda/pixelda_20190101224024.py | Napkin-DL/PyTorch-GAN | 4668fb434a74a4e4771631944e4abfb0ec1c8795 | [
"MIT"
] | null | null | null | import argparse
import os
import numpy as np
import math
import itertools
import torchvision.transforms as transforms
from torchvision.utils import save_image
from torch.utils.data import DataLoader
from torchvision import datasets
from torch.autograd import Variable
from mnistm import MNISTM
import torch.nn as nn
import torch.nn.functional as F
import torch
os.makedirs('images', exist_ok=True)
parser = argparse.ArgumentParser()
parser.add_argument('--n_epochs', type=int, default=200, help='number of epochs of training')
parser.add_argument('--batch_size', type=int, default=64, help='size of the batches')
parser.add_argument('--lr', type=float, default=0.0002, help='adam: learning rate')
parser.add_argument('--b1', type=float, default=0.5, help='adam: decay of first order momentum of gradient')
parser.add_argument('--b2', type=float, default=0.999, help='adam: decay of first order momentum of gradient')
parser.add_argument('--n_cpu', type=int, default=8, help='number of cpu threads to use during batch generation')
parser.add_argument('--n_residual_blocks', type=int, default=1, help='number of residual blocks in generator')
parser.add_argument('--latent_dim', type=int, default=10, help='dimensionality of the noise input')
parser.add_argument('--img_size', type=int, default=32, help='size of each image dimension')
parser.add_argument('--channels', type=int, default=3, help='number of image channels')
parser.add_argument('--n_classes', type=int, default=10, help='number of classes in the dataset')
parser.add_argument('--sample_interval', type=int, default=300, help='interval betwen image samples')
opt = parser.parse_args()
print(opt)
# Calculate output of image discriminator (PatchGAN)
patch = int(opt.img_size / 2**4)
patch = (1, patch, patch)
cuda = True if torch.cuda.is_available() else False
print("cuda : {}".format(cuda))
def weights_init_normal(m):
classname = m.__class__.__name__
print("classname : {}".format(classname))
if classname.find('Conv') != -1:
torch.nn.init.normal_(m.weight.data, 0.0, 0.02)
elif classname.find('BatchNorm') != -1:
torch.nn.init.normal_(m.weight.data, 1.0, 0.02)
torch.nn.init.constant_(m.bias.data, 0.0)
class ResidualBlock_back(nn.Module):
def __init__(self, in_features=64, out_features=64):
super(ResidualBlock, self).__init__()
self.block = nn.Sequential(
nn.Conv2d(in_features, in_features, 3, 1, 1),
nn.BatchNorm2d(in_features),
nn.ReLU(inplace=True),
nn.Conv2d(in_features, in_features, 3, 1, 1),
nn.BatchNorm2d(in_features)
)
def forward(self, x):
return x + self.block(x)
class sencode_ResidualBlock(nn.Module):
def __init__(self, in_features=64, out_features=64):
super(sencode_ResidualBlock, self).__init__()
### ENCODER
self.sencode_block = nn.Sequential(
nn.Conv2d(in_channels=1*in_features,out_channels=4*in_features,kernel_size=(3, 3),stride=(2, 2),padding=0),
nn.BatchNorm2d(4*in_features),
nn.LeakyReLU(inplace=True),
nn.Conv2d(in_channels=4*in_features,out_channels=8*in_features,kernel_size=(3, 3),stride=(2, 2),padding=1),
nn.BatchNorm2d(8*in_features),
nn.LeakyReLU(inplace=True)
)
def forward(self, x):
encode_x = self.sencode_block(x)
return x, encode_x
class sdecode_ResidualBlock(nn.Module):
def __init__(self, in_features=64, out_features=64):
super(sdecode_ResidualBlock, self).__init__()
self.sdecode_block = nn.Sequential(
nn.ConvTranspose2d(in_channels=8*in_features,out_channels=4*in_features,kernel_size=(3, 3),stride=(2, 2), padding=0),
nn.BatchNorm2d(4*in_features),
nn.LeakyReLU(inplace=True),
nn.ConvTranspose2d(in_channels=4*in_features,out_channels=1*in_features,kernel_size=(3, 3),stride=(2, 2),padding=1),
nn.BatchNorm2d(1*in_features),
nn.LeakyReLU(inplace=True),
)
def forward(self, encode_x):
decode_x = self.sdecode_block(encode_x)
decode_x = decode_x[:, :, :-1, :-1]
decode_x = F.sigmoid(decode_x)
return decode_x
class tencode_ResidualBlock(nn.Module):
def __init__(self, in_features=64, out_features=64):
super(tencode_ResidualBlock, self).__init__()
### ENCODER
self.tencode_block = nn.Sequential(
nn.Conv2d(in_channels=1*in_features,out_channels=4*in_features,kernel_size=(3, 3),stride=(2, 2),padding=0),
nn.BatchNorm2d(4*in_features),
nn.LeakyReLU(inplace=True),
nn.Conv2d(in_channels=4*in_features,out_channels=8*in_features,kernel_size=(3, 3),stride=(2, 2),padding=1),
nn.BatchNorm2d(8*in_features),
nn.LeakyReLU(inplace=True)
)
def forward(self, x):
encode_x = self.tencode_block(x)
return x, encode_x
class tdecode_ResidualBlock(nn.Module):
def __init__(self, in_features=64, out_features=64):
super(tdecode_ResidualBlock, self).__init__()
self.tdecode_block = nn.Sequential(
nn.ConvTranspose2d(in_channels=8*in_features,out_channels=4*in_features,kernel_size=(3, 3),stride=(2, 2), padding=0),
nn.BatchNorm2d(4*in_features),
nn.LeakyReLU(inplace=True),
nn.ConvTranspose2d(in_channels=4*in_features,out_channels=1*in_features,kernel_size=(3, 3),stride=(2, 2),padding=1),
nn.BatchNorm2d(1*in_features),
nn.LeakyReLU(inplace=True),
)
def forward(self, encode_x):
decode_x = self.tdecode_block(encode_x)
decode_x = decode_x[:, :, :-1, :-1]
decode_x = F.sigmoid(decode_x)
return decode_x
class target_encode_Generator(nn.Module):
def __init__(self):
super(target_encode_Generator, self).__init__()
# Fully-connected layer which constructs image channel shaped output from noise
self.tfc = nn.Linear(opt.latent_dim, opt.channels*opt.img_size**2)
self.tl1 = nn.Sequential(nn.Conv2d(opt.channels*2, 64, 3, 1, 1), nn.ReLU(inplace=True))
resblocks = []
for _ in range(opt.n_residual_blocks):
resblocks.append(tencode_ResidualBlock())
self.tencode_resblocks = nn.Sequential(*resblocks)
def forward(self, img, z):
gen_input = torch.cat((img, self.tfc(z).view(*img.shape)), 1)
out = self.tl1(gen_input)
x, encode_out = self.tencode_resblocks(out)
return x, encode_out
class source_encode_Generator(nn.Module):
def __init__(self):
super(source_encode_Generator, self).__init__()
# Fully-connected layer which constructs image channel shaped output from noise
self.sfc = nn.Linear(opt.latent_dim, opt.channels*opt.img_size**2)
self.sl1 = nn.Sequential(nn.Conv2d(opt.channels*2, 64, 3, 1, 1), nn.ReLU(inplace=True))
resblocks = []
for _ in range(opt.n_residual_blocks):
resblocks.append(sencode_ResidualBlock())
self.sencode_resblocks = nn.Sequential(*resblocks)
def forward(self, img, z):
gen_input = torch.cat((img, self.sfc(z).view(*img.shape)), 1)
out = self.sl1(gen_input)
x, encode_out = self.sencode_resblocks(out)
return x, encode_out
class target_decode_Generator(nn.Module):
def __init__(self):
super(target_decode_Generator, self).__init__()
resblocks = []
for _ in range(opt.n_residual_blocks):
resblocks.append(tdecode_ResidualBlock())
self.target_decode_resblocks = nn.Sequential(*resblocks)
self.tl2 = nn.Sequential(nn.Conv2d(64, opt.channels, 3, 1, 1), nn.Tanh())
def forward(self, img, encode_out):
out = img + self.target_decode_resblocks(encode_out)
img_ = self.tl2(out)
return img_
class source_decode_Generator(nn.Module):
def __init__(self):
super(source_decode_Generator, self).__init__()
resblocks = []
for _ in range(opt.n_residual_blocks):
resblocks.append(sdecode_ResidualBlock())
self.source_decode_resblocks = nn.Sequential(*resblocks)
self.sl2 = nn.Sequential(nn.Conv2d(64, opt.channels, 3, 1, 1), nn.Tanh())
def forward(self, img, encode_out):
out = img + self.source_decode_resblocks(encode_out)
img_ = self.sl2(out)
return img_
class encode_Discriminator(nn.Module):
def __init__(self):
super(encode_Discriminator, self).__init__()
def block(in_features, out_features, normalization=True):
"""Discriminator block"""
layers = [ nn.Conv2d(in_features, out_features, 3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace=True) ]
if normalization:
layers.append(nn.InstanceNorm2d(out_features))
return layers
self.model = nn.Sequential(
*block(256, 512, normalization=False),
*block(512, 1024),
nn.Conv2d(1024, 1, 3, 1, 1)
)
def forward(self, encode_x):
validity = self.model(encode_x)
return validity
class Discriminator(nn.Module):
def __init__(self):
super(Discriminator, self).__init__()
def block(in_features, out_features, normalization=True):
"""Discriminator block"""
layers = [ nn.Conv2d(in_features, out_features, 3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace=True) ]
if normalization:
layers.append(nn.InstanceNorm2d(out_features))
return layers
self.model = nn.Sequential(
*block(opt.channels, 64, normalization=False),
*block(64, 128),
*block(128, 256),
*block(256, 512),
nn.Conv2d(512, 1, 3, 1, 1)
)
def forward(self, img):
validity = self.model(img)
return validity
class encode_Classifier(nn.Module):
def __init__(self):
super(encode_Classifier, self).__init__()
def block(in_features, out_features, normalization=True):
"""Classifier block"""
layers = [ nn.Conv2d(in_features, out_features, 3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace=True) ]
if normalization:
layers.append(nn.InstanceNorm2d(out_features))
return layers
self.model = nn.Sequential(
*block(256, 512, normalization=False),
*block(512, 1024)
*block(1024, 2048)
)
input_size = opt.img_size // 2**4
self.output_layer = nn.Sequential(
nn.Linear(2048*input_size**2, opt.n_classes),
nn.Softmax()
)
def forward(self, img):
feature_repr = self.model(img)
feature_repr = feature_repr.view(feature_repr.size(0), -1)
label = self.output_layer(feature_repr)
return label
class Classifier(nn.Module):
def __init__(self):
super(Classifier, self).__init__()
def block(in_features, out_features, normalization=True):
"""Classifier block"""
layers = [ nn.Conv2d(in_features, out_features, 3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace=True) ]
if normalization:
layers.append(nn.InstanceNorm2d(out_features))
return layers
self.model = nn.Sequential(
*block(opt.channels, 64, normalization=False),
*block(64, 128),
*block(128, 256),
*block(256, 512)
)
input_size = opt.img_size // 2**4
self.output_layer = nn.Sequential(
nn.Linear(512*input_size**2, opt.n_classes),
nn.Softmax()
)
def forward(self, img):
feature_repr = self.model(img)
feature_repr = feature_repr.view(feature_repr.size(0), -1)
label = self.output_layer(feature_repr)
return label
# Loss function
adversarial_loss = torch.nn.MSELoss()
encode_adversarial_loss = torch.nn.MSELoss()
task_loss = torch.nn.CrossEntropyLoss()
# Loss weights
lambda_adv = 1
lambda_task = 0.1
# Initialize generator and discriminator
target_encode_generator = target_encode_Generator()
target_decode_generator = target_decode_Generator()
source_encode_generator = source_encode_Generator()
source_decode_generator = source_decode_Generator()
encode_discriminator = encode_Discriminator()
discriminator = Discriminator()
classifier = Classifier()
if cuda:
target_encode_generator.cuda()
target_decode_generator.cuda()
source_encode_generator.cuda()
source_decode_generator.cuda()
encode_discriminator.cuda()
discriminator.cuda()
classifier.cuda()
adversarial_loss.cuda()
encode_adversarial_loss.cuda()
task_loss.cuda()
# Initialize weights
target_encode_generator.apply(weights_init_normal)
target_decode_generator.apply(weights_init_normal)
source_encode_generator.apply(weights_init_normal)
source_decode_generator.apply(weights_init_normal)
encode_discriminator.apply(weights_init_normal)
discriminator.apply(weights_init_normal)
classifier.apply(weights_init_normal)
# Configure data loader
os.makedirs('../../data/mnist', exist_ok=True)
dataloader_A = torch.utils.data.DataLoader(
datasets.MNIST('../../data/mnist', train=True, download=True,
transform=transforms.Compose([
transforms.Resize(opt.img_size),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
])),
batch_size=opt.batch_size, shuffle=True)
os.makedirs('../../data/mnistm', exist_ok=True)
dataloader_B = torch.utils.data.DataLoader(
MNISTM('../../data/mnistm', train=True, download=True,
transform=transforms.Compose([
transforms.Resize(opt.img_size),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
])),
batch_size=opt.batch_size, shuffle=True)
# Optimizers
optimizer_G = torch.optim.Adam( itertools.chain(target_encode_generator.parameters(),
source_encode_generator.parameters(), target_decode_generator.parameters(),
source_decode_generator.parameters(),
classifier.parameters()),
lr=opt.lr, betas=(opt.b1, opt.b2))
optimizer_D = torch.optim.Adam(itertools.chain(encode_discriminator.parameters(), discriminator.parameters()), lr=opt.lr, betas=(opt.b1, opt.b2))
FloatTensor = torch.cuda.FloatTensor if cuda else torch.FloatTensor
LongTensor = torch.cuda.LongTensor if cuda else torch.LongTensor
# ----------
# Training
# ----------
# Keeps 100 accuracy measurements
task_performance = []
target_performance = []
for epoch in range(opt.n_epochs):
for i, ((imgs_A, labels_A), (imgs_B, labels_B)) in enumerate(zip(dataloader_A, dataloader_B)):
batch_size = imgs_A.size(0)
# Adversarial ground truths
valid = Variable(FloatTensor(batch_size, *patch).fill_(1.0), requires_grad=False)
fake = Variable(FloatTensor(batch_size, *patch).fill_(0.0), requires_grad=False)
# Configure input
imgs_A = Variable(imgs_A.type(FloatTensor).expand(batch_size, 3, opt.img_size, opt.img_size))
labels_A = Variable(labels_A.type(LongTensor))
imgs_B = Variable(imgs_B.type(FloatTensor))
# -----------------
# Train Generator
# -----------------
optimizer_G.zero_grad()
# Sample noise
z = Variable(FloatTensor(np.random.uniform(-1, 1, (batch_size, opt.latent_dim))))
# Generate a batch of images
imgs_A_x, encode_fake_B = source_encode_generator(imgs_A, z)
decode_fake_B = source_decode_generator(imgs_A_x, encode_fake_B)
# Perform task on translated source image
label_pred = classifier(decode_fake_B)
# Calculate the task loss
task_loss_ = (task_loss(label_pred, labels_A) + \
task_loss(classifier(imgs_A), labels_A)) / 2
# Loss measures generator's ability to fool the discriminator
g_loss = lambda_adv * adversarial_loss(discriminator(decode_fake_B), valid) + \
0.1 * encode_adversarial_loss(encode_discriminator(encode_fake_B), valid) + \
lambda_task * task_loss_
g_loss.backward()
optimizer_G.step()
# ---------------------
# Train Discriminator
# ---------------------
optimizer_D.zero_grad()
imgs_B_x, encode_real_B = target_encode_generator(imgs_B, z)
decode_real_B = target_decode_generator(imgs_B_x, encode_real_B)
# Measure discriminator's ability to classify real from generated samples
encode_real_loss = adversarial_loss(encode_discriminator(encode_real_B), valid)
encode_fake_loss = adversarial_loss(encode_discriminator(encode_fake_B.detach()), fake)
decode_real_loss = adversarial_loss(discriminator(decode_real_B), valid)
decode_fake_loss = adversarial_loss(discriminator(decode_fake_B.detach()), fake)
encode_d_loss = (encode_real_loss + encode_fake_loss) / 2
decode_d_loss = (decode_real_loss + decode_fake_loss) / 2
d_loss = encode_d_loss + decode_d_loss
d_loss.backward()
optimizer_D.step()
# ---------------------------------------
# Evaluate Performance on target domain
# ---------------------------------------
# Evaluate performance on translated Domain A
acc = np.mean(np.argmax(label_pred.data.cpu().numpy(), axis=1) == labels_A.data.cpu().numpy())
task_performance.append(acc)
if len(task_performance) > 100:
task_performance.pop(0)
# Evaluate performance on Domain B
pred_B = classifier(imgs_B)
target_acc = np.mean(np.argmax(pred_B.data.cpu().numpy(), axis=1) == labels_B.numpy())
target_performance.append(target_acc)
if len(target_performance) > 100:
target_performance.pop(0)
print ("[Epoch %d/%d] [Batch %d/%d] [D loss: %f] [G loss: %f] [CLF acc: %3d%% (%3d%%), target_acc: %3d%% (%3d%%)]" %
(epoch, opt.n_epochs,
i, len(dataloader_A),
d_loss.item(), g_loss.item(),
100*acc, 100*np.mean(task_performance),
100*target_acc, 100*np.mean(target_performance)))
batches_done = len(dataloader_A) * epoch + i
if batches_done % opt.sample_interval == 0:
sample = torch.cat((imgs_A.data[:5], fake_B.data[:5], imgs_B.data[:5]), -2)
save_image(sample, 'images/%d.png' % batches_done, nrow=int(math.sqrt(batch_size)), normalize=True)
| 37.917647 | 145 | 0.631554 | import argparse
import os
import numpy as np
import math
import itertools
import torchvision.transforms as transforms
from torchvision.utils import save_image
from torch.utils.data import DataLoader
from torchvision import datasets
from torch.autograd import Variable
from mnistm import MNISTM
import torch.nn as nn
import torch.nn.functional as F
import torch
os.makedirs('images', exist_ok=True)
parser = argparse.ArgumentParser()
parser.add_argument('--n_epochs', type=int, default=200, help='number of epochs of training')
parser.add_argument('--batch_size', type=int, default=64, help='size of the batches')
parser.add_argument('--lr', type=float, default=0.0002, help='adam: learning rate')
parser.add_argument('--b1', type=float, default=0.5, help='adam: decay of first order momentum of gradient')
parser.add_argument('--b2', type=float, default=0.999, help='adam: decay of first order momentum of gradient')
parser.add_argument('--n_cpu', type=int, default=8, help='number of cpu threads to use during batch generation')
parser.add_argument('--n_residual_blocks', type=int, default=1, help='number of residual blocks in generator')
parser.add_argument('--latent_dim', type=int, default=10, help='dimensionality of the noise input')
parser.add_argument('--img_size', type=int, default=32, help='size of each image dimension')
parser.add_argument('--channels', type=int, default=3, help='number of image channels')
parser.add_argument('--n_classes', type=int, default=10, help='number of classes in the dataset')
parser.add_argument('--sample_interval', type=int, default=300, help='interval betwen image samples')
opt = parser.parse_args()
print(opt)
patch = int(opt.img_size / 2**4)
patch = (1, patch, patch)
cuda = True if torch.cuda.is_available() else False
print("cuda : {}".format(cuda))
def weights_init_normal(m):
classname = m.__class__.__name__
print("classname : {}".format(classname))
if classname.find('Conv') != -1:
torch.nn.init.normal_(m.weight.data, 0.0, 0.02)
elif classname.find('BatchNorm') != -1:
torch.nn.init.normal_(m.weight.data, 1.0, 0.02)
torch.nn.init.constant_(m.bias.data, 0.0)
class ResidualBlock_back(nn.Module):
def __init__(self, in_features=64, out_features=64):
super(ResidualBlock, self).__init__()
self.block = nn.Sequential(
nn.Conv2d(in_features, in_features, 3, 1, 1),
nn.BatchNorm2d(in_features),
nn.ReLU(inplace=True),
nn.Conv2d(in_features, in_features, 3, 1, 1),
nn.BatchNorm2d(in_features)
)
def forward(self, x):
return x + self.block(x)
class sencode_ResidualBlock(nn.Module):
def __init__(self, in_features=64, out_features=64):
super(sencode_ResidualBlock, self).__init__()
de_block = nn.Sequential(
nn.Conv2d(in_channels=1*in_features,out_channels=4*in_features,kernel_size=(3, 3),stride=(2, 2),padding=0),
nn.BatchNorm2d(4*in_features),
nn.LeakyReLU(inplace=True),
nn.Conv2d(in_channels=4*in_features,out_channels=8*in_features,kernel_size=(3, 3),stride=(2, 2),padding=1),
nn.BatchNorm2d(8*in_features),
nn.LeakyReLU(inplace=True)
)
def forward(self, x):
encode_x = self.sencode_block(x)
return x, encode_x
class sdecode_ResidualBlock(nn.Module):
def __init__(self, in_features=64, out_features=64):
super(sdecode_ResidualBlock, self).__init__()
self.sdecode_block = nn.Sequential(
nn.ConvTranspose2d(in_channels=8*in_features,out_channels=4*in_features,kernel_size=(3, 3),stride=(2, 2), padding=0),
nn.BatchNorm2d(4*in_features),
nn.LeakyReLU(inplace=True),
nn.ConvTranspose2d(in_channels=4*in_features,out_channels=1*in_features,kernel_size=(3, 3),stride=(2, 2),padding=1),
nn.BatchNorm2d(1*in_features),
nn.LeakyReLU(inplace=True),
)
def forward(self, encode_x):
decode_x = self.sdecode_block(encode_x)
decode_x = decode_x[:, :, :-1, :-1]
decode_x = F.sigmoid(decode_x)
return decode_x
class tencode_ResidualBlock(nn.Module):
def __init__(self, in_features=64, out_features=64):
super(tencode_ResidualBlock, self).__init__()
de_block = nn.Sequential(
nn.Conv2d(in_channels=1*in_features,out_channels=4*in_features,kernel_size=(3, 3),stride=(2, 2),padding=0),
nn.BatchNorm2d(4*in_features),
nn.LeakyReLU(inplace=True),
nn.Conv2d(in_channels=4*in_features,out_channels=8*in_features,kernel_size=(3, 3),stride=(2, 2),padding=1),
nn.BatchNorm2d(8*in_features),
nn.LeakyReLU(inplace=True)
)
def forward(self, x):
encode_x = self.tencode_block(x)
return x, encode_x
class tdecode_ResidualBlock(nn.Module):
def __init__(self, in_features=64, out_features=64):
super(tdecode_ResidualBlock, self).__init__()
self.tdecode_block = nn.Sequential(
nn.ConvTranspose2d(in_channels=8*in_features,out_channels=4*in_features,kernel_size=(3, 3),stride=(2, 2), padding=0),
nn.BatchNorm2d(4*in_features),
nn.LeakyReLU(inplace=True),
nn.ConvTranspose2d(in_channels=4*in_features,out_channels=1*in_features,kernel_size=(3, 3),stride=(2, 2),padding=1),
nn.BatchNorm2d(1*in_features),
nn.LeakyReLU(inplace=True),
)
def forward(self, encode_x):
decode_x = self.tdecode_block(encode_x)
decode_x = decode_x[:, :, :-1, :-1]
decode_x = F.sigmoid(decode_x)
return decode_x
class target_encode_Generator(nn.Module):
def __init__(self):
super(target_encode_Generator, self).__init__()
self.tfc = nn.Linear(opt.latent_dim, opt.channels*opt.img_size**2)
self.tl1 = nn.Sequential(nn.Conv2d(opt.channels*2, 64, 3, 1, 1), nn.ReLU(inplace=True))
resblocks = []
for _ in range(opt.n_residual_blocks):
resblocks.append(tencode_ResidualBlock())
self.tencode_resblocks = nn.Sequential(*resblocks)
def forward(self, img, z):
gen_input = torch.cat((img, self.tfc(z).view(*img.shape)), 1)
out = self.tl1(gen_input)
x, encode_out = self.tencode_resblocks(out)
return x, encode_out
class source_encode_Generator(nn.Module):
def __init__(self):
super(source_encode_Generator, self).__init__()
self.sfc = nn.Linear(opt.latent_dim, opt.channels*opt.img_size**2)
self.sl1 = nn.Sequential(nn.Conv2d(opt.channels*2, 64, 3, 1, 1), nn.ReLU(inplace=True))
resblocks = []
for _ in range(opt.n_residual_blocks):
resblocks.append(sencode_ResidualBlock())
self.sencode_resblocks = nn.Sequential(*resblocks)
def forward(self, img, z):
gen_input = torch.cat((img, self.sfc(z).view(*img.shape)), 1)
out = self.sl1(gen_input)
x, encode_out = self.sencode_resblocks(out)
return x, encode_out
class target_decode_Generator(nn.Module):
def __init__(self):
super(target_decode_Generator, self).__init__()
resblocks = []
for _ in range(opt.n_residual_blocks):
resblocks.append(tdecode_ResidualBlock())
self.target_decode_resblocks = nn.Sequential(*resblocks)
self.tl2 = nn.Sequential(nn.Conv2d(64, opt.channels, 3, 1, 1), nn.Tanh())
def forward(self, img, encode_out):
out = img + self.target_decode_resblocks(encode_out)
img_ = self.tl2(out)
return img_
class source_decode_Generator(nn.Module):
def __init__(self):
super(source_decode_Generator, self).__init__()
resblocks = []
for _ in range(opt.n_residual_blocks):
resblocks.append(sdecode_ResidualBlock())
self.source_decode_resblocks = nn.Sequential(*resblocks)
self.sl2 = nn.Sequential(nn.Conv2d(64, opt.channels, 3, 1, 1), nn.Tanh())
def forward(self, img, encode_out):
out = img + self.source_decode_resblocks(encode_out)
img_ = self.sl2(out)
return img_
class encode_Discriminator(nn.Module):
def __init__(self):
super(encode_Discriminator, self).__init__()
def block(in_features, out_features, normalization=True):
layers = [ nn.Conv2d(in_features, out_features, 3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace=True) ]
if normalization:
layers.append(nn.InstanceNorm2d(out_features))
return layers
self.model = nn.Sequential(
*block(256, 512, normalization=False),
*block(512, 1024),
nn.Conv2d(1024, 1, 3, 1, 1)
)
def forward(self, encode_x):
validity = self.model(encode_x)
return validity
class Discriminator(nn.Module):
def __init__(self):
super(Discriminator, self).__init__()
def block(in_features, out_features, normalization=True):
layers = [ nn.Conv2d(in_features, out_features, 3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace=True) ]
if normalization:
layers.append(nn.InstanceNorm2d(out_features))
return layers
self.model = nn.Sequential(
*block(opt.channels, 64, normalization=False),
*block(64, 128),
*block(128, 256),
*block(256, 512),
nn.Conv2d(512, 1, 3, 1, 1)
)
def forward(self, img):
validity = self.model(img)
return validity
class encode_Classifier(nn.Module):
def __init__(self):
super(encode_Classifier, self).__init__()
def block(in_features, out_features, normalization=True):
layers = [ nn.Conv2d(in_features, out_features, 3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace=True) ]
if normalization:
layers.append(nn.InstanceNorm2d(out_features))
return layers
self.model = nn.Sequential(
*block(256, 512, normalization=False),
*block(512, 1024)
*block(1024, 2048)
)
input_size = opt.img_size // 2**4
self.output_layer = nn.Sequential(
nn.Linear(2048*input_size**2, opt.n_classes),
nn.Softmax()
)
def forward(self, img):
feature_repr = self.model(img)
feature_repr = feature_repr.view(feature_repr.size(0), -1)
label = self.output_layer(feature_repr)
return label
class Classifier(nn.Module):
def __init__(self):
super(Classifier, self).__init__()
def block(in_features, out_features, normalization=True):
layers = [ nn.Conv2d(in_features, out_features, 3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace=True) ]
if normalization:
layers.append(nn.InstanceNorm2d(out_features))
return layers
self.model = nn.Sequential(
*block(opt.channels, 64, normalization=False),
*block(64, 128),
*block(128, 256),
*block(256, 512)
)
input_size = opt.img_size // 2**4
self.output_layer = nn.Sequential(
nn.Linear(512*input_size**2, opt.n_classes),
nn.Softmax()
)
def forward(self, img):
feature_repr = self.model(img)
feature_repr = feature_repr.view(feature_repr.size(0), -1)
label = self.output_layer(feature_repr)
return label
adversarial_loss = torch.nn.MSELoss()
encode_adversarial_loss = torch.nn.MSELoss()
task_loss = torch.nn.CrossEntropyLoss()
lambda_adv = 1
lambda_task = 0.1
target_encode_generator = target_encode_Generator()
target_decode_generator = target_decode_Generator()
source_encode_generator = source_encode_Generator()
source_decode_generator = source_decode_Generator()
encode_discriminator = encode_Discriminator()
discriminator = Discriminator()
classifier = Classifier()
if cuda:
target_encode_generator.cuda()
target_decode_generator.cuda()
source_encode_generator.cuda()
source_decode_generator.cuda()
encode_discriminator.cuda()
discriminator.cuda()
classifier.cuda()
adversarial_loss.cuda()
encode_adversarial_loss.cuda()
task_loss.cuda()
target_encode_generator.apply(weights_init_normal)
target_decode_generator.apply(weights_init_normal)
source_encode_generator.apply(weights_init_normal)
source_decode_generator.apply(weights_init_normal)
encode_discriminator.apply(weights_init_normal)
discriminator.apply(weights_init_normal)
classifier.apply(weights_init_normal)
os.makedirs('../../data/mnist', exist_ok=True)
dataloader_A = torch.utils.data.DataLoader(
datasets.MNIST('../../data/mnist', train=True, download=True,
transform=transforms.Compose([
transforms.Resize(opt.img_size),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
])),
batch_size=opt.batch_size, shuffle=True)
os.makedirs('../../data/mnistm', exist_ok=True)
dataloader_B = torch.utils.data.DataLoader(
MNISTM('../../data/mnistm', train=True, download=True,
transform=transforms.Compose([
transforms.Resize(opt.img_size),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
])),
batch_size=opt.batch_size, shuffle=True)
optimizer_G = torch.optim.Adam( itertools.chain(target_encode_generator.parameters(),
source_encode_generator.parameters(), target_decode_generator.parameters(),
source_decode_generator.parameters(),
classifier.parameters()),
lr=opt.lr, betas=(opt.b1, opt.b2))
optimizer_D = torch.optim.Adam(itertools.chain(encode_discriminator.parameters(), discriminator.parameters()), lr=opt.lr, betas=(opt.b1, opt.b2))
FloatTensor = torch.cuda.FloatTensor if cuda else torch.FloatTensor
LongTensor = torch.cuda.LongTensor if cuda else torch.LongTensor
task_performance = []
target_performance = []
for epoch in range(opt.n_epochs):
for i, ((imgs_A, labels_A), (imgs_B, labels_B)) in enumerate(zip(dataloader_A, dataloader_B)):
batch_size = imgs_A.size(0)
valid = Variable(FloatTensor(batch_size, *patch).fill_(1.0), requires_grad=False)
fake = Variable(FloatTensor(batch_size, *patch).fill_(0.0), requires_grad=False)
imgs_A = Variable(imgs_A.type(FloatTensor).expand(batch_size, 3, opt.img_size, opt.img_size))
labels_A = Variable(labels_A.type(LongTensor))
imgs_B = Variable(imgs_B.type(FloatTensor))
optimizer_G.zero_grad()
z = Variable(FloatTensor(np.random.uniform(-1, 1, (batch_size, opt.latent_dim))))
imgs_A_x, encode_fake_B = source_encode_generator(imgs_A, z)
decode_fake_B = source_decode_generator(imgs_A_x, encode_fake_B)
label_pred = classifier(decode_fake_B)
task_loss_ = (task_loss(label_pred, labels_A) + \
task_loss(classifier(imgs_A), labels_A)) / 2
g_loss = lambda_adv * adversarial_loss(discriminator(decode_fake_B), valid) + \
0.1 * encode_adversarial_loss(encode_discriminator(encode_fake_B), valid) + \
lambda_task * task_loss_
g_loss.backward()
optimizer_G.step()
# ---------------------
# Train Discriminator
# ---------------------
optimizer_D.zero_grad()
imgs_B_x, encode_real_B = target_encode_generator(imgs_B, z)
decode_real_B = target_decode_generator(imgs_B_x, encode_real_B)
# Measure discriminator's ability to classify real from generated samples
encode_real_loss = adversarial_loss(encode_discriminator(encode_real_B), valid)
encode_fake_loss = adversarial_loss(encode_discriminator(encode_fake_B.detach()), fake)
decode_real_loss = adversarial_loss(discriminator(decode_real_B), valid)
decode_fake_loss = adversarial_loss(discriminator(decode_fake_B.detach()), fake)
encode_d_loss = (encode_real_loss + encode_fake_loss) / 2
decode_d_loss = (decode_real_loss + decode_fake_loss) / 2
d_loss = encode_d_loss + decode_d_loss
d_loss.backward()
optimizer_D.step()
acc = np.mean(np.argmax(label_pred.data.cpu().numpy(), axis=1) == labels_A.data.cpu().numpy())
task_performance.append(acc)
if len(task_performance) > 100:
task_performance.pop(0)
pred_B = classifier(imgs_B)
target_acc = np.mean(np.argmax(pred_B.data.cpu().numpy(), axis=1) == labels_B.numpy())
target_performance.append(target_acc)
if len(target_performance) > 100:
target_performance.pop(0)
print ("[Epoch %d/%d] [Batch %d/%d] [D loss: %f] [G loss: %f] [CLF acc: %3d%% (%3d%%), target_acc: %3d%% (%3d%%)]" %
(epoch, opt.n_epochs,
i, len(dataloader_A),
d_loss.item(), g_loss.item(),
100*acc, 100*np.mean(task_performance),
100*target_acc, 100*np.mean(target_performance)))
batches_done = len(dataloader_A) * epoch + i
if batches_done % opt.sample_interval == 0:
sample = torch.cat((imgs_A.data[:5], fake_B.data[:5], imgs_B.data[:5]), -2)
save_image(sample, 'images/%d.png' % batches_done, nrow=int(math.sqrt(batch_size)), normalize=True)
| true | true |
f71a7bc3530cae6fe552775aa2d6f0317c406877 | 481 | py | Python | ocean_utils/http_requests/requests_session.py | oceanprotocol/common-utils-py | f577f4762841496584e114baaec0d476e73c700e | [
"Apache-2.0"
] | 1 | 2020-12-02T13:49:43.000Z | 2020-12-02T13:49:43.000Z | common_utils_py/http_requests/requests_session.py | nevermined-io/common-utils-py | 4a02843d4f4771935b6f057badac844fef6f6f13 | [
"Apache-2.0"
] | 2 | 2021-08-24T13:14:47.000Z | 2021-12-01T17:06:29.000Z | common_utils_py/http_requests/requests_session.py | nevermined-io/common-utils-py | 4a02843d4f4771935b6f057badac844fef6f6f13 | [
"Apache-2.0"
] | null | null | null | import requests
from requests.adapters import HTTPAdapter
def get_requests_session():
"""
Set connection pool maxsize and block value to avoid `connection pool full` warnings.
:return: requests session
"""
session = requests.sessions.Session()
session.mount('http://', HTTPAdapter(pool_connections=25, pool_maxsize=25, pool_block=True))
session.mount('https://', HTTPAdapter(pool_connections=25, pool_maxsize=25, pool_block=True))
return session
| 32.066667 | 97 | 0.738046 | import requests
from requests.adapters import HTTPAdapter
def get_requests_session():
session = requests.sessions.Session()
session.mount('http://', HTTPAdapter(pool_connections=25, pool_maxsize=25, pool_block=True))
session.mount('https://', HTTPAdapter(pool_connections=25, pool_maxsize=25, pool_block=True))
return session
| true | true |
f71a7d056b1aa807f43b720faed6745239c9c75f | 1,872 | py | Python | app.py | ssvfx41/tk-houdini-geometrynode | 03454d3c6773b0a48531ab24ace60928f11c4a4e | [
"MIT"
] | null | null | null | app.py | ssvfx41/tk-houdini-geometrynode | 03454d3c6773b0a48531ab24ace60928f11c4a4e | [
"MIT"
] | null | null | null | app.py | ssvfx41/tk-houdini-geometrynode | 03454d3c6773b0a48531ab24ace60928f11c4a4e | [
"MIT"
] | null | null | null | # Copyright (c) 2015 Pixomondo
#
# CONFIDENTIAL AND PROPRIETARY
#
# This work is provided "AS IS" and subject to the MIT License included in this
# distribution package. See LICENSE.
# By accessing, using, copying or modifying this work you indicate your
# agreement to the MIT License. All rights
# not expressly granted therein are reserved by Pixomondo.
"""
Geometry Output App for Houdini
"""
import sgtk
class GeometryOutputNode(sgtk.platform.Application):
def init_app(self):
module = self.import_module("tk_houdini_geometrynode")
self.handler = module.ToolkitGeometryNodeHandler(self)
def convert_to_geometry_nodes(self):
"""
Convert all Shotgun Geometry nodes found in the current Script to regular
Geometry nodes. Additional toolkit information will be stored in
user data named 'tk_*'
"""
self.handler.convert_sg_to_geometry_nodes()
def convert_from_geometry_nodes(self):
"""
Convert all regular Geometry nodes that have previously been converted
from Shotgun Geometry nodes, back into Shotgun Geometry nodes.
"""
self.handler.convert_geometry_to_sg_nodes()
def get_nodes(self):
"""
Returns a list of hou.node objects for each tk alembic node.
Example usage::
>>> import sgtk
>>> eng = sgtk.platform.current_engine()
>>> app = eng.apps["tk-houdini-geometrynode"]
>>> tk_alembic_nodes = app.get_nodes()
"""
self.log_debug("Retrieving tk-houdini-geometrynode nodes...")
tk_houdini_geometrynode = self.import_module("tk_houdini_geometrynode")
nodes = tk_houdini_geometrynode.ToolkitGeometryNodeHandler.\
get_all_tk_geometry_nodes()
self.log_debug("Found %s tk-houdini-geometrynode nodes." % (len(nodes),))
return nodes
| 33.428571 | 81 | 0.6875 |
import sgtk
class GeometryOutputNode(sgtk.platform.Application):
def init_app(self):
module = self.import_module("tk_houdini_geometrynode")
self.handler = module.ToolkitGeometryNodeHandler(self)
def convert_to_geometry_nodes(self):
self.handler.convert_sg_to_geometry_nodes()
def convert_from_geometry_nodes(self):
self.handler.convert_geometry_to_sg_nodes()
def get_nodes(self):
self.log_debug("Retrieving tk-houdini-geometrynode nodes...")
tk_houdini_geometrynode = self.import_module("tk_houdini_geometrynode")
nodes = tk_houdini_geometrynode.ToolkitGeometryNodeHandler.\
get_all_tk_geometry_nodes()
self.log_debug("Found %s tk-houdini-geometrynode nodes." % (len(nodes),))
return nodes
| true | true |
f71a7ea56d84335a1f6f15af7d71e033e8ced3a1 | 996 | py | Python | tests/test_inference.py | weiyx16/mmsegmentation | 6d35d76195f173fbc6b119a7d7815e67d78024c6 | [
"Apache-2.0"
] | 21 | 2022-01-11T14:06:25.000Z | 2022-03-29T06:42:13.000Z | tests/test_inference.py | weiyx16/mmsegmentation | 6d35d76195f173fbc6b119a7d7815e67d78024c6 | [
"Apache-2.0"
] | 13 | 2022-02-15T20:05:18.000Z | 2022-02-15T20:05:21.000Z | tests/test_inference.py | weiyx16/mmsegmentation | 6d35d76195f173fbc6b119a7d7815e67d78024c6 | [
"Apache-2.0"
] | 11 | 2022-01-11T16:05:24.000Z | 2022-03-17T01:58:52.000Z | # Copyright (c) OpenMMLab. All rights reserved.
import os.path as osp
import mmcv
from mmseg.apis import inference_segmentor, init_segmentor
def test_test_time_augmentation_on_cpu():
config_file = 'configs/pspnet/pspnet_r50-d8_512x1024_40k_cityscapes.py'
config = mmcv.Config.fromfile(config_file)
# Remove pretrain model download for testing
config.model.pretrained = None
# Replace SyncBN with BN to inference on CPU
norm_cfg = dict(type='BN', requires_grad=True)
config.model.backbone.norm_cfg = norm_cfg
config.model.decode_head.norm_cfg = norm_cfg
config.model.auxiliary_head.norm_cfg = norm_cfg
# Enable test time augmentation
config.data.test.pipeline[1].flip = True
checkpoint_file = None
model = init_segmentor(config, checkpoint_file, device='cpu')
img = mmcv.imread(
osp.join(osp.dirname(__file__), 'data/color.jpg'), 'color')
result = inference_segmentor(model, img)
assert result[0].shape == (288, 512)
| 32.129032 | 75 | 0.73494 |
import os.path as osp
import mmcv
from mmseg.apis import inference_segmentor, init_segmentor
def test_test_time_augmentation_on_cpu():
config_file = 'configs/pspnet/pspnet_r50-d8_512x1024_40k_cityscapes.py'
config = mmcv.Config.fromfile(config_file)
config.model.pretrained = None
norm_cfg = dict(type='BN', requires_grad=True)
config.model.backbone.norm_cfg = norm_cfg
config.model.decode_head.norm_cfg = norm_cfg
config.model.auxiliary_head.norm_cfg = norm_cfg
config.data.test.pipeline[1].flip = True
checkpoint_file = None
model = init_segmentor(config, checkpoint_file, device='cpu')
img = mmcv.imread(
osp.join(osp.dirname(__file__), 'data/color.jpg'), 'color')
result = inference_segmentor(model, img)
assert result[0].shape == (288, 512)
| true | true |
f71a801eee241a74789c0995cf4813e2cdb9335f | 19,958 | py | Python | sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py | RusOr10n/beam | ede14d4aa7d239f74d5565a28a7c4433eaaa7d47 | [
"Apache-2.0"
] | 1 | 2019-12-05T04:36:46.000Z | 2019-12-05T04:36:46.000Z | sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py | RusOr10n/beam | ede14d4aa7d239f74d5565a28a7c4433eaaa7d47 | [
"Apache-2.0"
] | 14 | 2020-02-12T22:20:41.000Z | 2021-11-09T19:41:23.000Z | sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py | violalyu/beam | dd605e568d70b1a6ebea60c15b2aec3e240f3914 | [
"Apache-2.0"
] | null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
A connector for reading from and writing to Google Cloud Datastore.
Please use this module for Datastore I/O since
``apache_beam.io.gcp.datastore.v1.datastoreio`` will be deprecated in the
next Beam major release.
This module uses the newer google-cloud-datastore package. Its API was different
enough to require extensive changes to this and associated modules.
This module is experimental, no backwards compatibility guarantees.
"""
from __future__ import absolute_import
from __future__ import division
import logging
import time
from builtins import round
from apache_beam import typehints
from apache_beam.io.gcp.datastore.v1 import util
from apache_beam.io.gcp.datastore.v1.adaptive_throttler import AdaptiveThrottler
from apache_beam.io.gcp.datastore.v1new import helper
from apache_beam.io.gcp.datastore.v1new import query_splitter
from apache_beam.io.gcp.datastore.v1new import types
from apache_beam.metrics.metric import Metrics
from apache_beam.transforms import Create
from apache_beam.transforms import DoFn
from apache_beam.transforms import ParDo
from apache_beam.transforms import PTransform
from apache_beam.transforms import Reshuffle
from apache_beam.utils import retry
__all__ = ['ReadFromDatastore', 'WriteToDatastore', 'DeleteFromDatastore']
@typehints.with_output_types(types.Entity)
class ReadFromDatastore(PTransform):
"""A ``PTransform`` for querying Google Cloud Datastore.
To read a ``PCollection[Entity]`` from a Cloud Datastore ``Query``, use
the ``ReadFromDatastore`` transform by providing a `query` to
read from. The project and optional namespace are set in the query.
The query will be split into multiple queries to allow for parallelism. The
degree of parallelism is automatically determined, but can be overridden by
setting `num_splits` to a value of 1 or greater.
Note: Normally, a runner will read from Cloud Datastore in parallel across
many workers. However, when the `query` is configured with a `limit` or if the
query contains inequality filters like `GREATER_THAN, LESS_THAN` etc., then
all the returned results will be read by a single worker in order to ensure
correct data. Since data is read from a single worker, this could have
significant impact on the performance of the job. Using a
:class:`~apache_beam.transforms.util.Reshuffle` transform after the read in
this case might be beneficial for parallelizing work across workers.
The semantics for query splitting is defined below:
1. If `num_splits` is equal to 0, then the number of splits will be chosen
dynamically at runtime based on the query data size.
2. Any value of `num_splits` greater than
`ReadFromDatastore._NUM_QUERY_SPLITS_MAX` will be capped at that value.
3. If the `query` has a user limit set, or contains inequality filters, then
`num_splits` will be ignored and no split will be performed.
4. Under certain cases Cloud Datastore is unable to split query to the
requested number of splits. In such cases we just use whatever Cloud
Datastore returns.
See https://developers.google.com/datastore/ for more details on Google Cloud
Datastore.
"""
# An upper bound on the number of splits for a query.
_NUM_QUERY_SPLITS_MAX = 50000
# A lower bound on the number of splits for a query. This is to ensure that
# we parallelize the query even when Datastore statistics are not available.
_NUM_QUERY_SPLITS_MIN = 12
# Default bundle size of 64MB.
_DEFAULT_BUNDLE_SIZE_BYTES = 64 * 1024 * 1024
def __init__(self, query, num_splits=0):
"""Initialize the `ReadFromDatastore` transform.
This transform outputs elements of type
:class:`~apache_beam.io.gcp.datastore.v1new.types.Entity`.
Args:
query: (:class:`~apache_beam.io.gcp.datastore.v1new.types.Query`) query
used to fetch entities.
num_splits: (:class:`int`) (optional) Number of splits for the query.
"""
super(ReadFromDatastore, self).__init__()
if not query.project:
raise ValueError("query.project cannot be empty")
if not query:
raise ValueError("query cannot be empty")
if num_splits < 0:
raise ValueError("num_splits must be greater than or equal 0")
self._project = query.project
# using _namespace conflicts with DisplayData._namespace
self._datastore_namespace = query.namespace
self._query = query
self._num_splits = num_splits
def expand(self, pcoll):
# This is a composite transform involves the following:
# 1. Create a singleton of the user provided `query` and apply a ``ParDo``
# that splits the query into `num_splits` queries if possible.
#
# If the value of `num_splits` is 0, the number of splits will be
# computed dynamically based on the size of the data for the `query`.
#
# 2. The resulting ``PCollection`` is sharded across workers using a
# ``Reshuffle`` operation.
#
# 3. In the third step, a ``ParDo`` reads entities for each query and
# outputs a ``PCollection[Entity]``.
return (pcoll.pipeline
| 'UserQuery' >> Create([self._query])
| 'SplitQuery' >> ParDo(ReadFromDatastore._SplitQueryFn(
self._num_splits))
| Reshuffle()
| 'Read' >> ParDo(ReadFromDatastore._QueryFn()))
def display_data(self):
disp_data = {'project': self._query.project,
'query': str(self._query),
'num_splits': self._num_splits}
if self._datastore_namespace is not None:
disp_data['namespace'] = self._datastore_namespace
return disp_data
@typehints.with_input_types(types.Query)
@typehints.with_output_types(types.Query)
class _SplitQueryFn(DoFn):
"""A `DoFn` that splits a given query into multiple sub-queries."""
def __init__(self, num_splits):
super(ReadFromDatastore._SplitQueryFn, self).__init__()
self._num_splits = num_splits
def process(self, query, *args, **kwargs):
client = helper.get_client(query.project, query.namespace)
try:
# Short circuit estimating num_splits if split is not possible.
query_splitter.validate_split(query)
if self._num_splits == 0:
estimated_num_splits = self.get_estimated_num_splits(client, query)
else:
estimated_num_splits = self._num_splits
logging.info("Splitting the query into %d splits", estimated_num_splits)
query_splits = query_splitter.get_splits(
client, query, estimated_num_splits)
except query_splitter.QuerySplitterError:
logging.info("Unable to parallelize the given query: %s", query,
exc_info=True)
query_splits = [query]
return query_splits
def display_data(self):
disp_data = {'num_splits': self._num_splits}
return disp_data
@staticmethod
def query_latest_statistics_timestamp(client):
"""Fetches the latest timestamp of statistics from Cloud Datastore.
Cloud Datastore system tables with statistics are periodically updated.
This method fetches the latest timestamp (in microseconds) of statistics
update using the `__Stat_Total__` table.
"""
if client.namespace is None:
kind = '__Stat_Total__'
else:
kind = '__Stat_Ns_Total__'
query = client.query(kind=kind, order=["-timestamp", ])
entities = list(query.fetch(limit=1))
if not entities:
raise RuntimeError("Datastore total statistics unavailable.")
return entities[0]['timestamp']
@staticmethod
def get_estimated_size_bytes(client, query):
"""Get the estimated size of the data returned by this instance's query.
Cloud Datastore provides no way to get a good estimate of how large the
result of a query is going to be. Hence we use the __Stat_Kind__ system
table to get size of the entire kind as an approximate estimate, assuming
exactly 1 kind is specified in the query.
See https://cloud.google.com/datastore/docs/concepts/stats.
"""
kind_name = query.kind
latest_timestamp = (
ReadFromDatastore._SplitQueryFn
.query_latest_statistics_timestamp(client))
logging.info('Latest stats timestamp for kind %s is %s',
kind_name, latest_timestamp)
if client.namespace is None:
kind = '__Stat_Kind__'
else:
kind = '__Stat_Ns_Kind__'
query = client.query(kind=kind)
query.add_filter('kind_name', '=', kind_name)
query.add_filter('timestamp', '=', latest_timestamp)
entities = list(query.fetch(limit=1))
if not entities:
raise RuntimeError(
'Datastore statistics for kind %s unavailable' % kind_name)
return entities[0]['entity_bytes']
@staticmethod
def get_estimated_num_splits(client, query):
"""Computes the number of splits to be performed on the query."""
try:
estimated_size_bytes = (
ReadFromDatastore._SplitQueryFn
.get_estimated_size_bytes(client, query))
logging.info('Estimated size bytes for query: %s', estimated_size_bytes)
num_splits = int(min(ReadFromDatastore._NUM_QUERY_SPLITS_MAX, round(
(float(estimated_size_bytes) /
ReadFromDatastore._DEFAULT_BUNDLE_SIZE_BYTES))))
except Exception as e:
logging.warning('Failed to fetch estimated size bytes: %s', e)
# Fallback in case estimated size is unavailable.
num_splits = ReadFromDatastore._NUM_QUERY_SPLITS_MIN
return max(num_splits, ReadFromDatastore._NUM_QUERY_SPLITS_MIN)
@typehints.with_input_types(types.Query)
@typehints.with_output_types(types.Entity)
class _QueryFn(DoFn):
"""A DoFn that fetches entities from Cloud Datastore, for a given query."""
def process(self, query, *unused_args, **unused_kwargs):
_client = helper.get_client(query.project, query.namespace)
client_query = query._to_client_query(_client)
for client_entity in client_query.fetch(query.limit):
yield types.Entity.from_client_entity(client_entity)
class _Mutate(PTransform):
"""A ``PTransform`` that writes mutations to Cloud Datastore.
Only idempotent Datastore mutation operations (upsert and delete) are
supported, as the commits are retried when failures occur.
"""
def __init__(self, mutate_fn):
"""Initializes a Mutate transform.
Args:
mutate_fn: Instance of `DatastoreMutateFn` to use.
"""
self._mutate_fn = mutate_fn
def expand(self, pcoll):
return pcoll | 'Write Batch to Datastore' >> ParDo(self._mutate_fn)
class DatastoreMutateFn(DoFn):
"""A ``DoFn`` that write mutations to Datastore.
Mutations are written in batches, where the maximum batch size is
`util.WRITE_BATCH_SIZE`.
Commits are non-transactional. If a commit fails because of a conflict over
an entity group, the commit will be retried. This means that the mutation
should be idempotent (`upsert` and `delete` mutations) to prevent duplicate
data or errors.
"""
def __init__(self, project):
"""
Args:
project: (str) cloud project id
"""
self._project = project
self._client = None
self._rpc_successes = Metrics.counter(
_Mutate.DatastoreMutateFn, "datastoreRpcSuccesses")
self._rpc_errors = Metrics.counter(
_Mutate.DatastoreMutateFn, "datastoreRpcErrors")
self._throttled_secs = Metrics.counter(
_Mutate.DatastoreMutateFn, "cumulativeThrottlingSeconds")
self._throttler = AdaptiveThrottler(window_ms=120000, bucket_ms=1000,
overload_ratio=1.25)
def _update_rpc_stats(self, successes=0, errors=0, throttled_secs=0):
self._rpc_successes.inc(successes)
self._rpc_errors.inc(errors)
self._throttled_secs.inc(throttled_secs)
def start_bundle(self):
self._client = helper.get_client(self._project, namespace=None)
self._init_batch()
self._batch_sizer = util.DynamicBatchSizer()
self._target_batch_size = self._batch_sizer.get_batch_size(
time.time() * 1000)
def element_to_client_batch_item(self, element):
raise NotImplementedError
def add_to_batch(self, client_batch_item):
raise NotImplementedError
@retry.with_exponential_backoff(num_retries=5,
retry_filter=helper.retry_on_rpc_error)
def write_mutations(self, throttler, rpc_stats_callback, throttle_delay=1):
"""Writes a batch of mutations to Cloud Datastore.
If a commit fails, it will be retried up to 5 times. All mutations in the
batch will be committed again, even if the commit was partially
successful. If the retry limit is exceeded, the last exception from
Cloud Datastore will be raised.
Assumes that the Datastore client library does not perform any retries on
commits. It has not been determined how such retries would interact with
the retries and throttler used here.
See ``google.cloud.datastore_v1.gapic.datastore_client_config`` for
retry config.
Args:
rpc_stats_callback: a function to call with arguments `successes` and
`failures` and `throttled_secs`; this is called to record successful
and failed RPCs to Datastore and time spent waiting for throttling.
throttler: (``apache_beam.io.gcp.datastore.v1.adaptive_throttler.
AdaptiveThrottler``)
Throttler instance used to select requests to be throttled.
throttle_delay: (:class:`float`) time in seconds to sleep when
throttled.
Returns:
(int) The latency of the successful RPC in milliseconds.
"""
# Client-side throttling.
while throttler.throttle_request(time.time() * 1000):
logging.info("Delaying request for %ds due to previous failures",
throttle_delay)
time.sleep(throttle_delay)
rpc_stats_callback(throttled_secs=throttle_delay)
if self._batch is None:
# this will only happen when we re-try previously failed batch
self._batch = self._client.batch()
self._batch.begin()
for element in self._batch_elements:
self.add_to_batch(element)
try:
start_time = time.time()
self._batch.commit()
end_time = time.time()
rpc_stats_callback(successes=1)
throttler.successful_request(start_time * 1000)
commit_time_ms = int((end_time-start_time) * 1000)
return commit_time_ms
except Exception:
self._batch = None
rpc_stats_callback(errors=1)
raise
def process(self, element):
client_element = self.element_to_client_batch_item(element)
self._batch_elements.append(client_element)
self.add_to_batch(client_element)
self._batch_bytes_size += self._batch.mutations[-1].ByteSize()
if (len(self._batch.mutations) >= self._target_batch_size or
self._batch_bytes_size > util.WRITE_BATCH_MAX_BYTES_SIZE):
self._flush_batch()
def finish_bundle(self):
if self._batch_elements:
self._flush_batch()
def _init_batch(self):
self._batch_bytes_size = 0
self._batch = self._client.batch()
self._batch.begin()
self._batch_elements = []
def _flush_batch(self):
# Flush the current batch of mutations to Cloud Datastore.
latency_ms = self.write_mutations(
self._throttler,
rpc_stats_callback=self._update_rpc_stats,
throttle_delay=util.WRITE_BATCH_TARGET_LATENCY_MS // 1000)
logging.debug("Successfully wrote %d mutations in %dms.",
len(self._batch.mutations), latency_ms)
now = time.time() * 1000
self._batch_sizer.report_latency(
now, latency_ms, len(self._batch.mutations))
self._target_batch_size = self._batch_sizer.get_batch_size(now)
self._init_batch()
@typehints.with_input_types(types.Entity)
class WriteToDatastore(_Mutate):
"""
Writes elements of type
:class:`~apache_beam.io.gcp.datastore.v1new.types.Entity` to Cloud Datastore.
Entity keys must be complete. The ``project`` field in each key must match the
project ID passed to this transform. If ``project`` field in entity or
property key is empty then it is filled with the project ID passed to this
transform.
"""
def __init__(self, project):
"""Initialize the `WriteToDatastore` transform.
Args:
project: (:class:`str`) The ID of the project to write entities to.
"""
mutate_fn = WriteToDatastore._DatastoreWriteFn(project)
super(WriteToDatastore, self).__init__(mutate_fn)
class _DatastoreWriteFn(_Mutate.DatastoreMutateFn):
def element_to_client_batch_item(self, element):
if not isinstance(element, types.Entity):
raise ValueError('apache_beam.io.gcp.datastore.v1new.datastoreio.Entity'
' expected, got: %s' % type(element))
if not element.key.project:
element.key.project = self._project
client_entity = element.to_client_entity()
if client_entity.key.is_partial:
raise ValueError('Entities to be written to Cloud Datastore must '
'have complete keys:\n%s' % client_entity)
return client_entity
def add_to_batch(self, client_entity):
self._batch.put(client_entity)
def display_data(self):
return {
'mutation': 'Write (upsert)',
'project': self._project,
}
@typehints.with_input_types(types.Key)
class DeleteFromDatastore(_Mutate):
"""
Deletes elements matching input
:class:`~apache_beam.io.gcp.datastore.v1new.types.Key` elements from Cloud
Datastore.
Keys must be complete. The ``project`` field in each key must match the
project ID passed to this transform. If ``project`` field in key is empty then
it is filled with the project ID passed to this transform.
"""
def __init__(self, project):
"""Initialize the `DeleteFromDatastore` transform.
Args:
project: (:class:`str`) The ID of the project from which the entities will
be deleted.
"""
mutate_fn = DeleteFromDatastore._DatastoreDeleteFn(project)
super(DeleteFromDatastore, self).__init__(mutate_fn)
class _DatastoreDeleteFn(_Mutate.DatastoreMutateFn):
def element_to_client_batch_item(self, element):
if not isinstance(element, types.Key):
raise ValueError('apache_beam.io.gcp.datastore.v1new.datastoreio.Key'
' expected, got: %s' % type(element))
if not element.project:
element.project = self._project
client_key = element.to_client_key()
if client_key.is_partial:
raise ValueError('Keys to be deleted from Cloud Datastore must be '
'complete:\n%s' % client_key)
return client_key
def add_to_batch(self, client_key):
self._batch.delete(client_key)
def display_data(self):
return {
'mutation': 'Delete',
'project': self._project,
}
| 38.980469 | 80 | 0.702225 |
from __future__ import absolute_import
from __future__ import division
import logging
import time
from builtins import round
from apache_beam import typehints
from apache_beam.io.gcp.datastore.v1 import util
from apache_beam.io.gcp.datastore.v1.adaptive_throttler import AdaptiveThrottler
from apache_beam.io.gcp.datastore.v1new import helper
from apache_beam.io.gcp.datastore.v1new import query_splitter
from apache_beam.io.gcp.datastore.v1new import types
from apache_beam.metrics.metric import Metrics
from apache_beam.transforms import Create
from apache_beam.transforms import DoFn
from apache_beam.transforms import ParDo
from apache_beam.transforms import PTransform
from apache_beam.transforms import Reshuffle
from apache_beam.utils import retry
__all__ = ['ReadFromDatastore', 'WriteToDatastore', 'DeleteFromDatastore']
@typehints.with_output_types(types.Entity)
class ReadFromDatastore(PTransform):
_NUM_QUERY_SPLITS_MAX = 50000
_NUM_QUERY_SPLITS_MIN = 12
_DEFAULT_BUNDLE_SIZE_BYTES = 64 * 1024 * 1024
def __init__(self, query, num_splits=0):
super(ReadFromDatastore, self).__init__()
if not query.project:
raise ValueError("query.project cannot be empty")
if not query:
raise ValueError("query cannot be empty")
if num_splits < 0:
raise ValueError("num_splits must be greater than or equal 0")
self._project = query.project
self._datastore_namespace = query.namespace
self._query = query
self._num_splits = num_splits
def expand(self, pcoll):
return (pcoll.pipeline
| 'UserQuery' >> Create([self._query])
| 'SplitQuery' >> ParDo(ReadFromDatastore._SplitQueryFn(
self._num_splits))
| Reshuffle()
| 'Read' >> ParDo(ReadFromDatastore._QueryFn()))
def display_data(self):
disp_data = {'project': self._query.project,
'query': str(self._query),
'num_splits': self._num_splits}
if self._datastore_namespace is not None:
disp_data['namespace'] = self._datastore_namespace
return disp_data
@typehints.with_input_types(types.Query)
@typehints.with_output_types(types.Query)
class _SplitQueryFn(DoFn):
def __init__(self, num_splits):
super(ReadFromDatastore._SplitQueryFn, self).__init__()
self._num_splits = num_splits
def process(self, query, *args, **kwargs):
client = helper.get_client(query.project, query.namespace)
try:
query_splitter.validate_split(query)
if self._num_splits == 0:
estimated_num_splits = self.get_estimated_num_splits(client, query)
else:
estimated_num_splits = self._num_splits
logging.info("Splitting the query into %d splits", estimated_num_splits)
query_splits = query_splitter.get_splits(
client, query, estimated_num_splits)
except query_splitter.QuerySplitterError:
logging.info("Unable to parallelize the given query: %s", query,
exc_info=True)
query_splits = [query]
return query_splits
def display_data(self):
disp_data = {'num_splits': self._num_splits}
return disp_data
@staticmethod
def query_latest_statistics_timestamp(client):
if client.namespace is None:
kind = '__Stat_Total__'
else:
kind = '__Stat_Ns_Total__'
query = client.query(kind=kind, order=["-timestamp", ])
entities = list(query.fetch(limit=1))
if not entities:
raise RuntimeError("Datastore total statistics unavailable.")
return entities[0]['timestamp']
@staticmethod
def get_estimated_size_bytes(client, query):
kind_name = query.kind
latest_timestamp = (
ReadFromDatastore._SplitQueryFn
.query_latest_statistics_timestamp(client))
logging.info('Latest stats timestamp for kind %s is %s',
kind_name, latest_timestamp)
if client.namespace is None:
kind = '__Stat_Kind__'
else:
kind = '__Stat_Ns_Kind__'
query = client.query(kind=kind)
query.add_filter('kind_name', '=', kind_name)
query.add_filter('timestamp', '=', latest_timestamp)
entities = list(query.fetch(limit=1))
if not entities:
raise RuntimeError(
'Datastore statistics for kind %s unavailable' % kind_name)
return entities[0]['entity_bytes']
@staticmethod
def get_estimated_num_splits(client, query):
try:
estimated_size_bytes = (
ReadFromDatastore._SplitQueryFn
.get_estimated_size_bytes(client, query))
logging.info('Estimated size bytes for query: %s', estimated_size_bytes)
num_splits = int(min(ReadFromDatastore._NUM_QUERY_SPLITS_MAX, round(
(float(estimated_size_bytes) /
ReadFromDatastore._DEFAULT_BUNDLE_SIZE_BYTES))))
except Exception as e:
logging.warning('Failed to fetch estimated size bytes: %s', e)
num_splits = ReadFromDatastore._NUM_QUERY_SPLITS_MIN
return max(num_splits, ReadFromDatastore._NUM_QUERY_SPLITS_MIN)
@typehints.with_input_types(types.Query)
@typehints.with_output_types(types.Entity)
class _QueryFn(DoFn):
def process(self, query, *unused_args, **unused_kwargs):
_client = helper.get_client(query.project, query.namespace)
client_query = query._to_client_query(_client)
for client_entity in client_query.fetch(query.limit):
yield types.Entity.from_client_entity(client_entity)
class _Mutate(PTransform):
def __init__(self, mutate_fn):
self._mutate_fn = mutate_fn
def expand(self, pcoll):
return pcoll | 'Write Batch to Datastore' >> ParDo(self._mutate_fn)
class DatastoreMutateFn(DoFn):
def __init__(self, project):
self._project = project
self._client = None
self._rpc_successes = Metrics.counter(
_Mutate.DatastoreMutateFn, "datastoreRpcSuccesses")
self._rpc_errors = Metrics.counter(
_Mutate.DatastoreMutateFn, "datastoreRpcErrors")
self._throttled_secs = Metrics.counter(
_Mutate.DatastoreMutateFn, "cumulativeThrottlingSeconds")
self._throttler = AdaptiveThrottler(window_ms=120000, bucket_ms=1000,
overload_ratio=1.25)
def _update_rpc_stats(self, successes=0, errors=0, throttled_secs=0):
self._rpc_successes.inc(successes)
self._rpc_errors.inc(errors)
self._throttled_secs.inc(throttled_secs)
def start_bundle(self):
self._client = helper.get_client(self._project, namespace=None)
self._init_batch()
self._batch_sizer = util.DynamicBatchSizer()
self._target_batch_size = self._batch_sizer.get_batch_size(
time.time() * 1000)
def element_to_client_batch_item(self, element):
raise NotImplementedError
def add_to_batch(self, client_batch_item):
raise NotImplementedError
@retry.with_exponential_backoff(num_retries=5,
retry_filter=helper.retry_on_rpc_error)
def write_mutations(self, throttler, rpc_stats_callback, throttle_delay=1):
while throttler.throttle_request(time.time() * 1000):
logging.info("Delaying request for %ds due to previous failures",
throttle_delay)
time.sleep(throttle_delay)
rpc_stats_callback(throttled_secs=throttle_delay)
if self._batch is None:
self._batch = self._client.batch()
self._batch.begin()
for element in self._batch_elements:
self.add_to_batch(element)
try:
start_time = time.time()
self._batch.commit()
end_time = time.time()
rpc_stats_callback(successes=1)
throttler.successful_request(start_time * 1000)
commit_time_ms = int((end_time-start_time) * 1000)
return commit_time_ms
except Exception:
self._batch = None
rpc_stats_callback(errors=1)
raise
def process(self, element):
client_element = self.element_to_client_batch_item(element)
self._batch_elements.append(client_element)
self.add_to_batch(client_element)
self._batch_bytes_size += self._batch.mutations[-1].ByteSize()
if (len(self._batch.mutations) >= self._target_batch_size or
self._batch_bytes_size > util.WRITE_BATCH_MAX_BYTES_SIZE):
self._flush_batch()
def finish_bundle(self):
if self._batch_elements:
self._flush_batch()
def _init_batch(self):
self._batch_bytes_size = 0
self._batch = self._client.batch()
self._batch.begin()
self._batch_elements = []
def _flush_batch(self):
latency_ms = self.write_mutations(
self._throttler,
rpc_stats_callback=self._update_rpc_stats,
throttle_delay=util.WRITE_BATCH_TARGET_LATENCY_MS // 1000)
logging.debug("Successfully wrote %d mutations in %dms.",
len(self._batch.mutations), latency_ms)
now = time.time() * 1000
self._batch_sizer.report_latency(
now, latency_ms, len(self._batch.mutations))
self._target_batch_size = self._batch_sizer.get_batch_size(now)
self._init_batch()
@typehints.with_input_types(types.Entity)
class WriteToDatastore(_Mutate):
def __init__(self, project):
mutate_fn = WriteToDatastore._DatastoreWriteFn(project)
super(WriteToDatastore, self).__init__(mutate_fn)
class _DatastoreWriteFn(_Mutate.DatastoreMutateFn):
def element_to_client_batch_item(self, element):
if not isinstance(element, types.Entity):
raise ValueError('apache_beam.io.gcp.datastore.v1new.datastoreio.Entity'
' expected, got: %s' % type(element))
if not element.key.project:
element.key.project = self._project
client_entity = element.to_client_entity()
if client_entity.key.is_partial:
raise ValueError('Entities to be written to Cloud Datastore must '
'have complete keys:\n%s' % client_entity)
return client_entity
def add_to_batch(self, client_entity):
self._batch.put(client_entity)
def display_data(self):
return {
'mutation': 'Write (upsert)',
'project': self._project,
}
@typehints.with_input_types(types.Key)
class DeleteFromDatastore(_Mutate):
def __init__(self, project):
mutate_fn = DeleteFromDatastore._DatastoreDeleteFn(project)
super(DeleteFromDatastore, self).__init__(mutate_fn)
class _DatastoreDeleteFn(_Mutate.DatastoreMutateFn):
def element_to_client_batch_item(self, element):
if not isinstance(element, types.Key):
raise ValueError('apache_beam.io.gcp.datastore.v1new.datastoreio.Key'
' expected, got: %s' % type(element))
if not element.project:
element.project = self._project
client_key = element.to_client_key()
if client_key.is_partial:
raise ValueError('Keys to be deleted from Cloud Datastore must be '
'complete:\n%s' % client_key)
return client_key
def add_to_batch(self, client_key):
self._batch.delete(client_key)
def display_data(self):
return {
'mutation': 'Delete',
'project': self._project,
}
| true | true |
f71a808666b13ce290442e22bb59d1788d36b370 | 1,950 | py | Python | tools/find_run_binary.py | pospx/external_skia | 7a135275c9fc2a4b3cbdcf9a96e7102724752234 | [
"BSD-3-Clause"
] | 6,304 | 2015-01-05T23:45:12.000Z | 2022-03-31T09:48:13.000Z | third_party/skia/tools/find_run_binary.py | w4454962/miniblink49 | b294b6eacb3333659bf7b94d670d96edeeba14c0 | [
"Apache-2.0"
] | 459 | 2016-09-29T00:51:38.000Z | 2022-03-07T14:37:46.000Z | third_party/skia/tools/find_run_binary.py | w4454962/miniblink49 | b294b6eacb3333659bf7b94d670d96edeeba14c0 | [
"Apache-2.0"
] | 1,231 | 2015-01-05T03:17:39.000Z | 2022-03-31T22:54:58.000Z | #!/usr/bin/python
# Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Module that finds and runs a binary by looking in the likely locations."""
import os
import subprocess
import sys
def run_command(args):
"""Runs a program from the command line and returns stdout.
Args:
args: Command line to run, as a list of string parameters. args[0] is the
binary to run.
Returns:
stdout from the program, as a single string.
Raises:
Exception: the program exited with a nonzero return code.
"""
proc = subprocess.Popen(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
if proc.returncode is not 0:
raise Exception('command "%s" failed: %s' % (args, stderr))
return stdout
def find_path_to_program(program):
"""Returns path to an existing program binary.
Args:
program: Basename of the program to find (e.g., 'render_pictures').
Returns:
Absolute path to the program binary, as a string.
Raises:
Exception: unable to find the program binary.
"""
trunk_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir))
possible_paths = [os.path.join(trunk_path, 'out', 'Release', program),
os.path.join(trunk_path, 'out', 'Debug', program),
os.path.join(trunk_path, 'out', 'Release',
program + '.exe'),
os.path.join(trunk_path, 'out', 'Debug',
program + '.exe')]
for try_path in possible_paths:
if os.path.isfile(try_path):
return try_path
raise Exception('cannot find %s in paths %s; maybe you need to '
'build %s?' % (program, possible_paths, program))
| 31.451613 | 77 | 0.615385 |
import os
import subprocess
import sys
def run_command(args):
proc = subprocess.Popen(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
if proc.returncode is not 0:
raise Exception('command "%s" failed: %s' % (args, stderr))
return stdout
def find_path_to_program(program):
trunk_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir))
possible_paths = [os.path.join(trunk_path, 'out', 'Release', program),
os.path.join(trunk_path, 'out', 'Debug', program),
os.path.join(trunk_path, 'out', 'Release',
program + '.exe'),
os.path.join(trunk_path, 'out', 'Debug',
program + '.exe')]
for try_path in possible_paths:
if os.path.isfile(try_path):
return try_path
raise Exception('cannot find %s in paths %s; maybe you need to '
'build %s?' % (program, possible_paths, program))
| true | true |
f71a80a3465b00ac689f97b6980266f31c3f42cb | 1,463 | py | Python | medium/binary-tree-level-order-traversal.py | therealabdi2/LeetcodeQuestions | 4c45ee836482a2c7b59906f7a7a99b5b3fa17317 | [
"MIT"
] | null | null | null | medium/binary-tree-level-order-traversal.py | therealabdi2/LeetcodeQuestions | 4c45ee836482a2c7b59906f7a7a99b5b3fa17317 | [
"MIT"
] | null | null | null | medium/binary-tree-level-order-traversal.py | therealabdi2/LeetcodeQuestions | 4c45ee836482a2c7b59906f7a7a99b5b3fa17317 | [
"MIT"
] | null | null | null | '''
Given the root of a binary tree, return the level order traversal of its nodes' values.
(i.e., from left to right, level by level).
Example 1:
Input: root = [3,9,20,null,null,15,7]
Output: [[3],[9,20],[15,7]]
Example 2:
Input: root = [1]
Output: [[1]]
Example 3:
Input: root = []
Output: []
'''
# Definition for a binary tree node.
import collections
from collections import deque
from typing import Optional, List
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def levelOrder(self, root: Optional[TreeNode]) -> List[List[int]]:
res = []
if root is None:
return res
q = collections.deque()
q.append(root)
while q:
# this len will make sure we go through one level at a time
q_len = len(q)
# we will add this list to our result
level = []
for i in range(q_len):
# first in first out
node = q.popleft()
if node:
level.append(node.val)
# the children can be null that's why we've got the if statement
q.append(node.left)
q.append(node.right)
# our queue can have null nodes so we dont wanna add them
if level:
res.append(level)
return res
| 22.859375 | 87 | 0.548189 |
import collections
from collections import deque
from typing import Optional, List
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def levelOrder(self, root: Optional[TreeNode]) -> List[List[int]]:
res = []
if root is None:
return res
q = collections.deque()
q.append(root)
while q:
q_len = len(q)
level = []
for i in range(q_len):
node = q.popleft()
if node:
level.append(node.val)
q.append(node.left)
q.append(node.right)
if level:
res.append(level)
return res
| true | true |
f71a8102f3baad74119a666239cf334a1a7047ff | 23,415 | py | Python | sfa_dash/conftest.py | lboeman/solarforecastarbiter_dashboard | cd4dcffa922625b548d93f83be2b7c10457e1bbe | [
"MIT"
] | 4 | 2020-06-02T01:40:40.000Z | 2021-06-01T20:15:00.000Z | sfa_dash/conftest.py | lboeman/solarforecastarbiter_dashboard | cd4dcffa922625b548d93f83be2b7c10457e1bbe | [
"MIT"
] | 181 | 2020-05-14T15:42:55.000Z | 2021-12-02T20:27:44.000Z | sfa_dash/conftest.py | lboeman/solarforecastarbiter_dashboard | cd4dcffa922625b548d93f83be2b7c10457e1bbe | [
"MIT"
] | 2 | 2018-11-02T19:32:16.000Z | 2018-11-06T17:06:28.000Z | import os
import requests
import pymysql
import pytest
from flask import url_for
from solarforecastarbiter.datamodel import QualityFlagFilter as QFF
from sfa_dash import create_app
BASE_URL = 'http://localhost'
resample_threshold = QFF.resample_threshold_percentage
@pytest.fixture(scope='session')
def auth_token():
token_req = requests.post(
'https://solarforecastarbiter.auth0.com/oauth/token',
headers={'content-type': 'application/json'},
data=('{"grant_type": "password", '
'"username": "testing@solarforecastarbiter.org",'
'"password": "Thepassword123!", '
'"audience": "https://api.solarforecastarbiter.org", '
'"client_id": "c16EJo48lbTCQEhqSztGGlmxxxmZ4zX7"}'))
if token_req.status_code != 200:
pytest.skip('Cannot retrieve valid Auth0 token')
else:
token = token_req.json()
return token
@pytest.fixture()
def expired_token():
stored = {'access_token': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6Ik5UZENSRGRFTlVNMk9FTTJNVGhCTWtRelFUSXpNRFF6TUVRd1JUZ3dNekV3T1VWR1FrRXpSUSJ9.eyJpc3MiOiJodHRwczovL3NvbGFyZm9yZWNhc3RhcmJpdGVyLmF1dGgwLmNvbS8iLCJzdWIiOiJhdXRoMHw1YmUzNDNkZjcwMjU0MDYyMzc4MjBiODUiLCJhdWQiOlsiaHR0cHM6Ly9hcGkuc29sYXJmb3JlY2FzdGFyYml0ZXIub3JnIiwiaHR0cHM6Ly9zb2xhcmZvcmVjYXN0YXJiaXRlci5hdXRoMC5jb20vdXNlcmluZm8iXSwiaWF0IjoxNTU1NDU0NzcwLCJleHAiOjE1NTU0NjU1NzAsImF6cCI6IlByRTM5QWR0R01QSTRnSzJoUnZXWjJhRFJhcmZwZzdBIiwic2NvcGUiOiJvcGVuaWQgcHJvZmlsZSBlbWFpbCBvZmZsaW5lX2FjY2VzcyJ9.lT1XPtLkYCVGUZjcAgWFCU6AJbKWtE077zw_KO4fhIaF0wo6TTpLTkZBmF9Sxmrwb5NfeR5XuJmkX3SPCjpzcZG9wdXIpPWRGhsOAAUdoSkoHKFzALoc46VPjA3A5SZxlGqNeh6RoKFlWRAp5EJN9Z-JcwT06JyJGrbx7ip4tCbAADqWuDY2tzkjKD3EHjHTO9OSJiCRxlNA22OCfMTF6B8-8RLUabZ414bypezw83S9g25mLLWtlGhQvzWGA8F7yhhVXbEsAPPC1QoyjevXzn8PBqL5dSDp6u1gL6T29PsbhZ0diZ1xt5jkm4iX-cryc7tqwq-5D5ZkC3wbhNpLuQ', 'refresh_token': 'QlLHR9wyFS5cokItX-ym7jWlCCuLO1fC3AtZLUeDVX-mI', 'id_token': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6Ik5UZENSRGRFTlVNMk9FTTJNVGhCTWtRelFUSXpNRFF6TUVRd1JUZ3dNekV3T1VWR1FrRXpSUSJ9.eyJuaWNrbmFtZSI6InRlc3RpbmciLCJuYW1lIjoidGVzdGluZ0Bzb2xhcmZvcmVjYXN0YXJiaXRlci5vcmciLCJwaWN0dXJlIjoiaHR0cHM6Ly9zLmdyYXZhdGFyLmNvbS9hdmF0YXIvY2MxMTNkZjY5NmY4ZTlmMjA2Nzc5OTQzMzUxNzRhYjY_cz00ODAmcj1wZyZkPWh0dHBzJTNBJTJGJTJGY2RuLmF1dGgwLmNvbSUyRmF2YXRhcnMlMkZ0ZS5wbmciLCJ1cGRhdGVkX2F0IjoiMjAxOS0wNC0xNlQyMjo0NjoxMC42NTdaIiwiZW1haWwiOiJ0ZXN0aW5nQHNvbGFyZm9yZWNhc3RhcmJpdGVyLm9yZyIsImVtYWlsX3ZlcmlmaWVkIjpmYWxzZSwiaXNzIjoiaHR0cHM6Ly9zb2xhcmZvcmVjYXN0YXJiaXRlci5hdXRoMC5jb20vIiwic3ViIjoiYXV0aDB8NWJlMzQzZGY3MDI1NDA2MjM3ODIwYjg1IiwiYXVkIjoiUHJFMzlBZHRHTVBJNGdLMmhSdldaMmFEUmFyZnBnN0EiLCJpYXQiOjE1NTU0NTQ3NzAsImV4cCI6MTU1NTQ5MDc3MH0.axw45-ms_LVIS_WsUdcCryZeOwpZVAn95zbUm9WO23bpIja7QaR1h6_Emb9nUNJIk44vp-J-zwKIZd4j7bg5_vaVcJER4_rL8vlc6f5lJdZAU20yeisTT4q1YcwlvQhg7avWMUkZaiO3SgK0eJ3371Gm2gJgK2b21bnpzmUHQ0vS906GLGngaVzb3VEE_g4CgR4u6qmBQRwq3Z6DyRBq572Qhn3TXk_0Xvj43Q9TyYjV5ioou5Xe-3T5HHsCoUWqDp0BZ3bP9FlYFws9DffnFzf1yVtpwfk9shmAe8V6Fn9N0OjuS4LJP0Tc-I7adspJlYfB9BeTEci6MKn58OQCrw', 'scope': ['openid', 'profile', 'email', 'offline_access'], 'expires_in': 0, 'token_type': 'Bearer', 'expires_at': 1555465570.9597309} # NOQA
return stored
@pytest.fixture()
def mocked_storage(mocker, auth_token, expired_token):
def make_storage(authenticated=False):
if authenticated:
token = auth_token
else:
token = expired_token
class fake_storage:
def __init__(*args, **kwargs):
pass
def get(self, *args):
return token
def set(self, *args):
pass
def delete(self, *args):
pass
return fake_storage
return make_storage
@pytest.fixture()
def mocked_unauth_storage(mocker, mocked_storage):
mocker.patch('sfa_dash.session_storage',
new=mocked_storage())
@pytest.fixture()
def mocked_auth_storage(mocker, mocked_storage):
mocker.patch('sfa_dash.session_storage',
new=mocked_storage(True))
@pytest.fixture()
def app_unauth(mocked_unauth_storage):
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
return create_app('sfa_dash.config.TestConfig')
@pytest.fixture()
def app(mocked_auth_storage):
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
return create_app('sfa_dash.config.TestConfig')
@pytest.fixture()
def client(app):
yield app.test_client()
no_arg_routes_list = [
'/sites/',
'/observations/',
'/forecasts/single/',
'/forecasts/cdf/',
'/reports/',
'/aggregates/',
'/sites/create',
'/reports/deterministic/create',
'/reports/event/create',
'/aggregates/create',
]
@pytest.fixture(params=no_arg_routes_list)
def no_arg_route(request):
return request.param
admin_routes_list = [
'/admin/permissions/create/cdf_forecast_group',
'/admin/permissions/create/observation',
'/admin/permissions/create/forecast',
'/admin/permissions/create/report',
'/admin/permissions/create/site',
'/admin/roles/create',
'/admin/permissions/',
'/admin/roles/',
'/admin/users/',
]
@pytest.fixture(params=admin_routes_list)
def admin_route(request):
return request.param
admin_multiarg_route_list = [
'/admin/permissions/{permission_id}/remove/{object_id}',
'/admin/roles/{role_id}/remove/{permission_id}',
'/admin/users/{user_id}/remove/{role_id}',
]
@pytest.fixture(params=admin_multiarg_route_list)
def admin_multiarg_route(request):
def fn(object_id, permission_id, user_id, role_id):
return request.param.format(
object_id=object_id,
permission_id=permission_id,
user_id=user_id,
role_id=role_id)
return fn
user_id_route_list = [
'/admin/users/{user_id}',
'/admin/users/{user_id}/add/',
]
@pytest.fixture(params=user_id_route_list)
def user_id_route(request):
def fn(user_id):
return request.param.format(user_id=user_id)
return fn
role_id_route_list = [
'/admin/roles/{role_id}',
'/admin/roles/{role_id}/delete',
'/admin/roles/{role_id}/add/',
'/admin/roles/{role_id}/grant/',
]
@pytest.fixture(params=role_id_route_list)
def role_id_route(request):
def fn(role_id):
return request.param.format(role_id=role_id)
return fn
permission_id_route_list = [
'/admin/permissions/{permission_id}',
'/admin/permissions/{permission_id}/delete',
'/admin/permissions/{permission_id}/add',
]
@pytest.fixture(params=permission_id_route_list)
def permission_id_route(request):
def fn(permission_id):
return request.param.format(permission_id=permission_id)
return fn
report_id_route_list = [
'/reports/{report_id}',
'/reports/{report_id}/delete',
]
@pytest.fixture(params=report_id_route_list)
def report_id_route(request):
def fn(report_id):
return request.param.format(report_id=report_id)
return fn
site_id_route_list = [
'/sites/{site_id}/',
'/sites/{site_id}/delete',
'/sites/{site_id}/forecasts/single/create',
'/sites/{site_id}/forecasts/cdf/create',
'/sites/{site_id}/observations/create',
'/sites/{site_id}/observations/create',
]
@pytest.fixture(params=site_id_route_list)
def site_id_route(request):
def fn(site_id):
return request.param.format(site_id=site_id)
return fn
observation_id_route_list = [
'/observations/{observation_id}',
'/observations/{observation_id}/delete',
]
@pytest.fixture(params=observation_id_route_list)
def observation_id_route(request):
def fn(observation_id):
return request.param.format(observation_id=observation_id)
return fn
forecast_id_route_list = [
'/forecasts/single/{forecast_id}',
'/forecasts/single/{forecast_id}/delete',
]
@pytest.fixture(params=forecast_id_route_list)
def forecast_id_route(request):
def fn(forecast_id):
return request.param.format(forecast_id=forecast_id)
return fn
cdf_forecast_id_route_list = [
'/forecasts/cdf/{forecast_id}',
'/forecasts/cdf/{forecast_id}/delete',
]
@pytest.fixture(params=cdf_forecast_id_route_list)
def cdf_forecast_id_route(request):
def fn(forecast_id):
return request.param.format(forecast_id=forecast_id)
return fn
cdf_forecast_single_id_routes_list = [
'/forecasts/cdf/single/{forecast_id}',
]
@pytest.fixture(params=cdf_forecast_single_id_routes_list)
def cdf_forecast_single_id_route(request):
def fn(forecast_id):
return request.param.format(forecast_id=forecast_id)
return fn
aggregate_id_route_list = [
'/aggregates/{aggregate_id}',
'/aggregates/{aggregate_id}/delete',
'/aggregates/{aggregate_id}/add',
'/aggregates/{aggregate_id}/forecasts/single/create',
'/aggregates/{aggregate_id}/forecasts/cdf/create',
]
@pytest.fixture(params=aggregate_id_route_list)
def aggregate_id_route(request):
def fn(aggregate_id):
return request.param.format(aggregate_id=aggregate_id)
return fn
clone_route_list = [
'/sites/{site_id}/clone',
'/observations/{observation_id}/clone',
'/forecasts/single/{forecast_id}/clone',
]
@pytest.fixture(params=clone_route_list)
def clone_route(request):
def fn(uuids):
# NOTE: expects a dict of all possible ids to use for formatting
return request.param.format(**uuids)
return fn
@pytest.fixture()
def missing_id():
return '7d2c3208-5243-11e9-8647-d663bd873d93'
@pytest.fixture()
def observation_id():
return '123e4567-e89b-12d3-a456-426655440000'
@pytest.fixture()
def cdf_forecast_group_id():
return 'ef51e87c-50b9-11e9-8647-d663bd873d93'
@pytest.fixture()
def cdf_forecast_id():
return '633f9396-50bb-11e9-8647-d663bd873d93'
@pytest.fixture()
def forecast_id():
return '11c20780-76ae-4b11-bef1-7a75bdc784e3'
@pytest.fixture()
def site_id():
return '123e4567-e89b-12d3-a456-426655440001'
@pytest.fixture()
def site_id_plant():
return '123e4567-e89b-12d3-a456-426655440002'
@pytest.fixture()
def test_orgid():
return 'b76ab62e-4fe1-11e9-9e44-64006a511e6f'
@pytest.fixture()
def user_id():
return '0c90950a-7cca-11e9-a81f-54bf64606445'
@pytest.fixture()
def aggregate_id():
return '458ffc27-df0b-11e9-b622-62adb5fd6af0'
@pytest.fixture()
def report_id():
return '9f290dd4-42b8-11ea-abdf-f4939feddd82'
@pytest.fixture
def all_metadata_ids(
observation_id, forecast_id, cdf_forecast_group_id, cdf_forecast_id,
site_id, site_id_plant, aggregate_id, report_id):
return {
'observation_id': observation_id,
'forecast_id': forecast_id,
'cdf_forecast_group_id': cdf_forecast_group_id,
'cdf_forecast_id': cdf_forecast_id,
'site_id': site_id,
'site_id_plant': site_id_plant,
'aggregate_id': aggregate_id,
'report_id': report_id,
}
@pytest.fixture()
def test_url(app):
def fn(view):
with app.test_request_context():
return url_for(view, _external=True)
return fn
@pytest.fixture(scope='session')
def connection():
connection = pymysql.connect(
host=os.getenv('MYSQL_HOST', '127.0.0.1'),
port=int(os.getenv('MYSQL_PORT', '3306')),
user='root',
password='testpassword',
database='arbiter_data',
binary_prefix=True)
# with no connection.commit(), no data should stay in db
return connection
@pytest.fixture()
def cursor(connection):
connection.rollback()
return connection.cursor()
@pytest.fixture()
def dictcursor(connection):
connection.rollback()
return connection.cursor(cursor=pymysql.cursors.DictCursor)
@pytest.fixture()
def role_id(cursor):
cursor.execute(
'SELECT BIN_TO_UUID(id, 1) from arbiter_data.roles '
'WHERE name = "Test user role"')
role_id = cursor.fetchone()[0]
return role_id
@pytest.fixture()
def permission_id(cursor, role_id):
cursor.execute(
'SELECT BIN_TO_UUID(id, 1) FROM arbiter_data.permissions '
'WHERE id IN (SELECT permission_id FROM '
'arbiter_data.role_permission_mapping WHERE role_id '
'= UUID_TO_BIN(%s, 1) ) LIMIT 1', role_id)
permission_id = cursor.fetchone()[0]
return permission_id
@pytest.fixture()
def permission_object_type(cursor, permission_id):
cursor.execute(
'SELECT object_type FROM arbiter_data.permissions '
'WHERE id = UUID_TO_BIN(%s, 1)', permission_id)
return cursor.fetchone()[0]
@pytest.fixture()
def valid_permission_object_id(
observation_id, forecast_id, cdf_forecast_group_id, aggregate_id,
site_id, role_id, user_id, permission_id, report_id,
permission_object_type):
ot = permission_object_type
if ot == 'forecasts':
return forecast_id
if ot == 'observations':
return observation_id
if ot == 'cdf_forecasts':
return cdf_forecast_group_id
if ot == 'agggregates':
return aggregate_id
if ot == 'sites':
return site_id
if ot == 'reports':
return report_id
if ot == 'users':
return user_id
if ot == 'permissions':
return permission_id
if ot == 'roles':
return role_id
@pytest.fixture()
def site():
return {
'created_at': '2019-03-01T11:44:38+00:00',
'elevation': 595.0,
'extra_parameters': '{"network_api_abbreviation": "AS","network": "University of Oregon SRML","network_api_id": "94040"}', # noqa
'latitude': 42.19,
'longitude': -122.7,
'modeling_parameters': {'ac_capacity': None,
'ac_loss_factor': None,
'axis_azimuth': None,
'axis_tilt': None,
'backtrack': None,
'dc_capacity': None,
'dc_loss_factor': None,
'ground_coverage_ratio': None,
'max_rotation_angle': None,
'surface_azimuth': None,
'surface_tilt': None,
'temperature_coefficient': None,
'tracking_type': None},
'modified_at': '2019-03-01T11:44:38+00:00',
'name': 'Weather Station',
'provider': 'Organization 1',
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'timezone': 'Etc/GMT+8'}
@pytest.fixture()
def site_with_modeling_params():
return {
'created_at': '2019-03-01T11:44:46+00:00',
'elevation': 786.0,
'extra_parameters': '',
'latitude': 43.73403,
'longitude': -96.62328,
'modeling_parameters': {
'ac_capacity': 0.015,
'ac_loss_factor': 0.0,
'axis_azimuth': None,
'axis_tilt': None,
'backtrack': None,
'dc_capacity': 0.015,
'dc_loss_factor': 0.0,
'ground_coverage_ratio': None,
'max_rotation_angle': None,
'surface_azimuth': 180.0,
'surface_tilt': 45.0,
'temperature_coefficient': -0.2,
'tracking_type': 'fixed'},
'modified_at': '2019-03-01T11:44:46+00:00',
'name': 'Power Plant 1',
'provider': 'Organization 1',
'site_id': '123e4567-e89b-12d3-a456-426655440002',
'timezone': 'Etc/GMT+6'}
@pytest.fixture()
def observation():
return {
'_links': {'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'}, # noqa
'created_at': '2019-03-01T12:01:39+00:00',
'extra_parameters': '{"instrument": "Ascension Technology Rotating Shadowband Pyranometer", "network": "UO SRML"}', # noqa
'interval_label': 'beginning',
'interval_length': 5,
'interval_value_type': 'interval_mean',
'modified_at': '2019-03-01T12:01:39+00:00',
'name': 'GHI Instrument 1',
'observation_id': '123e4567-e89b-12d3-a456-426655440000',
'provider': 'Organization 1',
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'uncertainty': 0.1,
'variable': 'ghi'}
@pytest.fixture()
def forecast():
return {
'_links': {'aggregate': None,
'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'}, # noqa
'aggregate_id': None,
'created_at': '2019-03-01T11:55:37+00:00',
'extra_parameters': '',
'forecast_id': '11c20780-76ae-4b11-bef1-7a75bdc784e3',
'interval_label': 'beginning',
'interval_length': 5,
'interval_value_type': 'interval_mean',
'issue_time_of_day': '06:00',
'lead_time_to_start': 60,
'modified_at': '2019-03-01T11:55:37+00:00',
'name': 'DA GHI',
'provider': 'Organization 1',
'run_length': 1440,
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'variable': 'ghi'}
@pytest.fixture()
def cdf_forecast():
return {
'_links': {'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'}, # noqa
'aggregate_id': None,
'axis': 'y',
'constant_values': [{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9396-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa
'values': 'http://localhost:5000/forecasts/cdf/single/633f9396-50bb-11e9-8647-d663bd873d93/values'}, # noqa
'constant_value': 5.0,
'forecast_id': '633f9396-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9864-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa
'values': 'http://localhost:5000/forecasts/cdf/single/633f9864-50bb-11e9-8647-d663bd873d93/values'}, # noqa
'constant_value': 20.0,
'forecast_id': '633f9864-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9b2a-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa
'values': 'http://localhost:5000/forecasts/cdf/single/633f9b2a-50bb-11e9-8647-d663bd873d93/values'}, # noqa
'constant_value': 50.0,
'forecast_id': '633f9b2a-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9d96-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa
'values': 'http://localhost:5000/forecasts/cdf/single/633f9d96-50bb-11e9-8647-d663bd873d93/values'}, # noqa
'constant_value': 80.0,
'forecast_id': '633f9d96-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633fa548-50bb-11e9-8647-d663bd873d93/values/timerange', # noqa
'values': 'http://localhost:5000/forecasts/cdf/single/633fa548-50bb-11e9-8647-d663bd873d93/values'}, # noqa
'constant_value': 95.0,
'forecast_id': '633fa548-50bb-11e9-8647-d663bd873d93'}],
'created_at': '2019-03-02T14:55:37+00:00',
'extra_parameters': '',
'forecast_id': 'ef51e87c-50b9-11e9-8647-d663bd873d93',
'interval_label': 'beginning',
'interval_length': 5,
'interval_value_type': 'interval_mean',
'issue_time_of_day': '06:00',
'lead_time_to_start': 60,
'modified_at': '2019-03-02T14:55:37+00:00',
'name': 'DA GHI',
'provider': 'Organization 1',
'run_length': 1440,
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'variable': 'ghi'}
@pytest.fixture()
def aggregate():
return {
'aggregate_id': '458ffc27-df0b-11e9-b622-62adb5fd6af0',
'aggregate_type': 'mean',
'created_at': '2019-09-24T12:00:00+00:00',
'description': 'ghi agg',
'extra_parameters': 'extra',
'interval_label': 'ending',
'interval_length': 60,
'interval_value_type': 'interval_mean',
'modified_at': '2019-09-24T12:00:00+00:00',
'name': 'Test Aggregate ghi',
'observations': [
{'_links': {'observation': 'http://localhost:5000/observations/123e4567-e89b-12d3-a456-426655440000/metadata'}, # noqa
'created_at': '2019-09-25T00:00:00+00:00',
'effective_from': '2019-01-01T00:00:00+00:00',
'effective_until': None,
'observation_deleted_at': None,
'observation_id': '123e4567-e89b-12d3-a456-426655440000'},
{'_links': {'observation': 'http://localhost:5000/observations/e0da0dea-9482-4073-84de-f1b12c304d23/metadata'}, # noqa
'created_at': '2019-09-25T00:00:00+00:00',
'effective_from': '2019-01-01T00:00:00+00:00',
'effective_until': None,
'observation_deleted_at': None,
'observation_id': 'e0da0dea-9482-4073-84de-f1b12c304d23'},
{'_links': {'observation': 'http://localhost:5000/observations/b1dfe2cb-9c8e-43cd-afcf-c5a6feaf81e2/metadata'}, # noqa
'created_at': '2019-09-25T00:00:00+00:00',
'effective_from': '2019-01-01T00:00:00+00:00',
'effective_until': None,
'observation_deleted_at': None,
'observation_id': 'b1dfe2cb-9c8e-43cd-afcf-c5a6feaf81e2'}],
'provider': 'Organization 1',
'timezone': 'America/Denver',
'variable': 'ghi'}
@pytest.fixture()
def report():
return {
'created_at': '2020-01-22T13:48:00+00:00',
'modified_at': '2020-01-22T13:50:00+00:00',
'provider': 'Organization 1',
'raw_report': {
'data_checksum': None,
'generated_at': '2019-07-01T12:00:00+00:00',
'messages': [
{'function': 'fcn',
'level': 'error',
'message': 'FAILED',
'step': 'dunno'}],
'metrics': [],
'plots': None,
'processed_forecasts_observations': [],
'timezone': 'Etc/GMT+8',
'versions': []},
'report_id': '9f290dd4-42b8-11ea-abdf-f4939feddd82',
'report_parameters': {
'categories': ['total', 'date'],
'end': '2019-06-01T06:59:00+00:00',
'filters': [{'quality_flags': ['USER FLAGGED'],
'discard_before_resample': True,
'resample_threshold_percentage': resample_threshold,
},
{'quality_flags': ['STALE VALUES'],
'discard_before_resample': True,
'resample_threshold_percentage': resample_threshold,
}],
'metrics': ['mae', 'rmse'],
'name': 'NREL MIDC OASIS GHI Forecast Analysis',
'object_pairs': [
{'forecast': '11c20780-76ae-4b11-bef1-7a75bdc784e3',
'observation': '123e4567-e89b-12d3-a456-426655440000',
'reference_forecast': None,
'uncertainty': None,
'forecast_type': 'forecast',
}],
'start': '2019-04-01T07:00:00+00:00',
'costs': [{
'name': 'example cost',
'type': 'constant',
'parameters': {
"cost": 1.1,
"aggregation": "sum",
"net": False,
},
}],
},
'status': 'failed',
'values': [
{'id': 'a2b6ed14-42d0-11ea-aa3c-f4939feddd82',
'object_id': '123e4567-e89b-12d3-a456-426655440000',
'processed_values': 'superencodedvalues'}]
}
| 33.690647 | 2,216 | 0.658808 | import os
import requests
import pymysql
import pytest
from flask import url_for
from solarforecastarbiter.datamodel import QualityFlagFilter as QFF
from sfa_dash import create_app
BASE_URL = 'http://localhost'
resample_threshold = QFF.resample_threshold_percentage
@pytest.fixture(scope='session')
def auth_token():
token_req = requests.post(
'https://solarforecastarbiter.auth0.com/oauth/token',
headers={'content-type': 'application/json'},
data=('{"grant_type": "password", '
'"username": "testing@solarforecastarbiter.org",'
'"password": "Thepassword123!", '
'"audience": "https://api.solarforecastarbiter.org", '
'"client_id": "c16EJo48lbTCQEhqSztGGlmxxxmZ4zX7"}'))
if token_req.status_code != 200:
pytest.skip('Cannot retrieve valid Auth0 token')
else:
token = token_req.json()
return token
@pytest.fixture()
def expired_token():
stored = {'access_token': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6Ik5UZENSRGRFTlVNMk9FTTJNVGhCTWtRelFUSXpNRFF6TUVRd1JUZ3dNekV3T1VWR1FrRXpSUSJ9.eyJpc3MiOiJodHRwczovL3NvbGFyZm9yZWNhc3RhcmJpdGVyLmF1dGgwLmNvbS8iLCJzdWIiOiJhdXRoMHw1YmUzNDNkZjcwMjU0MDYyMzc4MjBiODUiLCJhdWQiOlsiaHR0cHM6Ly9hcGkuc29sYXJmb3JlY2FzdGFyYml0ZXIub3JnIiwiaHR0cHM6Ly9zb2xhcmZvcmVjYXN0YXJiaXRlci5hdXRoMC5jb20vdXNlcmluZm8iXSwiaWF0IjoxNTU1NDU0NzcwLCJleHAiOjE1NTU0NjU1NzAsImF6cCI6IlByRTM5QWR0R01QSTRnSzJoUnZXWjJhRFJhcmZwZzdBIiwic2NvcGUiOiJvcGVuaWQgcHJvZmlsZSBlbWFpbCBvZmZsaW5lX2FjY2VzcyJ9.lT1XPtLkYCVGUZjcAgWFCU6AJbKWtE077zw_KO4fhIaF0wo6TTpLTkZBmF9Sxmrwb5NfeR5XuJmkX3SPCjpzcZG9wdXIpPWRGhsOAAUdoSkoHKFzALoc46VPjA3A5SZxlGqNeh6RoKFlWRAp5EJN9Z-JcwT06JyJGrbx7ip4tCbAADqWuDY2tzkjKD3EHjHTO9OSJiCRxlNA22OCfMTF6B8-8RLUabZ414bypezw83S9g25mLLWtlGhQvzWGA8F7yhhVXbEsAPPC1QoyjevXzn8PBqL5dSDp6u1gL6T29PsbhZ0diZ1xt5jkm4iX-cryc7tqwq-5D5ZkC3wbhNpLuQ', 'refresh_token': 'QlLHR9wyFS5cokItX-ym7jWlCCuLO1fC3AtZLUeDVX-mI', 'id_token': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsImtpZCI6Ik5UZENSRGRFTlVNMk9FTTJNVGhCTWtRelFUSXpNRFF6TUVRd1JUZ3dNekV3T1VWR1FrRXpSUSJ9.eyJuaWNrbmFtZSI6InRlc3RpbmciLCJuYW1lIjoidGVzdGluZ0Bzb2xhcmZvcmVjYXN0YXJiaXRlci5vcmciLCJwaWN0dXJlIjoiaHR0cHM6Ly9zLmdyYXZhdGFyLmNvbS9hdmF0YXIvY2MxMTNkZjY5NmY4ZTlmMjA2Nzc5OTQzMzUxNzRhYjY_cz00ODAmcj1wZyZkPWh0dHBzJTNBJTJGJTJGY2RuLmF1dGgwLmNvbSUyRmF2YXRhcnMlMkZ0ZS5wbmciLCJ1cGRhdGVkX2F0IjoiMjAxOS0wNC0xNlQyMjo0NjoxMC42NTdaIiwiZW1haWwiOiJ0ZXN0aW5nQHNvbGFyZm9yZWNhc3RhcmJpdGVyLm9yZyIsImVtYWlsX3ZlcmlmaWVkIjpmYWxzZSwiaXNzIjoiaHR0cHM6Ly9zb2xhcmZvcmVjYXN0YXJiaXRlci5hdXRoMC5jb20vIiwic3ViIjoiYXV0aDB8NWJlMzQzZGY3MDI1NDA2MjM3ODIwYjg1IiwiYXVkIjoiUHJFMzlBZHRHTVBJNGdLMmhSdldaMmFEUmFyZnBnN0EiLCJpYXQiOjE1NTU0NTQ3NzAsImV4cCI6MTU1NTQ5MDc3MH0.axw45-ms_LVIS_WsUdcCryZeOwpZVAn95zbUm9WO23bpIja7QaR1h6_Emb9nUNJIk44vp-J-zwKIZd4j7bg5_vaVcJER4_rL8vlc6f5lJdZAU20yeisTT4q1YcwlvQhg7avWMUkZaiO3SgK0eJ3371Gm2gJgK2b21bnpzmUHQ0vS906GLGngaVzb3VEE_g4CgR4u6qmBQRwq3Z6DyRBq572Qhn3TXk_0Xvj43Q9TyYjV5ioou5Xe-3T5HHsCoUWqDp0BZ3bP9FlYFws9DffnFzf1yVtpwfk9shmAe8V6Fn9N0OjuS4LJP0Tc-I7adspJlYfB9BeTEci6MKn58OQCrw', 'scope': ['openid', 'profile', 'email', 'offline_access'], 'expires_in': 0, 'token_type': 'Bearer', 'expires_at': 1555465570.9597309}
return stored
@pytest.fixture()
def mocked_storage(mocker, auth_token, expired_token):
def make_storage(authenticated=False):
if authenticated:
token = auth_token
else:
token = expired_token
class fake_storage:
def __init__(*args, **kwargs):
pass
def get(self, *args):
return token
def set(self, *args):
pass
def delete(self, *args):
pass
return fake_storage
return make_storage
@pytest.fixture()
def mocked_unauth_storage(mocker, mocked_storage):
mocker.patch('sfa_dash.session_storage',
new=mocked_storage())
@pytest.fixture()
def mocked_auth_storage(mocker, mocked_storage):
mocker.patch('sfa_dash.session_storage',
new=mocked_storage(True))
@pytest.fixture()
def app_unauth(mocked_unauth_storage):
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
return create_app('sfa_dash.config.TestConfig')
@pytest.fixture()
def app(mocked_auth_storage):
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
return create_app('sfa_dash.config.TestConfig')
@pytest.fixture()
def client(app):
yield app.test_client()
no_arg_routes_list = [
'/sites/',
'/observations/',
'/forecasts/single/',
'/forecasts/cdf/',
'/reports/',
'/aggregates/',
'/sites/create',
'/reports/deterministic/create',
'/reports/event/create',
'/aggregates/create',
]
@pytest.fixture(params=no_arg_routes_list)
def no_arg_route(request):
return request.param
admin_routes_list = [
'/admin/permissions/create/cdf_forecast_group',
'/admin/permissions/create/observation',
'/admin/permissions/create/forecast',
'/admin/permissions/create/report',
'/admin/permissions/create/site',
'/admin/roles/create',
'/admin/permissions/',
'/admin/roles/',
'/admin/users/',
]
@pytest.fixture(params=admin_routes_list)
def admin_route(request):
return request.param
admin_multiarg_route_list = [
'/admin/permissions/{permission_id}/remove/{object_id}',
'/admin/roles/{role_id}/remove/{permission_id}',
'/admin/users/{user_id}/remove/{role_id}',
]
@pytest.fixture(params=admin_multiarg_route_list)
def admin_multiarg_route(request):
def fn(object_id, permission_id, user_id, role_id):
return request.param.format(
object_id=object_id,
permission_id=permission_id,
user_id=user_id,
role_id=role_id)
return fn
user_id_route_list = [
'/admin/users/{user_id}',
'/admin/users/{user_id}/add/',
]
@pytest.fixture(params=user_id_route_list)
def user_id_route(request):
def fn(user_id):
return request.param.format(user_id=user_id)
return fn
role_id_route_list = [
'/admin/roles/{role_id}',
'/admin/roles/{role_id}/delete',
'/admin/roles/{role_id}/add/',
'/admin/roles/{role_id}/grant/',
]
@pytest.fixture(params=role_id_route_list)
def role_id_route(request):
def fn(role_id):
return request.param.format(role_id=role_id)
return fn
permission_id_route_list = [
'/admin/permissions/{permission_id}',
'/admin/permissions/{permission_id}/delete',
'/admin/permissions/{permission_id}/add',
]
@pytest.fixture(params=permission_id_route_list)
def permission_id_route(request):
def fn(permission_id):
return request.param.format(permission_id=permission_id)
return fn
report_id_route_list = [
'/reports/{report_id}',
'/reports/{report_id}/delete',
]
@pytest.fixture(params=report_id_route_list)
def report_id_route(request):
def fn(report_id):
return request.param.format(report_id=report_id)
return fn
site_id_route_list = [
'/sites/{site_id}/',
'/sites/{site_id}/delete',
'/sites/{site_id}/forecasts/single/create',
'/sites/{site_id}/forecasts/cdf/create',
'/sites/{site_id}/observations/create',
'/sites/{site_id}/observations/create',
]
@pytest.fixture(params=site_id_route_list)
def site_id_route(request):
def fn(site_id):
return request.param.format(site_id=site_id)
return fn
observation_id_route_list = [
'/observations/{observation_id}',
'/observations/{observation_id}/delete',
]
@pytest.fixture(params=observation_id_route_list)
def observation_id_route(request):
def fn(observation_id):
return request.param.format(observation_id=observation_id)
return fn
forecast_id_route_list = [
'/forecasts/single/{forecast_id}',
'/forecasts/single/{forecast_id}/delete',
]
@pytest.fixture(params=forecast_id_route_list)
def forecast_id_route(request):
def fn(forecast_id):
return request.param.format(forecast_id=forecast_id)
return fn
cdf_forecast_id_route_list = [
'/forecasts/cdf/{forecast_id}',
'/forecasts/cdf/{forecast_id}/delete',
]
@pytest.fixture(params=cdf_forecast_id_route_list)
def cdf_forecast_id_route(request):
def fn(forecast_id):
return request.param.format(forecast_id=forecast_id)
return fn
cdf_forecast_single_id_routes_list = [
'/forecasts/cdf/single/{forecast_id}',
]
@pytest.fixture(params=cdf_forecast_single_id_routes_list)
def cdf_forecast_single_id_route(request):
def fn(forecast_id):
return request.param.format(forecast_id=forecast_id)
return fn
aggregate_id_route_list = [
'/aggregates/{aggregate_id}',
'/aggregates/{aggregate_id}/delete',
'/aggregates/{aggregate_id}/add',
'/aggregates/{aggregate_id}/forecasts/single/create',
'/aggregates/{aggregate_id}/forecasts/cdf/create',
]
@pytest.fixture(params=aggregate_id_route_list)
def aggregate_id_route(request):
def fn(aggregate_id):
return request.param.format(aggregate_id=aggregate_id)
return fn
clone_route_list = [
'/sites/{site_id}/clone',
'/observations/{observation_id}/clone',
'/forecasts/single/{forecast_id}/clone',
]
@pytest.fixture(params=clone_route_list)
def clone_route(request):
def fn(uuids):
return request.param.format(**uuids)
return fn
@pytest.fixture()
def missing_id():
return '7d2c3208-5243-11e9-8647-d663bd873d93'
@pytest.fixture()
def observation_id():
return '123e4567-e89b-12d3-a456-426655440000'
@pytest.fixture()
def cdf_forecast_group_id():
return 'ef51e87c-50b9-11e9-8647-d663bd873d93'
@pytest.fixture()
def cdf_forecast_id():
return '633f9396-50bb-11e9-8647-d663bd873d93'
@pytest.fixture()
def forecast_id():
return '11c20780-76ae-4b11-bef1-7a75bdc784e3'
@pytest.fixture()
def site_id():
return '123e4567-e89b-12d3-a456-426655440001'
@pytest.fixture()
def site_id_plant():
return '123e4567-e89b-12d3-a456-426655440002'
@pytest.fixture()
def test_orgid():
return 'b76ab62e-4fe1-11e9-9e44-64006a511e6f'
@pytest.fixture()
def user_id():
return '0c90950a-7cca-11e9-a81f-54bf64606445'
@pytest.fixture()
def aggregate_id():
return '458ffc27-df0b-11e9-b622-62adb5fd6af0'
@pytest.fixture()
def report_id():
return '9f290dd4-42b8-11ea-abdf-f4939feddd82'
@pytest.fixture
def all_metadata_ids(
observation_id, forecast_id, cdf_forecast_group_id, cdf_forecast_id,
site_id, site_id_plant, aggregate_id, report_id):
return {
'observation_id': observation_id,
'forecast_id': forecast_id,
'cdf_forecast_group_id': cdf_forecast_group_id,
'cdf_forecast_id': cdf_forecast_id,
'site_id': site_id,
'site_id_plant': site_id_plant,
'aggregate_id': aggregate_id,
'report_id': report_id,
}
@pytest.fixture()
def test_url(app):
def fn(view):
with app.test_request_context():
return url_for(view, _external=True)
return fn
@pytest.fixture(scope='session')
def connection():
connection = pymysql.connect(
host=os.getenv('MYSQL_HOST', '127.0.0.1'),
port=int(os.getenv('MYSQL_PORT', '3306')),
user='root',
password='testpassword',
database='arbiter_data',
binary_prefix=True)
return connection
@pytest.fixture()
def cursor(connection):
connection.rollback()
return connection.cursor()
@pytest.fixture()
def dictcursor(connection):
connection.rollback()
return connection.cursor(cursor=pymysql.cursors.DictCursor)
@pytest.fixture()
def role_id(cursor):
cursor.execute(
'SELECT BIN_TO_UUID(id, 1) from arbiter_data.roles '
'WHERE name = "Test user role"')
role_id = cursor.fetchone()[0]
return role_id
@pytest.fixture()
def permission_id(cursor, role_id):
cursor.execute(
'SELECT BIN_TO_UUID(id, 1) FROM arbiter_data.permissions '
'WHERE id IN (SELECT permission_id FROM '
'arbiter_data.role_permission_mapping WHERE role_id '
'= UUID_TO_BIN(%s, 1) ) LIMIT 1', role_id)
permission_id = cursor.fetchone()[0]
return permission_id
@pytest.fixture()
def permission_object_type(cursor, permission_id):
cursor.execute(
'SELECT object_type FROM arbiter_data.permissions '
'WHERE id = UUID_TO_BIN(%s, 1)', permission_id)
return cursor.fetchone()[0]
@pytest.fixture()
def valid_permission_object_id(
observation_id, forecast_id, cdf_forecast_group_id, aggregate_id,
site_id, role_id, user_id, permission_id, report_id,
permission_object_type):
ot = permission_object_type
if ot == 'forecasts':
return forecast_id
if ot == 'observations':
return observation_id
if ot == 'cdf_forecasts':
return cdf_forecast_group_id
if ot == 'agggregates':
return aggregate_id
if ot == 'sites':
return site_id
if ot == 'reports':
return report_id
if ot == 'users':
return user_id
if ot == 'permissions':
return permission_id
if ot == 'roles':
return role_id
@pytest.fixture()
def site():
return {
'created_at': '2019-03-01T11:44:38+00:00',
'elevation': 595.0,
'extra_parameters': '{"network_api_abbreviation": "AS","network": "University of Oregon SRML","network_api_id": "94040"}',
'latitude': 42.19,
'longitude': -122.7,
'modeling_parameters': {'ac_capacity': None,
'ac_loss_factor': None,
'axis_azimuth': None,
'axis_tilt': None,
'backtrack': None,
'dc_capacity': None,
'dc_loss_factor': None,
'ground_coverage_ratio': None,
'max_rotation_angle': None,
'surface_azimuth': None,
'surface_tilt': None,
'temperature_coefficient': None,
'tracking_type': None},
'modified_at': '2019-03-01T11:44:38+00:00',
'name': 'Weather Station',
'provider': 'Organization 1',
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'timezone': 'Etc/GMT+8'}
@pytest.fixture()
def site_with_modeling_params():
return {
'created_at': '2019-03-01T11:44:46+00:00',
'elevation': 786.0,
'extra_parameters': '',
'latitude': 43.73403,
'longitude': -96.62328,
'modeling_parameters': {
'ac_capacity': 0.015,
'ac_loss_factor': 0.0,
'axis_azimuth': None,
'axis_tilt': None,
'backtrack': None,
'dc_capacity': 0.015,
'dc_loss_factor': 0.0,
'ground_coverage_ratio': None,
'max_rotation_angle': None,
'surface_azimuth': 180.0,
'surface_tilt': 45.0,
'temperature_coefficient': -0.2,
'tracking_type': 'fixed'},
'modified_at': '2019-03-01T11:44:46+00:00',
'name': 'Power Plant 1',
'provider': 'Organization 1',
'site_id': '123e4567-e89b-12d3-a456-426655440002',
'timezone': 'Etc/GMT+6'}
@pytest.fixture()
def observation():
return {
'_links': {'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'},
'created_at': '2019-03-01T12:01:39+00:00',
'extra_parameters': '{"instrument": "Ascension Technology Rotating Shadowband Pyranometer", "network": "UO SRML"}',
'interval_label': 'beginning',
'interval_length': 5,
'interval_value_type': 'interval_mean',
'modified_at': '2019-03-01T12:01:39+00:00',
'name': 'GHI Instrument 1',
'observation_id': '123e4567-e89b-12d3-a456-426655440000',
'provider': 'Organization 1',
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'uncertainty': 0.1,
'variable': 'ghi'}
@pytest.fixture()
def forecast():
return {
'_links': {'aggregate': None,
'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'},
'aggregate_id': None,
'created_at': '2019-03-01T11:55:37+00:00',
'extra_parameters': '',
'forecast_id': '11c20780-76ae-4b11-bef1-7a75bdc784e3',
'interval_label': 'beginning',
'interval_length': 5,
'interval_value_type': 'interval_mean',
'issue_time_of_day': '06:00',
'lead_time_to_start': 60,
'modified_at': '2019-03-01T11:55:37+00:00',
'name': 'DA GHI',
'provider': 'Organization 1',
'run_length': 1440,
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'variable': 'ghi'}
@pytest.fixture()
def cdf_forecast():
return {
'_links': {'site': 'http://localhost:5000/sites/123e4567-e89b-12d3-a456-426655440001'},
'aggregate_id': None,
'axis': 'y',
'constant_values': [{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9396-50bb-11e9-8647-d663bd873d93/values/timerange',
'values': 'http://localhost:5000/forecasts/cdf/single/633f9396-50bb-11e9-8647-d663bd873d93/values'},
'constant_value': 5.0,
'forecast_id': '633f9396-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9864-50bb-11e9-8647-d663bd873d93/values/timerange',
'values': 'http://localhost:5000/forecasts/cdf/single/633f9864-50bb-11e9-8647-d663bd873d93/values'},
'constant_value': 20.0,
'forecast_id': '633f9864-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9b2a-50bb-11e9-8647-d663bd873d93/values/timerange',
'values': 'http://localhost:5000/forecasts/cdf/single/633f9b2a-50bb-11e9-8647-d663bd873d93/values'},
'constant_value': 50.0,
'forecast_id': '633f9b2a-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633f9d96-50bb-11e9-8647-d663bd873d93/values/timerange',
'values': 'http://localhost:5000/forecasts/cdf/single/633f9d96-50bb-11e9-8647-d663bd873d93/values'},
'constant_value': 80.0,
'forecast_id': '633f9d96-50bb-11e9-8647-d663bd873d93'},
{'_links': {'timerange': 'http://localhost:5000/forecasts/cdf/single/633fa548-50bb-11e9-8647-d663bd873d93/values/timerange',
'values': 'http://localhost:5000/forecasts/cdf/single/633fa548-50bb-11e9-8647-d663bd873d93/values'},
'constant_value': 95.0,
'forecast_id': '633fa548-50bb-11e9-8647-d663bd873d93'}],
'created_at': '2019-03-02T14:55:37+00:00',
'extra_parameters': '',
'forecast_id': 'ef51e87c-50b9-11e9-8647-d663bd873d93',
'interval_label': 'beginning',
'interval_length': 5,
'interval_value_type': 'interval_mean',
'issue_time_of_day': '06:00',
'lead_time_to_start': 60,
'modified_at': '2019-03-02T14:55:37+00:00',
'name': 'DA GHI',
'provider': 'Organization 1',
'run_length': 1440,
'site_id': '123e4567-e89b-12d3-a456-426655440001',
'variable': 'ghi'}
@pytest.fixture()
def aggregate():
return {
'aggregate_id': '458ffc27-df0b-11e9-b622-62adb5fd6af0',
'aggregate_type': 'mean',
'created_at': '2019-09-24T12:00:00+00:00',
'description': 'ghi agg',
'extra_parameters': 'extra',
'interval_label': 'ending',
'interval_length': 60,
'interval_value_type': 'interval_mean',
'modified_at': '2019-09-24T12:00:00+00:00',
'name': 'Test Aggregate ghi',
'observations': [
{'_links': {'observation': 'http://localhost:5000/observations/123e4567-e89b-12d3-a456-426655440000/metadata'},
'created_at': '2019-09-25T00:00:00+00:00',
'effective_from': '2019-01-01T00:00:00+00:00',
'effective_until': None,
'observation_deleted_at': None,
'observation_id': '123e4567-e89b-12d3-a456-426655440000'},
{'_links': {'observation': 'http://localhost:5000/observations/e0da0dea-9482-4073-84de-f1b12c304d23/metadata'},
'created_at': '2019-09-25T00:00:00+00:00',
'effective_from': '2019-01-01T00:00:00+00:00',
'effective_until': None,
'observation_deleted_at': None,
'observation_id': 'e0da0dea-9482-4073-84de-f1b12c304d23'},
{'_links': {'observation': 'http://localhost:5000/observations/b1dfe2cb-9c8e-43cd-afcf-c5a6feaf81e2/metadata'},
'created_at': '2019-09-25T00:00:00+00:00',
'effective_from': '2019-01-01T00:00:00+00:00',
'effective_until': None,
'observation_deleted_at': None,
'observation_id': 'b1dfe2cb-9c8e-43cd-afcf-c5a6feaf81e2'}],
'provider': 'Organization 1',
'timezone': 'America/Denver',
'variable': 'ghi'}
@pytest.fixture()
def report():
return {
'created_at': '2020-01-22T13:48:00+00:00',
'modified_at': '2020-01-22T13:50:00+00:00',
'provider': 'Organization 1',
'raw_report': {
'data_checksum': None,
'generated_at': '2019-07-01T12:00:00+00:00',
'messages': [
{'function': 'fcn',
'level': 'error',
'message': 'FAILED',
'step': 'dunno'}],
'metrics': [],
'plots': None,
'processed_forecasts_observations': [],
'timezone': 'Etc/GMT+8',
'versions': []},
'report_id': '9f290dd4-42b8-11ea-abdf-f4939feddd82',
'report_parameters': {
'categories': ['total', 'date'],
'end': '2019-06-01T06:59:00+00:00',
'filters': [{'quality_flags': ['USER FLAGGED'],
'discard_before_resample': True,
'resample_threshold_percentage': resample_threshold,
},
{'quality_flags': ['STALE VALUES'],
'discard_before_resample': True,
'resample_threshold_percentage': resample_threshold,
}],
'metrics': ['mae', 'rmse'],
'name': 'NREL MIDC OASIS GHI Forecast Analysis',
'object_pairs': [
{'forecast': '11c20780-76ae-4b11-bef1-7a75bdc784e3',
'observation': '123e4567-e89b-12d3-a456-426655440000',
'reference_forecast': None,
'uncertainty': None,
'forecast_type': 'forecast',
}],
'start': '2019-04-01T07:00:00+00:00',
'costs': [{
'name': 'example cost',
'type': 'constant',
'parameters': {
"cost": 1.1,
"aggregation": "sum",
"net": False,
},
}],
},
'status': 'failed',
'values': [
{'id': 'a2b6ed14-42d0-11ea-aa3c-f4939feddd82',
'object_id': '123e4567-e89b-12d3-a456-426655440000',
'processed_values': 'superencodedvalues'}]
}
| true | true |
f71a810dff951c596883e731538ad5809e7693d4 | 676 | py | Python | pyutilscr/check.py | GoodMusic8596/pyutilscr | 5965d3ab66767e2ef14ab119739b673859bbab81 | [
"MIT"
] | 1 | 2021-12-25T16:09:41.000Z | 2021-12-25T16:09:41.000Z | pyutilscr/check.py | GoodMusic8596/pyutilscr | 5965d3ab66767e2ef14ab119739b673859bbab81 | [
"MIT"
] | null | null | null | pyutilscr/check.py | GoodMusic8596/pyutilscr | 5965d3ab66767e2ef14ab119739b673859bbab81 | [
"MIT"
] | null | null | null | import glob
def search(directory, searchElem:list, extension = ".txt"):
"""Searches files in a specified directory and checks if they contain the specified elements.
directory format: /home/runner/project
directory type: folder
element type: list
extensions requirments: MUST have a period before, such as ".txt"
:param directory:
:param searchElem:list:
:param extension: (Default value = ".txt")
"""
files = glob.glob(directory+"*"+extension)
files_detected = []
for file in files:
sf = open(file)
stored = sf.read()
for elem in searchElem:
if elem in stored:
files_detected.append(file)
return files_detected | 28.166667 | 94 | 0.690828 | import glob
def search(directory, searchElem:list, extension = ".txt"):
files = glob.glob(directory+"*"+extension)
files_detected = []
for file in files:
sf = open(file)
stored = sf.read()
for elem in searchElem:
if elem in stored:
files_detected.append(file)
return files_detected | true | true |
f71a812d7ff386f03ab0d70d638640016bdd2c44 | 3,007 | py | Python | air_pollution_death_rate_related/scripts/air_pollution/predict_aqi.py | nghitrampham/air_pollution_death_rate_related | 3fd72b9684e8362de5706ba37c1d90b844d4afe0 | [
"MIT"
] | null | null | null | air_pollution_death_rate_related/scripts/air_pollution/predict_aqi.py | nghitrampham/air_pollution_death_rate_related | 3fd72b9684e8362de5706ba37c1d90b844d4afe0 | [
"MIT"
] | 15 | 2019-12-10T02:05:58.000Z | 2022-03-12T00:06:38.000Z | air_pollution_death_rate_related/scripts/air_pollution/predict_aqi.py | nghitrampham/CSE583_FinalProject | 3fd72b9684e8362de5706ba37c1d90b844d4afe0 | [
"MIT"
] | 1 | 2020-06-04T17:48:21.000Z | 2020-06-04T17:48:21.000Z | """
This module is used to predict the Air Quality Index model for 2019 for all counties.
"""
import pickle
import warnings
import pandas as pd
import numpy as np
from keras.models import load_model
import helpers
warnings.filterwarnings("ignore")
def main():
data2019_raw = pd.read_csv("""air_pollution_death_rate_related/data/air_pollution/
data_air_raw/daily_aqi_by_county_2019.csv""")
data2019 = helpers.data_cleaning(data2019_raw)
predicted_date = "2019-03-12"
file = open("temp.csv", "w")
file.write("date,state_county,AQI\n")
# for county in list(data2019["state_county"].unique()):
for county in list(data2019["state_county"].unique())[:5]:
## load model to predict AQI
print("---> Loading model for county {} ...".format(county))
try:
scaler_path = ("air_pollution_death_rate_related/trained_model/min_scaler_model/" +
county + "_scaler.pickle")
model_path = ("air_pollution_death_rate_related/trained_model/county_aqi/" +
county + "_model.h5")
model = load_model(model_path)
mm_scaler = pickle.load(open(scaler_path, "rb"))
### feature engineering for model
data_feature_temp = helpers.data_feature_engineering_for_test(
data2019,
county,
predicted_date)
x_test, y_test = helpers.load_test_data(data_feature_temp["data"], mm_scaler)
## predicting AQI
predictions = helpers.predict_point_by_point(model, x_test)
# helpers.plot_results(predictions, y_test)
## keep prediction for all counties
print("Predicting ....")
y_pred = np.append(x_test, predictions.reshape(1, 1, 1)).reshape(1, 39)
y_scale = mm_scaler.inverse_transform(y_pred)[-1][-1]
file.write(predicted_date+","+county+","+str(y_scale)+"\n")
del data_feature_temp, scaler_path,\
model_path, model, mm_scaler, x_test, y_test, predictions, y_pred, y_scale
except Exception as exp:
print(exp)
exp.args += ('Path and list_year must not be empty', "check read_raw_data function")
file.close()
## creating dataframe containing county, state, predicted AQI,
## predicted date for interactive visualization map
county_code = pd.read_csv("""air_pollution_death_rate_related/data/air_pollution/
data_misc/county_with_code.csv""")
df_prediction = pd.read_csv("temp.csv")
df_result = (pd.merge(county_code, df_prediction,
how='inner',
left_on=["state_county"],
right_on=["state_county"])
)
df_result.to_csv("predicted_AQI" + predicted_date + ".csv", index=False)
if __name__ == '__main__':
main()
| 36.228916 | 96 | 0.606252 | import pickle
import warnings
import pandas as pd
import numpy as np
from keras.models import load_model
import helpers
warnings.filterwarnings("ignore")
def main():
data2019_raw = pd.read_csv("""air_pollution_death_rate_related/data/air_pollution/
data_air_raw/daily_aqi_by_county_2019.csv""")
data2019 = helpers.data_cleaning(data2019_raw)
predicted_date = "2019-03-12"
file = open("temp.csv", "w")
file.write("date,state_county,AQI\n")
for county in list(data2019["state_county"].unique())[:5]:
g model for county {} ...".format(county))
try:
scaler_path = ("air_pollution_death_rate_related/trained_model/min_scaler_model/" +
county + "_scaler.pickle")
model_path = ("air_pollution_death_rate_related/trained_model/county_aqi/" +
county + "_model.h5")
model = load_model(model_path)
mm_scaler = pickle.load(open(scaler_path, "rb"))
ng_for_test(
data2019,
county,
predicted_date)
x_test, y_test = helpers.load_test_data(data_feature_temp["data"], mm_scaler)
dictions = helpers.predict_point_by_point(model, x_test)
.")
y_pred = np.append(x_test, predictions.reshape(1, 1, 1)).reshape(1, 39)
y_scale = mm_scaler.inverse_transform(y_pred)[-1][-1]
file.write(predicted_date+","+county+","+str(y_scale)+"\n")
del data_feature_temp, scaler_path,\
model_path, model, mm_scaler, x_test, y_test, predictions, y_pred, y_scale
except Exception as exp:
print(exp)
exp.args += ('Path and list_year must not be empty', "check read_raw_data function")
file.close()
data_misc/county_with_code.csv""")
df_prediction = pd.read_csv("temp.csv")
df_result = (pd.merge(county_code, df_prediction,
how='inner',
left_on=["state_county"],
right_on=["state_county"])
)
df_result.to_csv("predicted_AQI" + predicted_date + ".csv", index=False)
if __name__ == '__main__':
main()
| true | true |
f71a8190b09e04bc1e4e8dc5cf8762becc12f3bd | 2,108 | py | Python | office365/directory/identities/userflows/language_configuration.py | juguerre/Office365-REST-Python-Client | dbadaddb14e7bad199499c898cdef1ada9694fc9 | [
"MIT"
] | null | null | null | office365/directory/identities/userflows/language_configuration.py | juguerre/Office365-REST-Python-Client | dbadaddb14e7bad199499c898cdef1ada9694fc9 | [
"MIT"
] | null | null | null | office365/directory/identities/userflows/language_configuration.py | juguerre/Office365-REST-Python-Client | dbadaddb14e7bad199499c898cdef1ada9694fc9 | [
"MIT"
] | null | null | null | from office365.directory.identities.userflows.language_page import UserFlowLanguagePage
from office365.entity import Entity
from office365.entity_collection import EntityCollection
from office365.runtime.resource_path import ResourcePath
class UserFlowLanguageConfiguration(Entity):
"""Allows a user flow to support the use of multiple languages.
For Azure Active Directory user flows, you can only leverage the built-in languages provided by Microsoft.
User flows for Azure Active Directory support defining the language and strings shown to users
as they go through the journeys you configure with your user flows."""
@property
def display_name(self):
"""
The language name to display.
:rtype: str or None
"""
return self.properties.get("displayName", None)
@property
def default_pages(self):
"""Collection of pages with the default content to display in a user flow for a specified language.
:rtype: EntityCollection
"""
return self.get_property('defaultPages',
EntityCollection(self.context, UserFlowLanguagePage,
ResourcePath("defaultPages", self.resource_path)))
@property
def overrides_pages(self):
"""Collection of pages with the default content to display in a user flow for a specified language.
:rtype: EntityCollection
"""
return self.get_property('overridesPages',
EntityCollection(self.context, UserFlowLanguagePage,
ResourcePath("overridesPages", self.resource_path)))
def get_property(self, name, default_value=None):
if default_value is None:
property_mapping = {
"defaultPages": self.default_pages,
"overridesPages": self.overrides_pages
}
default_value = property_mapping.get(name, None)
return super(UserFlowLanguageConfiguration, self).get_property(name, default_value)
| 41.333333 | 113 | 0.65797 | from office365.directory.identities.userflows.language_page import UserFlowLanguagePage
from office365.entity import Entity
from office365.entity_collection import EntityCollection
from office365.runtime.resource_path import ResourcePath
class UserFlowLanguageConfiguration(Entity):
@property
def display_name(self):
return self.properties.get("displayName", None)
@property
def default_pages(self):
return self.get_property('defaultPages',
EntityCollection(self.context, UserFlowLanguagePage,
ResourcePath("defaultPages", self.resource_path)))
@property
def overrides_pages(self):
return self.get_property('overridesPages',
EntityCollection(self.context, UserFlowLanguagePage,
ResourcePath("overridesPages", self.resource_path)))
def get_property(self, name, default_value=None):
if default_value is None:
property_mapping = {
"defaultPages": self.default_pages,
"overridesPages": self.overrides_pages
}
default_value = property_mapping.get(name, None)
return super(UserFlowLanguageConfiguration, self).get_property(name, default_value)
| true | true |
f71a821c13b082b4b5b3c1c3dd849583b82123e0 | 4,830 | py | Python | detect_secrets/core/baseline.py | paulo-sampaio/detect-secrets | 73ffbc35a72cb316d9e1842cc131b6098cf3c36a | [
"Apache-2.0"
] | 2,212 | 2018-04-03T20:58:42.000Z | 2022-03-31T17:58:38.000Z | detect_secrets/core/baseline.py | paulo-sampaio/detect-secrets | 73ffbc35a72cb316d9e1842cc131b6098cf3c36a | [
"Apache-2.0"
] | 354 | 2018-04-03T16:29:55.000Z | 2022-03-31T18:26:26.000Z | detect_secrets/core/baseline.py | paulo-sampaio/detect-secrets | 73ffbc35a72cb316d9e1842cc131b6098cf3c36a | [
"Apache-2.0"
] | 298 | 2018-04-02T19:35:15.000Z | 2022-03-28T04:52:14.000Z | import json
import time
from typing import Any
from typing import Callable
from typing import cast
from typing import Dict
from typing import List
from typing import Optional
from typing import Union
from . import upgrades
from ..__version__ import VERSION
from ..exceptions import UnableToReadBaselineError
from ..settings import configure_settings_from_baseline
from ..settings import get_settings
from ..util.importlib import import_modules_from_package
from ..util.semver import Version
from .scan import get_files_to_scan
from .secrets_collection import SecretsCollection
def create(
*paths: str,
should_scan_all_files: bool = False,
root: str = '',
num_processors: Optional[int] = None,
) -> SecretsCollection:
"""Scans all the files recursively in path to initialize a baseline."""
kwargs = {}
if num_processors:
kwargs['num_processors'] = num_processors
secrets = SecretsCollection(root=root)
secrets.scan_files(
*get_files_to_scan(*paths, should_scan_all_files=should_scan_all_files, root=root),
**kwargs,
)
return secrets
def load(baseline: Dict[str, Any], filename: str = '') -> SecretsCollection:
"""
With a given baseline file, load all settings and discovered secrets from it.
:raises: KeyError
"""
# This is required for backwards compatibility, and supporting upgrades from older versions.
baseline = upgrade(baseline)
configure_settings_from_baseline(baseline, filename=filename)
return SecretsCollection.load_from_baseline(baseline)
def load_from_file(filename: str) -> Dict[str, Any]:
"""
:raises: UnableToReadBaselineError
:raises: InvalidBaselineError
"""
try:
with open(filename) as f:
return cast(Dict[str, Any], json.loads(f.read()))
except (FileNotFoundError, IOError, json.decoder.JSONDecodeError) as e:
raise UnableToReadBaselineError from e
def format_for_output(secrets: SecretsCollection, is_slim_mode: bool = False) -> Dict[str, Any]:
output = {
'version': VERSION,
# This will populate settings of filters and plugins,
**get_settings().json(),
'results': secrets.json(),
}
if not is_slim_mode:
output['generated_at'] = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime())
else:
# NOTE: This has a nice little side effect of keeping it ordered by line number,
# even though we don't output it.
for filename, secret_list in cast(
Dict[str, List[Dict[str, Any]]],
output['results'],
).items():
for secret_dict in secret_list:
secret_dict.pop('line_number')
return output
def save_to_file(
secrets: Union[SecretsCollection, Dict[str, Any]],
filename: str,
) -> None: # pragma: no cover
"""
:param secrets: if this is a SecretsCollection, it will output the baseline in its latest
format. Otherwise, you should pass in a dictionary to this function, to manually
specify the baseline format to save as.
If you're trying to decide the difference, ask yourself whether there are any changes
that does not directly impact the results of the scan.
"""
# TODO: I wonder whether this should add the `detect_secrets.filters.common.is_baseline_file`
# filter, since we know the filename already. However, one could argue that it would cause
# this function to "do more than one thing".
output = secrets
if isinstance(secrets, SecretsCollection):
output = format_for_output(secrets)
with open(filename, 'w') as f:
f.write(json.dumps(output, indent=2) + '\n')
def upgrade(baseline: Dict[str, Any]) -> Dict[str, Any]:
"""
Baselines will eventually require format changes. This function is responsible for upgrading
an older baseline to the latest version.
"""
baseline_version = Version(baseline['version'])
if baseline_version >= Version(VERSION):
return baseline
modules = import_modules_from_package(
upgrades,
filter=lambda x: not _is_relevant_upgrade_module(baseline_version)(x),
)
new_baseline = {**baseline}
for module in modules:
module.upgrade(new_baseline) # type: ignore
new_baseline['version'] = VERSION
return new_baseline
def _is_relevant_upgrade_module(current_version: Version) -> Callable:
def wrapped(module_path: str) -> bool:
# This converts `v1_0` to `1.0`
affected_version_string = module_path.rsplit('.', 1)[-1].lstrip('v').replace('_', '.')
# Patch version doesn't matter, because patches should not require baseline bumps.
affected_version = Version(f'{affected_version_string}.0')
return current_version < affected_version
return wrapped
| 32.635135 | 97 | 0.69089 | import json
import time
from typing import Any
from typing import Callable
from typing import cast
from typing import Dict
from typing import List
from typing import Optional
from typing import Union
from . import upgrades
from ..__version__ import VERSION
from ..exceptions import UnableToReadBaselineError
from ..settings import configure_settings_from_baseline
from ..settings import get_settings
from ..util.importlib import import_modules_from_package
from ..util.semver import Version
from .scan import get_files_to_scan
from .secrets_collection import SecretsCollection
def create(
*paths: str,
should_scan_all_files: bool = False,
root: str = '',
num_processors: Optional[int] = None,
) -> SecretsCollection:
kwargs = {}
if num_processors:
kwargs['num_processors'] = num_processors
secrets = SecretsCollection(root=root)
secrets.scan_files(
*get_files_to_scan(*paths, should_scan_all_files=should_scan_all_files, root=root),
**kwargs,
)
return secrets
def load(baseline: Dict[str, Any], filename: str = '') -> SecretsCollection:
baseline = upgrade(baseline)
configure_settings_from_baseline(baseline, filename=filename)
return SecretsCollection.load_from_baseline(baseline)
def load_from_file(filename: str) -> Dict[str, Any]:
try:
with open(filename) as f:
return cast(Dict[str, Any], json.loads(f.read()))
except (FileNotFoundError, IOError, json.decoder.JSONDecodeError) as e:
raise UnableToReadBaselineError from e
def format_for_output(secrets: SecretsCollection, is_slim_mode: bool = False) -> Dict[str, Any]:
output = {
'version': VERSION,
**get_settings().json(),
'results': secrets.json(),
}
if not is_slim_mode:
output['generated_at'] = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime())
else:
for filename, secret_list in cast(
Dict[str, List[Dict[str, Any]]],
output['results'],
).items():
for secret_dict in secret_list:
secret_dict.pop('line_number')
return output
def save_to_file(
secrets: Union[SecretsCollection, Dict[str, Any]],
filename: str,
) -> None: # pragma: no cover
# TODO: I wonder whether this should add the `detect_secrets.filters.common.is_baseline_file`
# filter, since we know the filename already. However, one could argue that it would cause
# this function to "do more than one thing".
output = secrets
if isinstance(secrets, SecretsCollection):
output = format_for_output(secrets)
with open(filename, 'w') as f:
f.write(json.dumps(output, indent=2) + '\n')
def upgrade(baseline: Dict[str, Any]) -> Dict[str, Any]:
baseline_version = Version(baseline['version'])
if baseline_version >= Version(VERSION):
return baseline
modules = import_modules_from_package(
upgrades,
filter=lambda x: not _is_relevant_upgrade_module(baseline_version)(x),
)
new_baseline = {**baseline}
for module in modules:
module.upgrade(new_baseline) # type: ignore
new_baseline['version'] = VERSION
return new_baseline
def _is_relevant_upgrade_module(current_version: Version) -> Callable:
def wrapped(module_path: str) -> bool:
# This converts `v1_0` to `1.0`
affected_version_string = module_path.rsplit('.', 1)[-1].lstrip('v').replace('_', '.')
# Patch version doesn't matter, because patches should not require baseline bumps.
affected_version = Version(f'{affected_version_string}.0')
return current_version < affected_version
return wrapped
| true | true |
f71a825a050d1c91553d9c34bfd983037a77f740 | 1,367 | py | Python | app/core/tests/test_admin.py | tarcisioLima/recipe-app-api | 62e5dfa49c0b4bd400454e78d0c0bea673f32f58 | [
"MIT"
] | null | null | null | app/core/tests/test_admin.py | tarcisioLima/recipe-app-api | 62e5dfa49c0b4bd400454e78d0c0bea673f32f58 | [
"MIT"
] | null | null | null | app/core/tests/test_admin.py | tarcisioLima/recipe-app-api | 62e5dfa49c0b4bd400454e78d0c0bea673f32f58 | [
"MIT"
] | null | null | null | from django.test import TestCase, Client
from django.contrib.auth import get_user_model
from django.urls import reverse
class AdminSiteTests(TestCase):
"""A funcion that executes before all tests"""
def setUp(self):
self.client = Client()
self.admin_user = get_user_model().objects.create_superuser(
email='test2@test.com',
password='password123'
)
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email='test3@test.com',
password='password123',
name='Test user full name'
)
def test_users_listed(self):
"""Test that users are listed on user page"""
url = reverse('admin:core_user_changelist')
res = self.client.get(url)
self.assertContains(res, self.user.name)
self.assertContains(res, self.user.email)
def test_user_change_page(self):
"""Test that the user edit page works"""
url = reverse('admin:core_user_change', args=[self.user.id])
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
def test_create_user_page(self):
"""Test that the create user page works"""
url = reverse('admin:core_user_add')
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
| 31.790698 | 68 | 0.640088 | from django.test import TestCase, Client
from django.contrib.auth import get_user_model
from django.urls import reverse
class AdminSiteTests(TestCase):
def setUp(self):
self.client = Client()
self.admin_user = get_user_model().objects.create_superuser(
email='test2@test.com',
password='password123'
)
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email='test3@test.com',
password='password123',
name='Test user full name'
)
def test_users_listed(self):
url = reverse('admin:core_user_changelist')
res = self.client.get(url)
self.assertContains(res, self.user.name)
self.assertContains(res, self.user.email)
def test_user_change_page(self):
url = reverse('admin:core_user_change', args=[self.user.id])
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
def test_create_user_page(self):
url = reverse('admin:core_user_add')
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
| true | true |
f71a82a3be56b6be5477fc4c0651779ed08eab77 | 3,249 | py | Python | src/state.py | Amayas29/pyautomate | ea966348ea9d7ec153274bd7e2266bd50131cd3d | [
"MIT"
] | 1 | 2021-03-24T21:12:56.000Z | 2021-03-24T21:12:56.000Z | src/state.py | Amayas29/pyautomate | ea966348ea9d7ec153274bd7e2266bd50131cd3d | [
"MIT"
] | null | null | null | src/state.py | Amayas29/pyautomate | ea966348ea9d7ec153274bd7e2266bd50131cd3d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
class State :
"""
Classe définissant un état caractérisée par :
- un identifiant
- un booleen pour savoir si c'est un état initial
- un booleen pour savoir si c'est un état final
- un label utilisé pour les constructions
ou il faut memoriser d'ou vient l'etat construit
"""
def __init__ (self, id, init, fin, label=None) :
""" int x bool x bool x str -> State
constructeur d'état
"""
self.id = id
self.init = init
self.fin = fin
if label == None :
self.label = str(self.id)
else :
self.label =label
def __repr__(self) :
""" -> str
renvoie une description de l'état sous la forme d'une chaîne
de caractères contenant son label puis (init) si c'est un état
initial puis (fin) si c'est un état final
elle permet d'utiliser print pour les états
"""
# ret : str
ret = str(self.label)
if self.init :
ret = ret + "(init)"
if self.fin :
ret = ret+ "(fin)"
return ret
def insertPrefix(self, prefid, preflab=None):
""" int x str ->
met à jour l'identifiant et le label de l'état en les
préfixant avec la chaîne de caractères pref
"""
if self.id < 0 :
tempid = - self.id
else :
tempid = self.id
tempid2 = 10**len(str(tempid))*prefid + tempid
if self.id < 0 :
self.id = - tempid2
else :
self.id = tempid2
if preflab == None :
self.label = str(prefid) + "_" + str(self.label)
else :
self.label = str(preflab) + "_" + str(self.label)
def __eq__(self, other) :
""" Val -> bool
rend le booléen vrai si l'état est égal à other, faux sinon
elle permet que == fonctionne pour les états
"""
return type(self) == type(other) and self.id == other.id
def __ne__(self, other) :
""" Val -> bool
rend le booléen vrai si l'état est différent de other, faux sinon
elle permet que != fonctionne pour les états
"""
return not self.__eq__(other)
def __hash__(self) :
""" -> int
rend un identifiant unique (utile pour les tables de hachage)
elle permet que les états puissent appartenir à des ensembles
"""
if type(self.id)== int :
return self.id
# s : str
s=str(self.id)
# res : str
res=''.join(str(ord(c)) for c in s)
return int(res)
@staticmethod
def isInitialIn(list) :
""" list[State] -> bool
rend vrai si l'un des états de list est un état initial, faux sinon
"""
# s : State
for s in list :
if s.init :
return True
return False
@staticmethod
def isFinalIn(list) :
""" list[State] -> bool
rend vrai si l'un des états de list est un état final, faux sinon
"""
for s in list :
if s.fin :
return True
return False
| 29.536364 | 75 | 0.515851 |
class State :
def __init__ (self, id, init, fin, label=None) :
self.id = id
self.init = init
self.fin = fin
if label == None :
self.label = str(self.id)
else :
self.label =label
def __repr__(self) :
ret = str(self.label)
if self.init :
ret = ret + "(init)"
if self.fin :
ret = ret+ "(fin)"
return ret
def insertPrefix(self, prefid, preflab=None):
if self.id < 0 :
tempid = - self.id
else :
tempid = self.id
tempid2 = 10**len(str(tempid))*prefid + tempid
if self.id < 0 :
self.id = - tempid2
else :
self.id = tempid2
if preflab == None :
self.label = str(prefid) + "_" + str(self.label)
else :
self.label = str(preflab) + "_" + str(self.label)
def __eq__(self, other) :
return type(self) == type(other) and self.id == other.id
def __ne__(self, other) :
return not self.__eq__(other)
def __hash__(self) :
if type(self.id)== int :
return self.id
s=str(self.id)
res=''.join(str(ord(c)) for c in s)
return int(res)
@staticmethod
def isInitialIn(list) :
for s in list :
if s.init :
return True
return False
@staticmethod
def isFinalIn(list) :
for s in list :
if s.fin :
return True
return False
| true | true |
f71a82c5e1e63b262c2a25dc6c75f427f5c4eea8 | 8,829 | py | Python | deep-clustering-conv-autoencoder/main.py | positivevaib/semi-supervised-imagenet-classification | 4fb6427f5a72951c1b866a1ddbc2599811bb5770 | [
"MIT"
] | null | null | null | deep-clustering-conv-autoencoder/main.py | positivevaib/semi-supervised-imagenet-classification | 4fb6427f5a72951c1b866a1ddbc2599811bb5770 | [
"MIT"
] | null | null | null | deep-clustering-conv-autoencoder/main.py | positivevaib/semi-supervised-imagenet-classification | 4fb6427f5a72951c1b866a1ddbc2599811bb5770 | [
"MIT"
] | null | null | null | # import
import numpy as np
import sklearn as skl
import sklearn.cluster as cluster
import sklearn.metrics as metrics
import torch
import torch.distributions.kl as kl
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.utils.data as data
import torchvision
import torchvision.datasets as datasets
import torchvision.transforms as transforms
import tqdm
# model
class CAE_ENC(nn.Module):
def __init__(self):
super().__init__()
# self.enc = nn.Sequential(*list(model.features.children())[:-5])
self.conv1 = nn.Conv2d(3, 32, kernel_size=5, padding=2, stride=2)
self.conv2 = nn.Conv2d(32, 64, kernel_size=3, padding=1, stride=2)
self.conv3 = nn.Conv2d(64, 128, kernel_size=3, padding=1, stride=2)
self.conv4 = nn.Conv2d(128, 256, kernel_size=3, padding=1, stride=2)
self.fc1 = nn.Linear(256 * 6 * 6, 1000)
def forward(self, x):
# x = self.features(x)
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
x = x.view(-1, 256 * 6 * 6)
x = self.fc1(x)
return x
class CAE_DEC(nn.Module):
def __init__(self):
super().__init__()
self.fc2 = nn.Linear(1000, 256 * 6 * 6)
self.deconv1 = nn.ConvTranspose2d(256, 128, 2, stride=2)
self.deconv2 = nn.ConvTranspose2d(128, 64, 2, stride=2)
self.deconv3 = nn.ConvTranspose2d(64, 32, 2, stride=2)
self.deconv4 = nn.ConvTranspose2d(32, 3, 2, stride=2)
self.conv5 = nn.Conv2d(3, 3, kernel_size=1) # might have to remove
def forward(self, x):
x = F.relu(self.fc2(x))
x = x.view(128, 256, 6, 6)
x = F.relu(self.deconv1(x))
x = F.relu(self.deconv2(x))
x = F.relu(self.deconv3(x))
x = F.relu(self.deconv4(x))
x = torch.sigmoid(self.conv5(x)) # might have to remove
return x
class ClusteringLayer(nn.Module):
def __init__(self, weights=None, alpha=1.0):
super().__init__()
if weights:
self.weights = weights
else:
self.weights = torch.empty(1000, 1000)
nn.init.xavier_uniform_(self.weights)
self.alpha = alpha
def forward(self, x):
q = 1.0 / (1.0 + (torch.sum(
(x.unsqueeze(1) - self.weights)**2, dim=2) / self.alpha))
q **= (self.alpha + 1.0) / 2.0
q = torch.transpose(
torch.transpose(q, 1, 2) / torch.sum(q, dim=1), 1, 2)
return q
def set_weights(module, weights):
if isinstance(module, ClusteringLayer):
module.weights = weights
class CAE(nn.Module):
def __init__(self):
super().__init__()
self.enc = CAE_ENC()
self.dec = CAE_DEC()
self.clus = ClusteringLayer()
def forward(self, x):
h = self.enc(x)
q = self.clus(h)
o = self.dec(h)
return (h, q, o)
def loss(q, p, o, gamma=0.1):
mse = nn.MSELoss(o)
kld = gamma * kl.kl_divergence(p, q)
l = mse + kld
return l
def target_distribution(q):
weight = q**2 / torch.sum(q, dim=0)
return torch.transpose(torch.transpose(q) / torch.sum(weight, dim=1))
# data
transformations = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225),
inplace=True)
])
dataset1 = datasets.ImageFolder('/beegfs/vag273/ssl_data_96/supervised/train/',
transform=transformations)
dataset2 = datasets.ImageFolder('/beegfs/vag273/ssl_data_96/unsupervised/',
transform=transformations)
dataset = data.ConcatDataset((dataset1, dataset2))
train_ratio = 0.9
train_set_size = int(train_ratio * len(dataset))
val_set_size = len(dataset) - train_set_size
train_data, val_data = data.random_split(dataset,
(train_set_size, val_set_size))
train_loader = data.DataLoader(train_data, batch_size=128, shuffle=True)
val_loader = data.DataLoader(val_data, batch_size=128, shuffle=False)
# training
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
model = CAE().to(device)
# criterion = nn.MSELoss()
optimizer = optim.Adam(model.parameters())
# pretrain
best_val_loss = float('inf')
tot_epochs = 200 # maybe lower it on one of the runs
print('pretrain')
for epoch in range(tot_epochs):
model.train()
print('epoch {} of {}'.format(epoch + 1, tot_epochs))
desc = "ITERATION - loss: {:.2f}"
pbar = tqdm.tqdm(desc=desc.format(0),
total=len(train_loader),
leave=False,
file=None,
initial=0)
running_loss = 0
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
optimizer.zero_grad()
_, _, out = model(img)
loss = nn.MSELoss(out, img)
running_loss += loss.item()
loss.backward()
optimizer.step()
pbar.desc = desc.format(loss.item())
pbar.update()
print('loss: {}'.format(running_loss / len(train_loader)))
model.eval()
with torch.no_grad():
val_running_loss = 0
for val_batch_idx, val_data in enumerate(val_loader):
val_img, _ = val_data
val_img = val_img.to(device)
_, _, val_out = model(val_img)
val_loss = nn.MSELoss(val_out, val_img)
val_running_loss += val_loss.item()
if val_running_loss / len(val_loader) < best_val_loss:
torch.save(model.state_dict(), 'weights.pth')
print('val loss: {}'.format(val_running_loss / len(val_loader)))
pbar.close()
# first cluster
features = None
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
if not features:
features = model(img)
else:
torch.cat((features, model(img)), 0)
kmeans = cluster.kMeans(n_clusters=1000, n_init=20)
features = features.view(-1)
pred_last = kmeans.fit_predict(features)
q = kmeans.cluster_centers_
# deep cluster
print('deep cklustering')
update_interval = 140 # maybe reduce this for sake of time
maxiter = 20000 # maybe reduce this for sake of time
for ite in range(int(maxiter)):
model.train()
if ite % update_interval == 0:
q = None
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
if not features:
_, q, _ = model(img)
else:
_, new_q, _ = model(img)
torch.cat((q, new_q), 0)
p = target_distribution(
q) # update the auxiliary target distribution p
# evaluate the clustering performance
pred = q.argmax(1)
# check stop criterion
delta_label = np.sum(pred != pred_last).astype(
np.float32) / pred.shape[0]
pred_last = np.copy(pred)
if ite > 0 and delta_label < 0.001: # 0.001 is the tolerance
print('delta_label ', delta_label, '< tol ', 0.001) # tol
print('Reached tolerance threshold. Stopping training.')
break
print('epoch {} of {}'.format(epoch + 1, tot_epochs))
desc = "ITERATION - loss: {:.2f}"
pbar = tqdm.tqdm(desc=desc.format(0),
total=len(train_loader),
leave=False,
file=None,
initial=0)
running_loss = 0
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
optimizer.zero_grad()
_, q, out = model(img)
loss = loss(q,
p[batch_idx * 128:batch_idx * 128 + 128, :],
out,
gamma=0.1)
running_loss += loss.item()
loss.backward()
optimizer.step()
pbar.desc = desc.format(loss.item())
pbar.update()
print('loss: {}'.format(running_loss / len(train_loader)))
model.eval()
with torch.no_grad():
val_running_loss = 0
for val_batch_idx, val_data in enumerate(val_loader):
val_img, _ = val_data
val_img = val_img.to(device)
_, val_q, val_out = model(val_img)
val_loss = loss(val_q,
p[val_batch_idx * 128:val_batch_idx * 128 +
128, :],
val_out,
gamma=0.1)
val_running_loss += val_loss.item()
if val_running_loss / len(val_loader) < best_val_loss:
torch.save(model.state_dict(), 'overall_weights.pth')
print('val loss: {}'.format(val_running_loss / len(val_loader)))
pbar.close()
| 29.727273 | 79 | 0.581493 |
import numpy as np
import sklearn as skl
import sklearn.cluster as cluster
import sklearn.metrics as metrics
import torch
import torch.distributions.kl as kl
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.utils.data as data
import torchvision
import torchvision.datasets as datasets
import torchvision.transforms as transforms
import tqdm
class CAE_ENC(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(3, 32, kernel_size=5, padding=2, stride=2)
self.conv2 = nn.Conv2d(32, 64, kernel_size=3, padding=1, stride=2)
self.conv3 = nn.Conv2d(64, 128, kernel_size=3, padding=1, stride=2)
self.conv4 = nn.Conv2d(128, 256, kernel_size=3, padding=1, stride=2)
self.fc1 = nn.Linear(256 * 6 * 6, 1000)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
x = x.view(-1, 256 * 6 * 6)
x = self.fc1(x)
return x
class CAE_DEC(nn.Module):
def __init__(self):
super().__init__()
self.fc2 = nn.Linear(1000, 256 * 6 * 6)
self.deconv1 = nn.ConvTranspose2d(256, 128, 2, stride=2)
self.deconv2 = nn.ConvTranspose2d(128, 64, 2, stride=2)
self.deconv3 = nn.ConvTranspose2d(64, 32, 2, stride=2)
self.deconv4 = nn.ConvTranspose2d(32, 3, 2, stride=2)
self.conv5 = nn.Conv2d(3, 3, kernel_size=1)
def forward(self, x):
x = F.relu(self.fc2(x))
x = x.view(128, 256, 6, 6)
x = F.relu(self.deconv1(x))
x = F.relu(self.deconv2(x))
x = F.relu(self.deconv3(x))
x = F.relu(self.deconv4(x))
x = torch.sigmoid(self.conv5(x))
return x
class ClusteringLayer(nn.Module):
def __init__(self, weights=None, alpha=1.0):
super().__init__()
if weights:
self.weights = weights
else:
self.weights = torch.empty(1000, 1000)
nn.init.xavier_uniform_(self.weights)
self.alpha = alpha
def forward(self, x):
q = 1.0 / (1.0 + (torch.sum(
(x.unsqueeze(1) - self.weights)**2, dim=2) / self.alpha))
q **= (self.alpha + 1.0) / 2.0
q = torch.transpose(
torch.transpose(q, 1, 2) / torch.sum(q, dim=1), 1, 2)
return q
def set_weights(module, weights):
if isinstance(module, ClusteringLayer):
module.weights = weights
class CAE(nn.Module):
def __init__(self):
super().__init__()
self.enc = CAE_ENC()
self.dec = CAE_DEC()
self.clus = ClusteringLayer()
def forward(self, x):
h = self.enc(x)
q = self.clus(h)
o = self.dec(h)
return (h, q, o)
def loss(q, p, o, gamma=0.1):
mse = nn.MSELoss(o)
kld = gamma * kl.kl_divergence(p, q)
l = mse + kld
return l
def target_distribution(q):
weight = q**2 / torch.sum(q, dim=0)
return torch.transpose(torch.transpose(q) / torch.sum(weight, dim=1))
transformations = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225),
inplace=True)
])
dataset1 = datasets.ImageFolder('/beegfs/vag273/ssl_data_96/supervised/train/',
transform=transformations)
dataset2 = datasets.ImageFolder('/beegfs/vag273/ssl_data_96/unsupervised/',
transform=transformations)
dataset = data.ConcatDataset((dataset1, dataset2))
train_ratio = 0.9
train_set_size = int(train_ratio * len(dataset))
val_set_size = len(dataset) - train_set_size
train_data, val_data = data.random_split(dataset,
(train_set_size, val_set_size))
train_loader = data.DataLoader(train_data, batch_size=128, shuffle=True)
val_loader = data.DataLoader(val_data, batch_size=128, shuffle=False)
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
model = CAE().to(device)
optimizer = optim.Adam(model.parameters())
best_val_loss = float('inf')
tot_epochs = 200
print('pretrain')
for epoch in range(tot_epochs):
model.train()
print('epoch {} of {}'.format(epoch + 1, tot_epochs))
desc = "ITERATION - loss: {:.2f}"
pbar = tqdm.tqdm(desc=desc.format(0),
total=len(train_loader),
leave=False,
file=None,
initial=0)
running_loss = 0
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
optimizer.zero_grad()
_, _, out = model(img)
loss = nn.MSELoss(out, img)
running_loss += loss.item()
loss.backward()
optimizer.step()
pbar.desc = desc.format(loss.item())
pbar.update()
print('loss: {}'.format(running_loss / len(train_loader)))
model.eval()
with torch.no_grad():
val_running_loss = 0
for val_batch_idx, val_data in enumerate(val_loader):
val_img, _ = val_data
val_img = val_img.to(device)
_, _, val_out = model(val_img)
val_loss = nn.MSELoss(val_out, val_img)
val_running_loss += val_loss.item()
if val_running_loss / len(val_loader) < best_val_loss:
torch.save(model.state_dict(), 'weights.pth')
print('val loss: {}'.format(val_running_loss / len(val_loader)))
pbar.close()
features = None
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
if not features:
features = model(img)
else:
torch.cat((features, model(img)), 0)
kmeans = cluster.kMeans(n_clusters=1000, n_init=20)
features = features.view(-1)
pred_last = kmeans.fit_predict(features)
q = kmeans.cluster_centers_
print('deep cklustering')
update_interval = 140
maxiter = 20000
for ite in range(int(maxiter)):
model.train()
if ite % update_interval == 0:
q = None
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
if not features:
_, q, _ = model(img)
else:
_, new_q, _ = model(img)
torch.cat((q, new_q), 0)
p = target_distribution(
q)
pred = q.argmax(1)
delta_label = np.sum(pred != pred_last).astype(
np.float32) / pred.shape[0]
pred_last = np.copy(pred)
if ite > 0 and delta_label < 0.001:
print('delta_label ', delta_label, '< tol ', 0.001)
print('Reached tolerance threshold. Stopping training.')
break
print('epoch {} of {}'.format(epoch + 1, tot_epochs))
desc = "ITERATION - loss: {:.2f}"
pbar = tqdm.tqdm(desc=desc.format(0),
total=len(train_loader),
leave=False,
file=None,
initial=0)
running_loss = 0
for batch_idx, data in enumerate(train_loader):
img, _ = data
img = img.to(device)
optimizer.zero_grad()
_, q, out = model(img)
loss = loss(q,
p[batch_idx * 128:batch_idx * 128 + 128, :],
out,
gamma=0.1)
running_loss += loss.item()
loss.backward()
optimizer.step()
pbar.desc = desc.format(loss.item())
pbar.update()
print('loss: {}'.format(running_loss / len(train_loader)))
model.eval()
with torch.no_grad():
val_running_loss = 0
for val_batch_idx, val_data in enumerate(val_loader):
val_img, _ = val_data
val_img = val_img.to(device)
_, val_q, val_out = model(val_img)
val_loss = loss(val_q,
p[val_batch_idx * 128:val_batch_idx * 128 +
128, :],
val_out,
gamma=0.1)
val_running_loss += val_loss.item()
if val_running_loss / len(val_loader) < best_val_loss:
torch.save(model.state_dict(), 'overall_weights.pth')
print('val loss: {}'.format(val_running_loss / len(val_loader)))
pbar.close()
| true | true |
f71a8324690c575db20b997daa92e561d98a87c5 | 21,037 | py | Python | wsgidav/samples/mysql_dav_provider.py | KnoooW/wsgidav | 2cf357f72d2c835f376f2c1295897cb879ef6bc1 | [
"MIT"
] | 1 | 2021-12-29T08:27:04.000Z | 2021-12-29T08:27:04.000Z | wsgidav/samples/mysql_dav_provider.py | KnoooW/wsgidav | 2cf357f72d2c835f376f2c1295897cb879ef6bc1 | [
"MIT"
] | null | null | null | wsgidav/samples/mysql_dav_provider.py | KnoooW/wsgidav | 2cf357f72d2c835f376f2c1295897cb879ef6bc1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# (c) 2009-2021 Martin Wendt and contributors; see WsgiDAV https://github.com/mar10/wsgidav
# Original PyFileServer (c) 2005 Ho Chun Wei.
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
"""
Implementation of a WebDAV provider that provides a very basic, read-only
resource layer emulation of a MySQL database.
This module is specific to the WsgiDAV application. It provides a
classes ``MySQLBrowserProvider``.
Usage::
(see docs/sample_wsgidav.yaml)
MySQLBrowserProvider(host, user, passwd, db)
host - host of database server
user - user_name to access database
passwd - passwd to access database
db - name of database on database server
The ``MySQLBrowserProvider`` provides a very basic, read-only
resource layer emulation of a MySQL database.
It provides the following interface:
- the root collection shared consists of collections that correspond to
table names
- in each table collection, there is a resource called "_ENTIRE_CONTENTS".
This is a non-collection resource that returns a csv representation of the
entire table
- if the table has a single primary key, each table record will also appear
as a non-collection resource in the table collection using the primary key
value as its name. This resource returns a csv representation of the record
and will also include the record attributes as live properties with
attribute name as property name and table name suffixed with colon as the
property namespace
This is a very basic interface and below is a by no means thorough summary of
its limitations:
- Really only supports having numbers or strings as primary keys. The code uses
a numeric or string comparison that may not hold up if the primary key is
a date or some other datatype.
- There is no handling for cases like BLOBs as primary keys or such. Well, there is
no handling for BLOBs in general.
- When returning contents, it buffers the entire contents! A bad way to return
large tables. Ideally you would have a FileMixin that reads the database even
as the application reads the file object....
- It takes too many database queries to return information.
Ideally there should be some sort of caching for metadata at least, to avoid
unnecessary queries to the database.
"""
import csv
import hashlib
import time
from io import StringIO
import MySQLdb # @UnresolvedImport
from wsgidav import util
from wsgidav.dav_error import (
HTTP_FORBIDDEN,
DAVError,
PRECONDITION_CODE_ProtectedProperty,
)
from wsgidav.dav_provider import DAVProvider, _DAVResource
__docformat__ = "reStructuredText"
_logger = util.get_module_logger(__name__)
class MySQLBrowserResource(_DAVResource):
"""Represents a single existing DAV resource instance.
See also DAVResource and MySQLBrowserProvider.
"""
def __init__(self, provider, path, is_collection, environ):
super().__init__(path, is_collection, environ)
self._cache = None
def _init(self):
"""Read resource information into self._cache, for cached access.
See DAVResource._init()
"""
# TODO: recalc self.path from <self._file_path>, to fix correct file system case
# On windows this would lead to correct URLs
self.provider._count_get_resource_inst_init += 1
tableName, primKey = self.provider._split_path(self.path)
display_type = "Unknown"
displayTypeComment = ""
contentType = "text/html"
# _logger.debug("getInfoDict(%s), nc=%s" % (path, self.connectCount))
if tableName is None:
display_type = "Database"
elif primKey is None: # "database" and table name
display_type = "Database Table"
else:
contentType = "text/csv"
if primKey == "_ENTIRE_CONTENTS":
display_type = "Database Table Contents"
displayTypeComment = "CSV Representation of Table Contents"
else:
display_type = "Database Record"
displayTypeComment = "Attributes available as properties"
# Avoid calling is_collection, since it would call isExisting -> _init_connection
is_collection = primKey is None
self._cache = {
"content_length": None,
"contentType": contentType,
"created": time.time(),
"display_name": self.name,
"etag": hashlib.md5().update(self.path).hexdigest(),
# "etag": md5.new(self.path).hexdigest(),
"modified": None,
"support_ranges": False,
"display_info": {"type": display_type, "typeComment": displayTypeComment},
}
# Some resource-only infos:
if not is_collection:
self._cache["modified"] = time.time()
_logger.debug("---> _init, nc=%s" % self.provider._count_initConnection)
def _get_info(self, info):
if self._cache is None:
self._init()
return self._cache.get(info)
# Getter methods for standard live properties
def get_content_length(self):
return self._get_info("content_length")
def get_content_type(self):
return self._get_info("contentType")
def get_creation_date(self):
return self._get_info("created")
def get_display_name(self):
return self.name
def get_display_info(self):
return self._get_info("display_info")
def get_etag(self):
return self._get_info("etag")
def get_last_modified(self):
return self._get_info("modified")
def get_member_list(self):
"""Return list of (direct) collection member names (UTF-8 byte strings).
See DAVResource.get_member_list()
"""
members = []
conn = self.provider._init_connection()
try:
tableName, primKey = self.provider._split_path(self.path)
if tableName is None:
retlist = self.provider._list_tables(conn)
for name in retlist:
members.append(
MySQLBrowserResource(
self.provider,
util.join_uri(self.path, name),
True,
self.environ,
)
)
elif primKey is None:
pri_key = self.provider._find_primary_key(conn, tableName)
if pri_key is not None:
retlist = self.provider._list_fields(conn, tableName, pri_key)
for name in retlist:
members.append(
MySQLBrowserResource(
self.provider,
util.join_uri(self.path, name),
False,
self.environ,
)
)
members.insert(
0,
MySQLBrowserResource(
self.provider,
util.join_uri(self.path, "_ENTIRE_CONTENTS"),
False,
self.environ,
),
)
finally:
conn.close()
return members
def get_content(self):
"""Open content as a stream for reading.
See DAVResource.get_content()
"""
filestream = StringIO()
tableName, primKey = self.provider._split_path(self.path)
if primKey is not None:
conn = self.provider._init_connection()
listFields = self.provider._get_field_list(conn, tableName)
csvwriter = csv.DictWriter(filestream, listFields, extrasaction="ignore")
dictFields = {}
for field_name in listFields:
dictFields[field_name] = field_name
csvwriter.writerow(dictFields)
if primKey == "_ENTIRE_CONTENTS":
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("SELECT * from " + self.provider._db + "." + tableName)
result_set = cursor.fetchall()
for row in result_set:
csvwriter.writerow(row)
cursor.close()
else:
row = self.provider._get_record_by_primary_key(conn, tableName, primKey)
if row is not None:
csvwriter.writerow(row)
conn.close()
# this suffices for small dbs, but
# for a production big database, I imagine you would have a FileMixin that
# does the retrieving and population even as the file object is being read
filestream.seek(0)
return filestream
def get_property_names(self, *, is_allprop):
"""Return list of supported property names in Clark Notation.
Return supported live and dead properties. (See also DAVProvider.get_property_names().)
In addition, all table field names are returned as properties.
"""
# Let default implementation return supported live and dead properties
propNames = super().get_property_names(is_allprop=is_allprop)
# Add fieldnames as properties
tableName, primKey = self.provider._split_path(self.path)
if primKey is not None:
conn = self.provider._init_connection()
fieldlist = self.provider._get_field_list(conn, tableName)
for fieldname in fieldlist:
propNames.append("{%s:}%s" % (tableName, fieldname))
conn.close()
return propNames
def get_property_value(self, name):
"""Return the value of a property.
The base implementation handles:
- ``{DAV:}lockdiscovery`` and ``{DAV:}supportedlock`` using the
associated lock manager.
- All other *live* properties (i.e. name starts with ``{DAV:}``) are
delegated to self.getLivePropertyValue()
- Finally, other properties are considered *dead*, and are handled using
the associated property manager, if one is present.
"""
# Return table field as property
tableName, primKey = self.provider._split_path(self.path)
if primKey is not None:
ns, localName = util.split_namespace(name)
if ns == (tableName + ":"):
conn = self.provider._init_connection()
fieldlist = self.provider._get_field_list(conn, tableName)
if localName in fieldlist:
val = self.provider._get_field_by_primary_key(
conn, tableName, primKey, localName
)
conn.close()
return val
conn.close()
# else, let default implementation return supported live and dead properties
return super().get_property_value(name)
def set_property_value(self, name, value, dry_run=False):
"""Set or remove property value.
See DAVResource.set_property_value()
"""
raise DAVError(
HTTP_FORBIDDEN, err_condition=PRECONDITION_CODE_ProtectedProperty
)
# ============================================================================
# MySQLBrowserProvider
# ============================================================================
class MySQLBrowserProvider(DAVProvider):
def __init__(self, host, user, passwd, db):
super().__init__()
self._host = host
self._user = user
self._passwd = passwd
self._db = db
self._count_initConnection = 0
def __repr__(self):
return "%s for db '%s' on '%s' (user: '%s')'" % (
self.__class__.__name__,
self._db,
self._host,
self._user,
)
def _split_path(self, path):
"""Return (tableName, primaryKey) tuple for a request path."""
if path.strip() in (None, "", "/"):
return (None, None)
tableName, primKey = util.save_split(path.strip("/"), "/", 1)
# _logger.debug("'%s' -> ('%s', '%s')" % (path, tableName, primKey))
return (tableName, primKey)
def _init_connection(self):
self._count_initConnection += 1
return MySQLdb.connect(
host=self._host, user=self._user, passwd=self._passwd, db=self._db
)
def _get_field_list(self, conn, table_name):
retlist = []
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
retlist.append(row["Field"])
cursor.close()
return retlist
def _is_data_type_numeric(self, datatype):
if datatype is None:
return False
# how many MySQL datatypes does it take to change a lig... I mean, store numbers
numerictypes = [
"BIGINT",
"INTT",
"MEDIUMINT",
"SMALLINT",
"TINYINT",
"BIT",
"DEC",
"DECIMAL",
"DOUBLE",
"FLOAT",
"REAL",
"DOUBLE PRECISION",
"INTEGER",
"NUMERIC",
]
datatype = datatype.upper()
for numtype in numerictypes:
if datatype.startswith(numtype):
return True
return False
def _exists_record_by_primary_key(self, conn, table_name, pri_key_value):
pri_key = None
pri_field_type = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
if row["Key"] == "PRI":
if pri_key is None:
pri_key = row["Field"]
pri_field_type = row["Type"]
else:
return False # more than one primary key - multipart key?
cursor.close()
isNumType = self._is_data_type_numeric(pri_field_type)
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
if isNumType:
cursor.execute(
"SELECT "
+ pri_key
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = "
+ pri_key_value
)
else:
cursor.execute(
"SELECT "
+ pri_key
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = '"
+ pri_key_value
+ "'"
)
row = cursor.fetchone()
if row is None:
cursor.close()
return False
cursor.close()
return True
def _get_field_by_primary_key(self, conn, table_name, pri_key_value, field_name):
pri_key = None
pri_field_type = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
if row["Key"] == "PRI":
if pri_key is None:
pri_key = row["Field"]
pri_field_type = row["Type"]
else:
return None # more than one primary key - multipart key?
cursor.close()
isNumType = self._is_data_type_numeric(pri_field_type)
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
if isNumType:
cursor.execute(
"SELECT "
+ field_name
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = "
+ pri_key_value
)
else:
cursor.execute(
"SELECT "
+ field_name
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = '"
+ pri_key_value
+ "'"
)
row = cursor.fetchone()
if row is None:
cursor.close()
return None
val = util.to_str(row[field_name])
cursor.close()
return val
def _get_record_by_primary_key(self, conn, table_name, pri_key_value):
dictRet = {}
pri_key = None
pri_field_type = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
if row["Key"] == "PRI":
if pri_key is None:
pri_key = row["Field"]
pri_field_type = row["Type"]
else:
return None # more than one primary key - multipart key?
cursor.close()
isNumType = self._is_data_type_numeric(pri_field_type)
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
if isNumType:
cursor.execute(
"SELECT * FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = "
+ pri_key_value
)
else:
cursor.execute(
"SELECT * FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = '"
+ pri_key_value
+ "'"
)
row = cursor.fetchone()
if row is None:
cursor.close()
return None
for fname in row.keys():
dictRet[fname] = util.to_str(row[fname])
cursor.close()
return dictRet
def _find_primary_key(self, conn, table_name):
pri_key = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
fieldname = row["Field"]
keyvalue = row["Key"]
if keyvalue == "PRI":
if pri_key is None:
pri_key = fieldname
else:
return None # more than one primary key - multipart key?
cursor.close()
return pri_key
def _list_fields(self, conn, table_name, field_name):
retlist = []
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("SELECT " + field_name + " FROM " + self._db + "." + table_name)
result_set = cursor.fetchall()
for row in result_set:
retlist.append(util.to_str(row[field_name]))
cursor.close()
return retlist
def _list_tables(self, conn):
retlist = []
cursor = conn.cursor()
cursor.execute("SHOW TABLES")
result_set = cursor.fetchall()
for row in result_set:
retlist.append("%s" % (row[0]))
cursor.close()
return retlist
def get_resource_inst(self, path, environ):
"""Return info dictionary for path.
See get_resource_inst()
"""
# TODO: calling exists() makes directory browsing VERY slow.
# At least compared to PyFileServer, which simply used string
# functions to get display_type and displayTypeComment
self._count_get_resource_inst += 1
if not self.exists(path, environ):
return None
_tableName, primKey = self._split_path(path)
is_collection = primKey is None
return MySQLBrowserResource(self, path, is_collection, environ)
def exists(self, path, environ):
tableName, primKey = self._split_path(path)
if tableName is None:
return True
try:
conn = None
conn = self._init_connection()
# Check table existence:
tbllist = self._list_tables(conn)
if tableName not in tbllist:
return False
# Check table key existence:
if primKey and primKey != "_ENTIRE_CONTENTS":
return self._exists_record_by_primary_key(conn, tableName, primKey)
return True
finally:
if conn:
conn.close()
def is_collection(self, path, environ):
_tableName, primKey = self._split_path(path)
return self.exists(path, environ) and primKey is None
| 34.657331 | 95 | 0.553216 |
import csv
import hashlib
import time
from io import StringIO
import MySQLdb
from wsgidav import util
from wsgidav.dav_error import (
HTTP_FORBIDDEN,
DAVError,
PRECONDITION_CODE_ProtectedProperty,
)
from wsgidav.dav_provider import DAVProvider, _DAVResource
__docformat__ = "reStructuredText"
_logger = util.get_module_logger(__name__)
class MySQLBrowserResource(_DAVResource):
def __init__(self, provider, path, is_collection, environ):
super().__init__(path, is_collection, environ)
self._cache = None
def _init(self):
self.provider._count_get_resource_inst_init += 1
tableName, primKey = self.provider._split_path(self.path)
display_type = "Unknown"
displayTypeComment = ""
contentType = "text/html"
if tableName is None:
display_type = "Database"
elif primKey is None:
display_type = "Database Table"
else:
contentType = "text/csv"
if primKey == "_ENTIRE_CONTENTS":
display_type = "Database Table Contents"
displayTypeComment = "CSV Representation of Table Contents"
else:
display_type = "Database Record"
displayTypeComment = "Attributes available as properties"
is_collection = primKey is None
self._cache = {
"content_length": None,
"contentType": contentType,
"created": time.time(),
"display_name": self.name,
"etag": hashlib.md5().update(self.path).hexdigest(),
"modified": None,
"support_ranges": False,
"display_info": {"type": display_type, "typeComment": displayTypeComment},
}
if not is_collection:
self._cache["modified"] = time.time()
_logger.debug("---> _init, nc=%s" % self.provider._count_initConnection)
def _get_info(self, info):
if self._cache is None:
self._init()
return self._cache.get(info)
def get_content_length(self):
return self._get_info("content_length")
def get_content_type(self):
return self._get_info("contentType")
def get_creation_date(self):
return self._get_info("created")
def get_display_name(self):
return self.name
def get_display_info(self):
return self._get_info("display_info")
def get_etag(self):
return self._get_info("etag")
def get_last_modified(self):
return self._get_info("modified")
def get_member_list(self):
members = []
conn = self.provider._init_connection()
try:
tableName, primKey = self.provider._split_path(self.path)
if tableName is None:
retlist = self.provider._list_tables(conn)
for name in retlist:
members.append(
MySQLBrowserResource(
self.provider,
util.join_uri(self.path, name),
True,
self.environ,
)
)
elif primKey is None:
pri_key = self.provider._find_primary_key(conn, tableName)
if pri_key is not None:
retlist = self.provider._list_fields(conn, tableName, pri_key)
for name in retlist:
members.append(
MySQLBrowserResource(
self.provider,
util.join_uri(self.path, name),
False,
self.environ,
)
)
members.insert(
0,
MySQLBrowserResource(
self.provider,
util.join_uri(self.path, "_ENTIRE_CONTENTS"),
False,
self.environ,
),
)
finally:
conn.close()
return members
def get_content(self):
filestream = StringIO()
tableName, primKey = self.provider._split_path(self.path)
if primKey is not None:
conn = self.provider._init_connection()
listFields = self.provider._get_field_list(conn, tableName)
csvwriter = csv.DictWriter(filestream, listFields, extrasaction="ignore")
dictFields = {}
for field_name in listFields:
dictFields[field_name] = field_name
csvwriter.writerow(dictFields)
if primKey == "_ENTIRE_CONTENTS":
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("SELECT * from " + self.provider._db + "." + tableName)
result_set = cursor.fetchall()
for row in result_set:
csvwriter.writerow(row)
cursor.close()
else:
row = self.provider._get_record_by_primary_key(conn, tableName, primKey)
if row is not None:
csvwriter.writerow(row)
conn.close()
filestream.seek(0)
return filestream
def get_property_names(self, *, is_allprop):
propNames = super().get_property_names(is_allprop=is_allprop)
tableName, primKey = self.provider._split_path(self.path)
if primKey is not None:
conn = self.provider._init_connection()
fieldlist = self.provider._get_field_list(conn, tableName)
for fieldname in fieldlist:
propNames.append("{%s:}%s" % (tableName, fieldname))
conn.close()
return propNames
def get_property_value(self, name):
tableName, primKey = self.provider._split_path(self.path)
if primKey is not None:
ns, localName = util.split_namespace(name)
if ns == (tableName + ":"):
conn = self.provider._init_connection()
fieldlist = self.provider._get_field_list(conn, tableName)
if localName in fieldlist:
val = self.provider._get_field_by_primary_key(
conn, tableName, primKey, localName
)
conn.close()
return val
conn.close()
return super().get_property_value(name)
def set_property_value(self, name, value, dry_run=False):
raise DAVError(
HTTP_FORBIDDEN, err_condition=PRECONDITION_CODE_ProtectedProperty
)
class MySQLBrowserProvider(DAVProvider):
def __init__(self, host, user, passwd, db):
super().__init__()
self._host = host
self._user = user
self._passwd = passwd
self._db = db
self._count_initConnection = 0
def __repr__(self):
return "%s for db '%s' on '%s' (user: '%s')'" % (
self.__class__.__name__,
self._db,
self._host,
self._user,
)
def _split_path(self, path):
if path.strip() in (None, "", "/"):
return (None, None)
tableName, primKey = util.save_split(path.strip("/"), "/", 1)
# _logger.debug("'%s' -> ('%s', '%s')" % (path, tableName, primKey))
return (tableName, primKey)
def _init_connection(self):
self._count_initConnection += 1
return MySQLdb.connect(
host=self._host, user=self._user, passwd=self._passwd, db=self._db
)
def _get_field_list(self, conn, table_name):
retlist = []
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
retlist.append(row["Field"])
cursor.close()
return retlist
def _is_data_type_numeric(self, datatype):
if datatype is None:
return False
# how many MySQL datatypes does it take to change a lig... I mean, store numbers
numerictypes = [
"BIGINT",
"INTT",
"MEDIUMINT",
"SMALLINT",
"TINYINT",
"BIT",
"DEC",
"DECIMAL",
"DOUBLE",
"FLOAT",
"REAL",
"DOUBLE PRECISION",
"INTEGER",
"NUMERIC",
]
datatype = datatype.upper()
for numtype in numerictypes:
if datatype.startswith(numtype):
return True
return False
def _exists_record_by_primary_key(self, conn, table_name, pri_key_value):
pri_key = None
pri_field_type = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
if row["Key"] == "PRI":
if pri_key is None:
pri_key = row["Field"]
pri_field_type = row["Type"]
else:
return False # more than one primary key - multipart key?
cursor.close()
isNumType = self._is_data_type_numeric(pri_field_type)
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
if isNumType:
cursor.execute(
"SELECT "
+ pri_key
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = "
+ pri_key_value
)
else:
cursor.execute(
"SELECT "
+ pri_key
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = '"
+ pri_key_value
+ "'"
)
row = cursor.fetchone()
if row is None:
cursor.close()
return False
cursor.close()
return True
def _get_field_by_primary_key(self, conn, table_name, pri_key_value, field_name):
pri_key = None
pri_field_type = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
if row["Key"] == "PRI":
if pri_key is None:
pri_key = row["Field"]
pri_field_type = row["Type"]
else:
return None # more than one primary key - multipart key?
cursor.close()
isNumType = self._is_data_type_numeric(pri_field_type)
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
if isNumType:
cursor.execute(
"SELECT "
+ field_name
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = "
+ pri_key_value
)
else:
cursor.execute(
"SELECT "
+ field_name
+ " FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = '"
+ pri_key_value
+ "'"
)
row = cursor.fetchone()
if row is None:
cursor.close()
return None
val = util.to_str(row[field_name])
cursor.close()
return val
def _get_record_by_primary_key(self, conn, table_name, pri_key_value):
dictRet = {}
pri_key = None
pri_field_type = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
if row["Key"] == "PRI":
if pri_key is None:
pri_key = row["Field"]
pri_field_type = row["Type"]
else:
return None # more than one primary key - multipart key?
cursor.close()
isNumType = self._is_data_type_numeric(pri_field_type)
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
if isNumType:
cursor.execute(
"SELECT * FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = "
+ pri_key_value
)
else:
cursor.execute(
"SELECT * FROM "
+ self._db
+ "."
+ table_name
+ " WHERE "
+ pri_key
+ " = '"
+ pri_key_value
+ "'"
)
row = cursor.fetchone()
if row is None:
cursor.close()
return None
for fname in row.keys():
dictRet[fname] = util.to_str(row[fname])
cursor.close()
return dictRet
def _find_primary_key(self, conn, table_name):
pri_key = None
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("DESCRIBE " + table_name)
result_set = cursor.fetchall()
for row in result_set:
fieldname = row["Field"]
keyvalue = row["Key"]
if keyvalue == "PRI":
if pri_key is None:
pri_key = fieldname
else:
return None # more than one primary key - multipart key?
cursor.close()
return pri_key
def _list_fields(self, conn, table_name, field_name):
retlist = []
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
cursor.execute("SELECT " + field_name + " FROM " + self._db + "." + table_name)
result_set = cursor.fetchall()
for row in result_set:
retlist.append(util.to_str(row[field_name]))
cursor.close()
return retlist
def _list_tables(self, conn):
retlist = []
cursor = conn.cursor()
cursor.execute("SHOW TABLES")
result_set = cursor.fetchall()
for row in result_set:
retlist.append("%s" % (row[0]))
cursor.close()
return retlist
def get_resource_inst(self, path, environ):
# TODO: calling exists() makes directory browsing VERY slow.
# At least compared to PyFileServer, which simply used string
# functions to get display_type and displayTypeComment
self._count_get_resource_inst += 1
if not self.exists(path, environ):
return None
_tableName, primKey = self._split_path(path)
is_collection = primKey is None
return MySQLBrowserResource(self, path, is_collection, environ)
def exists(self, path, environ):
tableName, primKey = self._split_path(path)
if tableName is None:
return True
try:
conn = None
conn = self._init_connection()
# Check table existence:
tbllist = self._list_tables(conn)
if tableName not in tbllist:
return False
# Check table key existence:
if primKey and primKey != "_ENTIRE_CONTENTS":
return self._exists_record_by_primary_key(conn, tableName, primKey)
return True
finally:
if conn:
conn.close()
def is_collection(self, path, environ):
_tableName, primKey = self._split_path(path)
return self.exists(path, environ) and primKey is None
| true | true |
f71a833507215096556a8d151f20ec58347e380e | 1,804 | py | Python | meraki_sdk/models/switch_profile_port_model.py | meraki/meraki-python-sdk | 9894089eb013318243ae48869cc5130eb37f80c0 | [
"MIT"
] | 37 | 2019-04-24T14:01:33.000Z | 2022-01-28T01:37:21.000Z | meraki_sdk/models/switch_profile_port_model.py | ankita66666666/meraki-python-sdk | 9894089eb013318243ae48869cc5130eb37f80c0 | [
"MIT"
] | 10 | 2019-07-09T16:35:11.000Z | 2021-12-07T03:47:53.000Z | meraki_sdk/models/switch_profile_port_model.py | ankita66666666/meraki-python-sdk | 9894089eb013318243ae48869cc5130eb37f80c0 | [
"MIT"
] | 17 | 2019-04-30T23:53:21.000Z | 2022-02-07T22:57:44.000Z | # -*- coding: utf-8 -*-
"""
meraki_sdk
This file was automatically generated for meraki by APIMATIC v2.0 ( https://apimatic.io ).
"""
class SwitchProfilePortModel(object):
"""Implementation of the 'SwitchProfilePort' model.
TODO: type model description here.
Attributes:
profile (string): Profile identifier.
port_id (string): Port identifier of switch port. For modules, the
identifier is "SlotNumber_ModuleType_PortNumber" (Ex:
“1_8X10G_1”), otherwise it is just the port number (Ex: "8").
"""
# Create a mapping from Model property names to API property names
_names = {
"profile":'profile',
"port_id":'portId'
}
def __init__(self,
profile=None,
port_id=None):
"""Constructor for the SwitchProfilePortModel class"""
# Initialize members of the class
self.profile = profile
self.port_id = port_id
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
profile = dictionary.get('profile')
port_id = dictionary.get('portId')
# Return an object of this model
return cls(profile,
port_id)
| 27.333333 | 95 | 0.583703 |
class SwitchProfilePortModel(object):
_names = {
"profile":'profile',
"port_id":'portId'
}
def __init__(self,
profile=None,
port_id=None):
self.profile = profile
self.port_id = port_id
@classmethod
def from_dictionary(cls,
dictionary):
if dictionary is None:
return None
profile = dictionary.get('profile')
port_id = dictionary.get('portId')
return cls(profile,
port_id)
| true | true |
f71a8426ac96ef7e52fb2b1c74212768cb00e3c5 | 497 | py | Python | sprites/blocker.py | ErezOr18/pygame-space-invaders | f2e129bb2e2e18470599573910e6cad34f501df8 | [
"MIT"
] | null | null | null | sprites/blocker.py | ErezOr18/pygame-space-invaders | f2e129bb2e2e18470599573910e6cad34f501df8 | [
"MIT"
] | null | null | null | sprites/blocker.py | ErezOr18/pygame-space-invaders | f2e129bb2e2e18470599573910e6cad34f501df8 | [
"MIT"
] | null | null | null | from pygame import *
class Blocker(sprite.Sprite):
def __init__(self, size, color, row, column):
sprite.Sprite.__init__(self)
self.height = size
self.width = size
self.color = color
self.image = Surface((self.width, self.height))
self.image.fill(self.color)
self.rect = self.image.get_rect()
self.row = row
self.column = column
def update(self, game, keys, *args):
game.screen .blit(self.image, self.rect)
| 27.611111 | 55 | 0.607646 | from pygame import *
class Blocker(sprite.Sprite):
def __init__(self, size, color, row, column):
sprite.Sprite.__init__(self)
self.height = size
self.width = size
self.color = color
self.image = Surface((self.width, self.height))
self.image.fill(self.color)
self.rect = self.image.get_rect()
self.row = row
self.column = column
def update(self, game, keys, *args):
game.screen .blit(self.image, self.rect)
| true | true |
f71a84f7b27fecc7c26682e691d6999bc0138353 | 245 | py | Python | chaptertwo/famousquote2.py | cmotek/python_crashcourse | 29cbdd6699cd17192bb599d235852d547630d110 | [
"Apache-2.0"
] | null | null | null | chaptertwo/famousquote2.py | cmotek/python_crashcourse | 29cbdd6699cd17192bb599d235852d547630d110 | [
"Apache-2.0"
] | null | null | null | chaptertwo/famousquote2.py | cmotek/python_crashcourse | 29cbdd6699cd17192bb599d235852d547630d110 | [
"Apache-2.0"
] | null | null | null | famousauthor = "Herman Melville"
print(famousauthor + ' wrote in Moby Dick, "Now then, thought I, unconsciously rolling up the sleeves of my frock, here goes a cool, collected dive at death and destruction, and the devil fetch the hindmost."')
| 61.25 | 210 | 0.767347 | famousauthor = "Herman Melville"
print(famousauthor + ' wrote in Moby Dick, "Now then, thought I, unconsciously rolling up the sleeves of my frock, here goes a cool, collected dive at death and destruction, and the devil fetch the hindmost."')
| true | true |
f71a8535015df8c0b0c3d55332640c315d8527a4 | 162,189 | py | Python | jp.atcoder/abc081/arc086_b/17664033.py | kagemeka/atcoder-submissions | 91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e | [
"MIT"
] | 1 | 2022-02-09T03:06:25.000Z | 2022-02-09T03:06:25.000Z | jp.atcoder/abc081/arc086_b/17664033.py | kagemeka/atcoder-submissions | 91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e | [
"MIT"
] | 1 | 2022-02-05T22:53:18.000Z | 2022-02-09T01:29:30.000Z | jp.atcoder/abc081/arc086_b/17664033.py | kagemeka/atcoder-submissions | 91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e | [
"MIT"
] | null | null | null | import itertools
import math
import string
import sys
from bisect import bisect_left as bi_l
from bisect import bisect_right as bi_r
from collections import Counter, defaultdict, deque
from functools import lru_cache, reduce
from heapq import heapify, heappop, heappush
from operator import or_, xor
sys.setrecursionlimit(10**7)
inf = float("inf")
MOD = 10**9 + 7
# MOD = 998244353
using_numpy = 1
import networkx as nx
import numpy as np
from numba import i8, njit
from scipy import optimize
from scipy.ndimage import distance_transform_cdt
from scipy.sparse import csr_matrix
from scipy.sparse.csgraph import (
connected_components,
csgraph_to_dense,
maximum_flow,
minimum_spanning_tree,
shortest_path,
)
from scipy.spatial import ConvexHull
from scipy.special import comb
class Algebra:
class Modular(int):
def __init__(self, n, mod=MOD):
self.value = n
self.mod = mod
def __str__(self):
return f"{self.value}"
def __add__(self, other):
return self.__class__((self.value + other.value) % self.mod)
def __sub__(self, x):
return self.__class__((self.value - x.value) % self.mod)
def __mul__(self, x):
return self.__class__((self.value * x.value) % self.mod)
def __pow__(self, x):
return self.__class__(pow(self.value, x.value, self.mod))
def __lt__(self, x):
return self.value < x.value
def __le__(self, x):
return self.value <= x.value
def __eq__(self, x):
return self.value == x.value
def __ne__(self, x):
return self.value != x.value
def __gt__(self, x):
return self.value > x.value
def __ge__(self, x):
return self.value >= x.value
class SemiGroup:
pass
class Monoid:
pass
class Group:
pass
class SemiRing:
pass
class Ring:
pass
@staticmethod
def identity(n):
if using_numpy:
return np.identity(n, dtype=np.int64)
else:
a = [[0] * n for _ in range(n)]
for i in range(n):
a[i][i] = 1
return a
@staticmethod
def dot(a, b):
if using_numpy:
return np.dot(a, b)
else:
h, w, l = len(a), len(b[0]), len(b)
assert len(a[0]) == l
c = [[0] * w for _ in range(h)]
for i in range(h):
for j in range(w):
for k in range(l):
c[i][j] += a[i][k] * b[k][j]
return c
@classmethod
def matrix_pow(cls, a, n, mod=10**9 + 7):
m = len(a)
b = cls.identity(m)
while n:
if n & 1:
b = cls.dot(b, a)
n >>= 1
a = cls.dot(a, a)
if using_numpy:
a %= mod
b %= mod
else:
for i in range(m):
for j in range(m):
a[i][j] %= mod
b[i][j] %= mod
return b
@staticmethod
def bitwise_dot(a, b):
if using_numpy:
return np.bitwise_xor.reduce(
a[:, None, :] & b.T[None, :, :], axis=-1
)
else:
h, w, l = len(a), len(b[0]), len(b)
assert len(a[0]) == l
c = [[0] * w for _ in range(h)]
for i in range(h):
for j in range(w):
for k in range(l):
c[i][j] ^= a[i][k] & b[k][j]
return c
@classmethod
def bitwise_mat_pow(cls, a, n):
if n == 0:
return np.eye(len(a), dtype=np.uint32) * ((1 << 32) - 1)
res = cls.bitwise_mat_pow(a, n // 2)
res = cls.bitwise_dot(res, res)
return cls.bitwise_dot(res, a) if n & 1 else res
@staticmethod
def cumprod(a, mod):
l = len(a)
sql = int(np.sqrt(l) + 1)
a = np.resize(a, sql**2).reshape(sql, sql)
for i in range(sql - 1):
a[:, i + 1] *= a[:, i]
a[:, i + 1] %= mod
for i in range(sql - 1):
a[i + 1] *= a[i, -1]
a[i + 1] %= mod
return np.ravel(a)[:l]
@classmethod
def generate_fac_ifac(cls, n, p=MOD):
if using_numpy:
fac = np.arange(n + 1)
fac[0] = 1
fac = cls.cumprod(fac, p)
ifac = np.arange(n + 1, 0, -1)
ifac[0] = pow(int(fac[-1]), p - 2, p)
ifac = cls.cumprod(ifac, p)[n::-1]
else:
fac = [None] * (n + 1)
fac[0] = 1
for i in range(n):
fac[i + 1] = fac[i] * (i + 1) % p
ifac = [None] * (n + 1)
ifac[n] = pow(fac[n], p - 2, p)
for i in range(n, 0, -1):
ifac[i - 1] = ifac[i] * i % p
return fac, ifac
class Kitamasa:
pass
mint = Algebra.Modular
class NumberTheory:
class PrimeNumbers: # pn
def __init__(self, n=2 * 10**6):
self.is_prime, self.prime_nums = self.find(n)
def __call__(self, n):
return self.is_prime[n]
def __iter__(self):
return iter(self.prime_nums)
def __getitem__(self, key):
return self.prime_nums[key]
@staticmethod
def find(n): # Sieve of eratosthenes
if using_numpy:
is_prime = np.ones(n + 1, dtype=np.bool)
is_prime[:2] = 0
for i in range(2, int(n**0.5) + 1):
if is_prime[i]:
is_prime[i * 2 :: i] = 0
prime_nums = np.flatnonzero(is_prime)
else:
is_prime = [True] * (n + 1)
is_prime[0] = is_prime[1] = 0
for i in range(2, int(n**0.5) + 1):
if not is_prime[i]:
continue
for j in range(i * 2, n + 1, i):
is_prime[j] = 0
prime_nums = [i for i in range(2, n + 1) if is_prime[i]]
return is_prime, prime_nums
@lru_cache(maxsize=None)
def factorize(self, n):
res = defaultdict(int)
if n < 2:
return res
for p in self:
if p * p > n:
break
while n % p == 0:
res[p] += 1
n //= p
if n == 1:
return res
res[n] = 1
return res
def factorize_factorial(self, n):
res = defaultdict(int)
for i in range(2, n + 1):
for p, c in self.factorize(i).items():
res[p] += c
return res
@classmethod
@lru_cache(maxsize=None)
def gcd(cls, a, b):
return cls.gcd(b, a % b) if b else abs(a)
@classmethod
def lcm(cls, a, b):
return abs(a // cls.gcd(a, b) * b)
@staticmethod
def find_divisors(n):
divisors = []
for i in range(1, int(n**0.5) + 1):
if n % i:
continue
divisors.append(i)
j = n // i
if j != i:
divisors.append(j)
return sorted(divisors)
@staticmethod
def base_convert(n, b):
if not n:
return [0]
res = []
while n:
n, r = divmod(n, b)
if r < 0:
n += 1
r -= b
res.append(r)
return res
class Combinatorics:
@classmethod
@lru_cache(maxsize=None)
def choose(cls, n, r, mod=None):
if r > n or r < 0:
return 0
if r == 0:
return 1
res = cls.choose(n - 1, r, mod) + cls.choose(n - 1, r - 1, mod)
if mod:
res %= mod
return res
class CombinationsMod:
def __init__(self, n=2 * 10**6, mod=MOD):
self.__mod = mod
self.fac, self.ifac = Algebra.generate_fac_ifac(n, mod)
def __call__(self, n, r):
return self.__choose(n, r)
def __choose(self, n, r):
bl = (0 <= r) & (r <= n)
p = self.__mod
return bl * self.fac[n] * self.ifac[r] % p * self.ifac[n - r] % p
def make_nchoose_table(self, n):
p = self.__mod
r = len(self.__fac) - 1
if using_numpy:
n_choose = np.arange(n + 1, n - r, -1)
n_choose[0] = 1
n_choose = Algebra.cumprod(n_choose, p) * self.ifac % p
else:
n_choose = [None] * (r + 1)
n_choose[0] = 1
for i in range(r):
n_choose[i + 1] = n_choose[i] * (n - i) % p
for i in range(1, r + 1):
n_choose[i] = n_choose[i] * self.ifac[i] % p
return n_choose
@classmethod
def permutations(cls, a, r=None, i=0):
a = list(a)
n = len(a)
if r is None:
r = n
res = []
if r > n or i > r:
return res
if i == r:
return [tuple(a[:r])]
for j in range(i, n):
a[i], a[j] = a[j], a[i]
res += cls.permutations(a, r, i + 1)
return res
@staticmethod
def combinations(a, r):
a = tuple(a)
n = len(a)
if r > n:
return
indices = list(range(r))
yield a[:r]
while True:
for i in range(r - 1, -1, -1):
if indices[i] != i + n - r:
break
else:
return
indices[i] += 1
for j in range(i + 1, r):
indices[j] = indices[j - 1] + 1
yield tuple(a[i] for i in indices)
class DP:
@staticmethod
def LIS(a):
res = [inf] * len(a)
for x in a:
res[bi_l(res, x)] = x
return res
class String:
@staticmethod
def z_algorithm(s):
n = len(s)
a = [0] * n
a[0] = n
l = r = -1
for i in range(1, n):
if r >= i:
a[i] = min(a[i - l], r - i)
while i + a[i] < n and s[i + a[i]] == s[a[i]]:
a[i] += 1
if i + a[i] >= r:
l, r = i, i + a[i]
return a
class GeometryTopology:
class Graph:
class __Edge:
def __init__(self, weight=1, capacity=1, **args):
self.weight = weight
self.capacity = capacity
def __str__(self):
return f"weight: {self.weight}, cap: {self.capacity}"
class __Node:
def __init__(self, **args):
pass
def __init__(self, n=0):
self.__N = n
self.nodes = [None] * n
self.edges = [{} for _ in range(n)]
def add_node_info(self, v, **args):
self.nodes[v] = self.__Node(**args)
def add_edge(self, u, v, update=False, **args):
if not update and v in self.edges[u]:
return
self.edges[u][v] = self.__Edge(**args)
def get_size(self):
return self.__N
def bfs(self, src=0):
n = self.__N
self.depth = self.lv = lv = [None] * n
lv[src] = 0 # depth in tree, or level in general graph.
self.dist = dist = [inf] * n
dist[src] = 0 # dist for only tree.
self.parent = par = [None] * n
par[src] = src
q = deque([src])
while q:
u = q.popleft()
for v, e in self.edges[u].items():
if e.capacity == 0 or lv[v] is not None:
continue
lv[v], dist[v], par[v] = lv[u] + 1, dist[u] + e.weight, u
q.append(v)
return dist
def dinic(self, src, sink):
def flow_to_sink(u, flow_in):
if u == sink:
return flow_in
flow = 0
for v, e in self.edges[u].items():
if e.capacity == 0 or self.lv[v] <= self.lv[u]:
continue
f = flow_to_sink(v, min(flow_in, e.capacity))
if not f:
continue
self.edges[u][v].capacity -= f
if u in self.edges[v]:
self.edges[v][u].capacity += f
else:
self.add_edge(v, u, capacity=f)
flow_in -= f
flow += f
return flow
flow = 0
while True:
self.bfs(src)
if self.lv[sink] is None:
return flow
flow += flow_to_sink(src, inf)
def ford_fulkerson(self):
pass
def push_relabel(self):
pass
def floyd_warshall(self):
n = self.__N
d = [[inf] * n for _ in range(n)]
for u in range(n):
d[u][u] = 0
for v, e in self.edges[u].items():
d[u][v] = e.weight
for w in range(n):
for u in range(n):
for v in range(n):
d[u][v] = min(d[u][v], d[u][w] + d[w][v])
return d
def dijkstra(self, src, paths_cnt=False, mod=None):
dist = [inf] * self.__N
dist[src] = 0
visited = [False] * self.__N
paths = [0] * self.__N
paths[src] = 1
q = [(0, src)]
while q:
d, u = heappop(q)
if visited[u]:
continue
visited[u] = True
for v, e in self.edges[u].items():
dv = d + e.weight
if dv > dist[v]:
continue
elif dv == dist[v]:
paths[v] += paths[u]
if mod:
paths[v] %= mod
continue
paths[v], dist[v] = paths[u], dv
heappush(q, (dv, v))
if paths_cnt:
return dist, paths
else:
return dist
def astar(self, src, tgt, heuristic_func):
cost = [inf] * self.__N
q = [(heuristic_func(src, tgt), 0, src)]
while q:
_, c, u = heappop(q)
if u == tgt:
return c
if cost[u] != inf:
continue
cost[u] = c
for v, e in self.edges[u].items():
if cost[v] != inf:
continue
h = heuristic_func(v, tgt)
nc = c + e.weight
heappush(q, (h + nc, nc, v))
return inf
def bellman_ford(self, src):
n = self.__N
d = [inf] * n
d[src] = 0
for _ in range(n - 1):
for u in range(n):
for v, e in self.edges[u].items():
d[v] = min(d[v], d[u] + e.weight)
for u in range(n):
for v, e in self.edges[u].items():
if d[u] + e.weight < d[v]:
raise Exception("found negative cycle.")
return d
def bfs01(self, src=0):
d = [inf] * self.__N
d[src] = 0
q = deque([src])
while q:
u = q.popleft()
for v, e in self.edges[u].items():
dv = d[u] + e.weight
if d[v] <= dv:
continue
d[v] = dv
if e.weight:
q.append(v)
else:
q.appendleft(v)
return d
def find_ancestors(self): # tree doubling.
self.__ancestors = ancestors = [self.parent]
for _ in range(max(self.depth).bit_length()):
ancestors.append([ancestors[-1][u] for u in ancestors[-1]])
def find_dist(self, u, v):
return (
self.dist[u]
+ self.dist[v]
- 2 * self.dist[self.__find_lca(u, v)]
)
def __find_lca(self, u, v):
du, dv = self.depth[u], self.depth[v]
if du > dv:
u, v = v, u
du, dv = dv, du
d = dv - du
for i in range(d.bit_length()): # up-stream
if d >> i & 1:
v = self.__ancestors[i][v]
if v == u:
return v
for i in range(
du.bit_length() - 1, -1, -1
): # find direct child of LCA.
nu, nv = self.__ancestors[i][u], self.__ancestors[i][v]
if nu == nv:
continue
u, v = nu, nv
return self.__ancestors[0][u]
def init_dsu(self): # disjoint set union (union-find)
n = self.__N
self.parent = list(range(n))
self.rank = [0] * n
self.size = [1] * n
def find(self, u):
if self.parent[u] == u:
return u
self.parent[u] = self.find(self.parent[u])
return self.parent[u]
def unite(self, u, v):
u, v = self.find(u), self.find(v)
if u == v:
return
if self.rank[u] < self.rank[v]:
u, v = v, u
self.parent[v] = u
self.size[u] += self.size[v]
self.rank[u] = max(self.rank[u], self.rank[v] + 1)
def same(self, u, v):
return self.find(u) == self.find(v)
def scc(self): # strongly connected components
n = self.__N
visited, q, root, r = [False] * n, [], [None] * n, 0
gg = self.__class__(n)
for u in range(n):
for v in self.edges[u]:
gg.add_edge(v, u)
def dfs(u):
if visited[u]:
return
visited[u] = True
for v in self.edges[u]:
dfs(v)
q.append(u)
def rev_dfs(u, r):
if root[u] is not None:
return
root[u] = r
for v in gg.edges[u]:
rev_dfs(v, r)
for u in range(n):
dfs(u)
for u in q[::-1]:
rev_dfs(u, r)
r += 1
return root
def kruskal(self): # minimum spanning tree
n = self.__N
uf = self.__class__(n)
uf.init_dsu()
edges = sorted(
[
(u, v, e.weight)
for u in range(n)
for v, e in self.edges[u].items()
],
key=lambda x: x[2],
)
g = self.__class__(n)
d = 0
for u, v, w in edges:
if uf.same(u, v):
continue
uf.unite(u, v)
g.add_edge(u, v, weight=w)
d += w
return g, d
def prim(self, src=0, return_parent=False): # minimum spanning tree
n = self.__N
g = self.__class__(n)
parent, visited, dist = [None] * n, [False] * n, 0
q = [(0, (src, src))]
while q:
d, (w, u) = heappop(q)
if visited[u]:
continue
visited[u], parent[u] = True, w
dist += d
g.add_edge(w, u, weight=d)
for v, e in self.edges[u].items():
if not visited[v]:
heappush(q, (e.weight, (u, v)))
if return_parent:
return g, dist, parent
return g, dist
def boruvka(self): # minimum spanning tree
n = self.__N
uf = self.__class__(n)
uf.init_dsu()
g = self.__class__(n)
d = 0
def dfs(u):
if visited[u]:
return (inf, (None, None))
visited[u] = True
cand = []
for v, e in self.edges[u].items():
if uf.same(u, v):
cand.append(dfs(v))
continue
cand.append((e.weight, (u, v)))
return sorted(cand)[0]
while len(set(uf.parent)) != 1:
edges, visited = [], [False] * n
for u in range(n):
if visited[u]:
continue
edges.append(dfs(u))
for w, (u, v) in edges:
if uf.same(u, v):
continue
g.add_edge(u, v, weight=w)
uf.unite(u, v)
d += w
for u in range(n):
uf.find(u)
return g, d
def tsp(self): # traveling salesperson problem
pass
@staticmethod
def triangle_area(p0, p1, p2, signed=False):
x1, y1, x2, y2 = (
p1[0] - p0[0],
p1[1] - p0[1],
p2[0] - p0[0],
p2[1] - p0[1],
)
return (
(x1 * y2 - x2 * y1) / 2 if signed else abs(x1 * y2 - x2 * y1) / 2
)
@classmethod
def intersect(cls, seg1, seg2):
(p1, p2), (p3, p4) = seg1, seg2
t1 = cls.triangle_area(p1, p2, p3, signed=True)
t2 = cls.triangle_area(p1, p2, p4, signed=True)
t3 = cls.triangle_area(p3, p4, p1, signed=True)
t4 = cls.triangle_area(p3, p4, p2, signed=True)
return (t1 * t2 < 0) & (t3 * t4 < 0)
def cumxor(a):
return reduce(xor, a, 0)
def cumor(a):
return reduce(or_, a, 0)
def bit_count(n):
cnt = 0
while n:
cnt += n & 1
n >>= 1
return cnt
class AtCoder:
class ABC001:
@staticmethod
def a():
h1, h2 = map(int, sys.stdin.read().split())
print(h1 - h2)
@staticmethod
def d():
def to_minuites(x):
q, r = divmod(x, 100)
return 60 * q + r
def to_hmform(x):
q, r = divmod(x, 60)
return 100 * q + r
n = int(sys.stdin.readline().rstrip())
term = [0] * 2001
for _ in range(n):
s, e = map(
to_minuites,
map(int, sys.stdin.readline().rstrip().split("-")),
)
s = s // 5 * 5
e = (e + 4) // 5 * 5
term[s] += 1
term[e + 1] -= 1
for i in range(2000):
term[i + 1] += term[i]
res = []
raining = False
for i in range(2001):
if term[i]:
if not raining:
s = i
raining = True
elif raining:
res.append((s, i - 1))
raining = False
for s, e in res:
print(f"{to_hmform(s):04}-{to_hmform(e):04}")
class ABC002:
@staticmethod
def a():
print(max(map(int, sys.stdin.readline().split())))
@staticmethod
def b():
vowels = set("aeiou")
print(
"".join(
[
c
for c in sys.stdin.readline().rstrip()
if c not in vowels
]
)
)
@staticmethod
def c():
print(
GeometryTopology.triangle_area(
*map(int, sys.stdin.readline().split())
)
)
@staticmethod
def d():
n, m = map(int, sys.stdin.readline().split())
edges = set(
(x - 1, y - 1)
for x, y in zip(*[map(int, sys.stdin.read().split())] * 2)
)
print(
max(
len(s)
for i in range(1, 1 << n)
for s in [[j for j in range(n) if i >> j & 1]]
if all(
(x, y) in edges
for x, y in itertools.combinations(s, 2)
)
)
)
@staticmethod
def d_2():
n, m = map(int, sys.stdin.readline().split())
relations = [1 << i for i in range(n)]
for x, y in zip(*[map(int, sys.stdin.read().split())] * 2):
relations[x] |= 1 << (y - 1)
relations[y] |= 1 << (x - 1)
res = 0
for i in range(1 << n):
s, cnt = (1 << n) - 1, 0
for j in range(n):
if i >> j & 1:
t &= relations[j] | 1 << j
cnt += 1
if s & i == i:
res = max(res, cnt)
print(res)
class ABC003:
@staticmethod
def a():
print((int(sys.stdin.readline().rstrip()) + 1) * 5000)
@staticmethod
def b():
atcoder = set("atcoder")
s, t = sys.stdin.read().split()
print(
all(
s[i] == t[i]
or s[i] == "@"
and t[i] in atcoder
or t[i] == "@"
and s[i] in atcoder
for i in range(len(s))
)
and "You can win"
or "You will lose"
)
@staticmethod
def c():
n, k, *r = map(int, sys.stdin.read().split())
print(reduce(lambda x, y: (x + y) / 2, sorted(r)[-k:], 0))
class ABC004:
@staticmethod
def a():
print(int(sys.stdin.readline().rstrip()) * 2)
@staticmethod
def b():
for l in [sys.stdin.readline().rstrip() for _ in range(4)][::-1]:
print(l[::-1])
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip()) % 30
res = list(range(1, 7))
for i in range(n):
i %= 5
res[i], res[i + 1] = res[i + 1], res[i]
print(*res, sep="")
class ABC005:
@staticmethod
def a():
x, y = map(int, sys.stdin.readline().split())
print(y // x)
@staticmethod
def b():
n, *t = map(int, sys.stdin.read().split())
print(min(t))
@staticmethod
def c():
t = int(sys.stdin.readline().rstrip())
n = int(sys.stdin.readline().rstrip())
a = [int(x) for x in sys.stdin.readline().split()]
m = int(sys.stdin.readline().rstrip())
b = [int(x) for x in sys.stdin.readline().split()]
i = 0
for p in b:
if i == n:
print("no")
return
while p - a[i] > t:
i += 1
if i == n:
print("no")
return
if a[i] > p:
print("no")
return
i += 1
print("yes")
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
d = np.array(
[sys.stdin.readline().split() for _ in range(n)], np.int64
)
s = d.cumsum(axis=0).cumsum(axis=1)
s = np.pad(s, 1)
max_del = np.zeros((n + 1, n + 1), dtype=np.int64)
for y in range(1, n + 1):
for x in range(1, n + 1):
max_del[y, x] = np.amax(
s[y : n + 1, x : n + 1]
- s[0 : n - y + 1, x : n + 1]
- s[y : n + 1, 0 : n - x + 1]
+ s[0 : n - y + 1, 0 : n - x + 1]
)
res = np.arange(n**2 + 1)[:, None]
i = np.arange(1, n + 1)
res = max_del[i, np.minimum(res // i, n)].max(axis=1)
q = int(sys.stdin.readline().rstrip())
p = np.array(sys.stdin.read().split(), dtype=np.int64)
print(*res[p], sep="\n")
class ABC006:
@staticmethod
def a():
n = sys.stdin.readline().rstrip()
if "3" in n:
print("YES")
elif int(n) % 3 == 0:
print("YES")
else:
print("NO")
@staticmethod
def b():
mod = 10007
a = np.eye(N=3, k=-1, dtype=np.int64)
a[0] = 1
n = int(sys.stdin.readline().rstrip())
a = Algebra.matrix_pow(a, n - 1, mod)
print(a[2][0])
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
cnt = [0, 0, 0]
if m == 1:
cnt = [-1, -1, -1]
else:
if m & 1:
m -= 3
cnt[1] += 1
n -= 1
cnt[2] = m // 2 - n
cnt[0] = n - cnt[2]
if cnt[0] < 0 or cnt[1] < 0 or cnt[2] < 0:
print(-1, -1, -1)
else:
print(*cnt, sep=" ")
@staticmethod
def d():
n, *c = map(int, sys.stdin.read().split())
lis = [inf] * n
for x in c:
lis[bi_l(lis, x)] = x
print(n - bi_l(lis, inf))
class ABC007:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(n - 1)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
if s == "a":
print(-1)
else:
print("a")
@staticmethod
def c():
r, c = map(int, sys.stdin.readline().split())
sy, sx = map(int, sys.stdin.readline().split())
gy, gx = map(int, sys.stdin.readline().split())
sy -= 1
sx -= 1
gy -= 1
gx -= 1
maze = [sys.stdin.readline().rstrip() for _ in range(r)]
queue = deque([(sy, sx)])
dist = np.full((r, c), np.inf)
dist[sy, sx] = 0
while queue:
y, x = queue.popleft()
for i, j in [(-1, 0), (1, 0), (0, -1), (0, 1)]:
i += y
j += x
if maze[i][j] == "#" or dist[i, j] != np.inf:
continue
dist[i, j] = dist[y, x] + 1
queue.append((i, j))
print(int(dist[gy, gx]))
@staticmethod
def d():
ng = set([4, 9])
def count(d):
return d if d <= 4 else d - 1
def f(n):
x = [int(d) for d in str(n)]
flg = True
dp = 0
for d in x:
dp = dp * 8 + flg * count(d)
if d in ng:
flg = False
return n - (dp + flg)
a, b = map(int, sys.stdin.readline().split())
print(f(b) - f(a - 1))
class ABC008:
@staticmethod
def a():
s, t = map(int, sys.stdin.readline().split())
print(t - s + 1)
@staticmethod
def b():
n, *s = sys.stdin.read().split()
res = defaultdict(int)
for name in s:
res[name] += 1
print(sorted(res.items(), key=lambda x: x[1])[-1][0])
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
c = n - np.count_nonzero(a[:, None] % a, axis=1)
print(np.sum((c + 1) // 2 / c))
@staticmethod
def d():
w, h, n, *xy = map(int, sys.stdin.read().split())
(*xy,) = zip(*([iter(xy)] * 2))
@lru_cache(maxsize=None)
def count(x1, y1, x2, y2):
res = 0
for x, y in xy:
if not (x1 <= x <= x2 and y1 <= y <= y2):
continue
cnt = (x2 - x1) + (y2 - y1) + 1
cnt += count(x1, y1, x - 1, y - 1)
cnt += count(x1, y + 1, x - 1, y2)
cnt += count(x + 1, y1, x2, y - 1)
cnt += count(x + 1, y + 1, x2, y2)
res = max(res, cnt)
return res
print(count(1, 1, w, h))
class ABC009:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print((n + 1) // 2)
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
print(sorted(set(a))[-2])
@staticmethod
def c():
n, k = map(int, sys.stdin.readline().split())
s = list(sys.stdin.readline().rstrip())
cost = [1] * n
r = k
for i in range(n - 1):
q = []
for j in range(i + 1, n):
if s[j] < s[i] and cost[i] + cost[j] <= r:
heappush(q, (s[j], cost[i] + cost[j], -j))
if not q:
continue
_, c, j = heappop(q)
j = -j
s[i], s[j] = s[j], s[i]
r -= c
cost[i] = cost[j] = 0
print("".join(s))
@staticmethod
def d():
k, m = map(int, sys.stdin.readline().split())
a = np.array([int(x) for x in sys.stdin.readline().split()])
c = np.array([int(x) for x in sys.stdin.readline().split()])
mask = (1 << 32) - 1
d = np.eye(k, k, -1, dtype=np.uint32) * mask
d[0] = c
if m <= k:
print(a[m - 1])
return
# print(Algebra.bitwise_mat_pow(d, m-k))
# print(Algebra.bitwise_dot(Algebra.bitwise_mat_pow(d, m-k), a[::-1].reshape(-1, 1))[0].item())
print(
Algebra.bitwise_dot(
Algebra.bitwise_mat_pow(d, m - k), a[::-1].reshape(-1, 1)
)[0][0]
)
class ABC010:
@staticmethod
def a():
print(sys.stdin.readline().rstrip() + "pp")
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
tot = 0
for x in a:
c = 0
while x % 2 == 0 or x % 3 == 2:
x -= 1
c += 1
tot += c
print(tot)
@staticmethod
def c():
sx, sy, gx, gy, t, v, n, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy).reshape(-1, 2).T
def dist(x1, y1, x2, y2):
return np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2)
ans = (
"YES"
if (dist(sx, sy, x, y) + dist(x, y, gx, gy) <= v * t).any()
else "NO"
)
print(ans)
@staticmethod
def d():
n, g, e = map(int, sys.stdin.readline().split())
p = [int(x) for x in sys.stdin.readline().split()]
x, y = [], []
for _ in range(e):
a, b = map(int, sys.stdin.readline().split())
x.append(a)
y.append(b)
x.append(b)
y.append(a)
for a in p:
x.append(a)
y.append(n)
if not x:
print(0)
return
c = [1] * len(x)
min_cut = maximum_flow(
csr_matrix((c, (x, y)), (n + 1, n + 1)), source=0, sink=n
).flow_value
print(min_cut)
@staticmethod
def d_2():
n, g, e = map(int, sys.stdin.readline().split())
graph = nx.DiGraph()
graph.add_nodes_from(range(n + 1))
for p in [int(x) for x in sys.stdin.readline().split()]:
graph.add_edge(p, n, capacity=1)
for _ in range(e):
a, b = map(int, sys.stdin.readline().split())
graph.add_edge(a, b, capacity=1)
graph.add_edge(b, a, capacity=1)
print(nx.minimum_cut_value(graph, 0, n))
@staticmethod
def d_3():
n, q, m = map(int, sys.stdin.readline().split())
g = GeometryTopology.Graph(n + 1)
# for i in range(n+1): g.add_node(i)
for p in [int(x) for x in sys.stdin.readline().split()]:
g.add_edge(p, n, capacity=1)
for a, b in zip(*[map(int, sys.stdin.read().split())] * 2):
g.add_edge(a, b, capacity=1)
g.add_edge(b, a, capacity=1)
print(g.dinic(0, n))
class ABC011:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(n % 12 + 1)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
print(s[0].upper() + s[1:].lower())
@staticmethod
def c():
n, *ng = map(int, sys.stdin.read().split())
ng = set(ng)
if n in ng:
print("NO")
else:
r = 100
while n > 0:
if r == 0:
print("NO")
return
for i in range(3, 0, -1):
if (n - i) in ng:
continue
n -= i
r -= 1
break
else:
print("NO")
return
print("YES")
@staticmethod
def d():
n, d, x, y = map(int, sys.stdin.read().split())
x, y = abs(x), abs(y)
if x % d or y % d:
print(0)
return
x, y = x // d, y // d
r = n - (x + y)
if r < 0 or r & 1:
print(0)
return
res = 0
half_p = pow(1 / 2, n)
for d in range(r // 2 + 1): # 0 <= d <= r//2, south
south, north = d, y + d
west = (r - 2 * d) // 2
res += (
half_p
* comb(n, south, exact=True)
* comb(n - south, north, exact=True)
* comb(n - south - north, west, exact=True)
* half_p
)
print(res)
class ABC012:
@staticmethod
def a():
a, b = map(int, sys.stdin.readline().split())
print(b, a)
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
h, n = divmod(n, 3600)
m, s = divmod(n, 60)
print(f"{h:02}:{m:02}:{s:02}")
@staticmethod
def c():
n = 2025 - int(sys.stdin.readline().rstrip())
res = []
for i in range(1, 10):
if n % i != 0 or n // i > 9:
continue
res.append(f"{i} x {n//i}")
print(*sorted(res), sep="\n")
@staticmethod
def d():
n, m, *abt = map(int, sys.stdin.read().split())
a, b, t = np.array(abt).reshape(m, 3).T
res = shortest_path(
csr_matrix((t, (a - 1, b - 1)), (n, n)),
method="FW",
directed=False,
)
print(res.max(axis=-1).min().astype(np.int64))
@staticmethod
def d_2():
n, m, *abt = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b, t in zip(*[iter(abt)] * 3):
a -= 1
b -= 1
g.add_edge(a, b, weight=t)
g.add_edge(b, a, weight=t)
print(min(max(d) for d in g.floyd_warshall()))
class ABC013:
@staticmethod
def a():
print(ord(sys.stdin.readline().rstrip()) - ord("A") + 1)
@staticmethod
def b():
a, b = map(int, sys.stdin.read().split())
d = abs(a - b)
print(min(d, 10 - d))
@staticmethod
def c():
n, h, a, b, c, d, e = map(int, sys.stdin.read().split())
y = np.arange(n + 1)
x = (n * e - h - (d + e) * y) // (b + e) + 1
np.maximum(x, 0, out=x)
np.minimum(x, n - y, out=x)
print(np.amin(a * x + c * y))
@staticmethod
def d():
n, m, d, *a = map(int, sys.stdin.read().split())
res = list(range(n))
def swap(i, j):
res[i], res[j] = res[j], res[i]
for i in a[::-1]:
swap(i - 1, i)
res = np.array(res)
def binary_method(a, p):
b = np.arange(n)
while p:
if p & 1:
b = a[b]
p >>= 1
a = a[a]
return b
print(*(binary_method(res, d) + 1), sep="\n")
class ABC014:
@staticmethod
def a():
a, b = map(int, sys.stdin.read().split())
print((a + b - 1) // b * b - a)
@staticmethod
def b():
n, x, *a = map(int, sys.stdin.read().split())
print(sum(a[i] for i in range(n) if x >> i & 1))
@staticmethod
def c():
n, *ab = map(int, sys.stdin.read().split())
a, b = np.array(ab).reshape(n, 2).T
res = np.zeros(10**6 + 2, dtype=np.int64)
np.add.at(res, a, 1)
np.subtract.at(res, b + 1, 1)
np.cumsum(res, out=res)
print(res.max())
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
g = GeometryTopology.Graph(n)
for _ in range(n - 1):
x, y = map(int, sys.stdin.readline().split())
x -= 1
y -= 1
g.add_edge(x, y, weight=1)
g.add_edge(y, x, weight=1)
g.bfs(0)
g.find_ancestors()
q, *ab = map(int, sys.stdin.read().split())
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
print(g.find_dist(a, b) + 1)
class ABC015:
@staticmethod
def a():
a, b = sys.stdin.read().split()
print(a if len(a) > len(b) else b)
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
print(
np.ceil(
a[np.nonzero(a)[0]].sum() / np.count_nonzero(a)
).astype(np.int8)
)
@staticmethod
def c():
n, k, *t = map(int, sys.stdin.read().split())
t = np.array(t).reshape(n, k)
x = np.zeros((1, 1), dtype=np.int8)
for i in range(n):
x = x.reshape(-1, 1) ^ t[i]
print("Found" if np.count_nonzero(x == 0) > 0 else "Nothing")
@staticmethod
def d():
w, n, k, *ab = map(int, sys.stdin.read().split())
dp = np.zeros((k + 1, w + 1), dtype=np.int32)
for a, b in zip(*[iter(ab)] * 2):
np.maximum(dp[1:, a:], dp[:-1, :-a] + b, out=dp[1:, a:])
print(dp[k][w])
class ABC016:
@staticmethod
def a():
m, d = map(int, sys.stdin.readline().split())
print("YES" if m % d == 0 else "NO")
@staticmethod
def b():
a, b, c = map(int, sys.stdin.readline().split())
f1, f2 = a + b == c, a - b == c
if f1 & f2:
print("?")
elif f1 & (~f2):
print("+")
elif (~f1) & f2:
print("-")
else:
print("!")
@staticmethod
def c():
n, _, *ab = map(int, sys.stdin.read().split())
f = [0] * n
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
f[a] |= 1 << b
f[b] |= 1 << a
res = [
bit_count(
cumor(f[j] for j in range(n) if f[i] >> j & 1)
& ~(f[i] | 1 << i)
)
for i in range(n)
]
print(*res, sep="\n")
@staticmethod
def d():
sx, sy, gx, gy = map(int, sys.stdin.readline().split())
seg1 = ((sx, sy), (gx, gy))
n = int(sys.stdin.readline().rstrip())
p1 = (
np.array(sys.stdin.read().split(), dtype=np.int64)
.reshape(n, 2)
.T
)
p2 = np.hstack((p1[:, 1:], p1[:, :1]))
seg2 = (p1, p2)
print(
np.count_nonzero(GeometryTopology.intersect(seg1, seg2)) // 2
+ 1
)
class ABC017:
@staticmethod
def a():
s, e = (
np.array(sys.stdin.read().split(), dtype=np.int16)
.reshape(3, 2)
.T
)
print((s // 10 * e).sum())
@staticmethod
def b():
choku_tail = set("ch, o, k, u".split(", "))
def is_choku(s):
if s == "":
return True
if len(s) >= 1 and (s[-1] in choku_tail) and is_choku(s[:-1]):
return True
if len(s) >= 2 and (s[-2:] in choku_tail) and is_choku(s[:-2]):
return True
return False
print("YES" if is_choku(sys.stdin.readline().rstrip()) else "NO")
@staticmethod
def c():
n, m, *lrs = map(int, sys.stdin.read().split())
l, r, s = np.array(lrs).reshape(n, 3).T
score = np.zeros((m + 1,), dtype=np.int32)
np.add.at(score, l - 1, s)
np.subtract.at(score, r, s)
np.cumsum(score, out=score)
print(s.sum() - score[:m].min())
@staticmethod
def d():
n, m, *f = map(int, sys.stdin.read().split())
prev = [0] * (n + 1)
tmp = defaultdict(int)
for i in range(n):
prev[i + 1] = tmp[f[i]]
tmp[f[i]] = i + 1
dp = [0] * (n + 1)
dp[0] = 1
l, s = 0, dp[0]
for i in range(1, n + 1):
while l < prev[i]:
s = (s - dp[l]) % MOD
l += 1
dp[i] = s
s = (s + dp[i]) % MOD
print(dp[n])
class ABC018:
@staticmethod
def a():
(*a,) = map(int, sys.stdin.read().split())
a = sorted(enumerate(a), key=lambda x: -x[1])
res = [None] * 3
for i in range(3):
res[a[i][0]] = i + 1
print(*res, sep="\n")
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
n, *lr = map(int, sys.stdin.read().split())
for l, r in zip(*[iter(lr)] * 2):
l -= 1
r -= 1
s = s[:l] + s[l : r + 1][::-1] + s[r + 1 :]
print(s)
@staticmethod
def c():
r, c, k = map(int, sys.stdin.readline().split())
s = np.array([list(s) for s in sys.stdin.read().split()])
s = np.pad(s, 1, constant_values="x")
a = np.zeros_like(s, dtype=np.float64)
a[s == "o"] = np.inf
for i in range(1, r + 1):
np.minimum(a[i - 1, :] + 1, a[i, :], out=a[i, :])
for i in range(r, 0, -1):
np.minimum(a[i + 1, :] + 1, a[i, :], out=a[i, :])
for j in range(1, c + 1):
np.minimum(a[:, j - 1] + 1, a[:, j], out=a[:, j])
for j in range(c, 0, -1):
np.minimum(a[:, j + 1] + 1, a[:, j], out=a[:, j])
print(np.count_nonzero(a >= k))
@staticmethod
def c_2():
r, c, k = map(int, sys.stdin.readline().split())
s = np.array([list(s) for s in sys.stdin.read().split()])
s = np.pad(s, 1, constant_values="x")
a = (s == "o").astype(np.int16)
a = distance_transform_cdt(a, metric="taxicab")
print(np.count_nonzero(a >= k))
@staticmethod
def d():
n, m, p, q, r, *xyz = map(int, sys.stdin.read().split())
x, y, z = np.array(xyz).reshape(r, 3).T
h = np.zeros((n, m), dtype=np.int32)
h[x - 1, y - 1] = z
g = np.array([*itertools.combinations(range(n), p)])
print(np.sort(h[g].sum(axis=1), axis=1)[:, -q:].sum(axis=1).max())
class ABC019:
@staticmethod
def a():
(*a,) = map(int, sys.stdin.readline().split())
print(sorted(a)[1])
@staticmethod
def b():
s = sys.stdin.readline().rstrip() + "$"
cnt = 0
prev = "$"
t = ""
for c in s:
if c == prev:
cnt += 1
continue
t += prev + str(cnt)
prev = c
cnt = 1
print(t[2:])
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
res = set()
for x in a:
while not x & 1:
x >>= 1
res.add(x)
print(len(res))
@staticmethod
def d():
def inquire(u, v):
print(f"? {u} {v}".format(u, v), flush=True)
return int(sys.stdin.readline().rstrip())
n = int(sys.stdin.readline().rstrip())
u = sorted([(inquire(1, v), v) for v in range(2, n + 1)])[-1][1]
d = max((inquire(u, v)) for v in range(1, n + 1) if u != v)
print(f"! {d}")
class ABC020:
@staticmethod
def a():
print(
"ABC"
if int(sys.stdin.readline().rstrip()) == 1
else "chokudai"
)
@staticmethod
def b():
a, b = sys.stdin.readline().split()
print(int(a + b) * 2)
@staticmethod
def c():
h, w, t = map(int, sys.stdin.readline().split())
s = [list(s) for s in sys.stdin.read().split()]
for i in range(h):
for j in range(w):
if s[i][j] == "S":
sy, sx = i, j
if s[i][j] == "G":
gy, gx = i, j
s[sy][sx] = s[gy][gx] = "."
source, target = sy * w + sx, gy * w + gx
def heuristic_function(u, v=target):
uy, ux = divmod(u, w)
vy, vx = divmod(v, w)
return abs(vy - uy) + abs(ux - vx)
def min_time(x):
g = GeometryTopology.Graph(h * w)
# g = nx.DiGraph()
for i in range(h):
for j in range(w):
u = i * w + j
if i > 0:
g.add_edge(
u,
(i - 1) * w + j,
weight=(1 if s[i - 1][j] == "." else x),
)
if i < h - 1:
g.add_edge(
u,
(i + 1) * w + j,
weight=(1 if s[i + 1][j] == "." else x),
)
if j > 0:
g.add_edge(
u,
i * w + j - 1,
weight=(1 if s[i][j - 1] == "." else x),
)
if j < w - 1:
g.add_edge(
u,
i * w + j + 1,
weight=(1 if s[i][j + 1] == "." else x),
)
return g.dijkstra(source)[target]
return g.astar(source, target, heuristic_function)
# return nx.dijkstra_path_length(g, source, target)
# return nx.astar_path_length(g, source, target, heuristic_function)
def binary_search():
lo, hi = 1, t + 1
while lo + 1 < hi:
x = (lo + hi) // 2
if min_time(x) > t:
hi = x
else:
lo = x
return lo
print(binary_search())
@staticmethod
def d():
n, k = map(int, sys.stdin.readline().split())
div = sorted(NumberTheory.find_divisors(k))
l = len(div)
s = [0] * l
for i, d in enumerate(div):
s[i] = (1 + n // d) * (n // d) // 2 * d % MOD
for i in range(l - 1, -1, -1):
for j in range(i + 1, l):
if div[j] % div[i]:
continue
s[i] = (s[i] - s[j]) % MOD
print(
sum(s[i] * k // div[i] % MOD for i in range(l)) % MOD
) # ans is LCM.
class ABC021:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
s = [1 << i for i in range(5) if n >> i & 1]
print(len(s), *s, sep="\n")
@staticmethod
def b():
n, a, b, k, *p = map(int, sys.stdin.read().split())
print("YES" if len(set(p) | set([a, b])) == k + 2 else "NO")
@staticmethod
def c():
n, a, b, m, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy).reshape(m, 2).T - 1
a -= 1
b -= 1
g = csgraph_to_dense(
csr_matrix((np.ones(m), (x, y)), (n, n), dtype=np.int8)
)
g = np.logical_or(g, g.T)
paths = np.zeros(n, dtype=np.int64).reshape(-1, 1)
paths[a, 0] = 1
while not paths[b, 0]:
paths = np.dot(g, paths) % MOD
print(paths[b, 0])
@staticmethod
def c_2():
n, a, b, m, *xy = map(int, sys.stdin.read().split())
a -= 1
b -= 1
g = GeometryTopology.Graph()
for x, y in zip(*[iter(xy)] * 2):
x -= 1
y -= 1
g.add_edge(x, y, weight=1)
g.add_edge(y, x, weight=1)
dist, paths = g.dijkstra(a, paths_cnt=True, mod=MOD)
print(paths[b])
@staticmethod
def d():
n, k = map(int, sys.stdin.read().split())
cn = Combinatorics.CombinationsMod()
print(cn(n + k - 1, k))
class ABC022:
@staticmethod
def a():
n, s, t, *a = map(int, sys.stdin.read().split())
a = np.array(a)
np.cumsum(a, out=a)
print(((s <= a) & (a <= t)).sum())
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
c = Counter(a)
print(sum(c.values()) - len(c))
@staticmethod
def c():
n, m, *uvl = map(int, sys.stdin.read().split())
u, v, l = np.array(uvl).reshape(m, 3).T
u -= 1
v -= 1
g = csgraph_to_dense(csr_matrix((l, (u, v)), (n, n)))
g += g.T
g[g == 0] = np.inf
dist0 = g[0].copy()
g[0] = 0
g[:, 0] = 0
dist = shortest_path(g, method="FW", directed=False)
u, v = np.array([*itertools.combinations(range(1, n), 2)]).T
res = (dist0[u] + dist[u, v] + dist0[v]).min()
print(-1 if res == np.inf else int(res))
@staticmethod
def d():
n, *ab = map(int, sys.stdin.read().split())
c = np.array(ab).reshape(2, n, 2)
g = c.mean(axis=1)
d = np.sqrt(((c - g[:, None, :]) ** 2).sum(axis=-1)).sum(axis=1)
print(d[1] / d[0])
class ABC023:
@staticmethod
def a():
print(sum(divmod(int(sys.stdin.readline().rstrip()), 10)))
@staticmethod
def b():
n, s = sys.stdin.read().split()
n = int(n)
t = "b"
for i in range(n // 2):
if i % 3 == 0:
t = "a" + t + "c"
elif i % 3 == 1:
t = "c" + t + "a"
else:
t = "b" + t + "b"
print(n // 2 if t == s else -1)
@staticmethod
def b_2():
n, s = sys.stdin.read().split()
n = int(n)
if n & 1 ^ 1:
print(-1)
return
a = list("abc")
i = (1 - n // 2) % 3
for c in s:
if c != a[i]:
print(-1)
return
i = (i + 1) % 3
print(n // 2)
@staticmethod
def c():
h, w, k, n, *rc = map(int, sys.stdin.read().split())
r, c = np.array(rc).reshape(n, 2).T - 1
rb = np.bincount(r, minlength=h)
cb = np.bincount(c, minlength=w)
rbb = np.bincount(rb, minlength=k + 1)
cbb = np.bincount(cb, minlength=k + 1)
tot = (rbb[: k + 1] * cbb[k::-1]).sum()
real = np.bincount(rb[r] + cb[c] - 1, minlength=k + 1)
print(tot - real[k - 1] + real[k])
@staticmethod
def d():
n, *hs = map(int, sys.stdin.read().split())
h, s = np.array(hs).reshape(n, 2).T
t = np.arange(n)
def is_ok(x):
return np.all(np.sort((x - h) // s) >= t)
def binary_search():
lo, hi = 0, 10**14
while lo + 1 < hi:
x = (lo + hi) // 2
if is_ok(x):
hi = x
else:
lo = x
return hi
print(binary_search())
class ABC024:
@staticmethod
def a():
a, b, c, k, s, t = map(int, sys.stdin.read().split())
print(a * s + b * t - c * (s + t) * (s + t >= k))
@staticmethod
def b():
n, t, *a = map(int, sys.stdin.read().split())
a = np.array(a)
print(np.minimum(a[1:] - a[:-1], t).sum() + t)
@staticmethod
def c():
n, d, k, *lrst = map(int, sys.stdin.read().split())
lrst = np.array(lrst)
lr = lrst[: 2 * d].reshape(d, 2)
s, t = lrst[2 * d :].reshape(k, 2).T
day = np.zeros((k,), dtype=np.int32)
for i in range(d):
l, r = lr[i]
move = (l <= s) & (s <= r) & (s != t)
reach = move & (l <= t) & (t <= r)
s[move & (s < t)] = r
s[move & (s > t)] = l
s[reach] = t[reach]
day[reach] = i + 1
print(*day, sep="\n")
@staticmethod
def d():
a, b, c = map(int, sys.stdin.read().split())
p = MOD
denom = pow(a * b % p - b * c % p + c * a % p, p - 2, p)
w = (b * c - a * b) % p * denom % p
h = (b * c - a * c) % p * denom % p
print(h, w)
class ABC025:
@staticmethod
def a():
s, n = sys.stdin.read().split()
n = int(n)
i, j = divmod(n - 1, 5)
print(s[i] + s[j])
@staticmethod
def b():
n, a, b = map(int, sys.stdin.readline().split())
res = defaultdict(int)
for _ in range(n):
s, d = sys.stdin.readline().split()
d = int(d)
res[s] += min(max(d, a), b)
res = res["East"] - res["West"]
if res == 0:
ans = 0
elif res > 0:
ans = f"East {res}"
else:
ans = f"West {-res}"
print(ans)
@staticmethod
def c():
b = [0] * 6
for i in range(2):
(*row,) = map(int, sys.stdin.readline().split())
for j in range(3):
b[i * 3 + j] = row[j]
c = [0] * 8
for i in range(3):
(*row,) = map(int, sys.stdin.readline().split())
for j in range(2):
c[i * 3 + j] = row[j]
tot = sum(b) + sum(c)
@lru_cache(maxsize=None)
def f(s=tuple(0 for _ in range(9))):
if all(s):
res = 0
for i in range(6):
res += (s[i] == s[i + 3]) * b[i]
for i in range(8):
res += (s[i] == s[i + 1]) * c[i]
return res
cand = [i for i in range(9) if not s[i]]
flg = len(cand) & 1
s = list(s)
res = []
for i in cand:
s[i] = (flg ^ 1) + 1
res.append(f(tuple(s)))
s[i] = 0
return sorted(res, reverse=flg)[0]
a = f()
b = tot - a
print(a)
print(b)
class ABC026:
@staticmethod
def a():
a = int(sys.stdin.readline().rstrip())
print(a // 2 * (a - a // 2))
@staticmethod
def b():
n, *r = map(int, sys.stdin.read().split())
s = np.pi * np.array([0] + r) ** 2
s.sort()
res = s[n::-2].sum() - s[n - 1 :: -2].sum()
print(res)
@staticmethod
def c():
n, *b = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph()
for i in range(1, n):
g.add_edge(b[i - 1] - 1, i, weight=1)
def f(u=0):
if not g.edges[u]:
return 1
s = [f(v) for v in g.edges[u]]
return max(s) + min(s) + 1
print(f())
@staticmethod
def d():
a, b, c = map(int, sys.stdin.readline().split())
def f(t):
return a * t + b * np.sin(c * t * np.pi) - 100
print(optimize.brenth(f, 0, 200))
class ABC027:
@staticmethod
def a():
l = [int(l) for l in sys.stdin.readline().split()]
l.sort()
print(l[2] if l[0] == l[1] else l[0])
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
m, r = divmod(sum(a), n)
if r:
print(-1)
return
population = 0
towns = 0
cnt = 0
for x in a:
population += x
towns += 1
if population / towns != m:
cnt += 1
continue
population, towns = 0, 0
print(cnt)
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
flg = n.bit_length() & 1 ^ 1
t = 0
x = 1
while x <= n:
t += 1
x = 2 * x + 1 if t & 1 ^ flg else 2 * x
print("Aoki" if t & 1 else "Takahashi")
class ABC028:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(
"Bad"
if n < 60
else "Good"
if n < 90
else "Great"
if n < 100
else "Perfect"
)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
cnt = Counter(s)
print(*[cnt.get(c, 0) for c in "ABCDEF"])
@staticmethod
def c():
a, b, c, d, e = map(int, sys.stdin.readline().split())
print(max(b + c + e, a + d + e))
@staticmethod
def d():
n, k = map(int, sys.stdin.readline().split())
c = 3 * 2 * (n - k) * (k - 1) + 3 * (n - 1) + 1
print(c / n**3)
class ABC029:
@staticmethod
def a():
print(sys.stdin.readline().rstrip() + "s")
@staticmethod
def b():
print(sum("r" in s for s in sys.stdin.read().split()))
@staticmethod
def c():
print(
*[
"".join(s)
for s in itertools.product(
"abc", repeat=int(sys.stdin.readline().rstrip())
)
],
sep="\n",
)
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
print(
sum(
n // 10 ** (i + 1) * 10**i
+ min(max((n % 10 ** (i + 1) - 10**i + 1), 0), 10**i)
for i in range(9)
)
)
class ABC030:
@staticmethod
def a():
a, b, c, d = map(int, sys.stdin.readline().split())
e, f = b * c, d * a
print("TAKAHASHI" if e > f else "AOKI" if f > e else "DRAW")
@staticmethod
def b():
n, m = map(int, sys.stdin.readline().split())
n = (n % 12 + m / 60) * 30
m *= 6
d = abs(n - m)
print(min(d, 360 - d))
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
x, y = map(int, sys.stdin.readline().split())
a = [int(x) for x in sys.stdin.readline().split()]
b = [int(x) for x in sys.stdin.readline().split()]
t = 0
p = 1
cnt = 0
while True:
if p:
i = bi_l(a, t)
if i == n:
break
t = a[i] + x
else:
i = bi_l(b, t)
if i == m:
break
t = b[i] + y
cnt += 1
p ^= 1
print(cnt)
@staticmethod
def d():
n, a = map(int, sys.stdin.readline().split())
a -= 1
k = sys.stdin.readline().rstrip()
b = [int(x) - 1 for x in sys.stdin.readline().split()]
c = [None] * n
for i in range(n + 1):
if str(i) == k:
print(a + 1)
return
if c[a] is not None:
l, d = i - c[a], c[a]
break
c[a] = i
a = b[a]
r = [None] * len(k)
r[0] = 1
for i in range(len(k) - 1):
r[i + 1] = r[i] * 10 % l
k = [int(c) for c in k][::-1]
d = (sum(r[i] * k[i] for i in range(len(k))) - d) % l
for _ in range(d):
a = b[a]
print(a + 1)
@staticmethod
def d_2():
n, a, k, *b = map(int, sys.stdin.read().split())
a -= 1
b = [x - 1 for x in b]
c = [None] * n
for i in range(n + 1):
if i == k:
print(a + 1)
return
if c[a] is not None:
for _ in range((k - c[a]) % (i - c[a])):
a = b[a]
print(a + 1)
return
c[a] = i
a = b[a]
class ABC031:
@staticmethod
def a():
a, d = map(int, sys.stdin.readline().split())
if a > d:
a, d = d, a
print((a + 1) * d)
@staticmethod
def b():
l, h, n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
res = np.maximum(l - a, 0)
res[a > h] = -1
print(*res, sep="\n")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
np.cumsum(a[::2], out=a[::2])
np.cumsum(a[1::2], out=a[1::2])
a = list(a) + [0] * 2
def score(i, j):
if i > j:
i, j = j, i
if (j - i) & 1:
x, y = a[j - 1] - a[i - 2], a[j] - a[i - 1]
else:
x, y = a[j] - a[i - 2], a[j - 1] - a[i - 1]
return x, y
res = -inf
for i in range(n):
s = -inf
for j in range(n):
if i == j:
continue
x, y = score(i, j)
if y > s:
s, t = y, x
res = max(res, t)
print(res)
@staticmethod
def d():
k, m = map(int, sys.stdin.readline().split())
(*vw,) = zip(*[iter(sys.stdin.read().split())] * 2)
for l in itertools.product((1, 2, 3), repeat=k):
s = dict()
for v, w in vw:
i = 0
for d in v:
d = int(d) - 1
j = i + l[d]
if j > len(w):
break
t = w[i:j]
if d in s and s[d] != t:
break
s[d] = t
i = j
else:
if i == len(w):
continue
break
else:
for i in range(k):
print(s[i])
return
class ABC032:
@staticmethod
def a():
a, b, n = map(int, sys.stdin.read().split())
l = NumberTheory.lcm(a, b)
print((n + l - 1) // l * l)
@staticmethod
def b():
s, k = sys.stdin.read().split()
k = int(k)
res = set()
for i in range(len(s) - k + 1):
res.add(s[i : i + k])
print(len(res))
@staticmethod
def c():
n, k, *s = map(int, sys.stdin.read().split())
if 0 in s:
print(n)
return
if k == 0:
print(0)
return
res, tmp, l = 0, 1, 0
for r in range(n):
tmp *= s[r]
while tmp > k:
tmp //= s[l]
l += 1
res = max(res, r - l + 1)
print(res)
class ABC033:
@staticmethod
def a():
print(
"SAME"
if len(set(sys.stdin.readline().rstrip())) == 1
else "DIFFERENT"
)
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
res = dict()
for _ in range(n):
s, p = sys.stdin.readline().split()
res[s] = int(p)
tot = sum(res.values())
for s, p in res.items():
if p > tot / 2:
print(s)
return
print("atcoder")
@staticmethod
def c():
s = sys.stdin.readline().rstrip()
print(sum(not "0" in f for f in s.split("+")))
class ABC034:
@staticmethod
def a():
x, y = map(int, sys.stdin.readline().split())
print("Better" if y > x else "Worse")
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
print(n + 1 if n & 1 else n - 1)
@staticmethod
def c():
h, w = map(int, sys.stdin.read().split())
choose = Combinatorics.CombinationsMod()
print(choose(h + w - 2, h - 1))
@staticmethod
def d():
n, k, *wp = map(int, sys.stdin.read().split())
w, p = np.array(wp).reshape(-1, 2).T
def f(x):
return np.sort(w * (p - x))[-k:].sum()
print(optimize.bisect(f, 0, 100))
class ABC035:
@staticmethod
def a():
w, h = map(int, sys.stdin.readline().split())
print("4:3" if 4 * h == 3 * w else "16:9")
@staticmethod
def b():
s, t = sys.stdin.read().split()
y = x = z = 0
for c in s:
if c == "?":
z += 1
elif c == "L":
x -= 1
elif c == "R":
x += 1
elif c == "D":
y -= 1
elif c == "U":
y += 1
d = abs(y) + abs(x)
print(d + z if t == "1" else max(d - z, (d - z) & 1))
@staticmethod
def c():
n, q, *lr = map(int, sys.stdin.read().split())
l, r = np.array(lr).reshape(q, 2).T
res = np.zeros(n + 1, dtype=int)
np.add.at(res, l - 1, 1)
np.subtract.at(res, r, 1)
np.cumsum(res, out=res)
res = res & 1
print("".join(map(str, res[:-1])))
@staticmethod
def d():
n, m, t = map(int, sys.stdin.readline().split())
point = np.array(sys.stdin.readline().split(), dtype=int)
a, b, c = (
np.array(sys.stdin.read().split(), dtype=np.int64)
.reshape(m, 3)
.T
)
a -= 1
b -= 1
d_1 = shortest_path(
csr_matrix((c, (a, b)), (n, n)),
method="D",
directed=True,
indices=0,
)
d_2 = shortest_path(
csr_matrix((c, (b, a)), (n, n)),
method="D",
directed=True,
indices=0,
)
print(int(np.amax((t - (d_1 + d_2)) * point)))
class ABC036:
@staticmethod
def a():
a, b = map(int, sys.stdin.readline().split())
print((b + a - 1) // a)
@staticmethod
def b():
n, *s = sys.stdin.read().split()
n = int(n)
for j in range(n):
row = ""
for i in range(n - 1, -1, -1):
row += s[i][j]
print(row)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
b = [None] * n
prev = None
j = -1
for i, x in sorted(enumerate(a), key=lambda x: x[1]):
if x != prev:
j += 1
b[i] = j
prev = x
print(*b, sep="\n")
@staticmethod
def d():
n, *ab = map(int, sys.stdin.read().split())
edges = [[] for _ in range(n)]
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
edges[a].append(b)
edges[b].append(a)
parent = [None] * n
def count(u):
black, white = 1, 1
for v in edges[u]:
if v == parent[u]:
continue
parent[v] = u
b, w = count(v)
black *= w
black %= MOD
white *= (b + w) % MOD
white %= MOD
return black, white
print(sum(count(0)) % MOD)
class ABC037:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print(c // min(a, b))
@staticmethod
def b():
n, q, *lrt = map(int, sys.stdin.read().split())
a = np.zeros(n, dtype=int)
for l, r, t in zip(*[iter(lrt)] * 3):
a[l - 1 : r] = t
print(*a, sep="\n")
@staticmethod
def c():
n, k, *a = map(int, sys.stdin.read().split())
a = np.array([0] + a)
np.cumsum(a, out=a)
s = (a[k:] - a[:-k]).sum()
print(s)
@staticmethod
def d():
h, w, *a = map(int, sys.stdin.read().split())
p = [None] * (h * w)
def paths(k):
if p[k]:
return p[k]
p[k] = 1
i, j = divmod(k, w)
if j > 0 and a[k] > a[k - 1]:
p[k] += paths(k - 1)
if j < w - 1 and a[k] > a[k + 1]:
p[k] += paths(k + 1)
if i > 0 and a[k] > a[k - w]:
p[k] += paths(k - w)
if i < h - 1 and a[k] > a[k + w]:
p[k] += paths(k + w)
p[k] %= MOD
return p[k]
print(sum(paths(i) for i in range(h * w)) % MOD)
class ABC038:
@staticmethod
def a():
s = sys.stdin.readline().rstrip()
print("YES" if s[-1] == "T" else "NO")
@staticmethod
def b():
a, b, c, d = map(int, sys.stdin.read().split())
print("YES" if a == c or b == c or a == d or b == d else "NO")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a += [-1]
cnt = n
tmp = 1
for i in range(n):
if a[i + 1] > a[i]:
tmp += 1
else:
cnt += tmp * (tmp - 1) // 2
tmp = 1
print(cnt)
@staticmethod
def d():
n, *wh = map(int, sys.stdin.read().split())
a = [
x[1]
for x in sorted(
zip(*[iter(wh)] * 2), key=lambda x: (x[0], -x[1])
)
]
print(bi_l(DP.LIS(a), inf))
class ABC039:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print((a * b + b * c + c * a) * 2)
@staticmethod
def b():
x = int(sys.stdin.readline().rstrip())
for n in range(1, int(x**0.5) + 1):
if pow(n, 4) == x:
print(n)
return
@staticmethod
def c():
board = "WBWBWWBWBWBW" * 3
convert = "Do, *, Re, *, Mi, Fa, *, So, *, La, *, Si".split(", ")
s = sys.stdin.readline().rstrip()
print(convert[board.index(s)])
@staticmethod
def d():
h, w = map(int, sys.stdin.readline().split())
s = "".join(sys.stdin.read().split())
white = set()
for i in range(h * w):
if s[i] == "#":
continue
l = 0 if i % w == 0 else -1
r = 0 if (i + 1) % w == 0 else 1
white |= {
i + dy + dx
for dy in range(-w, w + 1, w)
for dx in range(l, r + 1)
}
black_before = set(range(h * w)) - white
black_after = set()
for i in black_before:
l = 0 if i % w == 0 else -1
r = 0 if (i + 1) % w == 0 else 1
black_after |= {
i + dy + dx
for dy in range(-w, w + 1, w)
for dx in range(l, r + 1)
}
black_after &= set(range(h * w))
for i in range(h * w):
if s[i] == "#" and not i in black_after:
print("impossible")
return
print("possible")
for i in range(h):
print(
"".join(
[
"#" if i * w + j in black_before else "."
for j in range(w)
]
)
)
class ABC040:
@staticmethod
def a():
n, x = map(int, sys.stdin.readline().split())
print(min(x - 1, n - x))
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
res = inf
for i in range(1, int(n**0.5) + 1):
res = min(res, n // i - i + n % i)
print(res)
@staticmethod
def c():
n, *h = map(int, sys.stdin.read().split())
h = [h[0]] + h
cost = [None] * (n + 1)
cost[0] = cost[1] = 0
for i in range(2, n + 1):
cost[i] = min(
cost[i - 2] + abs(h[i] - h[i - 2]),
cost[i - 1] + abs(h[i] - h[i - 1]),
)
print(cost[n])
@staticmethod
def d():
n, m = map(int, sys.stdin.readline().split())
uf = GeometryTopology.Graph(n)
uf.init_dsu()
queue = []
for _ in range(m):
a, b, y = map(int, sys.stdin.readline().split())
heappush(queue, (-(2 * y), a - 1, b - 1))
q = int(sys.stdin.readline().rstrip())
for i in range(q):
v, y = map(int, sys.stdin.readline().split())
heappush(queue, (-(2 * y + 1), v - 1, i))
res = [None] * q
while queue:
y, i, j = heappop(queue)
if y & 1:
res[j] = uf.size[uf.find(i)]
else:
uf.unite(i, j)
print(*res, sep="\n")
class ABC041:
@staticmethod
def a():
s, i = sys.stdin.read().split()
i = int(i)
print(s[i - 1])
@staticmethod
def b():
a, b, c = map(int, sys.stdin.readline().split())
ans = a * b % MOD * c % MOD
print(ans)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
for i, h in sorted(enumerate(a), key=lambda x: -x[1]):
print(i + 1)
@staticmethod
def d():
n, _, *xy = map(int, sys.stdin.read().split())
g = [0] * n
for x, y in zip(*[iter(xy)] * 2):
g[x - 1] |= 1 << (y - 1)
res = [0] * (1 << n)
res[0] = 1
for i in range(1 << n):
for j in range(n):
if i >> j & 1 ^ 1:
continue
if not (g[j] & i):
res[i] += res[i & ~(1 << j)]
print(res[-1])
class ABC042:
@staticmethod
def a():
a = [int(x) for x in sys.stdin.readline().split()]
c = Counter(a)
print("YES" if c[5] == 2 and c[7] == 1 else "NO")
@staticmethod
def b():
n, l, *s = sys.stdin.read().split()
print("".join(sorted(s)))
@staticmethod
def c():
n, k, *d = sys.stdin.read().split()
l = len(n)
ok = sorted(set(string.digits) - set(d))
cand = [
int("".join(p)) for p in itertools.product(ok, repeat=l)
] + [int(min(x for x in ok if x > "0") + min(ok) * l)]
print(cand[bi_l(cand, int(n))])
@staticmethod
def d():
h, w, a, b = map(int, sys.stdin.read().split())
combinations = Combinatorics.CombinationsMod(
n=2 * 10**5, mod=MOD
)
i = np.arange(h - a, h)
ng = np.sum(
combinations(i + b - 1, i)
* combinations(h - i + w - b - 2, h - 1 - i)
% MOD
)
print((combinations(h + w - 2, h - 1) - ng) % MOD)
class ABC043:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print((1 + n) * n // 2)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
t = ""
for c in s:
if c == "B":
t = t[:-1]
else:
t += c
print(t)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
x = np.around(a.sum() / n).astype(int)
print(np.sum((a - x) ** 2))
@staticmethod
def d():
s = sys.stdin.readline().rstrip()
n = len(s)
for i in range(n - 1):
if s[i] == s[i + 1]:
print(i + 1, i + 2)
return
for i in range(n - 2):
if s[i] == s[i + 2]:
print(i + 1, i + 3)
return
print(-1, -1)
class ABC044:
@staticmethod
def a():
n, k, x, y = map(int, sys.stdin.read().split())
print(min(n, k) * x + max(0, n - k) * y)
@staticmethod
def b():
res = set(
c & 1 for c in Counter(sys.stdin.readline().rstrip()).values()
)
print("Yes" if len(res) == 1 and res.pop() == 0 else "No")
@staticmethod
def c():
n, a, *x = map(int, sys.stdin.read().split())
dp = np.zeros((n + 1, 2501), dtype=np.int64)
dp[0, 0] = 1
for v in x:
dp[1:, v:] += dp[:-1, :-v]
i = np.arange(1, n + 1)
print(dp[i, i * a].sum())
@staticmethod
def c_2():
n, a, *x = map(int, sys.stdin.read().split())
for i in range(n):
x[i] -= a
s = defaultdict(int)
s[0] = 1
for i in range(n):
ns = s.copy()
for k, v in s.items():
ns[k + x[i]] += v
s = ns
print(s[0] - 1)
@staticmethod
def d():
pass
class ABC045:
@staticmethod
def a():
a, b, h = map(int, sys.stdin.read().split())
print((a + b) * h // 2)
@staticmethod
def b():
a, b, c = sys.stdin.read().split()
d = {"a": a[::-1], "b": b[::-1], "c": c[::-1]}
nx = "a"
while 1:
if not d[nx]:
print(nx.upper())
return
d[nx], nx = d[nx][:-1], d[nx][-1]
@staticmethod
def c():
def c(l):
return pow(2, max(0, l - 1))
s = sys.stdin.readline().rstrip()
n = len(s)
print(
sum(
int(s[i : j + 1]) * c(i) * c(n - 1 - j)
for i in range(n)
for j in range(i, n)
)
)
@staticmethod
def d():
h, w, n, *ab = map(int, sys.stdin.read().split())
c = defaultdict(int)
for y, x in zip(*[iter(ab)] * 2):
y -= 1
x -= 1
for dy, dx in itertools.product(range(-1, 2), repeat=2):
i, j = y + dy, x + dx
if not (0 < i < h - 1 and 0 < j < w - 1):
continue
c[(i, j)] += 1
c = Counter(c.values())
c[0] = (h - 2) * (w - 2) - sum(c.values())
for i in range(10):
print(c[i])
class ABC046:
@staticmethod
def a():
print(len(set(sys.stdin.readline().split())))
@staticmethod
def b():
n, k = map(int, sys.stdin.readline().split())
print(k * pow(k - 1, n - 1))
@staticmethod
def c():
n, *xy = map(int, sys.stdin.read().split())
a, b = 1, 1
for x, y in zip(*[iter(xy)] * 2):
n = max((a + x - 1) // x, (b + y - 1) // y)
a, b = n * x, n * y
print(a + b)
@staticmethod
def d():
c = Counter(sys.stdin.readline().rstrip())
print((c["g"] - c["p"]) // 2)
class ABC047:
@staticmethod
def a():
c = sorted(map(int, sys.stdin.readline().split()))
print("Yes" if c[0] + c[1] == c[2] else "No")
@staticmethod
def b():
w, h, n, *xyf = map(int, sys.stdin.read().split())
l, r, d, u = 0, w, 0, h
for x, y, f in zip(*[iter(xyf)] * 3):
if f == 1:
l = max(l, x)
if f == 2:
r = min(r, x)
if f == 3:
d = max(d, y)
if f == 4:
u = min(u, y)
print(max(0, r - l) * max(0, u - d))
@staticmethod
def c():
s = sys.stdin.readline().rstrip()
print(sum(s[i] != s[i + 1] for i in range(len(s) - 1)))
@staticmethod
def d():
mn, mx, c = inf, -1, 0
n, t, *a = map(int, sys.stdin.read().split())
for p in a:
if p - mn == mx:
c += 1
elif p - mn > mx:
mx, c = p - mn, 1
mn = min(mn, p)
print(c)
class ABC048:
@staticmethod
def a():
def initial(s):
return s[0].upper()
print("".join(map(initial, sys.stdin.readline().split())))
@staticmethod
def b():
a, b, x = map(int, sys.stdin.readline().split())
print(
b // x - (a - 1) // x
) # if a=0, (a-1)/x is rounded down to -1.
@staticmethod
def c():
n, x, *a = map(int, sys.stdin.read().split())
cnt = prev = 0
for i in range(n):
d = prev + a[i] - x
prev = a[i]
if d <= 0:
continue
cnt += d
prev -= d
print(cnt)
@staticmethod
def d():
s = sys.stdin.readline().rstrip()
print("First" if len(s) & 1 ^ (s[0] == s[-1]) else "Second")
class ABC049:
@staticmethod
def a():
vowels = set("aeiou")
print(
"vowel"
if sys.stdin.readline().rstrip() in vowels
else "consonant"
)
@staticmethod
def b():
h, w, *s = sys.stdin.read().split()
for l in s:
for _ in range(2):
print(l)
@staticmethod
def c():
t = set("dream, dreamer, erase, eraser".split(", "))
def obtainable(s):
while True:
for i in range(5, 8):
if s[-i:] in t:
s = s[:-i]
if not s:
return True
break
else:
return False
s = sys.stdin.readline().rstrip()
print("YES" if obtainable(s) else "NO")
@staticmethod
def d():
n, k, l = map(int, sys.stdin.readline().split())
uf1 = GeometryTopology.Graph(n)
uf1.init_dsu()
uf2 = GeometryTopology.Graph(n)
uf2.init_dsu()
def add_edges(uf, m):
for _ in range(m):
x, y = map(int, sys.stdin.readline().split())
x -= 1
y -= 1
uf.unite(x, y)
add_edges(uf1, k)
add_edges(uf2, l)
g = defaultdict(list)
for i in range(n):
g[(uf1.find(i), uf2.find(i))].append(i)
res = [None] * n
for a in g:
for i in g[a]:
res[i] = len(g[a])
print(*res, sep=" ")
class ABC050:
@staticmethod
def a():
print(eval(sys.stdin.readline().rstrip()))
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
t = np.array(sys.stdin.readline().split(), dtype=np.int64)
m, *px = map(int, sys.stdin.read().split())
p, x = np.array(px).reshape(m, 2).T
p -= 1
print(*(t.sum() + x - t[p]), sep="\n")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = Counter(a)
if n & 1 and not (
a[0] == 1 and all(a[i] == 2 for i in range(2, n, 2))
):
print(0)
return
if ~n & 1 and any(a[i] != 2 for i in range(1, n, 2)):
print(0)
return
print(pow(2, n // 2, MOD))
@staticmethod
def d():
pass
class ABC051:
@staticmethod
def a():
print(" ".join(sys.stdin.readline().rstrip().split(",")))
@staticmethod
def b():
k, s = map(int, sys.stdin.readline().split())
tot = 0
for x in range(k + 1):
if s - x < 0:
break
if s - x > 2 * k:
continue
tot += s - x + 1 if s - x <= k else 2 * k - (s - x) + 1
print(tot)
@staticmethod
def c():
x1, y1, x2, y2 = map(int, sys.stdin.readline().split())
dx, dy = x2 - x1, y2 - y1
print(
"U" * dy
+ "R" * (dx + 1)
+ "D" * (dy + 1)
+ "L" * (dx + 1)
+ "U"
+ "L"
+ "U" * (dy + 1)
+ "R" * (dx + 1)
+ "D" * (dy + 1)
+ "L" * dx
)
@staticmethod
def d():
n, m, *abc = map(int, sys.stdin.read().split())
x = np.arange(n)
a, b, c = np.array(abc).reshape(m, 3).T
a -= 1
b -= 1
d = shortest_path(
csr_matrix((c, (a, b)), shape=(n, n)),
method="FW",
directed=False,
).astype(np.int64)
print(
m
- np.any(
d[x, a[:, None]] + c[:, None] == d[x, b[:, None]], axis=1
).sum()
)
class ABC052:
@staticmethod
def a():
a, b, c, d = map(int, sys.stdin.readline().split())
print(max(a * b, c * d))
@staticmethod
def b():
n, s = sys.stdin.read().split()
n = int(n)
a = [0] * (n + 1)
for i in range(n):
a[i + 1] = a[i] + (1 if s[i] == "I" else -1)
print(max(a))
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
pn = NumberTheory.PrimeNumbers(n)
s = 1
for c in pn.factorize_factorial(n).values():
s = s * (c + 1) % MOD
print(s)
@staticmethod
def d():
n, a, b, *x = map(int, sys.stdin.read().split())
x = np.array(x)
print(np.minimum((x[1:] - x[:-1]) * a, b).sum())
class ABC053:
@staticmethod
def a():
print(
"ABC" if int(sys.stdin.readline().rstrip()) < 1200 else "ARC"
)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
print(len(s) - s.find("A") - s[::-1].find("Z"))
@staticmethod
def c():
x = int(sys.stdin.readline().rstrip())
q, r = divmod(x, 11)
print(2 * q + (r + 5) // 6)
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
print(n - ((n - len(set(a)) + 1) // 2 * 2))
class ABC054:
@staticmethod
def a():
def f(x):
return (x + 11) % 13
a, b = map(int, sys.stdin.readline().split())
print("Alice" if f(a) > f(b) else "Bob" if f(a) < f(b) else "Draw")
@staticmethod
def b():
n, m = map(int, sys.stdin.readline().split())
a = [sys.stdin.readline().rstrip() for _ in range(n)]
b = [sys.stdin.readline().rstrip() for _ in range(m)]
for i in range(n - m + 1):
for j in range(n - m + 1):
for y in range(m):
for x in range(m):
if a[i + y][j + x] == b[y][x]:
continue
break
else:
continue
break
else:
print("Yes")
return
print("No")
@staticmethod
def c():
n, m, *ab = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
g.add_edge(a, b)
g.add_edge(b, a)
cnt = 0
stack = [(0, 1)]
while stack:
u, s = stack.pop()
if s == (1 << n) - 1:
cnt += 1
continue
for v in g.edges[u]:
if s >> v & 1:
continue
stack.append((v, s | 1 << v))
print(cnt)
@staticmethod
def d():
n, ma, mb, *abc = map(int, sys.stdin.read().split())
dp = np.full((401, 401), np.inf)
dp[0, 0] = 0
for a, b, c in zip(*[iter(abc)] * 3):
np.minimum(dp[a:, b:], dp[:-a, :-b] + c, out=dp[a:, b:])
i = np.arange(1, 400 // max(ma, mb) + 1)
res = dp[i * ma, i * mb].min()
print(int(res) if res != np.inf else -1)
class ABC055:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(800 * n - 200 * (n // 15))
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
fac, _ = Algebra.generate_fac_ifac(n, MOD)
print(fac[-1])
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
print(m // 2 if m <= 2 * n else n + (m - 2 * n) // 4)
@staticmethod
def d():
n, s = sys.stdin.read().split()
n = int(n)
s = [1 if c == "o" else 0 for c in s]
def possible(t):
for i in range(1, n - 1):
t[i + 1] = t[i - 1] ^ t[i] ^ s[i]
return (
(t[0] ^ s[0] ^ t[1] ^ t[-1])
| (t[-1] ^ s[-1] ^ t[-2] ^ t[0])
) ^ 1
for fst in [(1, 0), (0, 1), (1, 1), (0, 0)]:
t = [None] * n
t[0], t[1] = fst[0], fst[1]
if possible(t):
print("".join("S" if x == 1 else "W" for x in t))
return
print(-1)
class ABC056:
@staticmethod
def a():
def to_i(c):
return 1 if c == "H" else 0
a, b = map(to_i, sys.stdin.readline().split())
print("D" if a ^ b else "H")
@staticmethod
def b():
w, a, b = map(int, sys.stdin.readline().split())
if a > b:
a, b = b, a
print(max(b - (a + w), 0))
@staticmethod
def c():
x = int(sys.stdin.readline().rstrip())
print(int(math.ceil(math.sqrt(2 * x + 1 / 4) - 0.5)))
@staticmethod
def d():
n, k, *a = map(int, sys.stdin.read().split())
a = sorted(min(x, k) for x in a)
def necessary(i):
dp = np.zeros(k, dtype=np.bool)
dp[0] = True
for j in range(n):
if j == i:
continue
dp[a[j] :] += dp[: -a[j]]
return np.any(dp[k - a[i] :])
def binary_search():
lo, hi = -1, n
while hi - lo > 1:
i = (lo + hi) // 2
if necessary(i):
hi = i
else:
lo = i
return hi
print(binary_search())
class ABC057:
@staticmethod
def a():
a, b = map(int, sys.stdin.readline().split())
print((a + b) % 24)
@staticmethod
def b():
n, m, *I = map(int, sys.stdin.read().split())
I = np.array(I).reshape(-1, 2)
ab, cd = I[:n], I[n:]
print(
*(
np.argmin(
np.absolute(ab[:, None] - cd).sum(axis=-1), axis=-1
)
+ 1
),
sep="\n",
)
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
divs = NumberTheory.find_divisors(n)
print(len(str(divs[bi_l(divs, math.sqrt(n))])))
@staticmethod
def d():
c = Combinatorics.choose
n, a, b, *v = map(int, sys.stdin.read().split())
v.sort()
print(sum(v[-a:]) / a)
l, r = bi_l(v, v[-a]), bi_r(v, v[-a])
print(
sum(
c(r - l, i)
for i in range(r - n + a, r - max(l, n - b) + 1)
)
if r == n
else c(r - l, r - n + a)
)
class ABC058:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print("YES" if c - b == b - a else "NO")
@staticmethod
def b():
s, t = sys.stdin.read().split()
a = ""
for i in range(len(t)):
a += s[i] + t[i]
if len(s) > len(t):
a += s[-1]
print(a)
@staticmethod
def c():
n, *s = sys.stdin.read().split()
res = {c: 100 for c in string.ascii_lowercase}
for counter in map(Counter, s):
for (
c,
x,
) in res.items():
res[c] = min(x, counter[c])
t = ""
for c, x in sorted(res.items()):
t += c * x
print(t)
@staticmethod
def d():
n, m, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy[:n]), np.array(xy[n:])
print(
(x * (np.arange(n) + 1) - np.cumsum(x)).sum()
% MOD
* ((y * (np.arange(m) + 1) - np.cumsum(y)).sum() % MOD)
% MOD
)
class ABC059:
@staticmethod
def a():
def initial(s):
return s[0].upper()
print("".join(map(initial, sys.stdin.readline().split())))
@staticmethod
def b():
a, b = sys.stdin.read().split()
la, lb = len(a), len(b)
print(
"GREATER"
if la > lb
else "LESS"
if la < lb
else "GREATER"
if a > b
else "LESS"
if a < b
else "EQUAL"
)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
c = s = 0
for i in range(n):
s += a[i]
if i & 1 and s >= 0:
c += s + 1
s = -1
elif i & 1 ^ 1 and s <= 0:
c += 1 - s
s = 1
c1 = c
c = s = 0
for i in range(n):
s += a[i]
if i & 1 and s <= 0:
c += 1 - s
s = 1
elif i & 1 ^ 1 and s >= 0:
c += s + 1
s = -1
c2 = c
print(min(c1, c2))
@staticmethod
def d():
x, y = map(int, sys.stdin.readline().split())
print("Brown" if abs(x - y) <= 1 else "Alice")
class ABC060:
@staticmethod
def a():
a, b, c = sys.stdin.readline().split()
print("YES" if a[-1] == b[0] and b[-1] == c[0] else "NO")
@staticmethod
def b():
a, b, c = map(int, sys.stdin.readline().split())
print("NO" if c % NumberTheory.gcd(a, b) else "YES")
@staticmethod
def c():
n, t, *a = map(int, sys.stdin.read().split())
print(sum(min(a[i + 1] - a[i], t) for i in range(n - 1)) + t)
@staticmethod
def d():
pass
class ABC061:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print("Yes" if a <= c <= b else "No")
@staticmethod
def b():
n, m, *ab = map(int, sys.stdin.read().split())
ab = np.array(ab) - 1
g = np.zeros(n, dtype=np.int32)
np.add.at(g, ab, 1)
print(*g, sep="\n")
@staticmethod
def c():
n, k, *ab = map(int, sys.stdin.read().split())
ab = np.transpose(np.array(ab).reshape(n, 2))
a, b = ab[:, np.argsort(ab[0])]
print(a[np.cumsum(b) >= k][0])
@staticmethod
def d():
n, m, *abc = map(int, sys.stdin.read().split())
a, b, c = np.array(abc).reshape(m, 3).T
a -= 1
b -= 1
c *= -1
g = csr_matrix(
([1] * (m + 1), (np.append(a, n - 1), np.append(b, 0))), (n, n)
)
_, labels = connected_components(g, connection="strong")
bl = (labels[a] == labels[0]) & (labels[b] == labels[0])
g = csr_matrix((c[bl], (a[bl], b[bl])), (n, n))
try:
print(
-shortest_path(g, method="BF", directed=True, indices=0)[
-1
].astype(int)
)
except:
print("inf")
@staticmethod
def d_2():
n, m, *abc = map(int, sys.stdin.read().split())
a, b, c = np.array(abc).reshape(m, 3).T
a -= 1
b -= 1
c *= -1
d = np.full(n, np.inf)
d[0] = 0
for _ in range(n - 1):
np.minimum.at(d, b, d[a] + c)
neg_cycle = np.zeros(n, dtype=np.bool)
for _ in range(n):
np.logical_or.at(neg_cycle, b, d[a] + c < d[b])
np.minimum.at(d, b, d[a] + c)
print(inf if neg_cycle[-1] else -d[-1].astype(int))
class ABC062:
@staticmethod
def a():
g = [0, 2, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0]
x, y = map(int, sys.stdin.readline().split())
print("Yes" if g[x - 1] == g[y - 1] else "No")
@staticmethod
def b():
h, w = map(int, sys.stdin.readline().split())
a = np.array(
[list(s) for s in sys.stdin.read().split()], dtype="U1"
)
a = np.pad(a, pad_width=1, constant_values="#")
for s in a:
print("".join(s))
@staticmethod
def c():
h, w = map(int, sys.stdin.readline().split())
if h * w % 3 == 0:
print(0)
return
def minimize(h, w):
return min(
h,
*(
s[-1] - s[0]
for x in range(w // 3, w // 3 + 2)
for s in (
sorted(
[
h * x,
h // 2 * (w - x),
(h + 1) // 2 * (w - x),
]
),
)
),
)
print(min(minimize(h, w), minimize(w, h)))
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
def optimize(a):
a = list(a)
l, r = a[:n], a[n:]
heapify(l)
s = [None] * (n + 1)
s[0] = sum(l)
for i in range(n):
x = heappop(l)
heappush(l, max(x, r[i]))
s[i + 1] = s[i] + max(0, r[i] - x)
return np.array(s)
print(
(
optimize(a[: 2 * n]) + optimize(-a[-1 : n - 1 : -1])[::-1]
).max()
)
class ABC063:
@staticmethod
def a():
a = sum(map(int, sys.stdin.readline().split()))
print("error" if a >= 10 else a)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
print("yes" if len(set(s)) == len(s) else "no")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
s = a.sum()
if s % 10:
print(s)
elif not np.count_nonzero(a % 10):
print(0)
else:
print(s - a[a % 10 != 0].min())
@staticmethod
def d():
n, a, b, *h = map(int, sys.stdin.read().split())
h = np.array(h)
d = a - b
def possible(c):
hh = h.copy()
np.maximum(hh - b * c, 0, out=hh)
return ((hh + d - 1) // d).sum() <= c
def binary_search():
lo, hi = 0, 10**9
while hi - lo > 1:
c = (lo + hi) // 2
if possible(c):
hi = c
else:
lo = c
return hi
print(binary_search())
class ABC064:
@staticmethod
def a():
r, g, b = map(int, sys.stdin.readline().split())
print("NO" if (10 * g + b) % 4 else "YES")
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
a.sort()
print(a[-1] - a[0])
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.bincount(np.minimum(np.array(a) // 400, 8), minlength=9)
mx = np.count_nonzero(a[:-1]) + a[-1]
mn = max(mx - a[-1], 1)
print(mn, mx)
@staticmethod
def d():
n, s = sys.stdin.read().split()
l = r = 0
for c in s:
if c == "(":
r += 1
else:
if r == 0:
l += 1
else:
r -= 1
print("(" * l + s + ")" * r)
class ABC065:
@staticmethod
def a():
x, a, b = map(int, sys.stdin.readline().split())
y = -a + b
print("delicious" if y <= 0 else "safe" if y <= x else "dangerous")
@staticmethod
def b():
n, *a = [int(x) - 1 for x in sys.stdin.read().split()]
i = 0
for c in range(n):
i = a[i]
if i == 1:
print(c + 1)
return
print(-1)
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
d = abs(n - m)
if d >= 2:
print(0)
return
fac, _ = Algebra.generate_fac_ifac(10**5)
print(fac[n] * fac[m] * (1 if d else 2) % MOD)
@staticmethod
def d():
n, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy).reshape(n, 2).T
i = np.argsort(x)
ax, bx, cx = (
i[:-1],
i[1:],
x[
i[1:],
]
- x[i[:-1]],
)
i = np.argsort(y)
ay, by, cy = (
i[:-1],
i[1:],
y[
i[1:],
]
- y[i[:-1]],
)
e = np.vstack(
[np.hstack([ax, ay]), np.hstack([bx, by]), np.hstack([cx, cy])]
)
e = e[:, np.argsort(e[-1])]
_, i = np.unique(e[:-1], return_index=True, axis=1)
a, b, c = e[:, i]
print(
minimum_spanning_tree(csr_matrix((c, (a, b)), (n, n)))
.astype(np.int64)
.sum()
)
@staticmethod
def d_2():
n, *xy = map(int, sys.stdin.read().split())
x, y = xy[::2], xy[1::2]
g = GeometryTopology.Graph(n)
def make(a):
b = sorted(enumerate(a), key=lambda x: x[1])
for i in range(n - 1):
u, v, w = b[i][0], b[i + 1][0], b[i + 1][1] - b[i][1]
for u, v in [(v, u), (u, v)]:
if not v in g.edges[u]:
g.add_edge(u, v, weight=w)
else:
g.edges[u][v].weight = min(g.edges[u][v].weight, w)
make(x)
make(y)
_, d = g.kruskal()
# _, d = g.prim()
# _, d = g.boruvka()
print(d)
class ABC066:
@staticmethod
def a():
print(sum(sorted(map(int, sys.stdin.readline().split()))[:-1]))
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
def f(s):
n = len(s) // 2
return s[:n] == s[n:]
for i in range(len(s) - 2, 0, -2):
if f(s[:i]):
print(i)
return
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
b = deque()
for i in range(n):
if i & 1:
b.appendleft(a[i])
else:
b.append(a[i])
if n & 1:
b.reverse()
print(*b)
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
tmp = [None] * (n + 1)
for i in range(n + 1):
if tmp[a[i]] is not None:
d = tmp[a[i]] + n - i
break
tmp[a[i]] = i
k = np.arange(1, n + 2)
c = Combinatorics.CombinationsMod(n + 1, MOD)
print(*((c(n + 1, k) - c(d, k - 1)) % MOD), sep="\n")
class ABC067:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, *ab = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
g.add_edge(a, b)
g.add_edge(b, a)
d1, d2 = g.bfs(0), g.bfs(n - 1)
print(
"Fennec"
if sum(d1[i] <= d2[i] for i in range(n)) > n // 2
else "Snuke"
)
class ABC068:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
k = int(sys.stdin.readline().rstrip())
n = 50
print(n)
q, r = divmod(k, n)
a = np.arange(n - 1, -1, -1) + q
a[:r] += 1
print(*a)
class ABC069:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
h, w, n, *a = map(int, sys.stdin.read().split())
c = [i + 1 for i in range(n) for j in range(a[i])]
for i in range(h):
row = c[i * w : (i + 1) * w]
if i & 1:
row = row[::-1]
print(*row)
class ABC070:
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
g = GeometryTopology.Graph(n)
for _ in range(n - 1):
a, b, c = map(int, sys.stdin.readline().split())
a -= 1
b -= 1
g.add_edge(a, b, weight=c)
g.add_edge(b, a, weight=c)
q, k = map(int, sys.stdin.readline().split())
d = g.bfs(k - 1)
for _ in range(q):
x, y = map(int, sys.stdin.readline().split())
x -= 1
y -= 1
print(d[x] + d[y])
class ABC071:
@staticmethod
def d():
n, *s = sys.stdin.read().split()
n = int(n)
s = list(zip(*s))
dp = [0] * n
dp[0] = 3 if s[0][0] == s[0][1] else 6
for i in range(1, n):
dp[i] = dp[i - 1]
if s[i][0] == s[i - 1][0]:
continue
dp[i] *= (
2
if s[i - 1][0] == s[i - 1][1]
else 3
if s[i][0] != s[i][1]
else 1
)
dp[i] %= MOD
print(dp[-1])
class ABC072:
@staticmethod
def d():
n, *p = map(int, sys.stdin.read().split())
p += [-1]
cnt, i = 0, 0
while i < n:
if p[i] == i + 1:
cnt += p[i] == i + 1
if p[i + 1] == i + 2:
i += 1
i += 1
print(cnt)
class ABC073:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, m, r, *I = map(int, sys.stdin.read().split())
I = np.array(I)
a, b, c = I[r:].reshape(m, 3).T
d = shortest_path(
csr_matrix((c, (a - 1, b - 1)), (n, n)),
method="FW",
directed=False,
).astype(np.int32)
r = np.array([*itertools.permutations(I[:r] - 1)])
print((d[r[:, :-1], r[:, 1:]].sum(axis=1)).min())
class ABC074:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a, dtype=np.int32).reshape(n, n)
b = shortest_path(a, method="FW").astype(np.int32)
if (b < a).any():
print(-1)
return
np.fill_diagonal(b, 10**9)
a[np.any(b[:, None] + b <= a[:, :, None], axis=2)] = 0
print(a.sum() // 2)
class ABC075:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, k, *xy = map(int, sys.stdin.read().split())
xy = np.array(xy).reshape(n, 2)
x_y = xy.copy()[np.argsort(xy[:, 0])]
y_x = xy.copy()[np.argsort(xy[:, 1])]
comb = np.array([*itertools.combinations(range(n), 2)])
i1, i2 = comb.T
j1, j2 = comb[None, :].T
s = (y_x[:, 1][i2] - y_x[:, 1][i1]) * (
x_y[:, 0][j2] - x_y[:, 0][j1]
)
c = np.zeros((n + 1, n + 1), dtype=np.int64)
for i in range(n):
c[i + 1, 1:] += c[i, 1:] + (y_x[i, 0] <= x_y[:, 0])
a = c[i2 + 1, j2 + 1] - c[i2 + 1, j1] - c[i1, j2 + 1] + c[i1, j1]
print(s[a >= k].min())
class ABC076:
@staticmethod
def d():
n, *tv = map(int, sys.stdin.read().split())
t, v = np.array(tv).reshape(2, n)
t = np.pad(t, pad_width=[2, 1], constant_values=0)
np.cumsum(t, out=t)
l, r = t[:-1], t[1:]
v = np.pad(v, pad_width=[1, 1], constant_values=0)
x = np.arange(0, r[-1] + 0.1, 0.5, dtype=np.float32)[:, None]
# y = np.stack([v-(x-l), np.zeros(r[-1]*2+1, dtype=np.float32)[:,None]+v, v+(x-r)]).max(axis=0).min(axis=1)
mx = v - (x - l)
np.maximum(mx, v, out=mx)
np.maximum(mx, v + (x - r), out=mx)
y = mx.min(axis=1)
print(((y[:-1] + y[1:]) / 4).sum())
class ABC077:
@staticmethod
def d():
k = int(sys.stdin.readline().rstrip())
g = GeometryTopology.Graph(k)
for i in range(k):
g.add_edge(i, i * 10 % k, weight=0)
g.add_edge(i, (i + 1) % k, update=False, weight=1)
print(1 + g.bfs01(1)[0])
class ABC078:
@staticmethod
def d():
n, z, w, *a = map(int, sys.stdin.read().split())
print(
abs(a[0] - w)
if n == 1
else max(abs(a[-1] - w), abs(a[-1] - a[-2]))
)
class ABC079:
@staticmethod
def d():
h, w, *I = map(int, sys.stdin.read().split())
I = np.array(I)
c = I[:100].reshape(10, 10)
a = I[100:].reshape(h, w)
c = shortest_path(c.T, method="D", indices=1).astype(np.int32)
print(c[a[a != -1]].sum())
class ABC080:
@staticmethod
def d():
n, c, *stc = map(int, sys.stdin.read().split())
using = np.zeros((c, 10**5 + 2), dtype=np.int8)
s, t, c = np.array(stc).reshape(n, 3).T
np.add.at(using, (c - 1, s), 1)
np.subtract.at(using, (c - 1, t + 1), 1)
np.cumsum(using, axis=1, out=using)
print(np.count_nonzero(using, axis=0).max())
class ABC081:
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
i = np.argmax(np.absolute(a))
# a +=
print(2 * n)
for j in range(n):
print(i + 1, j + 1)
if a[i] >= 0:
for j in range(n - 1):
print(j + 1, j + 2)
else:
for j in range(n - 1, 0, -1):
print(j + 1, j)
class ABC082:
pass
class ABC083:
pass
class ABC084:
pass
class ABC085:
pass
class ABC086:
pass
class ABC087:
pass
class ABC088:
pass
class ABC089:
pass
class ABC090:
pass
class ABC091:
pass
class ABC092:
pass
class ABC093:
pass
class ABC094:
pass
class ABC095:
pass
class ABC096:
pass
class ABC097:
pass
class ABC098:
pass
class ABC099:
pass
class ABC100:
pass
class ABC101:
pass
class ABC102:
pass
class ABC103:
pass
class ABC104:
pass
class ABC105:
pass
class ABC106:
pass
class ABC107:
pass
class ABC108:
pass
class ABC109:
pass
class ABC110:
pass
class ABC111:
pass
class ABC112:
pass
class ABC113:
pass
class ABC114:
pass
class ABC115:
pass
class ABC116:
pass
class ABC117:
pass
class ABC118:
pass
class ABC119:
pass
class ABC120:
pass
class ABC121:
pass
class ABC122:
pass
class ABC123:
pass
class ABC124:
pass
class ABC125:
pass
class ABC126:
pass
class ABC127:
pass
class ABC128:
pass
class ABC129:
pass
class ABC130:
pass
class ABC131:
pass
class ABC132:
pass
class ABC133:
pass
class ABC134:
pass
class ABC135:
pass
class ABC136:
pass
class ABC137:
pass
class ABC138:
pass
class ABC139:
pass
class ABC140:
pass
class ABC141:
pass
class ABC142:
pass
class ABC143:
pass
class ABC144:
pass
class ABC145:
pass
class ABC146:
pass
class ABC147:
pass
class ABC148:
pass
class ABC149:
pass
class ABC150:
pass
class ABC151:
pass
class ABC152:
pass
class ABC153:
pass
class ABC154:
pass
class ABC155:
pass
class ABC156:
pass
class ABC157:
pass
class ABC158:
pass
class ABC159:
pass
class ABC160:
pass
class ABC161:
pass
class ABC162:
pass
class ABC163:
pass
class ABC164:
pass
class ABC165:
pass
class ABC166:
pass
class ABC167:
pass
class ABC168:
pass
class ABC169:
pass
class ABC170:
@staticmethod
def a():
x = [int(x) for x in sys.stdin.readline().split()]
for i in range(5):
if x[i] != i + 1:
print(i + 1)
break
@staticmethod
def b():
x, y = map(int, sys.stdin.readline().split())
print("Yes" if 2 * x <= y <= 4 * x and y % 2 == 0 else "No")
@staticmethod
def c():
x, n, *p = map(int, sys.stdin.read().split())
a = list(set(range(102)) - set(p))
a = [(abs(y - x), y) for y in a]
print(sorted(a)[0][1])
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
cand = set(a)
cnt = 0
for x, c in sorted(Counter(a).items()):
cnt += c == 1 and x in cand
cand -= set(range(x * 2, 10**6 + 1, x))
print(cnt)
@staticmethod
def e():
n, q = map(int, sys.stdin.readline().split())
queue = []
m = 2 * 10**5
infants = [[] for _ in range(m)]
highest_rate = [None] * m
where = [None] * n
rate = [None] * n
def entry(i, k):
where[i] = k
while infants[k]:
r, j = heappop(infants[k])
if where[j] != k or j == i:
continue
if rate[i] >= -r:
highest_rate[k] = rate[i]
heappush(queue, (rate[i], k, i))
heappush(infants[k], (r, j))
break
else:
highest_rate[k] = rate[i]
heappush(queue, (rate[i], k, i))
heappush(infants[k], (-rate[i], i))
def transfer(i, k):
now = where[i]
while infants[now]:
r, j = heappop(infants[now])
if where[j] != now or j == i:
continue
if highest_rate[now] != -r:
highest_rate[now] = -r
heappush(queue, (-r, now, j))
heappush(infants[now], (r, j))
break
else:
highest_rate[now] = None
entry(i, k)
def inquire():
while True:
r, k, i = heappop(queue)
if where[i] != k or r != highest_rate[k]:
continue
heappush(queue, (r, k, i))
return r
for i in range(n):
a, b = map(int, sys.stdin.readline().split())
rate[i] = a
entry(i, b - 1)
for _ in range(q):
c, d = map(int, sys.stdin.readline().split())
transfer(c - 1, d - 1)
print(inquire())
class ABC171:
@staticmethod
def a():
c = sys.stdin.readline().rstrip()
print("A" if c < "a" else "a")
@staticmethod
def b():
n, k, *p = map(int, sys.stdin.read().split())
print(sum(sorted(p)[:k]))
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
n -= 1
l = 1
while True:
if n < pow(26, l):
break
n -= pow(26, l)
l += 1
res = "".join(
[chr(ord("a") + d) for d in NumberTheory.base_convert(n, 26)][
::-1
]
)
res = "a" * (l - len(res)) + res
print(res)
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
a = [int(x) for x in sys.stdin.readline().split()]
s = sum(a)
cnt = Counter(a)
q = int(sys.stdin.readline().rstrip())
for _ in range(q):
b, c = map(int, sys.stdin.readline().split())
s += (c - b) * cnt[b]
print(s)
cnt[c] += cnt[b]
cnt[b] = 0
@staticmethod
def e():
n, *a = map(int, sys.stdin.read().split())
s = 0
for x in a:
s ^= x
b = map(lambda x: x ^ s, a)
print(*b, sep=" ")
class ABC172:
@staticmethod
def a():
a = int(sys.stdin.readline().rstrip())
print(a * (1 + a + a**2))
@staticmethod
def b():
s, t = sys.stdin.read().split()
print(sum(s[i] != t[i] for i in range(len(s))))
@staticmethod
def c():
n, m, k = map(int, sys.stdin.readline().split())
a = [0] + [int(x) for x in sys.stdin.readline().split()]
b = [int(x) for x in sys.stdin.readline().split()]
(*sa,) = itertools.accumulate(a)
(*sb,) = itertools.accumulate(b)
res = 0
for i in range(n + 1):
r = k - sa[i]
if r < 0:
break
res = max(res, i + bi_r(sb, r))
print(res)
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
f = np.zeros(n + 1, dtype=np.int64)
for i in range(1, n + 1):
f[i::i] += 1
print((np.arange(1, n + 1) * f[1:]).sum())
class ABC173:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
charge = (n + 999) // 1000 * 1000 - n
print(charge)
@staticmethod
def b():
n, *s = sys.stdin.read().split()
c = Counter(s)
for v in "AC, WA, TLE, RE".split(", "):
print(f"{v} x {c[v]}")
@staticmethod
def c():
h, w, k = map(int, sys.stdin.readline().split())
c = [sys.stdin.readline().rstrip() for _ in range(h)]
tot = 0
for i in range(1 << h):
for j in range(1 << w):
cnt = 0
for y in range(h):
for x in range(w):
if i >> y & 1 or j >> x & 1:
continue
cnt += c[y][x] == "#"
tot += cnt == k
print(tot)
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a.sort(reverse=True)
res = (
a[0]
+ sum(a[1 : 1 + (n - 2) // 2]) * 2
+ a[1 + (n - 2) // 2] * (n & 1)
)
print(res)
@staticmethod
def e():
MOD = 10**9 + 7
n, k, *a = map(int, sys.stdin.read().split())
minus = [x for x in a if x < 0]
plus = [x for x in a if x > 0]
if len(plus) + len(minus) // 2 * 2 >= k: # plus
(*minus,) = map(abs, minus)
minus.sort(reverse=True)
plus.sort(reverse=True)
cand = []
if len(minus) & 1:
minus = minus[:-1]
for i in range(0, len(minus) - 1, 2):
cand.append(minus[i] * minus[i + 1] % MOD)
if k & 1:
res = plus[0]
plus = plus[1:]
else:
res = 1
if len(plus) & 1:
plus = plus[:-1]
for i in range(0, len(plus) - 1, 2):
cand.append(plus[i] * plus[i + 1] % MOD)
cand.sort(reverse=True)
for x in cand[: k // 2]:
res *= x
res %= MOD
print(res)
elif 0 in a:
print(0)
else:
cand = sorted(map(abs, a))
res = 1
for i in range(k):
res *= cand[i]
res %= MOD
res = MOD - res
print(res)
pass
class ABC174:
@staticmethod
def a():
print("Yes" if int(sys.stdin.readline().rstrip()) >= 30 else "No")
class ABC178:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
s = int(sys.stdin.readline().rstrip())
if s == 0:
print(1)
return
elif s == 1:
print(0)
return
c = np.eye(3, k=-1, dtype=np.int64)
c[0, 0] = c[0, 2] = 1
a = np.array([0, 0, 1])
print(Algebra.dot(Algebra.matrix_pow(c, s - 2), a)[0])
class ABC179:
@staticmethod
def a():
s = sys.stdin.readline().rstrip()
print(s + "s" if s[-1] != "s" else s + "es")
@staticmethod
def b():
n, *d = map(int, sys.stdin.read().split())
d = np.array(d).reshape(n, 2).T
d = np.equal(d[0], d[1]).astype(int)
dd = d.copy()
dd[1:] += d[:-1]
dd[:-1] += d[1:]
print("Yes" if (dd >= 3).any() else "No")
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
res = (n // np.arange(1, n + 1)).sum() - len(
NumberTheory.find_divisors(n)
)
print(res)
@staticmethod
def d():
mod = 998244353
n, k, *lr = map(int, sys.stdin.read().split())
l, r = np.array(lr).reshape(k, -1).T
@njit((i8, i8[:], i8[:]), cache=True)
def solve(n, l, r):
res = np.zeros(n * 2, dtype=np.int64)
res[0], res[1] = 1, -1
for i in range(n - 1):
res[i + 1] = (res[i + 1] + res[i]) % mod
res[i + l] = (res[i + l] + res[i]) % mod
res[i + r + 1] = (res[i + r + 1] - res[i]) % mod
print(res[n - 1])
solve(n, l, r)
@staticmethod
def e():
n, x, m = map(int, sys.stdin.readline().split())
res = [-1 for _ in range(m)]
s = 0
loop = np.zeros(m, dtype=np.int64)
for i in range(m + 1):
if i == n:
print(s)
return
if res[x] != -1:
l, loop = i - res[x], loop[res[x] : i]
q, r = divmod(n - i, l)
print(s + q * loop.sum() + loop[:r].sum())
return
res[x], loop[i] = i, x
s += x
x = x**2 % m
class ABC180:
@staticmethod
def a():
n, a, b = map(int, sys.stdin.readline().split())
print(n - a + b)
@staticmethod
def b():
n, *x = map(int, sys.stdin.read().split())
x = np.absolute(np.array(x))
print(x.sum())
print(np.sqrt((x**2).sum()))
print(x.max())
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
div = NumberTheory.find_divisors(n)
print(*div, sep="\n")
@staticmethod
def d():
x, y, a, b = map(int, sys.stdin.readline().split())
cnt = 0
while x * a <= x + b:
x *= a
if x >= y:
print(cnt)
return
cnt += 1
cnt += (y - x - 1) // b
print(cnt)
@staticmethod
def e():
n, *xyz = map(int, sys.stdin.read().split())
xyz = list(zip(*[iter(xyz)] * 3))
dist = [[0] * n for _ in range(n)]
for i in range(n):
a, b, c = xyz[i]
for j in range(n):
p, q, r = xyz[j]
dist[i][j] = abs(p - a) + abs(q - b) + max(0, r - c)
dp = [[inf] * n for _ in range(1 << n)]
dp[0][0] = 0
for s in range(1 << n):
for i in range(n):
t = s | (1 << i)
for j in range(n):
dp[t][i] = min(dp[t][i], dp[s][j] + dist[j][i])
print(dp[-1][0])
@staticmethod
def f(): # rewrite with jit compiling later.
n, m, l = map(int, sys.stdin.readline().split())
c = Combinatorics.CombinationsMod(n, MOD)
path = np.zeros(n + 1, dtype=np.int64)
path[1] = path[2] = 1
for i in range(3, n + 1):
path[i] = path[i - 1] * i % MOD
cycle = np.zeros(n + 1, dtype=np.int64)
cycle[1:] = path[:-1]
dp = np.zeros((n + 1, m + 1), dtype=np.int64)
def f(l):
dp[:, :] = 0
dp[0, 0] = 1
for i in range(n):
for j in range(m + 1):
k = np.arange(1, min(l, n - i, m - j + 1) + 1)
dp[i + k, j + k - 1] += (
dp[i, j]
* c(n - i - 1, k - 1)
% MOD
* path[k]
% MOD
)
dp[i + k, j + k - 1] %= MOD
k = np.arange(2, min(l, n - i, m - j) + 1)
dp[i + k, j + k] += (
dp[i, j]
* c(n - i - 1, k - 1)
% MOD
* cycle[k]
% MOD
)
dp[i + k, j + k] %= MOD
return dp[n, m]
print((f(l) - f(l - 1)) % MOD)
@staticmethod
def f_2(): # PyPy
n, m, l = map(int, sys.stdin.readline().split())
c = Combinatorics.CombinationsMod(n, MOD)
path = [0] * (n + 1)
path[1] = path[2] = 1
for i in range(3, n + 1):
path[i] = path[i - 1] * i % MOD
cycle = [0] + path[:-1]
def f(l):
dp = [[0] * (m + 1) for _ in range(n + 1)]
dp[0][0] = 1
for i in range(n):
for j in range(m + 1):
for k in range(1, min(l, n - i, m - j + 1) + 1):
dp[i + k][j + k - 1] += (
dp[i][j]
* c(n - i - 1, k - 1)
% MOD
* path[k]
% MOD
)
dp[i + k][j + k - 1] %= MOD
for k in range(1, min(l, n - i, m - j) + 1):
dp[i + k][j + k] += (
dp[i][j]
* c(n - i - 1, k - 1)
% MOD
* cycle[k]
% MOD
)
dp[i + k][j + k] %= MOD
return dp[n][m]
print((f(l) - f(l - 1)) % MOD)
class ARC106:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
a = 1
while pow(3, a) <= n:
m = n - pow(3, a)
b = 1
while pow(5, b) <= m:
if pow(5, b) == m:
print(a, b)
return
b += 1
a += 1
print(-1)
@staticmethod
def b():
n, m = map(int, sys.stdin.readline().split())
a = [int(x) for x in sys.stdin.readline().split()]
b = [int(x) for x in sys.stdin.readline().split()]
uf = GeometryTopology.Graph(n)
uf.init_dsu()
for _ in range(m):
c, d = map(int, sys.stdin.readline().split())
c -= 1
d -= 1
uf.unite(c, d)
visited = [False] * n
ga = [[] for _ in range(n)]
gb = [[] for _ in range(n)]
for i in range(n):
r = uf.find(i)
ga[r].append(a[i])
gb[r].append(b[i])
print(
"Yes"
if all(sum(ga[i]) == sum(gb[i]) for i in range(n))
else "No"
)
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
if m < 0:
print(-1)
return
if n == 1:
if m != 0:
print(-1)
return
print(1, 2)
return
if m >= n - 1:
print(-1)
return
l, r = 1, 10**9
print(l, r)
for _ in range(n - 2 - m):
l += 1
r -= 1
print(l, r)
r = l
for _ in range(m + 1):
l, r = r + 1, r + 2
print(l, r)
@staticmethod
def d():
mod = 998244353
n, k, *a = map(int, sys.stdin.read().split())
a = np.array(a)
b = np.zeros((k + 1, n), dtype=np.int64)
b[0] = 1
for i in range(k):
b[i + 1] = b[i] * a % mod
s = b.sum(axis=1) % mod
inv_2 = pow(2, mod - 2, mod)
c = Combinatorics.CombinationsMod(mod=mod)
for x in range(1, k + 1):
l = np.arange(x + 1)
print(
(
(c(x, l) * s[l] % mod * s[l][::-1] % mod).sum() % mod
- pow(2, x, mod) * s[x]
)
% mod
* inv_2
% mod
)
@staticmethod
def e():
pass
@staticmethod
def f():
pass
class ACL001:
@staticmethod
def a():
n, *xy = map(int, sys.stdin.read().split())
(*xy,) = zip(*[iter(xy)] * 2)
print(xy)
pass
class TDPC:
@staticmethod
def t():
pass
class MSolutions2020:
@staticmethod
def a():
x = int(sys.stdin.readline().rstrip())
x -= 400
print(8 - x // 200)
@staticmethod
def b():
r, g, b, k = map(int, sys.stdin.read().split())
while k and g <= r:
g *= 2
k -= 1
while k and b <= g:
b *= 2
k -= 1
print("Yes" if r < g < b else "No")
@staticmethod
def c():
n, k, *a = map(int, sys.stdin.read().split())
for i in range(k, n):
print("Yes" if a[i] > a[i - k] else "No")
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a += [-1]
m = 1000
s = 0
for i in range(n):
if a[i + 1] == a[i]:
continue
elif a[i + 1] > a[i]:
cnt = m // a[i]
m -= a[i] * cnt
s += cnt
else:
m += a[i] * s
s = 0
print(m)
class Codeforces:
class CR676div2:
@staticmethod
def a():
t = int(sys.stdin.readline().rstrip())
for _ in range(t):
a, b = map(int, sys.stdin.readline().split())
print(a ^ b)
@staticmethod
def b():
t = int(sys.stdin.readline().rstrip())
for _ in range(t):
n = int(sys.stdin.readline().rstrip())
s = [list(sys.stdin.readline().rstrip()) for _ in range(n)]
s[0][0] = s[-1][-1] = "0"
for i in range(n):
for j in range(n):
s[i][j] = int(s[i][j])
def can_goal(g, c=0):
visited = [0] * n
stack = [(0, 0)]
visited[0] |= 1 << 0
while stack:
y, x = stack.pop()
for dy, dx in [(-1, 0), (0, -1), (1, 0), (0, 1)]:
i, j = y + dy, x + dx
if i < 0 or i >= n or j < 0 or j >= n:
continue
if i == j == n - 1:
return True
if visited[i] >> j & 1:
continue
visited[i] |= 1 << j
if g[i][j] != c:
continue
stack.append((i, j))
return False
if not (can_goal(s, 0) or can_goal(s, 1)):
print(0)
continue
flg = 0
for i in range(n):
for j in range(n):
if i == j == 0 or i == j == n - 1:
continue
s[i][j] ^= 1
if not (can_goal(s, 0) or can_goal(s, 1)):
print(1)
print(i + 1, j + 1)
flg = 1
break
s[i][j] ^= 1
if flg:
break
if flg:
continue
print(2)
if s[0][1] == s[1][0]:
print(n, n - 1)
print(n - 1, n)
continue
if s[0][1] == s[-1][-2]:
print(1, 2)
print(n - 1, n)
else:
print(1, 2)
print(n, n - 1)
@staticmethod
def c():
pass
class ProjectEuler:
@staticmethod
def p1():
def f(n, x):
return (x + n // x * x) * (n // x) // 2
n = 1000
ans = f(n - 1, 3) + f(n - 1, 5) - f(n - 1, 15)
print(ans)
@staticmethod
def p2():
fib = [1, 2]
while fib[-1] < 4 * 10**6:
fib.append(fib[-1] + fib[-2])
print(sum(fib[1:-1:3]))
@staticmethod
def p3():
pn = NumberTheory.PrimeNumbers()
res = pn.factorize(600851475143)
print(max(res.keys()))
@staticmethod
def p4():
def is_palindrome(n):
n = str(n)
return n == n[::-1]
cand = []
for a in range(100, 1000):
for b in range(a, 1000):
n = a * b
if is_palindrome(n):
cand.append(n)
print(max(cand))
@staticmethod
def p5():
pn = NumberTheory.PrimeNumbers()
res = defaultdict(int)
for i in range(1, 21):
for p, c in pn.factorize(i).items():
res[p] = max(res[p], c)
ans = 1
for p, c in res.items():
ans *= pow(p, c)
print(ans)
@staticmethod
def p6():
a = np.arange(101)
b = np.cumsum(a**2)
a = a.cumsum()
print(a[100] ** 2 - b[100])
@staticmethod
def p7():
nt = NumberTheory.PrimeNumbers()
print(sorted(nt)[10000])
@staticmethod
def p8():
n = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
n = [int(d) for d in list(n)]
res = 0
for i in range(988):
x = 1
for j in range(13):
x *= n[i + j]
res = max(res, x)
print(res)
@staticmethod
def p9():
for a in range(1, 997):
for b in range(a, 998 - a):
c = 1000 - a - b
if a**2 + b**2 == c**2:
print(a * b * c)
return
@staticmethod
def p10():
pn = NumberTheory.PrimeNumbers(2 * 10**6 + 1)
print(sum(pn))
@staticmethod
def p11():
grid = "08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48"
print(grid)
pass
class Yukicoder:
def __init__(self):
pass
def __call__(self):
print(1)
class AOJ:
@staticmethod
def ALDS1_12_A():
n, *a = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for i in range(n - 1):
for j in range(i + 1, n):
if a[i * n + j] == -1:
continue
g.add_edge(i, j, weight=a[i * n + j])
g.add_edge(j, i, weight=a[i * n + j])
_, d = g.kruskal()
# _, d = g.prim()
# _, d = g.boruvka()
print(d)
@staticmethod
def GRL_3_C(): # strongly connected components
n, m = map(int, sys.stdin.readline().split())
g = GeometryTopology.Graph(n)
for _ in range(m):
g.add_edge(*map(int, sys.stdin.readline().split()))
r = g.scc()
q, *uv = map(int, sys.stdin.read().split())
for u, v in zip(*[iter(uv)] * 2):
print(int(r[u] == r[v]))
class YosupoJudge:
@staticmethod
def Directed_MST():
n, m, s, *abc = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b, c in zip(*[iter(abc)] * 3):
g.add_edge(a, b, weight=c)
_, d, p = g.prim(src=s, return_parent=True)
print(d)
print(*p)
@staticmethod
def Manhattan_MST():
n, *xy = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
if __name__ == "__main__":
# AtCoder.ABC179.f()
# AtCoder.ABC060.d()
AtCoder.ABC081.d()
# AtCoder.ARC106.d()
# YosupoJudge.Directed_MST()
pass
| 30.567094 | 1,217 | 0.35524 | import itertools
import math
import string
import sys
from bisect import bisect_left as bi_l
from bisect import bisect_right as bi_r
from collections import Counter, defaultdict, deque
from functools import lru_cache, reduce
from heapq import heapify, heappop, heappush
from operator import or_, xor
sys.setrecursionlimit(10**7)
inf = float("inf")
MOD = 10**9 + 7
using_numpy = 1
import networkx as nx
import numpy as np
from numba import i8, njit
from scipy import optimize
from scipy.ndimage import distance_transform_cdt
from scipy.sparse import csr_matrix
from scipy.sparse.csgraph import (
connected_components,
csgraph_to_dense,
maximum_flow,
minimum_spanning_tree,
shortest_path,
)
from scipy.spatial import ConvexHull
from scipy.special import comb
class Algebra:
class Modular(int):
def __init__(self, n, mod=MOD):
self.value = n
self.mod = mod
def __str__(self):
return f"{self.value}"
def __add__(self, other):
return self.__class__((self.value + other.value) % self.mod)
def __sub__(self, x):
return self.__class__((self.value - x.value) % self.mod)
def __mul__(self, x):
return self.__class__((self.value * x.value) % self.mod)
def __pow__(self, x):
return self.__class__(pow(self.value, x.value, self.mod))
def __lt__(self, x):
return self.value < x.value
def __le__(self, x):
return self.value <= x.value
def __eq__(self, x):
return self.value == x.value
def __ne__(self, x):
return self.value != x.value
def __gt__(self, x):
return self.value > x.value
def __ge__(self, x):
return self.value >= x.value
class SemiGroup:
pass
class Monoid:
pass
class Group:
pass
class SemiRing:
pass
class Ring:
pass
@staticmethod
def identity(n):
if using_numpy:
return np.identity(n, dtype=np.int64)
else:
a = [[0] * n for _ in range(n)]
for i in range(n):
a[i][i] = 1
return a
@staticmethod
def dot(a, b):
if using_numpy:
return np.dot(a, b)
else:
h, w, l = len(a), len(b[0]), len(b)
assert len(a[0]) == l
c = [[0] * w for _ in range(h)]
for i in range(h):
for j in range(w):
for k in range(l):
c[i][j] += a[i][k] * b[k][j]
return c
@classmethod
def matrix_pow(cls, a, n, mod=10**9 + 7):
m = len(a)
b = cls.identity(m)
while n:
if n & 1:
b = cls.dot(b, a)
n >>= 1
a = cls.dot(a, a)
if using_numpy:
a %= mod
b %= mod
else:
for i in range(m):
for j in range(m):
a[i][j] %= mod
b[i][j] %= mod
return b
@staticmethod
def bitwise_dot(a, b):
if using_numpy:
return np.bitwise_xor.reduce(
a[:, None, :] & b.T[None, :, :], axis=-1
)
else:
h, w, l = len(a), len(b[0]), len(b)
assert len(a[0]) == l
c = [[0] * w for _ in range(h)]
for i in range(h):
for j in range(w):
for k in range(l):
c[i][j] ^= a[i][k] & b[k][j]
return c
@classmethod
def bitwise_mat_pow(cls, a, n):
if n == 0:
return np.eye(len(a), dtype=np.uint32) * ((1 << 32) - 1)
res = cls.bitwise_mat_pow(a, n // 2)
res = cls.bitwise_dot(res, res)
return cls.bitwise_dot(res, a) if n & 1 else res
@staticmethod
def cumprod(a, mod):
l = len(a)
sql = int(np.sqrt(l) + 1)
a = np.resize(a, sql**2).reshape(sql, sql)
for i in range(sql - 1):
a[:, i + 1] *= a[:, i]
a[:, i + 1] %= mod
for i in range(sql - 1):
a[i + 1] *= a[i, -1]
a[i + 1] %= mod
return np.ravel(a)[:l]
@classmethod
def generate_fac_ifac(cls, n, p=MOD):
if using_numpy:
fac = np.arange(n + 1)
fac[0] = 1
fac = cls.cumprod(fac, p)
ifac = np.arange(n + 1, 0, -1)
ifac[0] = pow(int(fac[-1]), p - 2, p)
ifac = cls.cumprod(ifac, p)[n::-1]
else:
fac = [None] * (n + 1)
fac[0] = 1
for i in range(n):
fac[i + 1] = fac[i] * (i + 1) % p
ifac = [None] * (n + 1)
ifac[n] = pow(fac[n], p - 2, p)
for i in range(n, 0, -1):
ifac[i - 1] = ifac[i] * i % p
return fac, ifac
class Kitamasa:
pass
mint = Algebra.Modular
class NumberTheory:
class PrimeNumbers:
def __init__(self, n=2 * 10**6):
self.is_prime, self.prime_nums = self.find(n)
def __call__(self, n):
return self.is_prime[n]
def __iter__(self):
return iter(self.prime_nums)
def __getitem__(self, key):
return self.prime_nums[key]
@staticmethod
def find(n):
if using_numpy:
is_prime = np.ones(n + 1, dtype=np.bool)
is_prime[:2] = 0
for i in range(2, int(n**0.5) + 1):
if is_prime[i]:
is_prime[i * 2 :: i] = 0
prime_nums = np.flatnonzero(is_prime)
else:
is_prime = [True] * (n + 1)
is_prime[0] = is_prime[1] = 0
for i in range(2, int(n**0.5) + 1):
if not is_prime[i]:
continue
for j in range(i * 2, n + 1, i):
is_prime[j] = 0
prime_nums = [i for i in range(2, n + 1) if is_prime[i]]
return is_prime, prime_nums
@lru_cache(maxsize=None)
def factorize(self, n):
res = defaultdict(int)
if n < 2:
return res
for p in self:
if p * p > n:
break
while n % p == 0:
res[p] += 1
n //= p
if n == 1:
return res
res[n] = 1
return res
def factorize_factorial(self, n):
res = defaultdict(int)
for i in range(2, n + 1):
for p, c in self.factorize(i).items():
res[p] += c
return res
@classmethod
@lru_cache(maxsize=None)
def gcd(cls, a, b):
return cls.gcd(b, a % b) if b else abs(a)
@classmethod
def lcm(cls, a, b):
return abs(a // cls.gcd(a, b) * b)
@staticmethod
def find_divisors(n):
divisors = []
for i in range(1, int(n**0.5) + 1):
if n % i:
continue
divisors.append(i)
j = n // i
if j != i:
divisors.append(j)
return sorted(divisors)
@staticmethod
def base_convert(n, b):
if not n:
return [0]
res = []
while n:
n, r = divmod(n, b)
if r < 0:
n += 1
r -= b
res.append(r)
return res
class Combinatorics:
@classmethod
@lru_cache(maxsize=None)
def choose(cls, n, r, mod=None):
if r > n or r < 0:
return 0
if r == 0:
return 1
res = cls.choose(n - 1, r, mod) + cls.choose(n - 1, r - 1, mod)
if mod:
res %= mod
return res
class CombinationsMod:
def __init__(self, n=2 * 10**6, mod=MOD):
self.__mod = mod
self.fac, self.ifac = Algebra.generate_fac_ifac(n, mod)
def __call__(self, n, r):
return self.__choose(n, r)
def __choose(self, n, r):
bl = (0 <= r) & (r <= n)
p = self.__mod
return bl * self.fac[n] * self.ifac[r] % p * self.ifac[n - r] % p
def make_nchoose_table(self, n):
p = self.__mod
r = len(self.__fac) - 1
if using_numpy:
n_choose = np.arange(n + 1, n - r, -1)
n_choose[0] = 1
n_choose = Algebra.cumprod(n_choose, p) * self.ifac % p
else:
n_choose = [None] * (r + 1)
n_choose[0] = 1
for i in range(r):
n_choose[i + 1] = n_choose[i] * (n - i) % p
for i in range(1, r + 1):
n_choose[i] = n_choose[i] * self.ifac[i] % p
return n_choose
@classmethod
def permutations(cls, a, r=None, i=0):
a = list(a)
n = len(a)
if r is None:
r = n
res = []
if r > n or i > r:
return res
if i == r:
return [tuple(a[:r])]
for j in range(i, n):
a[i], a[j] = a[j], a[i]
res += cls.permutations(a, r, i + 1)
return res
@staticmethod
def combinations(a, r):
a = tuple(a)
n = len(a)
if r > n:
return
indices = list(range(r))
yield a[:r]
while True:
for i in range(r - 1, -1, -1):
if indices[i] != i + n - r:
break
else:
return
indices[i] += 1
for j in range(i + 1, r):
indices[j] = indices[j - 1] + 1
yield tuple(a[i] for i in indices)
class DP:
@staticmethod
def LIS(a):
res = [inf] * len(a)
for x in a:
res[bi_l(res, x)] = x
return res
class String:
@staticmethod
def z_algorithm(s):
n = len(s)
a = [0] * n
a[0] = n
l = r = -1
for i in range(1, n):
if r >= i:
a[i] = min(a[i - l], r - i)
while i + a[i] < n and s[i + a[i]] == s[a[i]]:
a[i] += 1
if i + a[i] >= r:
l, r = i, i + a[i]
return a
class GeometryTopology:
class Graph:
class __Edge:
def __init__(self, weight=1, capacity=1, **args):
self.weight = weight
self.capacity = capacity
def __str__(self):
return f"weight: {self.weight}, cap: {self.capacity}"
class __Node:
def __init__(self, **args):
pass
def __init__(self, n=0):
self.__N = n
self.nodes = [None] * n
self.edges = [{} for _ in range(n)]
def add_node_info(self, v, **args):
self.nodes[v] = self.__Node(**args)
def add_edge(self, u, v, update=False, **args):
if not update and v in self.edges[u]:
return
self.edges[u][v] = self.__Edge(**args)
def get_size(self):
return self.__N
def bfs(self, src=0):
n = self.__N
self.depth = self.lv = lv = [None] * n
lv[src] = 0
self.dist = dist = [inf] * n
dist[src] = 0
self.parent = par = [None] * n
par[src] = src
q = deque([src])
while q:
u = q.popleft()
for v, e in self.edges[u].items():
if e.capacity == 0 or lv[v] is not None:
continue
lv[v], dist[v], par[v] = lv[u] + 1, dist[u] + e.weight, u
q.append(v)
return dist
def dinic(self, src, sink):
def flow_to_sink(u, flow_in):
if u == sink:
return flow_in
flow = 0
for v, e in self.edges[u].items():
if e.capacity == 0 or self.lv[v] <= self.lv[u]:
continue
f = flow_to_sink(v, min(flow_in, e.capacity))
if not f:
continue
self.edges[u][v].capacity -= f
if u in self.edges[v]:
self.edges[v][u].capacity += f
else:
self.add_edge(v, u, capacity=f)
flow_in -= f
flow += f
return flow
flow = 0
while True:
self.bfs(src)
if self.lv[sink] is None:
return flow
flow += flow_to_sink(src, inf)
def ford_fulkerson(self):
pass
def push_relabel(self):
pass
def floyd_warshall(self):
n = self.__N
d = [[inf] * n for _ in range(n)]
for u in range(n):
d[u][u] = 0
for v, e in self.edges[u].items():
d[u][v] = e.weight
for w in range(n):
for u in range(n):
for v in range(n):
d[u][v] = min(d[u][v], d[u][w] + d[w][v])
return d
def dijkstra(self, src, paths_cnt=False, mod=None):
dist = [inf] * self.__N
dist[src] = 0
visited = [False] * self.__N
paths = [0] * self.__N
paths[src] = 1
q = [(0, src)]
while q:
d, u = heappop(q)
if visited[u]:
continue
visited[u] = True
for v, e in self.edges[u].items():
dv = d + e.weight
if dv > dist[v]:
continue
elif dv == dist[v]:
paths[v] += paths[u]
if mod:
paths[v] %= mod
continue
paths[v], dist[v] = paths[u], dv
heappush(q, (dv, v))
if paths_cnt:
return dist, paths
else:
return dist
def astar(self, src, tgt, heuristic_func):
cost = [inf] * self.__N
q = [(heuristic_func(src, tgt), 0, src)]
while q:
_, c, u = heappop(q)
if u == tgt:
return c
if cost[u] != inf:
continue
cost[u] = c
for v, e in self.edges[u].items():
if cost[v] != inf:
continue
h = heuristic_func(v, tgt)
nc = c + e.weight
heappush(q, (h + nc, nc, v))
return inf
def bellman_ford(self, src):
n = self.__N
d = [inf] * n
d[src] = 0
for _ in range(n - 1):
for u in range(n):
for v, e in self.edges[u].items():
d[v] = min(d[v], d[u] + e.weight)
for u in range(n):
for v, e in self.edges[u].items():
if d[u] + e.weight < d[v]:
raise Exception("found negative cycle.")
return d
def bfs01(self, src=0):
d = [inf] * self.__N
d[src] = 0
q = deque([src])
while q:
u = q.popleft()
for v, e in self.edges[u].items():
dv = d[u] + e.weight
if d[v] <= dv:
continue
d[v] = dv
if e.weight:
q.append(v)
else:
q.appendleft(v)
return d
def find_ancestors(self):
self.__ancestors = ancestors = [self.parent]
for _ in range(max(self.depth).bit_length()):
ancestors.append([ancestors[-1][u] for u in ancestors[-1]])
def find_dist(self, u, v):
return (
self.dist[u]
+ self.dist[v]
- 2 * self.dist[self.__find_lca(u, v)]
)
def __find_lca(self, u, v):
du, dv = self.depth[u], self.depth[v]
if du > dv:
u, v = v, u
du, dv = dv, du
d = dv - du
for i in range(d.bit_length()):
if d >> i & 1:
v = self.__ancestors[i][v]
if v == u:
return v
for i in range(
du.bit_length() - 1, -1, -1
):
nu, nv = self.__ancestors[i][u], self.__ancestors[i][v]
if nu == nv:
continue
u, v = nu, nv
return self.__ancestors[0][u]
def init_dsu(self):
n = self.__N
self.parent = list(range(n))
self.rank = [0] * n
self.size = [1] * n
def find(self, u):
if self.parent[u] == u:
return u
self.parent[u] = self.find(self.parent[u])
return self.parent[u]
def unite(self, u, v):
u, v = self.find(u), self.find(v)
if u == v:
return
if self.rank[u] < self.rank[v]:
u, v = v, u
self.parent[v] = u
self.size[u] += self.size[v]
self.rank[u] = max(self.rank[u], self.rank[v] + 1)
def same(self, u, v):
return self.find(u) == self.find(v)
def scc(self):
n = self.__N
visited, q, root, r = [False] * n, [], [None] * n, 0
gg = self.__class__(n)
for u in range(n):
for v in self.edges[u]:
gg.add_edge(v, u)
def dfs(u):
if visited[u]:
return
visited[u] = True
for v in self.edges[u]:
dfs(v)
q.append(u)
def rev_dfs(u, r):
if root[u] is not None:
return
root[u] = r
for v in gg.edges[u]:
rev_dfs(v, r)
for u in range(n):
dfs(u)
for u in q[::-1]:
rev_dfs(u, r)
r += 1
return root
def kruskal(self):
n = self.__N
uf = self.__class__(n)
uf.init_dsu()
edges = sorted(
[
(u, v, e.weight)
for u in range(n)
for v, e in self.edges[u].items()
],
key=lambda x: x[2],
)
g = self.__class__(n)
d = 0
for u, v, w in edges:
if uf.same(u, v):
continue
uf.unite(u, v)
g.add_edge(u, v, weight=w)
d += w
return g, d
def prim(self, src=0, return_parent=False):
n = self.__N
g = self.__class__(n)
parent, visited, dist = [None] * n, [False] * n, 0
q = [(0, (src, src))]
while q:
d, (w, u) = heappop(q)
if visited[u]:
continue
visited[u], parent[u] = True, w
dist += d
g.add_edge(w, u, weight=d)
for v, e in self.edges[u].items():
if not visited[v]:
heappush(q, (e.weight, (u, v)))
if return_parent:
return g, dist, parent
return g, dist
def boruvka(self):
n = self.__N
uf = self.__class__(n)
uf.init_dsu()
g = self.__class__(n)
d = 0
def dfs(u):
if visited[u]:
return (inf, (None, None))
visited[u] = True
cand = []
for v, e in self.edges[u].items():
if uf.same(u, v):
cand.append(dfs(v))
continue
cand.append((e.weight, (u, v)))
return sorted(cand)[0]
while len(set(uf.parent)) != 1:
edges, visited = [], [False] * n
for u in range(n):
if visited[u]:
continue
edges.append(dfs(u))
for w, (u, v) in edges:
if uf.same(u, v):
continue
g.add_edge(u, v, weight=w)
uf.unite(u, v)
d += w
for u in range(n):
uf.find(u)
return g, d
def tsp(self):
pass
@staticmethod
def triangle_area(p0, p1, p2, signed=False):
x1, y1, x2, y2 = (
p1[0] - p0[0],
p1[1] - p0[1],
p2[0] - p0[0],
p2[1] - p0[1],
)
return (
(x1 * y2 - x2 * y1) / 2 if signed else abs(x1 * y2 - x2 * y1) / 2
)
@classmethod
def intersect(cls, seg1, seg2):
(p1, p2), (p3, p4) = seg1, seg2
t1 = cls.triangle_area(p1, p2, p3, signed=True)
t2 = cls.triangle_area(p1, p2, p4, signed=True)
t3 = cls.triangle_area(p3, p4, p1, signed=True)
t4 = cls.triangle_area(p3, p4, p2, signed=True)
return (t1 * t2 < 0) & (t3 * t4 < 0)
def cumxor(a):
return reduce(xor, a, 0)
def cumor(a):
return reduce(or_, a, 0)
def bit_count(n):
cnt = 0
while n:
cnt += n & 1
n >>= 1
return cnt
class AtCoder:
class ABC001:
@staticmethod
def a():
h1, h2 = map(int, sys.stdin.read().split())
print(h1 - h2)
@staticmethod
def d():
def to_minuites(x):
q, r = divmod(x, 100)
return 60 * q + r
def to_hmform(x):
q, r = divmod(x, 60)
return 100 * q + r
n = int(sys.stdin.readline().rstrip())
term = [0] * 2001
for _ in range(n):
s, e = map(
to_minuites,
map(int, sys.stdin.readline().rstrip().split("-")),
)
s = s // 5 * 5
e = (e + 4) // 5 * 5
term[s] += 1
term[e + 1] -= 1
for i in range(2000):
term[i + 1] += term[i]
res = []
raining = False
for i in range(2001):
if term[i]:
if not raining:
s = i
raining = True
elif raining:
res.append((s, i - 1))
raining = False
for s, e in res:
print(f"{to_hmform(s):04}-{to_hmform(e):04}")
class ABC002:
@staticmethod
def a():
print(max(map(int, sys.stdin.readline().split())))
@staticmethod
def b():
vowels = set("aeiou")
print(
"".join(
[
c
for c in sys.stdin.readline().rstrip()
if c not in vowels
]
)
)
@staticmethod
def c():
print(
GeometryTopology.triangle_area(
*map(int, sys.stdin.readline().split())
)
)
@staticmethod
def d():
n, m = map(int, sys.stdin.readline().split())
edges = set(
(x - 1, y - 1)
for x, y in zip(*[map(int, sys.stdin.read().split())] * 2)
)
print(
max(
len(s)
for i in range(1, 1 << n)
for s in [[j for j in range(n) if i >> j & 1]]
if all(
(x, y) in edges
for x, y in itertools.combinations(s, 2)
)
)
)
@staticmethod
def d_2():
n, m = map(int, sys.stdin.readline().split())
relations = [1 << i for i in range(n)]
for x, y in zip(*[map(int, sys.stdin.read().split())] * 2):
relations[x] |= 1 << (y - 1)
relations[y] |= 1 << (x - 1)
res = 0
for i in range(1 << n):
s, cnt = (1 << n) - 1, 0
for j in range(n):
if i >> j & 1:
t &= relations[j] | 1 << j
cnt += 1
if s & i == i:
res = max(res, cnt)
print(res)
class ABC003:
@staticmethod
def a():
print((int(sys.stdin.readline().rstrip()) + 1) * 5000)
@staticmethod
def b():
atcoder = set("atcoder")
s, t = sys.stdin.read().split()
print(
all(
s[i] == t[i]
or s[i] == "@"
and t[i] in atcoder
or t[i] == "@"
and s[i] in atcoder
for i in range(len(s))
)
and "You can win"
or "You will lose"
)
@staticmethod
def c():
n, k, *r = map(int, sys.stdin.read().split())
print(reduce(lambda x, y: (x + y) / 2, sorted(r)[-k:], 0))
class ABC004:
@staticmethod
def a():
print(int(sys.stdin.readline().rstrip()) * 2)
@staticmethod
def b():
for l in [sys.stdin.readline().rstrip() for _ in range(4)][::-1]:
print(l[::-1])
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip()) % 30
res = list(range(1, 7))
for i in range(n):
i %= 5
res[i], res[i + 1] = res[i + 1], res[i]
print(*res, sep="")
class ABC005:
@staticmethod
def a():
x, y = map(int, sys.stdin.readline().split())
print(y // x)
@staticmethod
def b():
n, *t = map(int, sys.stdin.read().split())
print(min(t))
@staticmethod
def c():
t = int(sys.stdin.readline().rstrip())
n = int(sys.stdin.readline().rstrip())
a = [int(x) for x in sys.stdin.readline().split()]
m = int(sys.stdin.readline().rstrip())
b = [int(x) for x in sys.stdin.readline().split()]
i = 0
for p in b:
if i == n:
print("no")
return
while p - a[i] > t:
i += 1
if i == n:
print("no")
return
if a[i] > p:
print("no")
return
i += 1
print("yes")
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
d = np.array(
[sys.stdin.readline().split() for _ in range(n)], np.int64
)
s = d.cumsum(axis=0).cumsum(axis=1)
s = np.pad(s, 1)
max_del = np.zeros((n + 1, n + 1), dtype=np.int64)
for y in range(1, n + 1):
for x in range(1, n + 1):
max_del[y, x] = np.amax(
s[y : n + 1, x : n + 1]
- s[0 : n - y + 1, x : n + 1]
- s[y : n + 1, 0 : n - x + 1]
+ s[0 : n - y + 1, 0 : n - x + 1]
)
res = np.arange(n**2 + 1)[:, None]
i = np.arange(1, n + 1)
res = max_del[i, np.minimum(res // i, n)].max(axis=1)
q = int(sys.stdin.readline().rstrip())
p = np.array(sys.stdin.read().split(), dtype=np.int64)
print(*res[p], sep="\n")
class ABC006:
@staticmethod
def a():
n = sys.stdin.readline().rstrip()
if "3" in n:
print("YES")
elif int(n) % 3 == 0:
print("YES")
else:
print("NO")
@staticmethod
def b():
mod = 10007
a = np.eye(N=3, k=-1, dtype=np.int64)
a[0] = 1
n = int(sys.stdin.readline().rstrip())
a = Algebra.matrix_pow(a, n - 1, mod)
print(a[2][0])
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
cnt = [0, 0, 0]
if m == 1:
cnt = [-1, -1, -1]
else:
if m & 1:
m -= 3
cnt[1] += 1
n -= 1
cnt[2] = m // 2 - n
cnt[0] = n - cnt[2]
if cnt[0] < 0 or cnt[1] < 0 or cnt[2] < 0:
print(-1, -1, -1)
else:
print(*cnt, sep=" ")
@staticmethod
def d():
n, *c = map(int, sys.stdin.read().split())
lis = [inf] * n
for x in c:
lis[bi_l(lis, x)] = x
print(n - bi_l(lis, inf))
class ABC007:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(n - 1)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
if s == "a":
print(-1)
else:
print("a")
@staticmethod
def c():
r, c = map(int, sys.stdin.readline().split())
sy, sx = map(int, sys.stdin.readline().split())
gy, gx = map(int, sys.stdin.readline().split())
sy -= 1
sx -= 1
gy -= 1
gx -= 1
maze = [sys.stdin.readline().rstrip() for _ in range(r)]
queue = deque([(sy, sx)])
dist = np.full((r, c), np.inf)
dist[sy, sx] = 0
while queue:
y, x = queue.popleft()
for i, j in [(-1, 0), (1, 0), (0, -1), (0, 1)]:
i += y
j += x
if maze[i][j] == "#" or dist[i, j] != np.inf:
continue
dist[i, j] = dist[y, x] + 1
queue.append((i, j))
print(int(dist[gy, gx]))
@staticmethod
def d():
ng = set([4, 9])
def count(d):
return d if d <= 4 else d - 1
def f(n):
x = [int(d) for d in str(n)]
flg = True
dp = 0
for d in x:
dp = dp * 8 + flg * count(d)
if d in ng:
flg = False
return n - (dp + flg)
a, b = map(int, sys.stdin.readline().split())
print(f(b) - f(a - 1))
class ABC008:
@staticmethod
def a():
s, t = map(int, sys.stdin.readline().split())
print(t - s + 1)
@staticmethod
def b():
n, *s = sys.stdin.read().split()
res = defaultdict(int)
for name in s:
res[name] += 1
print(sorted(res.items(), key=lambda x: x[1])[-1][0])
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
c = n - np.count_nonzero(a[:, None] % a, axis=1)
print(np.sum((c + 1) // 2 / c))
@staticmethod
def d():
w, h, n, *xy = map(int, sys.stdin.read().split())
(*xy,) = zip(*([iter(xy)] * 2))
@lru_cache(maxsize=None)
def count(x1, y1, x2, y2):
res = 0
for x, y in xy:
if not (x1 <= x <= x2 and y1 <= y <= y2):
continue
cnt = (x2 - x1) + (y2 - y1) + 1
cnt += count(x1, y1, x - 1, y - 1)
cnt += count(x1, y + 1, x - 1, y2)
cnt += count(x + 1, y1, x2, y - 1)
cnt += count(x + 1, y + 1, x2, y2)
res = max(res, cnt)
return res
print(count(1, 1, w, h))
class ABC009:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print((n + 1) // 2)
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
print(sorted(set(a))[-2])
@staticmethod
def c():
n, k = map(int, sys.stdin.readline().split())
s = list(sys.stdin.readline().rstrip())
cost = [1] * n
r = k
for i in range(n - 1):
q = []
for j in range(i + 1, n):
if s[j] < s[i] and cost[i] + cost[j] <= r:
heappush(q, (s[j], cost[i] + cost[j], -j))
if not q:
continue
_, c, j = heappop(q)
j = -j
s[i], s[j] = s[j], s[i]
r -= c
cost[i] = cost[j] = 0
print("".join(s))
@staticmethod
def d():
k, m = map(int, sys.stdin.readline().split())
a = np.array([int(x) for x in sys.stdin.readline().split()])
c = np.array([int(x) for x in sys.stdin.readline().split()])
mask = (1 << 32) - 1
d = np.eye(k, k, -1, dtype=np.uint32) * mask
d[0] = c
if m <= k:
print(a[m - 1])
return
print(
Algebra.bitwise_dot(
Algebra.bitwise_mat_pow(d, m - k), a[::-1].reshape(-1, 1)
)[0][0]
)
class ABC010:
@staticmethod
def a():
print(sys.stdin.readline().rstrip() + "pp")
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
tot = 0
for x in a:
c = 0
while x % 2 == 0 or x % 3 == 2:
x -= 1
c += 1
tot += c
print(tot)
@staticmethod
def c():
sx, sy, gx, gy, t, v, n, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy).reshape(-1, 2).T
def dist(x1, y1, x2, y2):
return np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2)
ans = (
"YES"
if (dist(sx, sy, x, y) + dist(x, y, gx, gy) <= v * t).any()
else "NO"
)
print(ans)
@staticmethod
def d():
n, g, e = map(int, sys.stdin.readline().split())
p = [int(x) for x in sys.stdin.readline().split()]
x, y = [], []
for _ in range(e):
a, b = map(int, sys.stdin.readline().split())
x.append(a)
y.append(b)
x.append(b)
y.append(a)
for a in p:
x.append(a)
y.append(n)
if not x:
print(0)
return
c = [1] * len(x)
min_cut = maximum_flow(
csr_matrix((c, (x, y)), (n + 1, n + 1)), source=0, sink=n
).flow_value
print(min_cut)
@staticmethod
def d_2():
n, g, e = map(int, sys.stdin.readline().split())
graph = nx.DiGraph()
graph.add_nodes_from(range(n + 1))
for p in [int(x) for x in sys.stdin.readline().split()]:
graph.add_edge(p, n, capacity=1)
for _ in range(e):
a, b = map(int, sys.stdin.readline().split())
graph.add_edge(a, b, capacity=1)
graph.add_edge(b, a, capacity=1)
print(nx.minimum_cut_value(graph, 0, n))
@staticmethod
def d_3():
n, q, m = map(int, sys.stdin.readline().split())
g = GeometryTopology.Graph(n + 1)
for p in [int(x) for x in sys.stdin.readline().split()]:
g.add_edge(p, n, capacity=1)
for a, b in zip(*[map(int, sys.stdin.read().split())] * 2):
g.add_edge(a, b, capacity=1)
g.add_edge(b, a, capacity=1)
print(g.dinic(0, n))
class ABC011:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(n % 12 + 1)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
print(s[0].upper() + s[1:].lower())
@staticmethod
def c():
n, *ng = map(int, sys.stdin.read().split())
ng = set(ng)
if n in ng:
print("NO")
else:
r = 100
while n > 0:
if r == 0:
print("NO")
return
for i in range(3, 0, -1):
if (n - i) in ng:
continue
n -= i
r -= 1
break
else:
print("NO")
return
print("YES")
@staticmethod
def d():
n, d, x, y = map(int, sys.stdin.read().split())
x, y = abs(x), abs(y)
if x % d or y % d:
print(0)
return
x, y = x // d, y // d
r = n - (x + y)
if r < 0 or r & 1:
print(0)
return
res = 0
half_p = pow(1 / 2, n)
for d in range(r // 2 + 1):
south, north = d, y + d
west = (r - 2 * d) // 2
res += (
half_p
* comb(n, south, exact=True)
* comb(n - south, north, exact=True)
* comb(n - south - north, west, exact=True)
* half_p
)
print(res)
class ABC012:
@staticmethod
def a():
a, b = map(int, sys.stdin.readline().split())
print(b, a)
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
h, n = divmod(n, 3600)
m, s = divmod(n, 60)
print(f"{h:02}:{m:02}:{s:02}")
@staticmethod
def c():
n = 2025 - int(sys.stdin.readline().rstrip())
res = []
for i in range(1, 10):
if n % i != 0 or n // i > 9:
continue
res.append(f"{i} x {n//i}")
print(*sorted(res), sep="\n")
@staticmethod
def d():
n, m, *abt = map(int, sys.stdin.read().split())
a, b, t = np.array(abt).reshape(m, 3).T
res = shortest_path(
csr_matrix((t, (a - 1, b - 1)), (n, n)),
method="FW",
directed=False,
)
print(res.max(axis=-1).min().astype(np.int64))
@staticmethod
def d_2():
n, m, *abt = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b, t in zip(*[iter(abt)] * 3):
a -= 1
b -= 1
g.add_edge(a, b, weight=t)
g.add_edge(b, a, weight=t)
print(min(max(d) for d in g.floyd_warshall()))
class ABC013:
@staticmethod
def a():
print(ord(sys.stdin.readline().rstrip()) - ord("A") + 1)
@staticmethod
def b():
a, b = map(int, sys.stdin.read().split())
d = abs(a - b)
print(min(d, 10 - d))
@staticmethod
def c():
n, h, a, b, c, d, e = map(int, sys.stdin.read().split())
y = np.arange(n + 1)
x = (n * e - h - (d + e) * y) // (b + e) + 1
np.maximum(x, 0, out=x)
np.minimum(x, n - y, out=x)
print(np.amin(a * x + c * y))
@staticmethod
def d():
n, m, d, *a = map(int, sys.stdin.read().split())
res = list(range(n))
def swap(i, j):
res[i], res[j] = res[j], res[i]
for i in a[::-1]:
swap(i - 1, i)
res = np.array(res)
def binary_method(a, p):
b = np.arange(n)
while p:
if p & 1:
b = a[b]
p >>= 1
a = a[a]
return b
print(*(binary_method(res, d) + 1), sep="\n")
class ABC014:
@staticmethod
def a():
a, b = map(int, sys.stdin.read().split())
print((a + b - 1) // b * b - a)
@staticmethod
def b():
n, x, *a = map(int, sys.stdin.read().split())
print(sum(a[i] for i in range(n) if x >> i & 1))
@staticmethod
def c():
n, *ab = map(int, sys.stdin.read().split())
a, b = np.array(ab).reshape(n, 2).T
res = np.zeros(10**6 + 2, dtype=np.int64)
np.add.at(res, a, 1)
np.subtract.at(res, b + 1, 1)
np.cumsum(res, out=res)
print(res.max())
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
g = GeometryTopology.Graph(n)
for _ in range(n - 1):
x, y = map(int, sys.stdin.readline().split())
x -= 1
y -= 1
g.add_edge(x, y, weight=1)
g.add_edge(y, x, weight=1)
g.bfs(0)
g.find_ancestors()
q, *ab = map(int, sys.stdin.read().split())
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
print(g.find_dist(a, b) + 1)
class ABC015:
@staticmethod
def a():
a, b = sys.stdin.read().split()
print(a if len(a) > len(b) else b)
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
print(
np.ceil(
a[np.nonzero(a)[0]].sum() / np.count_nonzero(a)
).astype(np.int8)
)
@staticmethod
def c():
n, k, *t = map(int, sys.stdin.read().split())
t = np.array(t).reshape(n, k)
x = np.zeros((1, 1), dtype=np.int8)
for i in range(n):
x = x.reshape(-1, 1) ^ t[i]
print("Found" if np.count_nonzero(x == 0) > 0 else "Nothing")
@staticmethod
def d():
w, n, k, *ab = map(int, sys.stdin.read().split())
dp = np.zeros((k + 1, w + 1), dtype=np.int32)
for a, b in zip(*[iter(ab)] * 2):
np.maximum(dp[1:, a:], dp[:-1, :-a] + b, out=dp[1:, a:])
print(dp[k][w])
class ABC016:
@staticmethod
def a():
m, d = map(int, sys.stdin.readline().split())
print("YES" if m % d == 0 else "NO")
@staticmethod
def b():
a, b, c = map(int, sys.stdin.readline().split())
f1, f2 = a + b == c, a - b == c
if f1 & f2:
print("?")
elif f1 & (~f2):
print("+")
elif (~f1) & f2:
print("-")
else:
print("!")
@staticmethod
def c():
n, _, *ab = map(int, sys.stdin.read().split())
f = [0] * n
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
f[a] |= 1 << b
f[b] |= 1 << a
res = [
bit_count(
cumor(f[j] for j in range(n) if f[i] >> j & 1)
& ~(f[i] | 1 << i)
)
for i in range(n)
]
print(*res, sep="\n")
@staticmethod
def d():
sx, sy, gx, gy = map(int, sys.stdin.readline().split())
seg1 = ((sx, sy), (gx, gy))
n = int(sys.stdin.readline().rstrip())
p1 = (
np.array(sys.stdin.read().split(), dtype=np.int64)
.reshape(n, 2)
.T
)
p2 = np.hstack((p1[:, 1:], p1[:, :1]))
seg2 = (p1, p2)
print(
np.count_nonzero(GeometryTopology.intersect(seg1, seg2)) // 2
+ 1
)
class ABC017:
@staticmethod
def a():
s, e = (
np.array(sys.stdin.read().split(), dtype=np.int16)
.reshape(3, 2)
.T
)
print((s // 10 * e).sum())
@staticmethod
def b():
choku_tail = set("ch, o, k, u".split(", "))
def is_choku(s):
if s == "":
return True
if len(s) >= 1 and (s[-1] in choku_tail) and is_choku(s[:-1]):
return True
if len(s) >= 2 and (s[-2:] in choku_tail) and is_choku(s[:-2]):
return True
return False
print("YES" if is_choku(sys.stdin.readline().rstrip()) else "NO")
@staticmethod
def c():
n, m, *lrs = map(int, sys.stdin.read().split())
l, r, s = np.array(lrs).reshape(n, 3).T
score = np.zeros((m + 1,), dtype=np.int32)
np.add.at(score, l - 1, s)
np.subtract.at(score, r, s)
np.cumsum(score, out=score)
print(s.sum() - score[:m].min())
@staticmethod
def d():
n, m, *f = map(int, sys.stdin.read().split())
prev = [0] * (n + 1)
tmp = defaultdict(int)
for i in range(n):
prev[i + 1] = tmp[f[i]]
tmp[f[i]] = i + 1
dp = [0] * (n + 1)
dp[0] = 1
l, s = 0, dp[0]
for i in range(1, n + 1):
while l < prev[i]:
s = (s - dp[l]) % MOD
l += 1
dp[i] = s
s = (s + dp[i]) % MOD
print(dp[n])
class ABC018:
@staticmethod
def a():
(*a,) = map(int, sys.stdin.read().split())
a = sorted(enumerate(a), key=lambda x: -x[1])
res = [None] * 3
for i in range(3):
res[a[i][0]] = i + 1
print(*res, sep="\n")
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
n, *lr = map(int, sys.stdin.read().split())
for l, r in zip(*[iter(lr)] * 2):
l -= 1
r -= 1
s = s[:l] + s[l : r + 1][::-1] + s[r + 1 :]
print(s)
@staticmethod
def c():
r, c, k = map(int, sys.stdin.readline().split())
s = np.array([list(s) for s in sys.stdin.read().split()])
s = np.pad(s, 1, constant_values="x")
a = np.zeros_like(s, dtype=np.float64)
a[s == "o"] = np.inf
for i in range(1, r + 1):
np.minimum(a[i - 1, :] + 1, a[i, :], out=a[i, :])
for i in range(r, 0, -1):
np.minimum(a[i + 1, :] + 1, a[i, :], out=a[i, :])
for j in range(1, c + 1):
np.minimum(a[:, j - 1] + 1, a[:, j], out=a[:, j])
for j in range(c, 0, -1):
np.minimum(a[:, j + 1] + 1, a[:, j], out=a[:, j])
print(np.count_nonzero(a >= k))
@staticmethod
def c_2():
r, c, k = map(int, sys.stdin.readline().split())
s = np.array([list(s) for s in sys.stdin.read().split()])
s = np.pad(s, 1, constant_values="x")
a = (s == "o").astype(np.int16)
a = distance_transform_cdt(a, metric="taxicab")
print(np.count_nonzero(a >= k))
@staticmethod
def d():
n, m, p, q, r, *xyz = map(int, sys.stdin.read().split())
x, y, z = np.array(xyz).reshape(r, 3).T
h = np.zeros((n, m), dtype=np.int32)
h[x - 1, y - 1] = z
g = np.array([*itertools.combinations(range(n), p)])
print(np.sort(h[g].sum(axis=1), axis=1)[:, -q:].sum(axis=1).max())
class ABC019:
@staticmethod
def a():
(*a,) = map(int, sys.stdin.readline().split())
print(sorted(a)[1])
@staticmethod
def b():
s = sys.stdin.readline().rstrip() + "$"
cnt = 0
prev = "$"
t = ""
for c in s:
if c == prev:
cnt += 1
continue
t += prev + str(cnt)
prev = c
cnt = 1
print(t[2:])
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
res = set()
for x in a:
while not x & 1:
x >>= 1
res.add(x)
print(len(res))
@staticmethod
def d():
def inquire(u, v):
print(f"? {u} {v}".format(u, v), flush=True)
return int(sys.stdin.readline().rstrip())
n = int(sys.stdin.readline().rstrip())
u = sorted([(inquire(1, v), v) for v in range(2, n + 1)])[-1][1]
d = max((inquire(u, v)) for v in range(1, n + 1) if u != v)
print(f"! {d}")
class ABC020:
@staticmethod
def a():
print(
"ABC"
if int(sys.stdin.readline().rstrip()) == 1
else "chokudai"
)
@staticmethod
def b():
a, b = sys.stdin.readline().split()
print(int(a + b) * 2)
@staticmethod
def c():
h, w, t = map(int, sys.stdin.readline().split())
s = [list(s) for s in sys.stdin.read().split()]
for i in range(h):
for j in range(w):
if s[i][j] == "S":
sy, sx = i, j
if s[i][j] == "G":
gy, gx = i, j
s[sy][sx] = s[gy][gx] = "."
source, target = sy * w + sx, gy * w + gx
def heuristic_function(u, v=target):
uy, ux = divmod(u, w)
vy, vx = divmod(v, w)
return abs(vy - uy) + abs(ux - vx)
def min_time(x):
g = GeometryTopology.Graph(h * w)
for i in range(h):
for j in range(w):
u = i * w + j
if i > 0:
g.add_edge(
u,
(i - 1) * w + j,
weight=(1 if s[i - 1][j] == "." else x),
)
if i < h - 1:
g.add_edge(
u,
(i + 1) * w + j,
weight=(1 if s[i + 1][j] == "." else x),
)
if j > 0:
g.add_edge(
u,
i * w + j - 1,
weight=(1 if s[i][j - 1] == "." else x),
)
if j < w - 1:
g.add_edge(
u,
i * w + j + 1,
weight=(1 if s[i][j + 1] == "." else x),
)
return g.dijkstra(source)[target]
return g.astar(source, target, heuristic_function)
def binary_search():
lo, hi = 1, t + 1
while lo + 1 < hi:
x = (lo + hi) // 2
if min_time(x) > t:
hi = x
else:
lo = x
return lo
print(binary_search())
@staticmethod
def d():
n, k = map(int, sys.stdin.readline().split())
div = sorted(NumberTheory.find_divisors(k))
l = len(div)
s = [0] * l
for i, d in enumerate(div):
s[i] = (1 + n // d) * (n // d) // 2 * d % MOD
for i in range(l - 1, -1, -1):
for j in range(i + 1, l):
if div[j] % div[i]:
continue
s[i] = (s[i] - s[j]) % MOD
print(
sum(s[i] * k // div[i] % MOD for i in range(l)) % MOD
)
class ABC021:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
s = [1 << i for i in range(5) if n >> i & 1]
print(len(s), *s, sep="\n")
@staticmethod
def b():
n, a, b, k, *p = map(int, sys.stdin.read().split())
print("YES" if len(set(p) | set([a, b])) == k + 2 else "NO")
@staticmethod
def c():
n, a, b, m, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy).reshape(m, 2).T - 1
a -= 1
b -= 1
g = csgraph_to_dense(
csr_matrix((np.ones(m), (x, y)), (n, n), dtype=np.int8)
)
g = np.logical_or(g, g.T)
paths = np.zeros(n, dtype=np.int64).reshape(-1, 1)
paths[a, 0] = 1
while not paths[b, 0]:
paths = np.dot(g, paths) % MOD
print(paths[b, 0])
@staticmethod
def c_2():
n, a, b, m, *xy = map(int, sys.stdin.read().split())
a -= 1
b -= 1
g = GeometryTopology.Graph()
for x, y in zip(*[iter(xy)] * 2):
x -= 1
y -= 1
g.add_edge(x, y, weight=1)
g.add_edge(y, x, weight=1)
dist, paths = g.dijkstra(a, paths_cnt=True, mod=MOD)
print(paths[b])
@staticmethod
def d():
n, k = map(int, sys.stdin.read().split())
cn = Combinatorics.CombinationsMod()
print(cn(n + k - 1, k))
class ABC022:
@staticmethod
def a():
n, s, t, *a = map(int, sys.stdin.read().split())
a = np.array(a)
np.cumsum(a, out=a)
print(((s <= a) & (a <= t)).sum())
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
c = Counter(a)
print(sum(c.values()) - len(c))
@staticmethod
def c():
n, m, *uvl = map(int, sys.stdin.read().split())
u, v, l = np.array(uvl).reshape(m, 3).T
u -= 1
v -= 1
g = csgraph_to_dense(csr_matrix((l, (u, v)), (n, n)))
g += g.T
g[g == 0] = np.inf
dist0 = g[0].copy()
g[0] = 0
g[:, 0] = 0
dist = shortest_path(g, method="FW", directed=False)
u, v = np.array([*itertools.combinations(range(1, n), 2)]).T
res = (dist0[u] + dist[u, v] + dist0[v]).min()
print(-1 if res == np.inf else int(res))
@staticmethod
def d():
n, *ab = map(int, sys.stdin.read().split())
c = np.array(ab).reshape(2, n, 2)
g = c.mean(axis=1)
d = np.sqrt(((c - g[:, None, :]) ** 2).sum(axis=-1)).sum(axis=1)
print(d[1] / d[0])
class ABC023:
@staticmethod
def a():
print(sum(divmod(int(sys.stdin.readline().rstrip()), 10)))
@staticmethod
def b():
n, s = sys.stdin.read().split()
n = int(n)
t = "b"
for i in range(n // 2):
if i % 3 == 0:
t = "a" + t + "c"
elif i % 3 == 1:
t = "c" + t + "a"
else:
t = "b" + t + "b"
print(n // 2 if t == s else -1)
@staticmethod
def b_2():
n, s = sys.stdin.read().split()
n = int(n)
if n & 1 ^ 1:
print(-1)
return
a = list("abc")
i = (1 - n // 2) % 3
for c in s:
if c != a[i]:
print(-1)
return
i = (i + 1) % 3
print(n // 2)
@staticmethod
def c():
h, w, k, n, *rc = map(int, sys.stdin.read().split())
r, c = np.array(rc).reshape(n, 2).T - 1
rb = np.bincount(r, minlength=h)
cb = np.bincount(c, minlength=w)
rbb = np.bincount(rb, minlength=k + 1)
cbb = np.bincount(cb, minlength=k + 1)
tot = (rbb[: k + 1] * cbb[k::-1]).sum()
real = np.bincount(rb[r] + cb[c] - 1, minlength=k + 1)
print(tot - real[k - 1] + real[k])
@staticmethod
def d():
n, *hs = map(int, sys.stdin.read().split())
h, s = np.array(hs).reshape(n, 2).T
t = np.arange(n)
def is_ok(x):
return np.all(np.sort((x - h) // s) >= t)
def binary_search():
lo, hi = 0, 10**14
while lo + 1 < hi:
x = (lo + hi) // 2
if is_ok(x):
hi = x
else:
lo = x
return hi
print(binary_search())
class ABC024:
@staticmethod
def a():
a, b, c, k, s, t = map(int, sys.stdin.read().split())
print(a * s + b * t - c * (s + t) * (s + t >= k))
@staticmethod
def b():
n, t, *a = map(int, sys.stdin.read().split())
a = np.array(a)
print(np.minimum(a[1:] - a[:-1], t).sum() + t)
@staticmethod
def c():
n, d, k, *lrst = map(int, sys.stdin.read().split())
lrst = np.array(lrst)
lr = lrst[: 2 * d].reshape(d, 2)
s, t = lrst[2 * d :].reshape(k, 2).T
day = np.zeros((k,), dtype=np.int32)
for i in range(d):
l, r = lr[i]
move = (l <= s) & (s <= r) & (s != t)
reach = move & (l <= t) & (t <= r)
s[move & (s < t)] = r
s[move & (s > t)] = l
s[reach] = t[reach]
day[reach] = i + 1
print(*day, sep="\n")
@staticmethod
def d():
a, b, c = map(int, sys.stdin.read().split())
p = MOD
denom = pow(a * b % p - b * c % p + c * a % p, p - 2, p)
w = (b * c - a * b) % p * denom % p
h = (b * c - a * c) % p * denom % p
print(h, w)
class ABC025:
@staticmethod
def a():
s, n = sys.stdin.read().split()
n = int(n)
i, j = divmod(n - 1, 5)
print(s[i] + s[j])
@staticmethod
def b():
n, a, b = map(int, sys.stdin.readline().split())
res = defaultdict(int)
for _ in range(n):
s, d = sys.stdin.readline().split()
d = int(d)
res[s] += min(max(d, a), b)
res = res["East"] - res["West"]
if res == 0:
ans = 0
elif res > 0:
ans = f"East {res}"
else:
ans = f"West {-res}"
print(ans)
@staticmethod
def c():
b = [0] * 6
for i in range(2):
(*row,) = map(int, sys.stdin.readline().split())
for j in range(3):
b[i * 3 + j] = row[j]
c = [0] * 8
for i in range(3):
(*row,) = map(int, sys.stdin.readline().split())
for j in range(2):
c[i * 3 + j] = row[j]
tot = sum(b) + sum(c)
@lru_cache(maxsize=None)
def f(s=tuple(0 for _ in range(9))):
if all(s):
res = 0
for i in range(6):
res += (s[i] == s[i + 3]) * b[i]
for i in range(8):
res += (s[i] == s[i + 1]) * c[i]
return res
cand = [i for i in range(9) if not s[i]]
flg = len(cand) & 1
s = list(s)
res = []
for i in cand:
s[i] = (flg ^ 1) + 1
res.append(f(tuple(s)))
s[i] = 0
return sorted(res, reverse=flg)[0]
a = f()
b = tot - a
print(a)
print(b)
class ABC026:
@staticmethod
def a():
a = int(sys.stdin.readline().rstrip())
print(a // 2 * (a - a // 2))
@staticmethod
def b():
n, *r = map(int, sys.stdin.read().split())
s = np.pi * np.array([0] + r) ** 2
s.sort()
res = s[n::-2].sum() - s[n - 1 :: -2].sum()
print(res)
@staticmethod
def c():
n, *b = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph()
for i in range(1, n):
g.add_edge(b[i - 1] - 1, i, weight=1)
def f(u=0):
if not g.edges[u]:
return 1
s = [f(v) for v in g.edges[u]]
return max(s) + min(s) + 1
print(f())
@staticmethod
def d():
a, b, c = map(int, sys.stdin.readline().split())
def f(t):
return a * t + b * np.sin(c * t * np.pi) - 100
print(optimize.brenth(f, 0, 200))
class ABC027:
@staticmethod
def a():
l = [int(l) for l in sys.stdin.readline().split()]
l.sort()
print(l[2] if l[0] == l[1] else l[0])
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
m, r = divmod(sum(a), n)
if r:
print(-1)
return
population = 0
towns = 0
cnt = 0
for x in a:
population += x
towns += 1
if population / towns != m:
cnt += 1
continue
population, towns = 0, 0
print(cnt)
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
flg = n.bit_length() & 1 ^ 1
t = 0
x = 1
while x <= n:
t += 1
x = 2 * x + 1 if t & 1 ^ flg else 2 * x
print("Aoki" if t & 1 else "Takahashi")
class ABC028:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(
"Bad"
if n < 60
else "Good"
if n < 90
else "Great"
if n < 100
else "Perfect"
)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
cnt = Counter(s)
print(*[cnt.get(c, 0) for c in "ABCDEF"])
@staticmethod
def c():
a, b, c, d, e = map(int, sys.stdin.readline().split())
print(max(b + c + e, a + d + e))
@staticmethod
def d():
n, k = map(int, sys.stdin.readline().split())
c = 3 * 2 * (n - k) * (k - 1) + 3 * (n - 1) + 1
print(c / n**3)
class ABC029:
@staticmethod
def a():
print(sys.stdin.readline().rstrip() + "s")
@staticmethod
def b():
print(sum("r" in s for s in sys.stdin.read().split()))
@staticmethod
def c():
print(
*[
"".join(s)
for s in itertools.product(
"abc", repeat=int(sys.stdin.readline().rstrip())
)
],
sep="\n",
)
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
print(
sum(
n // 10 ** (i + 1) * 10**i
+ min(max((n % 10 ** (i + 1) - 10**i + 1), 0), 10**i)
for i in range(9)
)
)
class ABC030:
@staticmethod
def a():
a, b, c, d = map(int, sys.stdin.readline().split())
e, f = b * c, d * a
print("TAKAHASHI" if e > f else "AOKI" if f > e else "DRAW")
@staticmethod
def b():
n, m = map(int, sys.stdin.readline().split())
n = (n % 12 + m / 60) * 30
m *= 6
d = abs(n - m)
print(min(d, 360 - d))
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
x, y = map(int, sys.stdin.readline().split())
a = [int(x) for x in sys.stdin.readline().split()]
b = [int(x) for x in sys.stdin.readline().split()]
t = 0
p = 1
cnt = 0
while True:
if p:
i = bi_l(a, t)
if i == n:
break
t = a[i] + x
else:
i = bi_l(b, t)
if i == m:
break
t = b[i] + y
cnt += 1
p ^= 1
print(cnt)
@staticmethod
def d():
n, a = map(int, sys.stdin.readline().split())
a -= 1
k = sys.stdin.readline().rstrip()
b = [int(x) - 1 for x in sys.stdin.readline().split()]
c = [None] * n
for i in range(n + 1):
if str(i) == k:
print(a + 1)
return
if c[a] is not None:
l, d = i - c[a], c[a]
break
c[a] = i
a = b[a]
r = [None] * len(k)
r[0] = 1
for i in range(len(k) - 1):
r[i + 1] = r[i] * 10 % l
k = [int(c) for c in k][::-1]
d = (sum(r[i] * k[i] for i in range(len(k))) - d) % l
for _ in range(d):
a = b[a]
print(a + 1)
@staticmethod
def d_2():
n, a, k, *b = map(int, sys.stdin.read().split())
a -= 1
b = [x - 1 for x in b]
c = [None] * n
for i in range(n + 1):
if i == k:
print(a + 1)
return
if c[a] is not None:
for _ in range((k - c[a]) % (i - c[a])):
a = b[a]
print(a + 1)
return
c[a] = i
a = b[a]
class ABC031:
@staticmethod
def a():
a, d = map(int, sys.stdin.readline().split())
if a > d:
a, d = d, a
print((a + 1) * d)
@staticmethod
def b():
l, h, n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
res = np.maximum(l - a, 0)
res[a > h] = -1
print(*res, sep="\n")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
np.cumsum(a[::2], out=a[::2])
np.cumsum(a[1::2], out=a[1::2])
a = list(a) + [0] * 2
def score(i, j):
if i > j:
i, j = j, i
if (j - i) & 1:
x, y = a[j - 1] - a[i - 2], a[j] - a[i - 1]
else:
x, y = a[j] - a[i - 2], a[j - 1] - a[i - 1]
return x, y
res = -inf
for i in range(n):
s = -inf
for j in range(n):
if i == j:
continue
x, y = score(i, j)
if y > s:
s, t = y, x
res = max(res, t)
print(res)
@staticmethod
def d():
k, m = map(int, sys.stdin.readline().split())
(*vw,) = zip(*[iter(sys.stdin.read().split())] * 2)
for l in itertools.product((1, 2, 3), repeat=k):
s = dict()
for v, w in vw:
i = 0
for d in v:
d = int(d) - 1
j = i + l[d]
if j > len(w):
break
t = w[i:j]
if d in s and s[d] != t:
break
s[d] = t
i = j
else:
if i == len(w):
continue
break
else:
for i in range(k):
print(s[i])
return
class ABC032:
@staticmethod
def a():
a, b, n = map(int, sys.stdin.read().split())
l = NumberTheory.lcm(a, b)
print((n + l - 1) // l * l)
@staticmethod
def b():
s, k = sys.stdin.read().split()
k = int(k)
res = set()
for i in range(len(s) - k + 1):
res.add(s[i : i + k])
print(len(res))
@staticmethod
def c():
n, k, *s = map(int, sys.stdin.read().split())
if 0 in s:
print(n)
return
if k == 0:
print(0)
return
res, tmp, l = 0, 1, 0
for r in range(n):
tmp *= s[r]
while tmp > k:
tmp //= s[l]
l += 1
res = max(res, r - l + 1)
print(res)
class ABC033:
@staticmethod
def a():
print(
"SAME"
if len(set(sys.stdin.readline().rstrip())) == 1
else "DIFFERENT"
)
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
res = dict()
for _ in range(n):
s, p = sys.stdin.readline().split()
res[s] = int(p)
tot = sum(res.values())
for s, p in res.items():
if p > tot / 2:
print(s)
return
print("atcoder")
@staticmethod
def c():
s = sys.stdin.readline().rstrip()
print(sum(not "0" in f for f in s.split("+")))
class ABC034:
@staticmethod
def a():
x, y = map(int, sys.stdin.readline().split())
print("Better" if y > x else "Worse")
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
print(n + 1 if n & 1 else n - 1)
@staticmethod
def c():
h, w = map(int, sys.stdin.read().split())
choose = Combinatorics.CombinationsMod()
print(choose(h + w - 2, h - 1))
@staticmethod
def d():
n, k, *wp = map(int, sys.stdin.read().split())
w, p = np.array(wp).reshape(-1, 2).T
def f(x):
return np.sort(w * (p - x))[-k:].sum()
print(optimize.bisect(f, 0, 100))
class ABC035:
@staticmethod
def a():
w, h = map(int, sys.stdin.readline().split())
print("4:3" if 4 * h == 3 * w else "16:9")
@staticmethod
def b():
s, t = sys.stdin.read().split()
y = x = z = 0
for c in s:
if c == "?":
z += 1
elif c == "L":
x -= 1
elif c == "R":
x += 1
elif c == "D":
y -= 1
elif c == "U":
y += 1
d = abs(y) + abs(x)
print(d + z if t == "1" else max(d - z, (d - z) & 1))
@staticmethod
def c():
n, q, *lr = map(int, sys.stdin.read().split())
l, r = np.array(lr).reshape(q, 2).T
res = np.zeros(n + 1, dtype=int)
np.add.at(res, l - 1, 1)
np.subtract.at(res, r, 1)
np.cumsum(res, out=res)
res = res & 1
print("".join(map(str, res[:-1])))
@staticmethod
def d():
n, m, t = map(int, sys.stdin.readline().split())
point = np.array(sys.stdin.readline().split(), dtype=int)
a, b, c = (
np.array(sys.stdin.read().split(), dtype=np.int64)
.reshape(m, 3)
.T
)
a -= 1
b -= 1
d_1 = shortest_path(
csr_matrix((c, (a, b)), (n, n)),
method="D",
directed=True,
indices=0,
)
d_2 = shortest_path(
csr_matrix((c, (b, a)), (n, n)),
method="D",
directed=True,
indices=0,
)
print(int(np.amax((t - (d_1 + d_2)) * point)))
class ABC036:
@staticmethod
def a():
a, b = map(int, sys.stdin.readline().split())
print((b + a - 1) // a)
@staticmethod
def b():
n, *s = sys.stdin.read().split()
n = int(n)
for j in range(n):
row = ""
for i in range(n - 1, -1, -1):
row += s[i][j]
print(row)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
b = [None] * n
prev = None
j = -1
for i, x in sorted(enumerate(a), key=lambda x: x[1]):
if x != prev:
j += 1
b[i] = j
prev = x
print(*b, sep="\n")
@staticmethod
def d():
n, *ab = map(int, sys.stdin.read().split())
edges = [[] for _ in range(n)]
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
edges[a].append(b)
edges[b].append(a)
parent = [None] * n
def count(u):
black, white = 1, 1
for v in edges[u]:
if v == parent[u]:
continue
parent[v] = u
b, w = count(v)
black *= w
black %= MOD
white *= (b + w) % MOD
white %= MOD
return black, white
print(sum(count(0)) % MOD)
class ABC037:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print(c // min(a, b))
@staticmethod
def b():
n, q, *lrt = map(int, sys.stdin.read().split())
a = np.zeros(n, dtype=int)
for l, r, t in zip(*[iter(lrt)] * 3):
a[l - 1 : r] = t
print(*a, sep="\n")
@staticmethod
def c():
n, k, *a = map(int, sys.stdin.read().split())
a = np.array([0] + a)
np.cumsum(a, out=a)
s = (a[k:] - a[:-k]).sum()
print(s)
@staticmethod
def d():
h, w, *a = map(int, sys.stdin.read().split())
p = [None] * (h * w)
def paths(k):
if p[k]:
return p[k]
p[k] = 1
i, j = divmod(k, w)
if j > 0 and a[k] > a[k - 1]:
p[k] += paths(k - 1)
if j < w - 1 and a[k] > a[k + 1]:
p[k] += paths(k + 1)
if i > 0 and a[k] > a[k - w]:
p[k] += paths(k - w)
if i < h - 1 and a[k] > a[k + w]:
p[k] += paths(k + w)
p[k] %= MOD
return p[k]
print(sum(paths(i) for i in range(h * w)) % MOD)
class ABC038:
@staticmethod
def a():
s = sys.stdin.readline().rstrip()
print("YES" if s[-1] == "T" else "NO")
@staticmethod
def b():
a, b, c, d = map(int, sys.stdin.read().split())
print("YES" if a == c or b == c or a == d or b == d else "NO")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a += [-1]
cnt = n
tmp = 1
for i in range(n):
if a[i + 1] > a[i]:
tmp += 1
else:
cnt += tmp * (tmp - 1) // 2
tmp = 1
print(cnt)
@staticmethod
def d():
n, *wh = map(int, sys.stdin.read().split())
a = [
x[1]
for x in sorted(
zip(*[iter(wh)] * 2), key=lambda x: (x[0], -x[1])
)
]
print(bi_l(DP.LIS(a), inf))
class ABC039:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print((a * b + b * c + c * a) * 2)
@staticmethod
def b():
x = int(sys.stdin.readline().rstrip())
for n in range(1, int(x**0.5) + 1):
if pow(n, 4) == x:
print(n)
return
@staticmethod
def c():
board = "WBWBWWBWBWBW" * 3
convert = "Do, *, Re, *, Mi, Fa, *, So, *, La, *, Si".split(", ")
s = sys.stdin.readline().rstrip()
print(convert[board.index(s)])
@staticmethod
def d():
h, w = map(int, sys.stdin.readline().split())
s = "".join(sys.stdin.read().split())
white = set()
for i in range(h * w):
if s[i] == "#":
continue
l = 0 if i % w == 0 else -1
r = 0 if (i + 1) % w == 0 else 1
white |= {
i + dy + dx
for dy in range(-w, w + 1, w)
for dx in range(l, r + 1)
}
black_before = set(range(h * w)) - white
black_after = set()
for i in black_before:
l = 0 if i % w == 0 else -1
r = 0 if (i + 1) % w == 0 else 1
black_after |= {
i + dy + dx
for dy in range(-w, w + 1, w)
for dx in range(l, r + 1)
}
black_after &= set(range(h * w))
for i in range(h * w):
if s[i] == "#" and not i in black_after:
print("impossible")
return
print("possible")
for i in range(h):
print(
"".join(
[
"#" if i * w + j in black_before else "."
for j in range(w)
]
)
)
class ABC040:
@staticmethod
def a():
n, x = map(int, sys.stdin.readline().split())
print(min(x - 1, n - x))
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
res = inf
for i in range(1, int(n**0.5) + 1):
res = min(res, n // i - i + n % i)
print(res)
@staticmethod
def c():
n, *h = map(int, sys.stdin.read().split())
h = [h[0]] + h
cost = [None] * (n + 1)
cost[0] = cost[1] = 0
for i in range(2, n + 1):
cost[i] = min(
cost[i - 2] + abs(h[i] - h[i - 2]),
cost[i - 1] + abs(h[i] - h[i - 1]),
)
print(cost[n])
@staticmethod
def d():
n, m = map(int, sys.stdin.readline().split())
uf = GeometryTopology.Graph(n)
uf.init_dsu()
queue = []
for _ in range(m):
a, b, y = map(int, sys.stdin.readline().split())
heappush(queue, (-(2 * y), a - 1, b - 1))
q = int(sys.stdin.readline().rstrip())
for i in range(q):
v, y = map(int, sys.stdin.readline().split())
heappush(queue, (-(2 * y + 1), v - 1, i))
res = [None] * q
while queue:
y, i, j = heappop(queue)
if y & 1:
res[j] = uf.size[uf.find(i)]
else:
uf.unite(i, j)
print(*res, sep="\n")
class ABC041:
@staticmethod
def a():
s, i = sys.stdin.read().split()
i = int(i)
print(s[i - 1])
@staticmethod
def b():
a, b, c = map(int, sys.stdin.readline().split())
ans = a * b % MOD * c % MOD
print(ans)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
for i, h in sorted(enumerate(a), key=lambda x: -x[1]):
print(i + 1)
@staticmethod
def d():
n, _, *xy = map(int, sys.stdin.read().split())
g = [0] * n
for x, y in zip(*[iter(xy)] * 2):
g[x - 1] |= 1 << (y - 1)
res = [0] * (1 << n)
res[0] = 1
for i in range(1 << n):
for j in range(n):
if i >> j & 1 ^ 1:
continue
if not (g[j] & i):
res[i] += res[i & ~(1 << j)]
print(res[-1])
class ABC042:
@staticmethod
def a():
a = [int(x) for x in sys.stdin.readline().split()]
c = Counter(a)
print("YES" if c[5] == 2 and c[7] == 1 else "NO")
@staticmethod
def b():
n, l, *s = sys.stdin.read().split()
print("".join(sorted(s)))
@staticmethod
def c():
n, k, *d = sys.stdin.read().split()
l = len(n)
ok = sorted(set(string.digits) - set(d))
cand = [
int("".join(p)) for p in itertools.product(ok, repeat=l)
] + [int(min(x for x in ok if x > "0") + min(ok) * l)]
print(cand[bi_l(cand, int(n))])
@staticmethod
def d():
h, w, a, b = map(int, sys.stdin.read().split())
combinations = Combinatorics.CombinationsMod(
n=2 * 10**5, mod=MOD
)
i = np.arange(h - a, h)
ng = np.sum(
combinations(i + b - 1, i)
* combinations(h - i + w - b - 2, h - 1 - i)
% MOD
)
print((combinations(h + w - 2, h - 1) - ng) % MOD)
class ABC043:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print((1 + n) * n // 2)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
t = ""
for c in s:
if c == "B":
t = t[:-1]
else:
t += c
print(t)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
x = np.around(a.sum() / n).astype(int)
print(np.sum((a - x) ** 2))
@staticmethod
def d():
s = sys.stdin.readline().rstrip()
n = len(s)
for i in range(n - 1):
if s[i] == s[i + 1]:
print(i + 1, i + 2)
return
for i in range(n - 2):
if s[i] == s[i + 2]:
print(i + 1, i + 3)
return
print(-1, -1)
class ABC044:
@staticmethod
def a():
n, k, x, y = map(int, sys.stdin.read().split())
print(min(n, k) * x + max(0, n - k) * y)
@staticmethod
def b():
res = set(
c & 1 for c in Counter(sys.stdin.readline().rstrip()).values()
)
print("Yes" if len(res) == 1 and res.pop() == 0 else "No")
@staticmethod
def c():
n, a, *x = map(int, sys.stdin.read().split())
dp = np.zeros((n + 1, 2501), dtype=np.int64)
dp[0, 0] = 1
for v in x:
dp[1:, v:] += dp[:-1, :-v]
i = np.arange(1, n + 1)
print(dp[i, i * a].sum())
@staticmethod
def c_2():
n, a, *x = map(int, sys.stdin.read().split())
for i in range(n):
x[i] -= a
s = defaultdict(int)
s[0] = 1
for i in range(n):
ns = s.copy()
for k, v in s.items():
ns[k + x[i]] += v
s = ns
print(s[0] - 1)
@staticmethod
def d():
pass
class ABC045:
@staticmethod
def a():
a, b, h = map(int, sys.stdin.read().split())
print((a + b) * h // 2)
@staticmethod
def b():
a, b, c = sys.stdin.read().split()
d = {"a": a[::-1], "b": b[::-1], "c": c[::-1]}
nx = "a"
while 1:
if not d[nx]:
print(nx.upper())
return
d[nx], nx = d[nx][:-1], d[nx][-1]
@staticmethod
def c():
def c(l):
return pow(2, max(0, l - 1))
s = sys.stdin.readline().rstrip()
n = len(s)
print(
sum(
int(s[i : j + 1]) * c(i) * c(n - 1 - j)
for i in range(n)
for j in range(i, n)
)
)
@staticmethod
def d():
h, w, n, *ab = map(int, sys.stdin.read().split())
c = defaultdict(int)
for y, x in zip(*[iter(ab)] * 2):
y -= 1
x -= 1
for dy, dx in itertools.product(range(-1, 2), repeat=2):
i, j = y + dy, x + dx
if not (0 < i < h - 1 and 0 < j < w - 1):
continue
c[(i, j)] += 1
c = Counter(c.values())
c[0] = (h - 2) * (w - 2) - sum(c.values())
for i in range(10):
print(c[i])
class ABC046:
@staticmethod
def a():
print(len(set(sys.stdin.readline().split())))
@staticmethod
def b():
n, k = map(int, sys.stdin.readline().split())
print(k * pow(k - 1, n - 1))
@staticmethod
def c():
n, *xy = map(int, sys.stdin.read().split())
a, b = 1, 1
for x, y in zip(*[iter(xy)] * 2):
n = max((a + x - 1) // x, (b + y - 1) // y)
a, b = n * x, n * y
print(a + b)
@staticmethod
def d():
c = Counter(sys.stdin.readline().rstrip())
print((c["g"] - c["p"]) // 2)
class ABC047:
@staticmethod
def a():
c = sorted(map(int, sys.stdin.readline().split()))
print("Yes" if c[0] + c[1] == c[2] else "No")
@staticmethod
def b():
w, h, n, *xyf = map(int, sys.stdin.read().split())
l, r, d, u = 0, w, 0, h
for x, y, f in zip(*[iter(xyf)] * 3):
if f == 1:
l = max(l, x)
if f == 2:
r = min(r, x)
if f == 3:
d = max(d, y)
if f == 4:
u = min(u, y)
print(max(0, r - l) * max(0, u - d))
@staticmethod
def c():
s = sys.stdin.readline().rstrip()
print(sum(s[i] != s[i + 1] for i in range(len(s) - 1)))
@staticmethod
def d():
mn, mx, c = inf, -1, 0
n, t, *a = map(int, sys.stdin.read().split())
for p in a:
if p - mn == mx:
c += 1
elif p - mn > mx:
mx, c = p - mn, 1
mn = min(mn, p)
print(c)
class ABC048:
@staticmethod
def a():
def initial(s):
return s[0].upper()
print("".join(map(initial, sys.stdin.readline().split())))
@staticmethod
def b():
a, b, x = map(int, sys.stdin.readline().split())
print(
b // x - (a - 1) // x
)
@staticmethod
def c():
n, x, *a = map(int, sys.stdin.read().split())
cnt = prev = 0
for i in range(n):
d = prev + a[i] - x
prev = a[i]
if d <= 0:
continue
cnt += d
prev -= d
print(cnt)
@staticmethod
def d():
s = sys.stdin.readline().rstrip()
print("First" if len(s) & 1 ^ (s[0] == s[-1]) else "Second")
class ABC049:
@staticmethod
def a():
vowels = set("aeiou")
print(
"vowel"
if sys.stdin.readline().rstrip() in vowels
else "consonant"
)
@staticmethod
def b():
h, w, *s = sys.stdin.read().split()
for l in s:
for _ in range(2):
print(l)
@staticmethod
def c():
t = set("dream, dreamer, erase, eraser".split(", "))
def obtainable(s):
while True:
for i in range(5, 8):
if s[-i:] in t:
s = s[:-i]
if not s:
return True
break
else:
return False
s = sys.stdin.readline().rstrip()
print("YES" if obtainable(s) else "NO")
@staticmethod
def d():
n, k, l = map(int, sys.stdin.readline().split())
uf1 = GeometryTopology.Graph(n)
uf1.init_dsu()
uf2 = GeometryTopology.Graph(n)
uf2.init_dsu()
def add_edges(uf, m):
for _ in range(m):
x, y = map(int, sys.stdin.readline().split())
x -= 1
y -= 1
uf.unite(x, y)
add_edges(uf1, k)
add_edges(uf2, l)
g = defaultdict(list)
for i in range(n):
g[(uf1.find(i), uf2.find(i))].append(i)
res = [None] * n
for a in g:
for i in g[a]:
res[i] = len(g[a])
print(*res, sep=" ")
class ABC050:
@staticmethod
def a():
print(eval(sys.stdin.readline().rstrip()))
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
t = np.array(sys.stdin.readline().split(), dtype=np.int64)
m, *px = map(int, sys.stdin.read().split())
p, x = np.array(px).reshape(m, 2).T
p -= 1
print(*(t.sum() + x - t[p]), sep="\n")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = Counter(a)
if n & 1 and not (
a[0] == 1 and all(a[i] == 2 for i in range(2, n, 2))
):
print(0)
return
if ~n & 1 and any(a[i] != 2 for i in range(1, n, 2)):
print(0)
return
print(pow(2, n // 2, MOD))
@staticmethod
def d():
pass
class ABC051:
@staticmethod
def a():
print(" ".join(sys.stdin.readline().rstrip().split(",")))
@staticmethod
def b():
k, s = map(int, sys.stdin.readline().split())
tot = 0
for x in range(k + 1):
if s - x < 0:
break
if s - x > 2 * k:
continue
tot += s - x + 1 if s - x <= k else 2 * k - (s - x) + 1
print(tot)
@staticmethod
def c():
x1, y1, x2, y2 = map(int, sys.stdin.readline().split())
dx, dy = x2 - x1, y2 - y1
print(
"U" * dy
+ "R" * (dx + 1)
+ "D" * (dy + 1)
+ "L" * (dx + 1)
+ "U"
+ "L"
+ "U" * (dy + 1)
+ "R" * (dx + 1)
+ "D" * (dy + 1)
+ "L" * dx
)
@staticmethod
def d():
n, m, *abc = map(int, sys.stdin.read().split())
x = np.arange(n)
a, b, c = np.array(abc).reshape(m, 3).T
a -= 1
b -= 1
d = shortest_path(
csr_matrix((c, (a, b)), shape=(n, n)),
method="FW",
directed=False,
).astype(np.int64)
print(
m
- np.any(
d[x, a[:, None]] + c[:, None] == d[x, b[:, None]], axis=1
).sum()
)
class ABC052:
@staticmethod
def a():
a, b, c, d = map(int, sys.stdin.readline().split())
print(max(a * b, c * d))
@staticmethod
def b():
n, s = sys.stdin.read().split()
n = int(n)
a = [0] * (n + 1)
for i in range(n):
a[i + 1] = a[i] + (1 if s[i] == "I" else -1)
print(max(a))
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
pn = NumberTheory.PrimeNumbers(n)
s = 1
for c in pn.factorize_factorial(n).values():
s = s * (c + 1) % MOD
print(s)
@staticmethod
def d():
n, a, b, *x = map(int, sys.stdin.read().split())
x = np.array(x)
print(np.minimum((x[1:] - x[:-1]) * a, b).sum())
class ABC053:
@staticmethod
def a():
print(
"ABC" if int(sys.stdin.readline().rstrip()) < 1200 else "ARC"
)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
print(len(s) - s.find("A") - s[::-1].find("Z"))
@staticmethod
def c():
x = int(sys.stdin.readline().rstrip())
q, r = divmod(x, 11)
print(2 * q + (r + 5) // 6)
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
print(n - ((n - len(set(a)) + 1) // 2 * 2))
class ABC054:
@staticmethod
def a():
def f(x):
return (x + 11) % 13
a, b = map(int, sys.stdin.readline().split())
print("Alice" if f(a) > f(b) else "Bob" if f(a) < f(b) else "Draw")
@staticmethod
def b():
n, m = map(int, sys.stdin.readline().split())
a = [sys.stdin.readline().rstrip() for _ in range(n)]
b = [sys.stdin.readline().rstrip() for _ in range(m)]
for i in range(n - m + 1):
for j in range(n - m + 1):
for y in range(m):
for x in range(m):
if a[i + y][j + x] == b[y][x]:
continue
break
else:
continue
break
else:
print("Yes")
return
print("No")
@staticmethod
def c():
n, m, *ab = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
g.add_edge(a, b)
g.add_edge(b, a)
cnt = 0
stack = [(0, 1)]
while stack:
u, s = stack.pop()
if s == (1 << n) - 1:
cnt += 1
continue
for v in g.edges[u]:
if s >> v & 1:
continue
stack.append((v, s | 1 << v))
print(cnt)
@staticmethod
def d():
n, ma, mb, *abc = map(int, sys.stdin.read().split())
dp = np.full((401, 401), np.inf)
dp[0, 0] = 0
for a, b, c in zip(*[iter(abc)] * 3):
np.minimum(dp[a:, b:], dp[:-a, :-b] + c, out=dp[a:, b:])
i = np.arange(1, 400 // max(ma, mb) + 1)
res = dp[i * ma, i * mb].min()
print(int(res) if res != np.inf else -1)
class ABC055:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
print(800 * n - 200 * (n // 15))
@staticmethod
def b():
n = int(sys.stdin.readline().rstrip())
fac, _ = Algebra.generate_fac_ifac(n, MOD)
print(fac[-1])
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
print(m // 2 if m <= 2 * n else n + (m - 2 * n) // 4)
@staticmethod
def d():
n, s = sys.stdin.read().split()
n = int(n)
s = [1 if c == "o" else 0 for c in s]
def possible(t):
for i in range(1, n - 1):
t[i + 1] = t[i - 1] ^ t[i] ^ s[i]
return (
(t[0] ^ s[0] ^ t[1] ^ t[-1])
| (t[-1] ^ s[-1] ^ t[-2] ^ t[0])
) ^ 1
for fst in [(1, 0), (0, 1), (1, 1), (0, 0)]:
t = [None] * n
t[0], t[1] = fst[0], fst[1]
if possible(t):
print("".join("S" if x == 1 else "W" for x in t))
return
print(-1)
class ABC056:
@staticmethod
def a():
def to_i(c):
return 1 if c == "H" else 0
a, b = map(to_i, sys.stdin.readline().split())
print("D" if a ^ b else "H")
@staticmethod
def b():
w, a, b = map(int, sys.stdin.readline().split())
if a > b:
a, b = b, a
print(max(b - (a + w), 0))
@staticmethod
def c():
x = int(sys.stdin.readline().rstrip())
print(int(math.ceil(math.sqrt(2 * x + 1 / 4) - 0.5)))
@staticmethod
def d():
n, k, *a = map(int, sys.stdin.read().split())
a = sorted(min(x, k) for x in a)
def necessary(i):
dp = np.zeros(k, dtype=np.bool)
dp[0] = True
for j in range(n):
if j == i:
continue
dp[a[j] :] += dp[: -a[j]]
return np.any(dp[k - a[i] :])
def binary_search():
lo, hi = -1, n
while hi - lo > 1:
i = (lo + hi) // 2
if necessary(i):
hi = i
else:
lo = i
return hi
print(binary_search())
class ABC057:
@staticmethod
def a():
a, b = map(int, sys.stdin.readline().split())
print((a + b) % 24)
@staticmethod
def b():
n, m, *I = map(int, sys.stdin.read().split())
I = np.array(I).reshape(-1, 2)
ab, cd = I[:n], I[n:]
print(
*(
np.argmin(
np.absolute(ab[:, None] - cd).sum(axis=-1), axis=-1
)
+ 1
),
sep="\n",
)
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
divs = NumberTheory.find_divisors(n)
print(len(str(divs[bi_l(divs, math.sqrt(n))])))
@staticmethod
def d():
c = Combinatorics.choose
n, a, b, *v = map(int, sys.stdin.read().split())
v.sort()
print(sum(v[-a:]) / a)
l, r = bi_l(v, v[-a]), bi_r(v, v[-a])
print(
sum(
c(r - l, i)
for i in range(r - n + a, r - max(l, n - b) + 1)
)
if r == n
else c(r - l, r - n + a)
)
class ABC058:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print("YES" if c - b == b - a else "NO")
@staticmethod
def b():
s, t = sys.stdin.read().split()
a = ""
for i in range(len(t)):
a += s[i] + t[i]
if len(s) > len(t):
a += s[-1]
print(a)
@staticmethod
def c():
n, *s = sys.stdin.read().split()
res = {c: 100 for c in string.ascii_lowercase}
for counter in map(Counter, s):
for (
c,
x,
) in res.items():
res[c] = min(x, counter[c])
t = ""
for c, x in sorted(res.items()):
t += c * x
print(t)
@staticmethod
def d():
n, m, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy[:n]), np.array(xy[n:])
print(
(x * (np.arange(n) + 1) - np.cumsum(x)).sum()
% MOD
* ((y * (np.arange(m) + 1) - np.cumsum(y)).sum() % MOD)
% MOD
)
class ABC059:
@staticmethod
def a():
def initial(s):
return s[0].upper()
print("".join(map(initial, sys.stdin.readline().split())))
@staticmethod
def b():
a, b = sys.stdin.read().split()
la, lb = len(a), len(b)
print(
"GREATER"
if la > lb
else "LESS"
if la < lb
else "GREATER"
if a > b
else "LESS"
if a < b
else "EQUAL"
)
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
c = s = 0
for i in range(n):
s += a[i]
if i & 1 and s >= 0:
c += s + 1
s = -1
elif i & 1 ^ 1 and s <= 0:
c += 1 - s
s = 1
c1 = c
c = s = 0
for i in range(n):
s += a[i]
if i & 1 and s <= 0:
c += 1 - s
s = 1
elif i & 1 ^ 1 and s >= 0:
c += s + 1
s = -1
c2 = c
print(min(c1, c2))
@staticmethod
def d():
x, y = map(int, sys.stdin.readline().split())
print("Brown" if abs(x - y) <= 1 else "Alice")
class ABC060:
@staticmethod
def a():
a, b, c = sys.stdin.readline().split()
print("YES" if a[-1] == b[0] and b[-1] == c[0] else "NO")
@staticmethod
def b():
a, b, c = map(int, sys.stdin.readline().split())
print("NO" if c % NumberTheory.gcd(a, b) else "YES")
@staticmethod
def c():
n, t, *a = map(int, sys.stdin.read().split())
print(sum(min(a[i + 1] - a[i], t) for i in range(n - 1)) + t)
@staticmethod
def d():
pass
class ABC061:
@staticmethod
def a():
a, b, c = map(int, sys.stdin.readline().split())
print("Yes" if a <= c <= b else "No")
@staticmethod
def b():
n, m, *ab = map(int, sys.stdin.read().split())
ab = np.array(ab) - 1
g = np.zeros(n, dtype=np.int32)
np.add.at(g, ab, 1)
print(*g, sep="\n")
@staticmethod
def c():
n, k, *ab = map(int, sys.stdin.read().split())
ab = np.transpose(np.array(ab).reshape(n, 2))
a, b = ab[:, np.argsort(ab[0])]
print(a[np.cumsum(b) >= k][0])
@staticmethod
def d():
n, m, *abc = map(int, sys.stdin.read().split())
a, b, c = np.array(abc).reshape(m, 3).T
a -= 1
b -= 1
c *= -1
g = csr_matrix(
([1] * (m + 1), (np.append(a, n - 1), np.append(b, 0))), (n, n)
)
_, labels = connected_components(g, connection="strong")
bl = (labels[a] == labels[0]) & (labels[b] == labels[0])
g = csr_matrix((c[bl], (a[bl], b[bl])), (n, n))
try:
print(
-shortest_path(g, method="BF", directed=True, indices=0)[
-1
].astype(int)
)
except:
print("inf")
@staticmethod
def d_2():
n, m, *abc = map(int, sys.stdin.read().split())
a, b, c = np.array(abc).reshape(m, 3).T
a -= 1
b -= 1
c *= -1
d = np.full(n, np.inf)
d[0] = 0
for _ in range(n - 1):
np.minimum.at(d, b, d[a] + c)
neg_cycle = np.zeros(n, dtype=np.bool)
for _ in range(n):
np.logical_or.at(neg_cycle, b, d[a] + c < d[b])
np.minimum.at(d, b, d[a] + c)
print(inf if neg_cycle[-1] else -d[-1].astype(int))
class ABC062:
@staticmethod
def a():
g = [0, 2, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0]
x, y = map(int, sys.stdin.readline().split())
print("Yes" if g[x - 1] == g[y - 1] else "No")
@staticmethod
def b():
h, w = map(int, sys.stdin.readline().split())
a = np.array(
[list(s) for s in sys.stdin.read().split()], dtype="U1"
)
a = np.pad(a, pad_width=1, constant_values="#")
for s in a:
print("".join(s))
@staticmethod
def c():
h, w = map(int, sys.stdin.readline().split())
if h * w % 3 == 0:
print(0)
return
def minimize(h, w):
return min(
h,
*(
s[-1] - s[0]
for x in range(w // 3, w // 3 + 2)
for s in (
sorted(
[
h * x,
h // 2 * (w - x),
(h + 1) // 2 * (w - x),
]
),
)
),
)
print(min(minimize(h, w), minimize(w, h)))
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
def optimize(a):
a = list(a)
l, r = a[:n], a[n:]
heapify(l)
s = [None] * (n + 1)
s[0] = sum(l)
for i in range(n):
x = heappop(l)
heappush(l, max(x, r[i]))
s[i + 1] = s[i] + max(0, r[i] - x)
return np.array(s)
print(
(
optimize(a[: 2 * n]) + optimize(-a[-1 : n - 1 : -1])[::-1]
).max()
)
class ABC063:
@staticmethod
def a():
a = sum(map(int, sys.stdin.readline().split()))
print("error" if a >= 10 else a)
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
print("yes" if len(set(s)) == len(s) else "no")
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
s = a.sum()
if s % 10:
print(s)
elif not np.count_nonzero(a % 10):
print(0)
else:
print(s - a[a % 10 != 0].min())
@staticmethod
def d():
n, a, b, *h = map(int, sys.stdin.read().split())
h = np.array(h)
d = a - b
def possible(c):
hh = h.copy()
np.maximum(hh - b * c, 0, out=hh)
return ((hh + d - 1) // d).sum() <= c
def binary_search():
lo, hi = 0, 10**9
while hi - lo > 1:
c = (lo + hi) // 2
if possible(c):
hi = c
else:
lo = c
return hi
print(binary_search())
class ABC064:
@staticmethod
def a():
r, g, b = map(int, sys.stdin.readline().split())
print("NO" if (10 * g + b) % 4 else "YES")
@staticmethod
def b():
n, *a = map(int, sys.stdin.read().split())
a.sort()
print(a[-1] - a[0])
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
a = np.bincount(np.minimum(np.array(a) // 400, 8), minlength=9)
mx = np.count_nonzero(a[:-1]) + a[-1]
mn = max(mx - a[-1], 1)
print(mn, mx)
@staticmethod
def d():
n, s = sys.stdin.read().split()
l = r = 0
for c in s:
if c == "(":
r += 1
else:
if r == 0:
l += 1
else:
r -= 1
print("(" * l + s + ")" * r)
class ABC065:
@staticmethod
def a():
x, a, b = map(int, sys.stdin.readline().split())
y = -a + b
print("delicious" if y <= 0 else "safe" if y <= x else "dangerous")
@staticmethod
def b():
n, *a = [int(x) - 1 for x in sys.stdin.read().split()]
i = 0
for c in range(n):
i = a[i]
if i == 1:
print(c + 1)
return
print(-1)
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
d = abs(n - m)
if d >= 2:
print(0)
return
fac, _ = Algebra.generate_fac_ifac(10**5)
print(fac[n] * fac[m] * (1 if d else 2) % MOD)
@staticmethod
def d():
n, *xy = map(int, sys.stdin.read().split())
x, y = np.array(xy).reshape(n, 2).T
i = np.argsort(x)
ax, bx, cx = (
i[:-1],
i[1:],
x[
i[1:],
]
- x[i[:-1]],
)
i = np.argsort(y)
ay, by, cy = (
i[:-1],
i[1:],
y[
i[1:],
]
- y[i[:-1]],
)
e = np.vstack(
[np.hstack([ax, ay]), np.hstack([bx, by]), np.hstack([cx, cy])]
)
e = e[:, np.argsort(e[-1])]
_, i = np.unique(e[:-1], return_index=True, axis=1)
a, b, c = e[:, i]
print(
minimum_spanning_tree(csr_matrix((c, (a, b)), (n, n)))
.astype(np.int64)
.sum()
)
@staticmethod
def d_2():
n, *xy = map(int, sys.stdin.read().split())
x, y = xy[::2], xy[1::2]
g = GeometryTopology.Graph(n)
def make(a):
b = sorted(enumerate(a), key=lambda x: x[1])
for i in range(n - 1):
u, v, w = b[i][0], b[i + 1][0], b[i + 1][1] - b[i][1]
for u, v in [(v, u), (u, v)]:
if not v in g.edges[u]:
g.add_edge(u, v, weight=w)
else:
g.edges[u][v].weight = min(g.edges[u][v].weight, w)
make(x)
make(y)
_, d = g.kruskal()
print(d)
class ABC066:
@staticmethod
def a():
print(sum(sorted(map(int, sys.stdin.readline().split()))[:-1]))
@staticmethod
def b():
s = sys.stdin.readline().rstrip()
def f(s):
n = len(s) // 2
return s[:n] == s[n:]
for i in range(len(s) - 2, 0, -2):
if f(s[:i]):
print(i)
return
@staticmethod
def c():
n, *a = map(int, sys.stdin.read().split())
b = deque()
for i in range(n):
if i & 1:
b.appendleft(a[i])
else:
b.append(a[i])
if n & 1:
b.reverse()
print(*b)
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
tmp = [None] * (n + 1)
for i in range(n + 1):
if tmp[a[i]] is not None:
d = tmp[a[i]] + n - i
break
tmp[a[i]] = i
k = np.arange(1, n + 2)
c = Combinatorics.CombinationsMod(n + 1, MOD)
print(*((c(n + 1, k) - c(d, k - 1)) % MOD), sep="\n")
class ABC067:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, *ab = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b in zip(*[iter(ab)] * 2):
a -= 1
b -= 1
g.add_edge(a, b)
g.add_edge(b, a)
d1, d2 = g.bfs(0), g.bfs(n - 1)
print(
"Fennec"
if sum(d1[i] <= d2[i] for i in range(n)) > n // 2
else "Snuke"
)
class ABC068:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
k = int(sys.stdin.readline().rstrip())
n = 50
print(n)
q, r = divmod(k, n)
a = np.arange(n - 1, -1, -1) + q
a[:r] += 1
print(*a)
class ABC069:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
h, w, n, *a = map(int, sys.stdin.read().split())
c = [i + 1 for i in range(n) for j in range(a[i])]
for i in range(h):
row = c[i * w : (i + 1) * w]
if i & 1:
row = row[::-1]
print(*row)
class ABC070:
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
g = GeometryTopology.Graph(n)
for _ in range(n - 1):
a, b, c = map(int, sys.stdin.readline().split())
a -= 1
b -= 1
g.add_edge(a, b, weight=c)
g.add_edge(b, a, weight=c)
q, k = map(int, sys.stdin.readline().split())
d = g.bfs(k - 1)
for _ in range(q):
x, y = map(int, sys.stdin.readline().split())
x -= 1
y -= 1
print(d[x] + d[y])
class ABC071:
@staticmethod
def d():
n, *s = sys.stdin.read().split()
n = int(n)
s = list(zip(*s))
dp = [0] * n
dp[0] = 3 if s[0][0] == s[0][1] else 6
for i in range(1, n):
dp[i] = dp[i - 1]
if s[i][0] == s[i - 1][0]:
continue
dp[i] *= (
2
if s[i - 1][0] == s[i - 1][1]
else 3
if s[i][0] != s[i][1]
else 1
)
dp[i] %= MOD
print(dp[-1])
class ABC072:
@staticmethod
def d():
n, *p = map(int, sys.stdin.read().split())
p += [-1]
cnt, i = 0, 0
while i < n:
if p[i] == i + 1:
cnt += p[i] == i + 1
if p[i + 1] == i + 2:
i += 1
i += 1
print(cnt)
class ABC073:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, m, r, *I = map(int, sys.stdin.read().split())
I = np.array(I)
a, b, c = I[r:].reshape(m, 3).T
d = shortest_path(
csr_matrix((c, (a - 1, b - 1)), (n, n)),
method="FW",
directed=False,
).astype(np.int32)
r = np.array([*itertools.permutations(I[:r] - 1)])
print((d[r[:, :-1], r[:, 1:]].sum(axis=1)).min())
class ABC074:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a, dtype=np.int32).reshape(n, n)
b = shortest_path(a, method="FW").astype(np.int32)
if (b < a).any():
print(-1)
return
np.fill_diagonal(b, 10**9)
a[np.any(b[:, None] + b <= a[:, :, None], axis=2)] = 0
print(a.sum() // 2)
class ABC075:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
n, k, *xy = map(int, sys.stdin.read().split())
xy = np.array(xy).reshape(n, 2)
x_y = xy.copy()[np.argsort(xy[:, 0])]
y_x = xy.copy()[np.argsort(xy[:, 1])]
comb = np.array([*itertools.combinations(range(n), 2)])
i1, i2 = comb.T
j1, j2 = comb[None, :].T
s = (y_x[:, 1][i2] - y_x[:, 1][i1]) * (
x_y[:, 0][j2] - x_y[:, 0][j1]
)
c = np.zeros((n + 1, n + 1), dtype=np.int64)
for i in range(n):
c[i + 1, 1:] += c[i, 1:] + (y_x[i, 0] <= x_y[:, 0])
a = c[i2 + 1, j2 + 1] - c[i2 + 1, j1] - c[i1, j2 + 1] + c[i1, j1]
print(s[a >= k].min())
class ABC076:
@staticmethod
def d():
n, *tv = map(int, sys.stdin.read().split())
t, v = np.array(tv).reshape(2, n)
t = np.pad(t, pad_width=[2, 1], constant_values=0)
np.cumsum(t, out=t)
l, r = t[:-1], t[1:]
v = np.pad(v, pad_width=[1, 1], constant_values=0)
x = np.arange(0, r[-1] + 0.1, 0.5, dtype=np.float32)[:, None]
mx = v - (x - l)
np.maximum(mx, v, out=mx)
np.maximum(mx, v + (x - r), out=mx)
y = mx.min(axis=1)
print(((y[:-1] + y[1:]) / 4).sum())
class ABC077:
@staticmethod
def d():
k = int(sys.stdin.readline().rstrip())
g = GeometryTopology.Graph(k)
for i in range(k):
g.add_edge(i, i * 10 % k, weight=0)
g.add_edge(i, (i + 1) % k, update=False, weight=1)
print(1 + g.bfs01(1)[0])
class ABC078:
@staticmethod
def d():
n, z, w, *a = map(int, sys.stdin.read().split())
print(
abs(a[0] - w)
if n == 1
else max(abs(a[-1] - w), abs(a[-1] - a[-2]))
)
class ABC079:
@staticmethod
def d():
h, w, *I = map(int, sys.stdin.read().split())
I = np.array(I)
c = I[:100].reshape(10, 10)
a = I[100:].reshape(h, w)
c = shortest_path(c.T, method="D", indices=1).astype(np.int32)
print(c[a[a != -1]].sum())
class ABC080:
@staticmethod
def d():
n, c, *stc = map(int, sys.stdin.read().split())
using = np.zeros((c, 10**5 + 2), dtype=np.int8)
s, t, c = np.array(stc).reshape(n, 3).T
np.add.at(using, (c - 1, s), 1)
np.subtract.at(using, (c - 1, t + 1), 1)
np.cumsum(using, axis=1, out=using)
print(np.count_nonzero(using, axis=0).max())
class ABC081:
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a = np.array(a)
i = np.argmax(np.absolute(a))
print(2 * n)
for j in range(n):
print(i + 1, j + 1)
if a[i] >= 0:
for j in range(n - 1):
print(j + 1, j + 2)
else:
for j in range(n - 1, 0, -1):
print(j + 1, j)
class ABC082:
pass
class ABC083:
pass
class ABC084:
pass
class ABC085:
pass
class ABC086:
pass
class ABC087:
pass
class ABC088:
pass
class ABC089:
pass
class ABC090:
pass
class ABC091:
pass
class ABC092:
pass
class ABC093:
pass
class ABC094:
pass
class ABC095:
pass
class ABC096:
pass
class ABC097:
pass
class ABC098:
pass
class ABC099:
pass
class ABC100:
pass
class ABC101:
pass
class ABC102:
pass
class ABC103:
pass
class ABC104:
pass
class ABC105:
pass
class ABC106:
pass
class ABC107:
pass
class ABC108:
pass
class ABC109:
pass
class ABC110:
pass
class ABC111:
pass
class ABC112:
pass
class ABC113:
pass
class ABC114:
pass
class ABC115:
pass
class ABC116:
pass
class ABC117:
pass
class ABC118:
pass
class ABC119:
pass
class ABC120:
pass
class ABC121:
pass
class ABC122:
pass
class ABC123:
pass
class ABC124:
pass
class ABC125:
pass
class ABC126:
pass
class ABC127:
pass
class ABC128:
pass
class ABC129:
pass
class ABC130:
pass
class ABC131:
pass
class ABC132:
pass
class ABC133:
pass
class ABC134:
pass
class ABC135:
pass
class ABC136:
pass
class ABC137:
pass
class ABC138:
pass
class ABC139:
pass
class ABC140:
pass
class ABC141:
pass
class ABC142:
pass
class ABC143:
pass
class ABC144:
pass
class ABC145:
pass
class ABC146:
pass
class ABC147:
pass
class ABC148:
pass
class ABC149:
pass
class ABC150:
pass
class ABC151:
pass
class ABC152:
pass
class ABC153:
pass
class ABC154:
pass
class ABC155:
pass
class ABC156:
pass
class ABC157:
pass
class ABC158:
pass
class ABC159:
pass
class ABC160:
pass
class ABC161:
pass
class ABC162:
pass
class ABC163:
pass
class ABC164:
pass
class ABC165:
pass
class ABC166:
pass
class ABC167:
pass
class ABC168:
pass
class ABC169:
pass
class ABC170:
@staticmethod
def a():
x = [int(x) for x in sys.stdin.readline().split()]
for i in range(5):
if x[i] != i + 1:
print(i + 1)
break
@staticmethod
def b():
x, y = map(int, sys.stdin.readline().split())
print("Yes" if 2 * x <= y <= 4 * x and y % 2 == 0 else "No")
@staticmethod
def c():
x, n, *p = map(int, sys.stdin.read().split())
a = list(set(range(102)) - set(p))
a = [(abs(y - x), y) for y in a]
print(sorted(a)[0][1])
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
cand = set(a)
cnt = 0
for x, c in sorted(Counter(a).items()):
cnt += c == 1 and x in cand
cand -= set(range(x * 2, 10**6 + 1, x))
print(cnt)
@staticmethod
def e():
n, q = map(int, sys.stdin.readline().split())
queue = []
m = 2 * 10**5
infants = [[] for _ in range(m)]
highest_rate = [None] * m
where = [None] * n
rate = [None] * n
def entry(i, k):
where[i] = k
while infants[k]:
r, j = heappop(infants[k])
if where[j] != k or j == i:
continue
if rate[i] >= -r:
highest_rate[k] = rate[i]
heappush(queue, (rate[i], k, i))
heappush(infants[k], (r, j))
break
else:
highest_rate[k] = rate[i]
heappush(queue, (rate[i], k, i))
heappush(infants[k], (-rate[i], i))
def transfer(i, k):
now = where[i]
while infants[now]:
r, j = heappop(infants[now])
if where[j] != now or j == i:
continue
if highest_rate[now] != -r:
highest_rate[now] = -r
heappush(queue, (-r, now, j))
heappush(infants[now], (r, j))
break
else:
highest_rate[now] = None
entry(i, k)
def inquire():
while True:
r, k, i = heappop(queue)
if where[i] != k or r != highest_rate[k]:
continue
heappush(queue, (r, k, i))
return r
for i in range(n):
a, b = map(int, sys.stdin.readline().split())
rate[i] = a
entry(i, b - 1)
for _ in range(q):
c, d = map(int, sys.stdin.readline().split())
transfer(c - 1, d - 1)
print(inquire())
class ABC171:
@staticmethod
def a():
c = sys.stdin.readline().rstrip()
print("A" if c < "a" else "a")
@staticmethod
def b():
n, k, *p = map(int, sys.stdin.read().split())
print(sum(sorted(p)[:k]))
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
n -= 1
l = 1
while True:
if n < pow(26, l):
break
n -= pow(26, l)
l += 1
res = "".join(
[chr(ord("a") + d) for d in NumberTheory.base_convert(n, 26)][
::-1
]
)
res = "a" * (l - len(res)) + res
print(res)
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
a = [int(x) for x in sys.stdin.readline().split()]
s = sum(a)
cnt = Counter(a)
q = int(sys.stdin.readline().rstrip())
for _ in range(q):
b, c = map(int, sys.stdin.readline().split())
s += (c - b) * cnt[b]
print(s)
cnt[c] += cnt[b]
cnt[b] = 0
@staticmethod
def e():
n, *a = map(int, sys.stdin.read().split())
s = 0
for x in a:
s ^= x
b = map(lambda x: x ^ s, a)
print(*b, sep=" ")
class ABC172:
@staticmethod
def a():
a = int(sys.stdin.readline().rstrip())
print(a * (1 + a + a**2))
@staticmethod
def b():
s, t = sys.stdin.read().split()
print(sum(s[i] != t[i] for i in range(len(s))))
@staticmethod
def c():
n, m, k = map(int, sys.stdin.readline().split())
a = [0] + [int(x) for x in sys.stdin.readline().split()]
b = [int(x) for x in sys.stdin.readline().split()]
(*sa,) = itertools.accumulate(a)
(*sb,) = itertools.accumulate(b)
res = 0
for i in range(n + 1):
r = k - sa[i]
if r < 0:
break
res = max(res, i + bi_r(sb, r))
print(res)
@staticmethod
def d():
n = int(sys.stdin.readline().rstrip())
f = np.zeros(n + 1, dtype=np.int64)
for i in range(1, n + 1):
f[i::i] += 1
print((np.arange(1, n + 1) * f[1:]).sum())
class ABC173:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
charge = (n + 999) // 1000 * 1000 - n
print(charge)
@staticmethod
def b():
n, *s = sys.stdin.read().split()
c = Counter(s)
for v in "AC, WA, TLE, RE".split(", "):
print(f"{v} x {c[v]}")
@staticmethod
def c():
h, w, k = map(int, sys.stdin.readline().split())
c = [sys.stdin.readline().rstrip() for _ in range(h)]
tot = 0
for i in range(1 << h):
for j in range(1 << w):
cnt = 0
for y in range(h):
for x in range(w):
if i >> y & 1 or j >> x & 1:
continue
cnt += c[y][x] == "#"
tot += cnt == k
print(tot)
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a.sort(reverse=True)
res = (
a[0]
+ sum(a[1 : 1 + (n - 2) // 2]) * 2
+ a[1 + (n - 2) // 2] * (n & 1)
)
print(res)
@staticmethod
def e():
MOD = 10**9 + 7
n, k, *a = map(int, sys.stdin.read().split())
minus = [x for x in a if x < 0]
plus = [x for x in a if x > 0]
if len(plus) + len(minus) // 2 * 2 >= k:
(*minus,) = map(abs, minus)
minus.sort(reverse=True)
plus.sort(reverse=True)
cand = []
if len(minus) & 1:
minus = minus[:-1]
for i in range(0, len(minus) - 1, 2):
cand.append(minus[i] * minus[i + 1] % MOD)
if k & 1:
res = plus[0]
plus = plus[1:]
else:
res = 1
if len(plus) & 1:
plus = plus[:-1]
for i in range(0, len(plus) - 1, 2):
cand.append(plus[i] * plus[i + 1] % MOD)
cand.sort(reverse=True)
for x in cand[: k // 2]:
res *= x
res %= MOD
print(res)
elif 0 in a:
print(0)
else:
cand = sorted(map(abs, a))
res = 1
for i in range(k):
res *= cand[i]
res %= MOD
res = MOD - res
print(res)
pass
class ABC174:
@staticmethod
def a():
print("Yes" if int(sys.stdin.readline().rstrip()) >= 30 else "No")
class ABC178:
@staticmethod
def a():
pass
@staticmethod
def b():
pass
@staticmethod
def c():
pass
@staticmethod
def d():
s = int(sys.stdin.readline().rstrip())
if s == 0:
print(1)
return
elif s == 1:
print(0)
return
c = np.eye(3, k=-1, dtype=np.int64)
c[0, 0] = c[0, 2] = 1
a = np.array([0, 0, 1])
print(Algebra.dot(Algebra.matrix_pow(c, s - 2), a)[0])
class ABC179:
@staticmethod
def a():
s = sys.stdin.readline().rstrip()
print(s + "s" if s[-1] != "s" else s + "es")
@staticmethod
def b():
n, *d = map(int, sys.stdin.read().split())
d = np.array(d).reshape(n, 2).T
d = np.equal(d[0], d[1]).astype(int)
dd = d.copy()
dd[1:] += d[:-1]
dd[:-1] += d[1:]
print("Yes" if (dd >= 3).any() else "No")
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
res = (n // np.arange(1, n + 1)).sum() - len(
NumberTheory.find_divisors(n)
)
print(res)
@staticmethod
def d():
mod = 998244353
n, k, *lr = map(int, sys.stdin.read().split())
l, r = np.array(lr).reshape(k, -1).T
@njit((i8, i8[:], i8[:]), cache=True)
def solve(n, l, r):
res = np.zeros(n * 2, dtype=np.int64)
res[0], res[1] = 1, -1
for i in range(n - 1):
res[i + 1] = (res[i + 1] + res[i]) % mod
res[i + l] = (res[i + l] + res[i]) % mod
res[i + r + 1] = (res[i + r + 1] - res[i]) % mod
print(res[n - 1])
solve(n, l, r)
@staticmethod
def e():
n, x, m = map(int, sys.stdin.readline().split())
res = [-1 for _ in range(m)]
s = 0
loop = np.zeros(m, dtype=np.int64)
for i in range(m + 1):
if i == n:
print(s)
return
if res[x] != -1:
l, loop = i - res[x], loop[res[x] : i]
q, r = divmod(n - i, l)
print(s + q * loop.sum() + loop[:r].sum())
return
res[x], loop[i] = i, x
s += x
x = x**2 % m
class ABC180:
@staticmethod
def a():
n, a, b = map(int, sys.stdin.readline().split())
print(n - a + b)
@staticmethod
def b():
n, *x = map(int, sys.stdin.read().split())
x = np.absolute(np.array(x))
print(x.sum())
print(np.sqrt((x**2).sum()))
print(x.max())
@staticmethod
def c():
n = int(sys.stdin.readline().rstrip())
div = NumberTheory.find_divisors(n)
print(*div, sep="\n")
@staticmethod
def d():
x, y, a, b = map(int, sys.stdin.readline().split())
cnt = 0
while x * a <= x + b:
x *= a
if x >= y:
print(cnt)
return
cnt += 1
cnt += (y - x - 1) // b
print(cnt)
@staticmethod
def e():
n, *xyz = map(int, sys.stdin.read().split())
xyz = list(zip(*[iter(xyz)] * 3))
dist = [[0] * n for _ in range(n)]
for i in range(n):
a, b, c = xyz[i]
for j in range(n):
p, q, r = xyz[j]
dist[i][j] = abs(p - a) + abs(q - b) + max(0, r - c)
dp = [[inf] * n for _ in range(1 << n)]
dp[0][0] = 0
for s in range(1 << n):
for i in range(n):
t = s | (1 << i)
for j in range(n):
dp[t][i] = min(dp[t][i], dp[s][j] + dist[j][i])
print(dp[-1][0])
@staticmethod
def f():
n, m, l = map(int, sys.stdin.readline().split())
c = Combinatorics.CombinationsMod(n, MOD)
path = np.zeros(n + 1, dtype=np.int64)
path[1] = path[2] = 1
for i in range(3, n + 1):
path[i] = path[i - 1] * i % MOD
cycle = np.zeros(n + 1, dtype=np.int64)
cycle[1:] = path[:-1]
dp = np.zeros((n + 1, m + 1), dtype=np.int64)
def f(l):
dp[:, :] = 0
dp[0, 0] = 1
for i in range(n):
for j in range(m + 1):
k = np.arange(1, min(l, n - i, m - j + 1) + 1)
dp[i + k, j + k - 1] += (
dp[i, j]
* c(n - i - 1, k - 1)
% MOD
* path[k]
% MOD
)
dp[i + k, j + k - 1] %= MOD
k = np.arange(2, min(l, n - i, m - j) + 1)
dp[i + k, j + k] += (
dp[i, j]
* c(n - i - 1, k - 1)
% MOD
* cycle[k]
% MOD
)
dp[i + k, j + k] %= MOD
return dp[n, m]
print((f(l) - f(l - 1)) % MOD)
@staticmethod
def f_2():
n, m, l = map(int, sys.stdin.readline().split())
c = Combinatorics.CombinationsMod(n, MOD)
path = [0] * (n + 1)
path[1] = path[2] = 1
for i in range(3, n + 1):
path[i] = path[i - 1] * i % MOD
cycle = [0] + path[:-1]
def f(l):
dp = [[0] * (m + 1) for _ in range(n + 1)]
dp[0][0] = 1
for i in range(n):
for j in range(m + 1):
for k in range(1, min(l, n - i, m - j + 1) + 1):
dp[i + k][j + k - 1] += (
dp[i][j]
* c(n - i - 1, k - 1)
% MOD
* path[k]
% MOD
)
dp[i + k][j + k - 1] %= MOD
for k in range(1, min(l, n - i, m - j) + 1):
dp[i + k][j + k] += (
dp[i][j]
* c(n - i - 1, k - 1)
% MOD
* cycle[k]
% MOD
)
dp[i + k][j + k] %= MOD
return dp[n][m]
print((f(l) - f(l - 1)) % MOD)
class ARC106:
@staticmethod
def a():
n = int(sys.stdin.readline().rstrip())
a = 1
while pow(3, a) <= n:
m = n - pow(3, a)
b = 1
while pow(5, b) <= m:
if pow(5, b) == m:
print(a, b)
return
b += 1
a += 1
print(-1)
@staticmethod
def b():
n, m = map(int, sys.stdin.readline().split())
a = [int(x) for x in sys.stdin.readline().split()]
b = [int(x) for x in sys.stdin.readline().split()]
uf = GeometryTopology.Graph(n)
uf.init_dsu()
for _ in range(m):
c, d = map(int, sys.stdin.readline().split())
c -= 1
d -= 1
uf.unite(c, d)
visited = [False] * n
ga = [[] for _ in range(n)]
gb = [[] for _ in range(n)]
for i in range(n):
r = uf.find(i)
ga[r].append(a[i])
gb[r].append(b[i])
print(
"Yes"
if all(sum(ga[i]) == sum(gb[i]) for i in range(n))
else "No"
)
@staticmethod
def c():
n, m = map(int, sys.stdin.readline().split())
if m < 0:
print(-1)
return
if n == 1:
if m != 0:
print(-1)
return
print(1, 2)
return
if m >= n - 1:
print(-1)
return
l, r = 1, 10**9
print(l, r)
for _ in range(n - 2 - m):
l += 1
r -= 1
print(l, r)
r = l
for _ in range(m + 1):
l, r = r + 1, r + 2
print(l, r)
@staticmethod
def d():
mod = 998244353
n, k, *a = map(int, sys.stdin.read().split())
a = np.array(a)
b = np.zeros((k + 1, n), dtype=np.int64)
b[0] = 1
for i in range(k):
b[i + 1] = b[i] * a % mod
s = b.sum(axis=1) % mod
inv_2 = pow(2, mod - 2, mod)
c = Combinatorics.CombinationsMod(mod=mod)
for x in range(1, k + 1):
l = np.arange(x + 1)
print(
(
(c(x, l) * s[l] % mod * s[l][::-1] % mod).sum() % mod
- pow(2, x, mod) * s[x]
)
% mod
* inv_2
% mod
)
@staticmethod
def e():
pass
@staticmethod
def f():
pass
class ACL001:
@staticmethod
def a():
n, *xy = map(int, sys.stdin.read().split())
(*xy,) = zip(*[iter(xy)] * 2)
print(xy)
pass
class TDPC:
@staticmethod
def t():
pass
class MSolutions2020:
@staticmethod
def a():
x = int(sys.stdin.readline().rstrip())
x -= 400
print(8 - x // 200)
@staticmethod
def b():
r, g, b, k = map(int, sys.stdin.read().split())
while k and g <= r:
g *= 2
k -= 1
while k and b <= g:
b *= 2
k -= 1
print("Yes" if r < g < b else "No")
@staticmethod
def c():
n, k, *a = map(int, sys.stdin.read().split())
for i in range(k, n):
print("Yes" if a[i] > a[i - k] else "No")
@staticmethod
def d():
n, *a = map(int, sys.stdin.read().split())
a += [-1]
m = 1000
s = 0
for i in range(n):
if a[i + 1] == a[i]:
continue
elif a[i + 1] > a[i]:
cnt = m // a[i]
m -= a[i] * cnt
s += cnt
else:
m += a[i] * s
s = 0
print(m)
class Codeforces:
class CR676div2:
@staticmethod
def a():
t = int(sys.stdin.readline().rstrip())
for _ in range(t):
a, b = map(int, sys.stdin.readline().split())
print(a ^ b)
@staticmethod
def b():
t = int(sys.stdin.readline().rstrip())
for _ in range(t):
n = int(sys.stdin.readline().rstrip())
s = [list(sys.stdin.readline().rstrip()) for _ in range(n)]
s[0][0] = s[-1][-1] = "0"
for i in range(n):
for j in range(n):
s[i][j] = int(s[i][j])
def can_goal(g, c=0):
visited = [0] * n
stack = [(0, 0)]
visited[0] |= 1 << 0
while stack:
y, x = stack.pop()
for dy, dx in [(-1, 0), (0, -1), (1, 0), (0, 1)]:
i, j = y + dy, x + dx
if i < 0 or i >= n or j < 0 or j >= n:
continue
if i == j == n - 1:
return True
if visited[i] >> j & 1:
continue
visited[i] |= 1 << j
if g[i][j] != c:
continue
stack.append((i, j))
return False
if not (can_goal(s, 0) or can_goal(s, 1)):
print(0)
continue
flg = 0
for i in range(n):
for j in range(n):
if i == j == 0 or i == j == n - 1:
continue
s[i][j] ^= 1
if not (can_goal(s, 0) or can_goal(s, 1)):
print(1)
print(i + 1, j + 1)
flg = 1
break
s[i][j] ^= 1
if flg:
break
if flg:
continue
print(2)
if s[0][1] == s[1][0]:
print(n, n - 1)
print(n - 1, n)
continue
if s[0][1] == s[-1][-2]:
print(1, 2)
print(n - 1, n)
else:
print(1, 2)
print(n, n - 1)
@staticmethod
def c():
pass
class ProjectEuler:
@staticmethod
def p1():
def f(n, x):
return (x + n // x * x) * (n // x) // 2
n = 1000
ans = f(n - 1, 3) + f(n - 1, 5) - f(n - 1, 15)
print(ans)
@staticmethod
def p2():
fib = [1, 2]
while fib[-1] < 4 * 10**6:
fib.append(fib[-1] + fib[-2])
print(sum(fib[1:-1:3]))
@staticmethod
def p3():
pn = NumberTheory.PrimeNumbers()
res = pn.factorize(600851475143)
print(max(res.keys()))
@staticmethod
def p4():
def is_palindrome(n):
n = str(n)
return n == n[::-1]
cand = []
for a in range(100, 1000):
for b in range(a, 1000):
n = a * b
if is_palindrome(n):
cand.append(n)
print(max(cand))
@staticmethod
def p5():
pn = NumberTheory.PrimeNumbers()
res = defaultdict(int)
for i in range(1, 21):
for p, c in pn.factorize(i).items():
res[p] = max(res[p], c)
ans = 1
for p, c in res.items():
ans *= pow(p, c)
print(ans)
@staticmethod
def p6():
a = np.arange(101)
b = np.cumsum(a**2)
a = a.cumsum()
print(a[100] ** 2 - b[100])
@staticmethod
def p7():
nt = NumberTheory.PrimeNumbers()
print(sorted(nt)[10000])
@staticmethod
def p8():
n = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
n = [int(d) for d in list(n)]
res = 0
for i in range(988):
x = 1
for j in range(13):
x *= n[i + j]
res = max(res, x)
print(res)
@staticmethod
def p9():
for a in range(1, 997):
for b in range(a, 998 - a):
c = 1000 - a - b
if a**2 + b**2 == c**2:
print(a * b * c)
return
@staticmethod
def p10():
pn = NumberTheory.PrimeNumbers(2 * 10**6 + 1)
print(sum(pn))
@staticmethod
def p11():
grid = "08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48"
print(grid)
pass
class Yukicoder:
def __init__(self):
pass
def __call__(self):
print(1)
class AOJ:
@staticmethod
def ALDS1_12_A():
n, *a = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for i in range(n - 1):
for j in range(i + 1, n):
if a[i * n + j] == -1:
continue
g.add_edge(i, j, weight=a[i * n + j])
g.add_edge(j, i, weight=a[i * n + j])
_, d = g.kruskal()
print(d)
@staticmethod
def GRL_3_C():
n, m = map(int, sys.stdin.readline().split())
g = GeometryTopology.Graph(n)
for _ in range(m):
g.add_edge(*map(int, sys.stdin.readline().split()))
r = g.scc()
q, *uv = map(int, sys.stdin.read().split())
for u, v in zip(*[iter(uv)] * 2):
print(int(r[u] == r[v]))
class YosupoJudge:
@staticmethod
def Directed_MST():
n, m, s, *abc = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
for a, b, c in zip(*[iter(abc)] * 3):
g.add_edge(a, b, weight=c)
_, d, p = g.prim(src=s, return_parent=True)
print(d)
print(*p)
@staticmethod
def Manhattan_MST():
n, *xy = map(int, sys.stdin.read().split())
g = GeometryTopology.Graph(n)
if __name__ == "__main__":
AtCoder.ABC081.d()
pass
| true | true |
f71a85be328989ab5fd1d62bb8e59c2c2b19ba47 | 3,607 | py | Python | src/ekpmeasure/experiments/ferroelectric/_switching/core.py | cjfinnell/ekpmeasure | e6611c053cad28e06f4f8a94764ebe3805cddb15 | [
"MIT"
] | null | null | null | src/ekpmeasure/experiments/ferroelectric/_switching/core.py | cjfinnell/ekpmeasure | e6611c053cad28e06f4f8a94764ebe3805cddb15 | [
"MIT"
] | null | null | null | src/ekpmeasure/experiments/ferroelectric/_switching/core.py | cjfinnell/ekpmeasure | e6611c053cad28e06f4f8a94764ebe3805cddb15 | [
"MIT"
] | null | null | null | from ....control import core
from ....control.instruments.berkeleynucleonics765 import stop
from ..switching import preset_run_function
import pandas as pd
import numpy as np
import os
import warnings
import time
__all__ = ("FE",)
class FE(core.experiment):
"""Experiment class for running pulsed Ferroelectric switching experiments like those shown `here <https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.125.067601>`_
args:
pg (pyvisa.resources.gpib.GPIBInstrument): Berkeley Nucleonics 765
scope (pyvisa.resources.gpib.GPIBInstrument): Tektronix TDS620B or Tektronix TDS6604
scopetype (str): Specify scope. Only Tektronix TDS620B (``'620B'``) or Tektronix TDS6604 (``'6604'``) are supported
run_function (function): Run function.
returns:
(FE): Experiment
"""
def __init__(self, pg, scope, scopetype="6604", run_function=preset_run_function):
super().__init__()
if scopetype != "6604" and scopetype != "620B":
raise ValueError(
"must specify scope type as either 6604 or 620B (corresponding to the correct scope you are using)"
)
self.run_function = preset_run_function
self.pg = pg
self.scope = scope
self.scopetype = scopetype
return
def checks(self, params):
"""Checks during initialization."""
if self.pg != params["pg"]:
try:
raise ValueError(
"pg provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.pg, params["pg"]
)
)
except KeyError:
raise ValueError(
"pg provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.pg, None
)
)
if self.scope != params["scope"]:
try:
raise ValueError(
"scope provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scope, params["scope"]
)
)
except KeyError:
raise ValueError(
"scope provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scope, None
)
)
try:
if self.scopetype != params["scopetype"]:
try:
raise ValueError(
"scopetype provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scopetype, params["scopetype"]
)
)
except KeyError:
raise ValueError(
"scopetype provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scopetype, None
)
)
except KeyError:
if self.scopetype != "6604":
raise ValueError(
"check scopetype. If you think this is done correctly, please specify explicitly scopetype in params."
)
def terminate(self):
"""Termination."""
stop(self.pg)
return
| 36.806122 | 174 | 0.540061 | from ....control import core
from ....control.instruments.berkeleynucleonics765 import stop
from ..switching import preset_run_function
import pandas as pd
import numpy as np
import os
import warnings
import time
__all__ = ("FE",)
class FE(core.experiment):
def __init__(self, pg, scope, scopetype="6604", run_function=preset_run_function):
super().__init__()
if scopetype != "6604" and scopetype != "620B":
raise ValueError(
"must specify scope type as either 6604 or 620B (corresponding to the correct scope you are using)"
)
self.run_function = preset_run_function
self.pg = pg
self.scope = scope
self.scopetype = scopetype
return
def checks(self, params):
if self.pg != params["pg"]:
try:
raise ValueError(
"pg provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.pg, params["pg"]
)
)
except KeyError:
raise ValueError(
"pg provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.pg, None
)
)
if self.scope != params["scope"]:
try:
raise ValueError(
"scope provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scope, params["scope"]
)
)
except KeyError:
raise ValueError(
"scope provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scope, None
)
)
try:
if self.scopetype != params["scopetype"]:
try:
raise ValueError(
"scopetype provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scopetype, params["scopetype"]
)
)
except KeyError:
raise ValueError(
"scopetype provided in initialization ({}) does not match that provided as an argument for run_function ({})".format(
self.scopetype, None
)
)
except KeyError:
if self.scopetype != "6604":
raise ValueError(
"check scopetype. If you think this is done correctly, please specify explicitly scopetype in params."
)
def terminate(self):
stop(self.pg)
return
| true | true |
f71a85f2c607d66f8e6260e04b8ed45d7f51a744 | 116 | py | Python | plots/w.py | Tethik/whistleblower | 56747cbf3c4eda95cee7eded36b4a853d33d6ee3 | [
"MIT"
] | 1 | 2016-06-20T12:35:42.000Z | 2016-06-20T12:35:42.000Z | plots/w.py | Tethik/whistleblower | 56747cbf3c4eda95cee7eded36b4a853d33d6ee3 | [
"MIT"
] | null | null | null | plots/w.py | Tethik/whistleblower | 56747cbf3c4eda95cee7eded36b4a853d33d6ee3 | [
"MIT"
] | null | null | null | def w(j, p):
return 4 * j * (1 - p)
for p in [0.5, 0.75, 0.99]:
print([w(j, p)*24*7 for j in [5, 20, 50]])
| 19.333333 | 46 | 0.448276 | def w(j, p):
return 4 * j * (1 - p)
for p in [0.5, 0.75, 0.99]:
print([w(j, p)*24*7 for j in [5, 20, 50]])
| true | true |
f71a8614737b2fe5ad5b8e12f3668178f8d6c600 | 8,334 | py | Python | mindspore/ops/operations/__init__.py | ZephyrChenzf/mindspore | 8f191847cf71e12715ced96bc3575914f980127a | [
"Apache-2.0"
] | null | null | null | mindspore/ops/operations/__init__.py | ZephyrChenzf/mindspore | 8f191847cf71e12715ced96bc3575914f980127a | [
"Apache-2.0"
] | null | null | null | mindspore/ops/operations/__init__.py | ZephyrChenzf/mindspore | 8f191847cf71e12715ced96bc3575914f980127a | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Primitive operator classes.
A collection of operators to build nerual networks or computing functions.
"""
from .array_ops import (Argmax, Argmin, Cast, Concat, Pack, Unpack,
Diag, DiagPart, DType, ExpandDims, Eye,
Fill, GatherNd, GatherV2, InvertPermutation,
IsInstance, IsSubClass, ArgMaxWithValue, OnesLike, ZerosLike,
Rank, Reshape, ResizeNearestNeighbor, ArgMinWithValue, Range,
SameTypeShape, ScatterAdd, ScatterMax, ScatterUpdate,
ScalarToArray, ScalarToTensor, ScatterNd, ScatterNdUpdate, Select,
Shape, Size, Slice, Split, EmbeddingLookup,
Squeeze, StridedSlice, Tile,
Transpose, TruncatedNormal, TupleToArray, UnsortedSegmentMin,
UnsortedSegmentSum, SpaceToDepth, DepthToSpace, SpaceToBatch, BatchToSpace,
SpaceToBatchND, BatchToSpaceND, BroadcastTo)
from .comm_ops import (AllGather, AllReduce, _AlltoAll, ReduceScatter, Broadcast,
_MirrorOperator, ReduceOp, _VirtualDataset,
_VirtualDiv, _GetTensorSlice,
HostAllGather, HostReduceScatter)
from .debug_ops import (ImageSummary, InsertGradientOf, HookBackward, ScalarSummary,
TensorSummary, HistogramSummary, Print)
from .control_ops import ControlDepend, GeSwitch, Merge
from .inner_ops import ScalarCast
from .math_ops import (Abs, ACos, Asin, Asinh, AddN, AssignAdd, AssignSub, Atan2, BatchMatMul, BitwiseAnd, BitwiseOr,
BitwiseXor, Inv, Invert,
ReduceMax, ReduceMin, ReduceMean, ReduceSum, ReduceAll, ReduceProd, CumProd,
Cos, Div, DivNoNan, Equal, EqualCount, Exp, Expm1, Erf, Erfc, Floor, FloorDiv, FloorMod, Ceil,
Acosh, Greater, GreaterEqual, Less, LessEqual, Log, Log1p, LogicalAnd,
LogicalNot, LogicalOr, MatMul, Maximum,
Minimum, Mul, Neg, NMSWithMask, NotEqual,
NPUAllocFloatStatus, NPUClearFloatStatus,
NPUGetFloatStatus, Pow, RealDiv, IsNan, IsInf, IsFinite, FloatStatus,
Reciprocal, CumSum,
Sin, Sqrt, Rsqrt, BesselI0e, BesselI1e,
Square, Sub, TensorAdd, Sign, Round, SquareSumAll, Atan, Atanh, Cosh, Sinh)
from .random_ops import (RandomChoiceWithMask)
from .nn_ops import (LSTM, SGD, Adam, ApplyMomentum, BatchNorm,
BiasAdd, Conv2D,
DepthwiseConv2dNative,
DropoutDoMask, DropoutGrad, Dropout,
DropoutGenMask, Flatten, FusedBatchNorm,
Gelu, Elu,
GetNext, L2Normalize, LayerNorm, L2Loss, CTCLoss,
LogSoftmax,
MaxPool,
AvgPool, Conv2DBackpropInput, ConfusionMulGrad,
MaxPoolWithArgmax, OneHot, Pad, MirrorPad, PReLU, ReLU, ReLU6, ReLUV2, HSwish, HSigmoid,
ResizeBilinear, Sigmoid,
SigmoidCrossEntropyWithLogits,
SmoothL1Loss, Softmax, Softplus,
SoftmaxCrossEntropyWithLogits, ROIAlign,
SparseSoftmaxCrossEntropyWithLogits, Tanh,
TopK, BinaryCrossEntropy, SparseApplyAdagrad, LARSUpdate, ApplyFtrl, SparseApplyFtrl,
ApplyProximalAdagrad, SparseApplyProximalAdagrad,
ApplyRMSProp, ApplyCenteredRMSProp, BasicLSTMCell)
from .other_ops import (Assign, IOU, BoundingBoxDecode, BoundingBoxEncode,
CheckValid, MakeRefKey, CheckBprop, ConfusionMatrix)
from . import _quant_ops
from ._quant_ops import *
from .thor_ops import *
__all__ = [
'TensorAdd',
'Argmax',
'Argmin',
'ArgMaxWithValue',
'ArgMinWithValue',
'AddN',
'Sub',
'CumSum',
'MatMul',
'BatchMatMul',
'Mul',
'Pow',
'Exp',
'Expm1',
'Rsqrt',
'Sqrt',
'Square',
'Conv2D',
'Flatten',
'MaxPoolWithArgmax',
'BatchNorm',
'MaxPool',
'TopK',
'Adam',
'Softplus',
'Softmax',
'LogSoftmax',
'SoftmaxCrossEntropyWithLogits',
'ROIAlign',
'ConfusionMulGrad',
'SparseSoftmaxCrossEntropyWithLogits',
'SGD',
'ApplyMomentum',
'ExpandDims',
'Cast',
'IsSubClass',
'IsInstance',
'Reshape',
'Squeeze',
'Transpose',
'OneHot',
'GatherV2',
'Concat',
'Pack',
'Unpack',
'Tile',
'BiasAdd',
'Gelu',
'Minimum',
'Maximum',
'StridedSlice',
'ReduceSum',
'ReduceMean',
'LayerNorm',
'EmbeddingLookup',
'Rank',
'Less',
'LessEqual',
'RealDiv',
'Div',
'DivNoNan',
'Inv',
'Invert',
'TruncatedNormal',
'Fill',
'OnesLike',
'ZerosLike',
'Select',
'Split',
'ReLU',
'ReLU6',
'Elu',
'Erf',
'Erfc',
'Sigmoid',
'HSwish',
'HSigmoid',
'Tanh',
'RandomChoiceWithMask',
'ResizeBilinear',
'ScalarSummary',
'ImageSummary',
'TensorSummary',
'HistogramSummary',
"Print",
'InsertGradientOf',
'HookBackward',
'InvertPermutation',
'Shape',
'DropoutDoMask',
'DropoutGenMask',
'DropoutGrad',
'Dropout',
'Neg',
'Slice',
'DType',
'NPUAllocFloatStatus',
'NPUGetFloatStatus',
'NPUClearFloatStatus',
'IsNan',
'IsFinite',
'IsInf',
'FloatStatus',
'Reciprocal',
'SmoothL1Loss',
'L2Loss',
'CTCLoss',
'ReduceAll',
'ScalarToArray',
'ScalarToTensor',
'TupleToArray',
'ControlDepend',
'GeSwitch',
'Merge',
'SameTypeShape',
'CheckBprop',
'CheckValid',
'BoundingBoxEncode',
'BoundingBoxDecode',
'L2Normalize',
'ScatterAdd',
'ScatterNd',
'ScatterMax',
'ResizeNearestNeighbor',
'Pad',
'MirrorPad',
'GatherNd',
'ScatterUpdate',
'ScatterNdUpdate',
'Floor',
'NMSWithMask',
'IOU',
'MakeRefKey',
'AvgPool',
# Back Primitive
'Equal',
'EqualCount',
'NotEqual',
'Greater',
'GreaterEqual',
'LogicalNot',
'LogicalAnd',
'LogicalOr',
'Size',
'DepthwiseConv2dNative',
'UnsortedSegmentSum',
'UnsortedSegmentMin',
"AllGather",
"HostAllGather",
"AllReduce",
"ReduceScatter",
"HostReduceScatter",
"Broadcast",
"ReduceOp",
'ScalarCast',
'GetNext',
'ReduceMax',
'ReduceMin',
'ReduceProd',
'CumProd',
'Log',
'Log1p',
'SigmoidCrossEntropyWithLogits',
'FloorDiv',
'FloorMod',
'Ceil',
'Acosh',
'Asinh',
"PReLU",
"Cos",
"Cosh",
"ACos",
"Diag",
"DiagPart",
'Eye',
'Assign',
'AssignAdd',
'AssignSub',
"Sin",
"Sinh",
"Asin",
"LSTM",
"Abs",
"BinaryCrossEntropy",
"SparseApplyAdagrad",
"SpaceToDepth",
"DepthToSpace",
"Conv2DBackpropInput",
"Sign",
"LARSUpdate",
"Round",
"ApplyFtrl",
"SpaceToBatch",
"SparseApplyFtrl",
"ApplyProximalAdagrad",
"SparseApplyProximalAdagrad",
"BatchToSpace",
"Atan2",
"ApplyRMSProp",
"ApplyCenteredRMSProp",
"SpaceToBatchND",
"BatchToSpaceND",
"SquareSumAll",
"BitwiseAnd",
"BitwiseOr",
"BitwiseXor",
"BesselI0e",
"BesselI1e",
"Atan",
"Atanh",
"BasicLSTMCell",
"ConfusionMatrix",
"BroadcastTo"
]
__all__.extend(_quant_ops.__all__)
__all__.sort()
| 27.78 | 117 | 0.583033 |
from .array_ops import (Argmax, Argmin, Cast, Concat, Pack, Unpack,
Diag, DiagPart, DType, ExpandDims, Eye,
Fill, GatherNd, GatherV2, InvertPermutation,
IsInstance, IsSubClass, ArgMaxWithValue, OnesLike, ZerosLike,
Rank, Reshape, ResizeNearestNeighbor, ArgMinWithValue, Range,
SameTypeShape, ScatterAdd, ScatterMax, ScatterUpdate,
ScalarToArray, ScalarToTensor, ScatterNd, ScatterNdUpdate, Select,
Shape, Size, Slice, Split, EmbeddingLookup,
Squeeze, StridedSlice, Tile,
Transpose, TruncatedNormal, TupleToArray, UnsortedSegmentMin,
UnsortedSegmentSum, SpaceToDepth, DepthToSpace, SpaceToBatch, BatchToSpace,
SpaceToBatchND, BatchToSpaceND, BroadcastTo)
from .comm_ops import (AllGather, AllReduce, _AlltoAll, ReduceScatter, Broadcast,
_MirrorOperator, ReduceOp, _VirtualDataset,
_VirtualDiv, _GetTensorSlice,
HostAllGather, HostReduceScatter)
from .debug_ops import (ImageSummary, InsertGradientOf, HookBackward, ScalarSummary,
TensorSummary, HistogramSummary, Print)
from .control_ops import ControlDepend, GeSwitch, Merge
from .inner_ops import ScalarCast
from .math_ops import (Abs, ACos, Asin, Asinh, AddN, AssignAdd, AssignSub, Atan2, BatchMatMul, BitwiseAnd, BitwiseOr,
BitwiseXor, Inv, Invert,
ReduceMax, ReduceMin, ReduceMean, ReduceSum, ReduceAll, ReduceProd, CumProd,
Cos, Div, DivNoNan, Equal, EqualCount, Exp, Expm1, Erf, Erfc, Floor, FloorDiv, FloorMod, Ceil,
Acosh, Greater, GreaterEqual, Less, LessEqual, Log, Log1p, LogicalAnd,
LogicalNot, LogicalOr, MatMul, Maximum,
Minimum, Mul, Neg, NMSWithMask, NotEqual,
NPUAllocFloatStatus, NPUClearFloatStatus,
NPUGetFloatStatus, Pow, RealDiv, IsNan, IsInf, IsFinite, FloatStatus,
Reciprocal, CumSum,
Sin, Sqrt, Rsqrt, BesselI0e, BesselI1e,
Square, Sub, TensorAdd, Sign, Round, SquareSumAll, Atan, Atanh, Cosh, Sinh)
from .random_ops import (RandomChoiceWithMask)
from .nn_ops import (LSTM, SGD, Adam, ApplyMomentum, BatchNorm,
BiasAdd, Conv2D,
DepthwiseConv2dNative,
DropoutDoMask, DropoutGrad, Dropout,
DropoutGenMask, Flatten, FusedBatchNorm,
Gelu, Elu,
GetNext, L2Normalize, LayerNorm, L2Loss, CTCLoss,
LogSoftmax,
MaxPool,
AvgPool, Conv2DBackpropInput, ConfusionMulGrad,
MaxPoolWithArgmax, OneHot, Pad, MirrorPad, PReLU, ReLU, ReLU6, ReLUV2, HSwish, HSigmoid,
ResizeBilinear, Sigmoid,
SigmoidCrossEntropyWithLogits,
SmoothL1Loss, Softmax, Softplus,
SoftmaxCrossEntropyWithLogits, ROIAlign,
SparseSoftmaxCrossEntropyWithLogits, Tanh,
TopK, BinaryCrossEntropy, SparseApplyAdagrad, LARSUpdate, ApplyFtrl, SparseApplyFtrl,
ApplyProximalAdagrad, SparseApplyProximalAdagrad,
ApplyRMSProp, ApplyCenteredRMSProp, BasicLSTMCell)
from .other_ops import (Assign, IOU, BoundingBoxDecode, BoundingBoxEncode,
CheckValid, MakeRefKey, CheckBprop, ConfusionMatrix)
from . import _quant_ops
from ._quant_ops import *
from .thor_ops import *
__all__ = [
'TensorAdd',
'Argmax',
'Argmin',
'ArgMaxWithValue',
'ArgMinWithValue',
'AddN',
'Sub',
'CumSum',
'MatMul',
'BatchMatMul',
'Mul',
'Pow',
'Exp',
'Expm1',
'Rsqrt',
'Sqrt',
'Square',
'Conv2D',
'Flatten',
'MaxPoolWithArgmax',
'BatchNorm',
'MaxPool',
'TopK',
'Adam',
'Softplus',
'Softmax',
'LogSoftmax',
'SoftmaxCrossEntropyWithLogits',
'ROIAlign',
'ConfusionMulGrad',
'SparseSoftmaxCrossEntropyWithLogits',
'SGD',
'ApplyMomentum',
'ExpandDims',
'Cast',
'IsSubClass',
'IsInstance',
'Reshape',
'Squeeze',
'Transpose',
'OneHot',
'GatherV2',
'Concat',
'Pack',
'Unpack',
'Tile',
'BiasAdd',
'Gelu',
'Minimum',
'Maximum',
'StridedSlice',
'ReduceSum',
'ReduceMean',
'LayerNorm',
'EmbeddingLookup',
'Rank',
'Less',
'LessEqual',
'RealDiv',
'Div',
'DivNoNan',
'Inv',
'Invert',
'TruncatedNormal',
'Fill',
'OnesLike',
'ZerosLike',
'Select',
'Split',
'ReLU',
'ReLU6',
'Elu',
'Erf',
'Erfc',
'Sigmoid',
'HSwish',
'HSigmoid',
'Tanh',
'RandomChoiceWithMask',
'ResizeBilinear',
'ScalarSummary',
'ImageSummary',
'TensorSummary',
'HistogramSummary',
"Print",
'InsertGradientOf',
'HookBackward',
'InvertPermutation',
'Shape',
'DropoutDoMask',
'DropoutGenMask',
'DropoutGrad',
'Dropout',
'Neg',
'Slice',
'DType',
'NPUAllocFloatStatus',
'NPUGetFloatStatus',
'NPUClearFloatStatus',
'IsNan',
'IsFinite',
'IsInf',
'FloatStatus',
'Reciprocal',
'SmoothL1Loss',
'L2Loss',
'CTCLoss',
'ReduceAll',
'ScalarToArray',
'ScalarToTensor',
'TupleToArray',
'ControlDepend',
'GeSwitch',
'Merge',
'SameTypeShape',
'CheckBprop',
'CheckValid',
'BoundingBoxEncode',
'BoundingBoxDecode',
'L2Normalize',
'ScatterAdd',
'ScatterNd',
'ScatterMax',
'ResizeNearestNeighbor',
'Pad',
'MirrorPad',
'GatherNd',
'ScatterUpdate',
'ScatterNdUpdate',
'Floor',
'NMSWithMask',
'IOU',
'MakeRefKey',
'AvgPool',
'Equal',
'EqualCount',
'NotEqual',
'Greater',
'GreaterEqual',
'LogicalNot',
'LogicalAnd',
'LogicalOr',
'Size',
'DepthwiseConv2dNative',
'UnsortedSegmentSum',
'UnsortedSegmentMin',
"AllGather",
"HostAllGather",
"AllReduce",
"ReduceScatter",
"HostReduceScatter",
"Broadcast",
"ReduceOp",
'ScalarCast',
'GetNext',
'ReduceMax',
'ReduceMin',
'ReduceProd',
'CumProd',
'Log',
'Log1p',
'SigmoidCrossEntropyWithLogits',
'FloorDiv',
'FloorMod',
'Ceil',
'Acosh',
'Asinh',
"PReLU",
"Cos",
"Cosh",
"ACos",
"Diag",
"DiagPart",
'Eye',
'Assign',
'AssignAdd',
'AssignSub',
"Sin",
"Sinh",
"Asin",
"LSTM",
"Abs",
"BinaryCrossEntropy",
"SparseApplyAdagrad",
"SpaceToDepth",
"DepthToSpace",
"Conv2DBackpropInput",
"Sign",
"LARSUpdate",
"Round",
"ApplyFtrl",
"SpaceToBatch",
"SparseApplyFtrl",
"ApplyProximalAdagrad",
"SparseApplyProximalAdagrad",
"BatchToSpace",
"Atan2",
"ApplyRMSProp",
"ApplyCenteredRMSProp",
"SpaceToBatchND",
"BatchToSpaceND",
"SquareSumAll",
"BitwiseAnd",
"BitwiseOr",
"BitwiseXor",
"BesselI0e",
"BesselI1e",
"Atan",
"Atanh",
"BasicLSTMCell",
"ConfusionMatrix",
"BroadcastTo"
]
__all__.extend(_quant_ops.__all__)
__all__.sort()
| true | true |
f71a86b512e65c17c14cf1e55832bd7b556b892d | 284,631 | py | Python | tt.py | someone120/some-py | a14732b9fde52d5476e4a433e3eecea8ea3eeaec | [
"Apache-2.0"
] | null | null | null | tt.py | someone120/some-py | a14732b9fde52d5476e4a433e3eecea8ea3eeaec | [
"Apache-2.0"
] | null | null | null | tt.py | someone120/some-py | a14732b9fde52d5476e4a433e3eecea8ea3eeaec | [
"Apache-2.0"
] | null | null | null | import json
a="""
[
{
"_id": 1,
"id": 1,
"pid": 0,
"city_code": "101010100",
"city_name": "北京"
},
{
"_id": 2,
"id": 2,
"pid": 0,
"city_code": "",
"city_name": "安徽"
},
{
"_id": 3,
"id": 3,
"pid": 0,
"city_code": "",
"city_name": "福建"
},
{
"_id": 4,
"id": 4,
"pid": 0,
"city_code": "",
"city_name": "甘肃"
},
{
"_id": 5,
"id": 5,
"pid": 0,
"city_code": "",
"city_name": "广东"
},
{
"_id": 6,
"id": 6,
"pid": 0,
"city_code": "",
"city_name": "广西"
},
{
"_id": 7,
"id": 7,
"pid": 0,
"city_code": "",
"city_name": "贵州"
},
{
"_id": 8,
"id": 8,
"pid": 0,
"city_code": "",
"city_name": "海南"
},
{
"_id": 9,
"id": 9,
"pid": 0,
"city_code": "",
"city_name": "河北"
},
{
"_id": 10,
"id": 10,
"pid": 0,
"city_code": "",
"city_name": "河南"
},
{
"_id": 11,
"id": 11,
"pid": 0,
"city_code": "",
"city_name": "黑龙江"
},
{
"_id": 12,
"id": 12,
"pid": 0,
"city_code": "",
"city_name": "湖北"
},
{
"_id": 13,
"id": 13,
"pid": 0,
"city_code": "",
"city_name": "湖南"
},
{
"_id": 14,
"id": 14,
"pid": 0,
"city_code": "",
"city_name": "吉林"
},
{
"_id": 15,
"id": 15,
"pid": 0,
"city_code": "",
"city_name": "江苏"
},
{
"_id": 16,
"id": 16,
"pid": 0,
"city_code": "",
"city_name": "江西"
},
{
"_id": 17,
"id": 17,
"pid": 0,
"city_code": "",
"city_name": "辽宁"
},
{
"_id": 18,
"id": 18,
"pid": 0,
"city_code": "",
"city_name": "内蒙古"
},
{
"_id": 19,
"id": 19,
"pid": 0,
"city_code": "",
"city_name": "宁夏"
},
{
"_id": 20,
"id": 20,
"pid": 0,
"city_code": "",
"city_name": "青海"
},
{
"_id": 21,
"id": 21,
"pid": 0,
"city_code": "",
"city_name": "山东"
},
{
"_id": 22,
"id": 22,
"pid": 0,
"city_code": "",
"city_name": "山西"
},
{
"_id": 23,
"id": 23,
"pid": 0,
"city_code": "",
"city_name": "陕西"
},
{
"_id": 24,
"id": 24,
"pid": 0,
"city_code": "101020100",
"city_name": "上海"
},
{
"_id": 25,
"id": 25,
"pid": 0,
"city_code": "",
"city_name": "四川"
},
{
"_id": 26,
"id": 26,
"pid": 0,
"city_code": "101030100",
"city_name": "天津"
},
{
"_id": 27,
"id": 27,
"pid": 0,
"city_code": "",
"city_name": "西藏"
},
{
"_id": 28,
"id": 28,
"pid": 0,
"city_code": "",
"city_name": "新疆"
},
{
"_id": 29,
"id": 29,
"pid": 0,
"city_code": "",
"city_name": "云南"
},
{
"_id": 30,
"id": 30,
"pid": 0,
"city_code": "",
"city_name": "浙江"
},
{
"_id": 31,
"id": 31,
"pid": 0,
"city_code": "101040100",
"city_name": "重庆"
},
{
"_id": 32,
"id": 32,
"pid": 0,
"city_code": "101320101",
"city_name": "香港"
},
{
"_id": 33,
"id": 33,
"pid": 0,
"city_code": "101330101",
"city_name": "澳门"
},
{
"_id": 34,
"id": 34,
"pid": 0,
"city_code": "",
"city_name": "台湾"
},
{
"_id": 35,
"id": 35,
"pid": 2,
"city_code": "101220601",
"city_name": "安庆"
},
{
"_id": 36,
"id": 36,
"pid": 2,
"city_code": "101220201",
"city_name": "蚌埠"
},
{
"_id": 37,
"id": 37,
"pid": 3400,
"city_code": "101220105",
"city_name": "巢湖市"
},
{
"_id": 38,
"id": 38,
"pid": 2,
"city_code": "101221701",
"city_name": "池州"
},
{
"_id": 39,
"id": 39,
"pid": 2,
"city_code": "101221101",
"city_name": "滁州"
},
{
"_id": 40,
"id": 40,
"pid": 2,
"city_code": "101220801",
"city_name": "阜阳"
},
{
"_id": 41,
"id": 41,
"pid": 2,
"city_code": "101221201",
"city_name": "淮北"
},
{
"_id": 42,
"id": 42,
"pid": 2,
"city_code": "101220401",
"city_name": "淮南"
},
{
"_id": 43,
"id": 43,
"pid": 2,
"city_code": "101221001",
"city_name": "黄山"
},
{
"_id": 44,
"id": 44,
"pid": 2,
"city_code": "101221501",
"city_name": "六安"
},
{
"_id": 45,
"id": 45,
"pid": 2,
"city_code": "101220501",
"city_name": "马鞍山"
},
{
"_id": 46,
"id": 46,
"pid": 2,
"city_code": "101220701",
"city_name": "宿州"
},
{
"_id": 47,
"id": 47,
"pid": 2,
"city_code": "101221301",
"city_name": "铜陵"
},
{
"_id": 48,
"id": 48,
"pid": 2,
"city_code": "101220301",
"city_name": "芜湖"
},
{
"_id": 49,
"id": 49,
"pid": 2,
"city_code": "101221401",
"city_name": "宣城"
},
{
"_id": 50,
"id": 50,
"pid": 2,
"city_code": "101220901",
"city_name": "亳州"
},
{
"_id": 51,
"id": 52,
"pid": 3,
"city_code": "101230101",
"city_name": "福州"
},
{
"_id": 52,
"id": 53,
"pid": 3,
"city_code": "101230701",
"city_name": "龙岩"
},
{
"_id": 53,
"id": 54,
"pid": 3,
"city_code": "101230901",
"city_name": "南平"
},
{
"_id": 54,
"id": 55,
"pid": 3,
"city_code": "101230301",
"city_name": "宁德"
},
{
"_id": 55,
"id": 56,
"pid": 3,
"city_code": "101230401",
"city_name": "莆田"
},
{
"_id": 56,
"id": 57,
"pid": 3,
"city_code": "101230501",
"city_name": "泉州"
},
{
"_id": 57,
"id": 58,
"pid": 3,
"city_code": "101230801",
"city_name": "三明"
},
{
"_id": 58,
"id": 59,
"pid": 3,
"city_code": "101230201",
"city_name": "厦门"
},
{
"_id": 59,
"id": 60,
"pid": 3,
"city_code": "101230601",
"city_name": "漳州"
},
{
"_id": 60,
"id": 61,
"pid": 4,
"city_code": "101160101",
"city_name": "兰州"
},
{
"_id": 61,
"id": 62,
"pid": 4,
"city_code": "101161301",
"city_name": "白银"
},
{
"_id": 62,
"id": 63,
"pid": 4,
"city_code": "101160201",
"city_name": "定西"
},
{
"_id": 63,
"id": 64,
"pid": 4,
"city_code": "",
"city_name": "甘南州"
},
{
"_id": 64,
"id": 65,
"pid": 4,
"city_code": "101161401",
"city_name": "嘉峪关"
},
{
"_id": 65,
"id": 66,
"pid": 4,
"city_code": "101160601",
"city_name": "金昌"
},
{
"_id": 66,
"id": 67,
"pid": 4,
"city_code": "101160801",
"city_name": "酒泉"
},
{
"_id": 67,
"id": 68,
"pid": 4,
"city_code": "101161101",
"city_name": "临夏"
},
{
"_id": 68,
"id": 69,
"pid": 4,
"city_code": "101161010",
"city_name": "陇南市"
},
{
"_id": 69,
"id": 70,
"pid": 4,
"city_code": "101160301",
"city_name": "平凉"
},
{
"_id": 70,
"id": 71,
"pid": 4,
"city_code": "101160401",
"city_name": "庆阳"
},
{
"_id": 71,
"id": 72,
"pid": 4,
"city_code": "101160901",
"city_name": "天水"
},
{
"_id": 72,
"id": 73,
"pid": 4,
"city_code": "101160501",
"city_name": "武威"
},
{
"_id": 73,
"id": 74,
"pid": 4,
"city_code": "101160701",
"city_name": "张掖"
},
{
"_id": 74,
"id": 75,
"pid": 5,
"city_code": "101280101",
"city_name": "广州"
},
{
"_id": 75,
"id": 76,
"pid": 5,
"city_code": "101280601",
"city_name": "深圳"
},
{
"_id": 76,
"id": 77,
"pid": 5,
"city_code": "101281501",
"city_name": "潮州"
},
{
"_id": 77,
"id": 78,
"pid": 5,
"city_code": "101281601",
"city_name": "东莞"
},
{
"_id": 78,
"id": 79,
"pid": 5,
"city_code": "101280800",
"city_name": "佛山"
},
{
"_id": 79,
"id": 80,
"pid": 5,
"city_code": "101281201",
"city_name": "河源"
},
{
"_id": 80,
"id": 81,
"pid": 5,
"city_code": "101280301",
"city_name": "惠州"
},
{
"_id": 81,
"id": 82,
"pid": 5,
"city_code": "101281101",
"city_name": "江门"
},
{
"_id": 82,
"id": 83,
"pid": 5,
"city_code": "101281901",
"city_name": "揭阳"
},
{
"_id": 83,
"id": 84,
"pid": 5,
"city_code": "101282001",
"city_name": "茂名"
},
{
"_id": 84,
"id": 85,
"pid": 5,
"city_code": "101280401",
"city_name": "梅州"
},
{
"_id": 85,
"id": 86,
"pid": 5,
"city_code": "101281301",
"city_name": "清远"
},
{
"_id": 86,
"id": 87,
"pid": 5,
"city_code": "101280501",
"city_name": "汕头"
},
{
"_id": 87,
"id": 88,
"pid": 5,
"city_code": "101282101",
"city_name": "汕尾"
},
{
"_id": 88,
"id": 89,
"pid": 5,
"city_code": "101280201",
"city_name": "韶关"
},
{
"_id": 89,
"id": 90,
"pid": 5,
"city_code": "101281801",
"city_name": "阳江"
},
{
"_id": 90,
"id": 91,
"pid": 5,
"city_code": "101281401",
"city_name": "云浮"
},
{
"_id": 91,
"id": 92,
"pid": 5,
"city_code": "101281001",
"city_name": "湛江"
},
{
"_id": 92,
"id": 93,
"pid": 5,
"city_code": "101280901",
"city_name": "肇庆"
},
{
"_id": 93,
"id": 94,
"pid": 5,
"city_code": "101281701",
"city_name": "中山"
},
{
"_id": 94,
"id": 95,
"pid": 5,
"city_code": "101280701",
"city_name": "珠海"
},
{
"_id": 95,
"id": 96,
"pid": 6,
"city_code": "101300101",
"city_name": "南宁"
},
{
"_id": 96,
"id": 97,
"pid": 6,
"city_code": "101300501",
"city_name": "桂林"
},
{
"_id": 97,
"id": 98,
"pid": 6,
"city_code": "101301001",
"city_name": "百色"
},
{
"_id": 98,
"id": 99,
"pid": 6,
"city_code": "101301301",
"city_name": "北海"
},
{
"_id": 99,
"id": 100,
"pid": 6,
"city_code": "101300201",
"city_name": "崇左"
},
{
"_id": 100,
"id": 101,
"pid": 6,
"city_code": "101301401",
"city_name": "防城港"
},
{
"_id": 101,
"id": 102,
"pid": 6,
"city_code": "101300801",
"city_name": "贵港"
},
{
"_id": 102,
"id": 103,
"pid": 6,
"city_code": "101301201",
"city_name": "河池"
},
{
"_id": 103,
"id": 104,
"pid": 6,
"city_code": "101300701",
"city_name": "贺州"
},
{
"_id": 104,
"id": 105,
"pid": 6,
"city_code": "101300401",
"city_name": "来宾"
},
{
"_id": 105,
"id": 106,
"pid": 6,
"city_code": "101300301",
"city_name": "柳州"
},
{
"_id": 106,
"id": 107,
"pid": 6,
"city_code": "101301101",
"city_name": "钦州"
},
{
"_id": 107,
"id": 108,
"pid": 6,
"city_code": "101300601",
"city_name": "梧州"
},
{
"_id": 108,
"id": 109,
"pid": 6,
"city_code": "101300901",
"city_name": "玉林"
},
{
"_id": 109,
"id": 110,
"pid": 7,
"city_code": "101260101",
"city_name": "贵阳"
},
{
"_id": 110,
"id": 111,
"pid": 7,
"city_code": "101260301",
"city_name": "安顺"
},
{
"_id": 111,
"id": 112,
"pid": 7,
"city_code": "101260701",
"city_name": "毕节"
},
{
"_id": 112,
"id": 113,
"pid": 7,
"city_code": "101260801",
"city_name": "六盘水"
},
{
"_id": 113,
"id": 114,
"pid": 7,
"city_code": "101260506",
"city_name": "黔东南"
},
{
"_id": 114,
"id": 115,
"pid": 7,
"city_code": "101260413",
"city_name": "黔南"
},
{
"_id": 115,
"id": 116,
"pid": 7,
"city_code": "101260906",
"city_name": "黔西南"
},
{
"_id": 116,
"id": 117,
"pid": 7,
"city_code": "101260601",
"city_name": "铜仁"
},
{
"_id": 117,
"id": 118,
"pid": 7,
"city_code": "101260201",
"city_name": "遵义"
},
{
"_id": 118,
"id": 119,
"pid": 8,
"city_code": "101310101",
"city_name": "海口"
},
{
"_id": 119,
"id": 120,
"pid": 8,
"city_code": "101310201",
"city_name": "三亚"
},
{
"_id": 120,
"id": 121,
"pid": 8,
"city_code": "101310207",
"city_name": "白沙县"
},
{
"_id": 121,
"id": 122,
"pid": 8,
"city_code": "101310214",
"city_name": "保亭县"
},
{
"_id": 122,
"id": 123,
"pid": 8,
"city_code": "101310206",
"city_name": "昌江县"
},
{
"_id": 123,
"id": 124,
"pid": 8,
"city_code": "101310204",
"city_name": "澄迈县"
},
{
"_id": 124,
"id": 125,
"pid": 8,
"city_code": "101310209",
"city_name": "定安县"
},
{
"_id": 125,
"id": 126,
"pid": 8,
"city_code": "101310202",
"city_name": "东方"
},
{
"_id": 126,
"id": 127,
"pid": 8,
"city_code": "101310221",
"city_name": "乐东县"
},
{
"_id": 127,
"id": 128,
"pid": 8,
"city_code": "101310203",
"city_name": "临高县"
},
{
"_id": 128,
"id": 129,
"pid": 8,
"city_code": "101310216",
"city_name": "陵水县"
},
{
"_id": 129,
"id": 130,
"pid": 8,
"city_code": "101310211",
"city_name": "琼海"
},
{
"_id": 130,
"id": 131,
"pid": 8,
"city_code": "101310208",
"city_name": "琼中"
},
{
"_id": 131,
"id": 132,
"pid": 8,
"city_code": "101310210",
"city_name": "屯昌县"
},
{
"_id": 132,
"id": 133,
"pid": 8,
"city_code": "101310215",
"city_name": "万宁"
},
{
"_id": 133,
"id": 134,
"pid": 8,
"city_code": "101310212",
"city_name": "文昌"
},
{
"_id": 134,
"id": 135,
"pid": 8,
"city_code": "101310222",
"city_name": "五指山"
},
{
"_id": 135,
"id": 136,
"pid": 8,
"city_code": "101310205",
"city_name": "儋州"
},
{
"_id": 136,
"id": 137,
"pid": 9,
"city_code": "101090101",
"city_name": "石家庄"
},
{
"_id": 137,
"id": 138,
"pid": 9,
"city_code": "101090201",
"city_name": "保定"
},
{
"_id": 138,
"id": 139,
"pid": 9,
"city_code": "101090701",
"city_name": "沧州"
},
{
"_id": 139,
"id": 140,
"pid": 9,
"city_code": "101090402",
"city_name": "承德"
},
{
"_id": 140,
"id": 141,
"pid": 9,
"city_code": "101091001",
"city_name": "邯郸"
},
{
"_id": 141,
"id": 142,
"pid": 9,
"city_code": "101090801",
"city_name": "衡水"
},
{
"_id": 142,
"id": 143,
"pid": 9,
"city_code": "101090601",
"city_name": "廊坊"
},
{
"_id": 143,
"id": 144,
"pid": 9,
"city_code": "101091101",
"city_name": "秦皇岛"
},
{
"_id": 144,
"id": 145,
"pid": 9,
"city_code": "101090501",
"city_name": "唐山"
},
{
"_id": 145,
"id": 146,
"pid": 9,
"city_code": "101090901",
"city_name": "邢台"
},
{
"_id": 146,
"id": 147,
"pid": 9,
"city_code": "101090301",
"city_name": "张家口"
},
{
"_id": 147,
"id": 148,
"pid": 10,
"city_code": "101180101",
"city_name": "郑州"
},
{
"_id": 148,
"id": 149,
"pid": 10,
"city_code": "101180901",
"city_name": "洛阳"
},
{
"_id": 149,
"id": 150,
"pid": 10,
"city_code": "101180801",
"city_name": "开封"
},
{
"_id": 150,
"id": 151,
"pid": 10,
"city_code": "101180201",
"city_name": "安阳"
},
{
"_id": 151,
"id": 152,
"pid": 10,
"city_code": "101181201",
"city_name": "鹤壁"
},
{
"_id": 152,
"id": 153,
"pid": 10,
"city_code": "101181801",
"city_name": "济源"
},
{
"_id": 153,
"id": 154,
"pid": 10,
"city_code": "101181101",
"city_name": "焦作"
},
{
"_id": 154,
"id": 155,
"pid": 10,
"city_code": "101180701",
"city_name": "南阳"
},
{
"_id": 155,
"id": 156,
"pid": 10,
"city_code": "101180501",
"city_name": "平顶山"
},
{
"_id": 156,
"id": 157,
"pid": 10,
"city_code": "101181701",
"city_name": "三门峡"
},
{
"_id": 157,
"id": 158,
"pid": 10,
"city_code": "101181001",
"city_name": "商丘"
},
{
"_id": 158,
"id": 159,
"pid": 10,
"city_code": "101180301",
"city_name": "新乡"
},
{
"_id": 159,
"id": 160,
"pid": 10,
"city_code": "101180601",
"city_name": "信阳"
},
{
"_id": 160,
"id": 161,
"pid": 10,
"city_code": "101180401",
"city_name": "许昌"
},
{
"_id": 161,
"id": 162,
"pid": 10,
"city_code": "101181401",
"city_name": "周口"
},
{
"_id": 162,
"id": 163,
"pid": 10,
"city_code": "101181601",
"city_name": "驻马店"
},
{
"_id": 163,
"id": 164,
"pid": 10,
"city_code": "101181501",
"city_name": "漯河"
},
{
"_id": 164,
"id": 165,
"pid": 10,
"city_code": "101181301",
"city_name": "濮阳"
},
{
"_id": 165,
"id": 166,
"pid": 11,
"city_code": "101050101",
"city_name": "哈尔滨"
},
{
"_id": 166,
"id": 167,
"pid": 11,
"city_code": "101050901",
"city_name": "大庆"
},
{
"_id": 167,
"id": 168,
"pid": 11,
"city_code": "101050701",
"city_name": "大兴安岭"
},
{
"_id": 168,
"id": 169,
"pid": 11,
"city_code": "101051201",
"city_name": "鹤岗"
},
{
"_id": 169,
"id": 170,
"pid": 11,
"city_code": "101050601",
"city_name": "黑河"
},
{
"_id": 170,
"id": 171,
"pid": 11,
"city_code": "101051101",
"city_name": "鸡西"
},
{
"_id": 171,
"id": 172,
"pid": 11,
"city_code": "101050401",
"city_name": "佳木斯"
},
{
"_id": 172,
"id": 173,
"pid": 11,
"city_code": "101050301",
"city_name": "牡丹江"
},
{
"_id": 173,
"id": 174,
"pid": 11,
"city_code": "101051002",
"city_name": "七台河"
},
{
"_id": 174,
"id": 175,
"pid": 11,
"city_code": "101050201",
"city_name": "齐齐哈尔"
},
{
"_id": 175,
"id": 176,
"pid": 11,
"city_code": "101051301",
"city_name": "双鸭山"
},
{
"_id": 176,
"id": 177,
"pid": 11,
"city_code": "101050501",
"city_name": "绥化"
},
{
"_id": 177,
"id": 178,
"pid": 11,
"city_code": "101050801",
"city_name": "伊春"
},
{
"_id": 178,
"id": 179,
"pid": 12,
"city_code": "101200101",
"city_name": "武汉"
},
{
"_id": 179,
"id": 180,
"pid": 12,
"city_code": "101201601",
"city_name": "仙桃"
},
{
"_id": 180,
"id": 181,
"pid": 12,
"city_code": "101200301",
"city_name": "鄂州"
},
{
"_id": 181,
"id": 182,
"pid": 12,
"city_code": "101200501",
"city_name": "黄冈"
},
{
"_id": 182,
"id": 183,
"pid": 12,
"city_code": "101200601",
"city_name": "黄石"
},
{
"_id": 183,
"id": 184,
"pid": 12,
"city_code": "101201401",
"city_name": "荆门"
},
{
"_id": 184,
"id": 185,
"pid": 12,
"city_code": "101200801",
"city_name": "荆州"
},
{
"_id": 185,
"id": 186,
"pid": 12,
"city_code": "101201701",
"city_name": "潜江"
},
{
"_id": 186,
"id": 187,
"pid": 12,
"city_code": "101201201",
"city_name": "神农架林区"
},
{
"_id": 187,
"id": 188,
"pid": 12,
"city_code": "101201101",
"city_name": "十堰"
},
{
"_id": 188,
"id": 189,
"pid": 12,
"city_code": "101201301",
"city_name": "随州"
},
{
"_id": 189,
"id": 190,
"pid": 12,
"city_code": "101201501",
"city_name": "天门"
},
{
"_id": 190,
"id": 191,
"pid": 12,
"city_code": "101200701",
"city_name": "咸宁"
},
{
"_id": 191,
"id": 192,
"pid": 12,
"city_code": "101200202",
"city_name": "襄阳"
},
{
"_id": 192,
"id": 193,
"pid": 12,
"city_code": "101200401",
"city_name": "孝感"
},
{
"_id": 193,
"id": 194,
"pid": 12,
"city_code": "101200901",
"city_name": "宜昌"
},
{
"_id": 194,
"id": 195,
"pid": 12,
"city_code": "101201001",
"city_name": "恩施"
},
{
"_id": 195,
"id": 196,
"pid": 13,
"city_code": "101250101",
"city_name": "长沙"
},
{
"_id": 196,
"id": 197,
"pid": 13,
"city_code": "101251101",
"city_name": "张家界"
},
{
"_id": 197,
"id": 198,
"pid": 13,
"city_code": "101250601",
"city_name": "常德"
},
{
"_id": 198,
"id": 199,
"pid": 13,
"city_code": "101250501",
"city_name": "郴州"
},
{
"_id": 199,
"id": 200,
"pid": 13,
"city_code": "101250401",
"city_name": "衡阳"
},
{
"_id": 200,
"id": 201,
"pid": 13,
"city_code": "101251201",
"city_name": "怀化"
},
{
"_id": 201,
"id": 202,
"pid": 13,
"city_code": "101250801",
"city_name": "娄底"
},
{
"_id": 202,
"id": 203,
"pid": 13,
"city_code": "101250901",
"city_name": "邵阳"
},
{
"_id": 203,
"id": 204,
"pid": 13,
"city_code": "101250201",
"city_name": "湘潭"
},
{
"_id": 204,
"id": 205,
"pid": 13,
"city_code": "101251509",
"city_name": "湘西"
},
{
"_id": 205,
"id": 206,
"pid": 13,
"city_code": "101250700",
"city_name": "益阳"
},
{
"_id": 206,
"id": 207,
"pid": 13,
"city_code": "101251401",
"city_name": "永州"
},
{
"_id": 207,
"id": 208,
"pid": 13,
"city_code": "101251001",
"city_name": "岳阳"
},
{
"_id": 208,
"id": 209,
"pid": 13,
"city_code": "101250301",
"city_name": "株洲"
},
{
"_id": 209,
"id": 210,
"pid": 14,
"city_code": "101060101",
"city_name": "长春"
},
{
"_id": 210,
"id": 211,
"pid": 14,
"city_code": "101060201",
"city_name": "吉林市"
},
{
"_id": 211,
"id": 212,
"pid": 14,
"city_code": "101060601",
"city_name": "白城"
},
{
"_id": 212,
"id": 213,
"pid": 14,
"city_code": "101060901",
"city_name": "白山"
},
{
"_id": 213,
"id": 214,
"pid": 14,
"city_code": "101060701",
"city_name": "辽源"
},
{
"_id": 214,
"id": 215,
"pid": 14,
"city_code": "101060401",
"city_name": "四平"
},
{
"_id": 215,
"id": 216,
"pid": 14,
"city_code": "101060801",
"city_name": "松原"
},
{
"_id": 216,
"id": 217,
"pid": 14,
"city_code": "101060501",
"city_name": "通化"
},
{
"_id": 217,
"id": 218,
"pid": 14,
"city_code": "101060312",
"city_name": "延边"
},
{
"_id": 218,
"id": 219,
"pid": 15,
"city_code": "101190101",
"city_name": "南京"
},
{
"_id": 219,
"id": 220,
"pid": 15,
"city_code": "101190401",
"city_name": "苏州"
},
{
"_id": 220,
"id": 221,
"pid": 15,
"city_code": "101190201",
"city_name": "无锡"
},
{
"_id": 221,
"id": 222,
"pid": 15,
"city_code": "101191101",
"city_name": "常州"
},
{
"_id": 222,
"id": 223,
"pid": 15,
"city_code": "101190901",
"city_name": "淮安"
},
{
"_id": 223,
"id": 224,
"pid": 15,
"city_code": "101191001",
"city_name": "连云港"
},
{
"_id": 224,
"id": 225,
"pid": 15,
"city_code": "101190501",
"city_name": "南通"
},
{
"_id": 225,
"id": 226,
"pid": 15,
"city_code": "101191301",
"city_name": "宿迁"
},
{
"_id": 226,
"id": 227,
"pid": 15,
"city_code": "101191201",
"city_name": "泰州"
},
{
"_id": 227,
"id": 228,
"pid": 15,
"city_code": "101190801",
"city_name": "徐州"
},
{
"_id": 228,
"id": 229,
"pid": 15,
"city_code": "101190701",
"city_name": "盐城"
},
{
"_id": 229,
"id": 230,
"pid": 15,
"city_code": "101190601",
"city_name": "扬州"
},
{
"_id": 230,
"id": 231,
"pid": 15,
"city_code": "101190301",
"city_name": "镇江"
},
{
"_id": 231,
"id": 232,
"pid": 16,
"city_code": "101240101",
"city_name": "南昌"
},
{
"_id": 232,
"id": 233,
"pid": 16,
"city_code": "101240401",
"city_name": "抚州"
},
{
"_id": 233,
"id": 234,
"pid": 16,
"city_code": "101240701",
"city_name": "赣州"
},
{
"_id": 234,
"id": 235,
"pid": 16,
"city_code": "101240601",
"city_name": "吉安"
},
{
"_id": 235,
"id": 236,
"pid": 16,
"city_code": "101240801",
"city_name": "景德镇"
},
{
"_id": 236,
"id": 237,
"pid": 16,
"city_code": "101240201",
"city_name": "九江"
},
{
"_id": 237,
"id": 238,
"pid": 16,
"city_code": "101240901",
"city_name": "萍乡"
},
{
"_id": 238,
"id": 239,
"pid": 16,
"city_code": "101240301",
"city_name": "上饶"
},
{
"_id": 239,
"id": 240,
"pid": 16,
"city_code": "101241001",
"city_name": "新余"
},
{
"_id": 240,
"id": 241,
"pid": 16,
"city_code": "101240501",
"city_name": "宜春"
},
{
"_id": 241,
"id": 242,
"pid": 16,
"city_code": "101241101",
"city_name": "鹰潭"
},
{
"_id": 242,
"id": 243,
"pid": 17,
"city_code": "101070101",
"city_name": "沈阳"
},
{
"_id": 243,
"id": 244,
"pid": 17,
"city_code": "101070201",
"city_name": "大连"
},
{
"_id": 244,
"id": 245,
"pid": 17,
"city_code": "101070301",
"city_name": "鞍山"
},
{
"_id": 245,
"id": 246,
"pid": 17,
"city_code": "101070501",
"city_name": "本溪"
},
{
"_id": 246,
"id": 247,
"pid": 17,
"city_code": "101071201",
"city_name": "朝阳"
},
{
"_id": 247,
"id": 248,
"pid": 17,
"city_code": "101070601",
"city_name": "丹东"
},
{
"_id": 248,
"id": 249,
"pid": 17,
"city_code": "101070401",
"city_name": "抚顺"
},
{
"_id": 249,
"id": 250,
"pid": 17,
"city_code": "101070901",
"city_name": "阜新"
},
{
"_id": 250,
"id": 251,
"pid": 17,
"city_code": "101071401",
"city_name": "葫芦岛"
},
{
"_id": 251,
"id": 252,
"pid": 17,
"city_code": "101070701",
"city_name": "锦州"
},
{
"_id": 252,
"id": 253,
"pid": 17,
"city_code": "101071001",
"city_name": "辽阳"
},
{
"_id": 253,
"id": 254,
"pid": 17,
"city_code": "101071301",
"city_name": "盘锦"
},
{
"_id": 254,
"id": 255,
"pid": 17,
"city_code": "101071101",
"city_name": "铁岭"
},
{
"_id": 255,
"id": 256,
"pid": 17,
"city_code": "101070801",
"city_name": "营口"
},
{
"_id": 256,
"id": 257,
"pid": 18,
"city_code": "101080101",
"city_name": "呼和浩特"
},
{
"_id": 257,
"id": 258,
"pid": 18,
"city_code": "101081213",
"city_name": "阿拉善盟"
},
{
"_id": 258,
"id": 259,
"pid": 18,
"city_code": "101080801",
"city_name": "巴彦淖尔"
},
{
"_id": 259,
"id": 260,
"pid": 18,
"city_code": "101080201",
"city_name": "包头"
},
{
"_id": 260,
"id": 261,
"pid": 18,
"city_code": "101080601",
"city_name": "赤峰"
},
{
"_id": 261,
"id": 262,
"pid": 18,
"city_code": "101080701",
"city_name": "鄂尔多斯"
},
{
"_id": 262,
"id": 263,
"pid": 18,
"city_code": "101081001",
"city_name": "呼伦贝尔"
},
{
"_id": 263,
"id": 264,
"pid": 18,
"city_code": "101080501",
"city_name": "通辽"
},
{
"_id": 264,
"id": 265,
"pid": 18,
"city_code": "101080301",
"city_name": "乌海"
},
{
"_id": 265,
"id": 266,
"pid": 18,
"city_code": "101080405",
"city_name": "乌兰察布"
},
{
"_id": 266,
"id": 267,
"pid": 18,
"city_code": "101080902",
"city_name": "锡林郭勒"
},
{
"_id": 267,
"id": 268,
"pid": 18,
"city_code": "101081108",
"city_name": "兴安盟"
},
{
"_id": 268,
"id": 269,
"pid": 19,
"city_code": "101170101",
"city_name": "银川"
},
{
"_id": 269,
"id": 270,
"pid": 19,
"city_code": "101170401",
"city_name": "固原"
},
{
"_id": 270,
"id": 271,
"pid": 19,
"city_code": "101170201",
"city_name": "石嘴山"
},
{
"_id": 271,
"id": 272,
"pid": 19,
"city_code": "101170301",
"city_name": "吴忠"
},
{
"_id": 272,
"id": 273,
"pid": 19,
"city_code": "101170501",
"city_name": "中卫"
},
{
"_id": 273,
"id": 274,
"pid": 20,
"city_code": "101150101",
"city_name": "西宁"
},
{
"_id": 274,
"id": 275,
"pid": 20,
"city_code": "101150501",
"city_name": "果洛"
},
{
"_id": 275,
"id": 276,
"pid": 20,
"city_code": "101150801",
"city_name": "海北"
},
{
"_id": 276,
"id": 277,
"pid": 20,
"city_code": "101150201",
"city_name": "海东"
},
{
"_id": 277,
"id": 278,
"pid": 20,
"city_code": "101150401",
"city_name": "海南州"
},
{
"_id": 278,
"id": 279,
"pid": 20,
"city_code": "101150701",
"city_name": "海西"
},
{
"_id": 279,
"id": 280,
"pid": 20,
"city_code": "101150301",
"city_name": "黄南"
},
{
"_id": 280,
"id": 281,
"pid": 20,
"city_code": "101150601",
"city_name": "玉树"
},
{
"_id": 281,
"id": 282,
"pid": 21,
"city_code": "101120101",
"city_name": "济南"
},
{
"_id": 282,
"id": 283,
"pid": 21,
"city_code": "101120201",
"city_name": "青岛"
},
{
"_id": 283,
"id": 284,
"pid": 21,
"city_code": "101121101",
"city_name": "滨州"
},
{
"_id": 284,
"id": 285,
"pid": 21,
"city_code": "101120401",
"city_name": "德州"
},
{
"_id": 285,
"id": 286,
"pid": 21,
"city_code": "101121201",
"city_name": "东营"
},
{
"_id": 286,
"id": 287,
"pid": 21,
"city_code": "101121001",
"city_name": "菏泽"
},
{
"_id": 287,
"id": 288,
"pid": 21,
"city_code": "101120701",
"city_name": "济宁"
},
{
"_id": 288,
"id": 289,
"pid": 21,
"city_code": "101121601",
"city_name": "莱芜"
},
{
"_id": 289,
"id": 290,
"pid": 21,
"city_code": "101121701",
"city_name": "聊城"
},
{
"_id": 290,
"id": 291,
"pid": 21,
"city_code": "101120901",
"city_name": "临沂"
},
{
"_id": 291,
"id": 292,
"pid": 21,
"city_code": "101121501",
"city_name": "日照"
},
{
"_id": 292,
"id": 293,
"pid": 21,
"city_code": "101120801",
"city_name": "泰安"
},
{
"_id": 293,
"id": 294,
"pid": 21,
"city_code": "101121301",
"city_name": "威海"
},
{
"_id": 294,
"id": 295,
"pid": 21,
"city_code": "101120601",
"city_name": "潍坊"
},
{
"_id": 295,
"id": 296,
"pid": 21,
"city_code": "101120501",
"city_name": "烟台"
},
{
"_id": 296,
"id": 297,
"pid": 21,
"city_code": "101121401",
"city_name": "枣庄"
},
{
"_id": 297,
"id": 298,
"pid": 21,
"city_code": "101120301",
"city_name": "淄博"
},
{
"_id": 298,
"id": 299,
"pid": 22,
"city_code": "101100101",
"city_name": "太原"
},
{
"_id": 299,
"id": 300,
"pid": 22,
"city_code": "101100501",
"city_name": "长治"
},
{
"_id": 300,
"id": 301,
"pid": 22,
"city_code": "101100201",
"city_name": "大同"
},
{
"_id": 301,
"id": 302,
"pid": 22,
"city_code": "101100601",
"city_name": "晋城"
},
{
"_id": 302,
"id": 303,
"pid": 22,
"city_code": "101100401",
"city_name": "晋中"
},
{
"_id": 303,
"id": 304,
"pid": 22,
"city_code": "101100701",
"city_name": "临汾"
},
{
"_id": 304,
"id": 305,
"pid": 22,
"city_code": "101101100",
"city_name": "吕梁"
},
{
"_id": 305,
"id": 306,
"pid": 22,
"city_code": "101100901",
"city_name": "朔州"
},
{
"_id": 306,
"id": 307,
"pid": 22,
"city_code": "101101001",
"city_name": "忻州"
},
{
"_id": 307,
"id": 308,
"pid": 22,
"city_code": "101100301",
"city_name": "阳泉"
},
{
"_id": 308,
"id": 309,
"pid": 22,
"city_code": "101100801",
"city_name": "运城"
},
{
"_id": 309,
"id": 310,
"pid": 23,
"city_code": "101110101",
"city_name": "西安"
},
{
"_id": 310,
"id": 311,
"pid": 23,
"city_code": "101110701",
"city_name": "安康"
},
{
"_id": 311,
"id": 312,
"pid": 23,
"city_code": "101110901",
"city_name": "宝鸡"
},
{
"_id": 312,
"id": 313,
"pid": 23,
"city_code": "101110801",
"city_name": "汉中"
},
{
"_id": 313,
"id": 314,
"pid": 23,
"city_code": "101110601",
"city_name": "商洛"
},
{
"_id": 314,
"id": 315,
"pid": 23,
"city_code": "101111001",
"city_name": "铜川"
},
{
"_id": 315,
"id": 316,
"pid": 23,
"city_code": "101110501",
"city_name": "渭南"
},
{
"_id": 316,
"id": 317,
"pid": 23,
"city_code": "101110200",
"city_name": "咸阳"
},
{
"_id": 317,
"id": 318,
"pid": 23,
"city_code": "101110300",
"city_name": "延安"
},
{
"_id": 318,
"id": 319,
"pid": 23,
"city_code": "101110401",
"city_name": "榆林"
},
{
"_id": 319,
"id": 321,
"pid": 25,
"city_code": "101270101",
"city_name": "成都"
},
{
"_id": 320,
"id": 322,
"pid": 25,
"city_code": "101270401",
"city_name": "绵阳"
},
{
"_id": 321,
"id": 323,
"pid": 25,
"city_code": "101271901",
"city_name": "阿坝"
},
{
"_id": 322,
"id": 324,
"pid": 25,
"city_code": "101270901",
"city_name": "巴中"
},
{
"_id": 323,
"id": 325,
"pid": 25,
"city_code": "101270601",
"city_name": "达州"
},
{
"_id": 324,
"id": 326,
"pid": 25,
"city_code": "101272001",
"city_name": "德阳"
},
{
"_id": 325,
"id": 327,
"pid": 25,
"city_code": "101271801",
"city_name": "甘孜"
},
{
"_id": 326,
"id": 328,
"pid": 25,
"city_code": "101270801",
"city_name": "广安"
},
{
"_id": 327,
"id": 329,
"pid": 25,
"city_code": "101272101",
"city_name": "广元"
},
{
"_id": 328,
"id": 330,
"pid": 25,
"city_code": "101271401",
"city_name": "乐山"
},
{
"_id": 329,
"id": 331,
"pid": 25,
"city_code": "101271601",
"city_name": "凉山"
},
{
"_id": 330,
"id": 332,
"pid": 25,
"city_code": "101271501",
"city_name": "眉山"
},
{
"_id": 331,
"id": 333,
"pid": 25,
"city_code": "101270501",
"city_name": "南充"
},
{
"_id": 332,
"id": 334,
"pid": 25,
"city_code": "101271201",
"city_name": "内江"
},
{
"_id": 333,
"id": 335,
"pid": 25,
"city_code": "101270201",
"city_name": "攀枝花"
},
{
"_id": 334,
"id": 336,
"pid": 25,
"city_code": "101270701",
"city_name": "遂宁"
},
{
"_id": 335,
"id": 337,
"pid": 25,
"city_code": "101271701",
"city_name": "雅安"
},
{
"_id": 336,
"id": 338,
"pid": 25,
"city_code": "101271101",
"city_name": "宜宾"
},
{
"_id": 337,
"id": 339,
"pid": 25,
"city_code": "101271301",
"city_name": "资阳"
},
{
"_id": 338,
"id": 340,
"pid": 25,
"city_code": "101270301",
"city_name": "自贡"
},
{
"_id": 339,
"id": 341,
"pid": 25,
"city_code": "101271001",
"city_name": "泸州"
},
{
"_id": 340,
"id": 343,
"pid": 27,
"city_code": "101140101",
"city_name": "拉萨"
},
{
"_id": 341,
"id": 344,
"pid": 27,
"city_code": "101140701",
"city_name": "阿里"
},
{
"_id": 342,
"id": 345,
"pid": 27,
"city_code": "101140501",
"city_name": "昌都"
},
{
"_id": 343,
"id": 346,
"pid": 27,
"city_code": "101140401",
"city_name": "林芝"
},
{
"_id": 344,
"id": 347,
"pid": 27,
"city_code": "101140601",
"city_name": "那曲"
},
{
"_id": 345,
"id": 348,
"pid": 27,
"city_code": "101140201",
"city_name": "日喀则"
},
{
"_id": 346,
"id": 349,
"pid": 27,
"city_code": "101140301",
"city_name": "山南"
},
{
"_id": 347,
"id": 350,
"pid": 28,
"city_code": "101130101",
"city_name": "乌鲁木齐"
},
{
"_id": 348,
"id": 351,
"pid": 28,
"city_code": "101130801",
"city_name": "阿克苏"
},
{
"_id": 349,
"id": 352,
"pid": 28,
"city_code": "101130701",
"city_name": "阿拉尔"
},
{
"_id": 350,
"id": 353,
"pid": 28,
"city_code": "101130609",
"city_name": "巴音郭楞"
},
{
"_id": 351,
"id": 354,
"pid": 28,
"city_code": "101131604",
"city_name": "博尔塔拉"
},
{
"_id": 352,
"id": 355,
"pid": 28,
"city_code": "101130401",
"city_name": "昌吉"
},
{
"_id": 353,
"id": 356,
"pid": 28,
"city_code": "101131201",
"city_name": "哈密"
},
{
"_id": 354,
"id": 357,
"pid": 28,
"city_code": "101131301",
"city_name": "和田"
},
{
"_id": 355,
"id": 358,
"pid": 28,
"city_code": "101130901",
"city_name": "喀什"
},
{
"_id": 356,
"id": 359,
"pid": 28,
"city_code": "101130201",
"city_name": "克拉玛依"
},
{
"_id": 357,
"id": 360,
"pid": 28,
"city_code": "",
"city_name": "克孜勒苏"
},
{
"_id": 358,
"id": 361,
"pid": 28,
"city_code": "101130301",
"city_name": "石河子"
},
{
"_id": 359,
"id": 362,
"pid": 28,
"city_code": "",
"city_name": "图木舒克"
},
{
"_id": 360,
"id": 363,
"pid": 28,
"city_code": "101130501",
"city_name": "吐鲁番"
},
{
"_id": 361,
"id": 364,
"pid": 28,
"city_code": "",
"city_name": "五家渠"
},
{
"_id": 362,
"id": 365,
"pid": 28,
"city_code": "101131012",
"city_name": "伊犁"
},
{
"_id": 363,
"id": 366,
"pid": 29,
"city_code": "101290101",
"city_name": "昆明"
},
{
"_id": 364,
"id": 367,
"pid": 29,
"city_code": "101291201",
"city_name": "怒江"
},
{
"_id": 365,
"id": 368,
"pid": 29,
"city_code": "101290901",
"city_name": "普洱"
},
{
"_id": 366,
"id": 369,
"pid": 29,
"city_code": "101291401",
"city_name": "丽江"
},
{
"_id": 367,
"id": 370,
"pid": 29,
"city_code": "101290501",
"city_name": "保山"
},
{
"_id": 368,
"id": 371,
"pid": 29,
"city_code": "101290801",
"city_name": "楚雄"
},
{
"_id": 369,
"id": 372,
"pid": 29,
"city_code": "101290201",
"city_name": "大理"
},
{
"_id": 370,
"id": 373,
"pid": 29,
"city_code": "101291501",
"city_name": "德宏"
},
{
"_id": 371,
"id": 374,
"pid": 29,
"city_code": "101291305",
"city_name": "迪庆"
},
{
"_id": 372,
"id": 375,
"pid": 29,
"city_code": "101290301",
"city_name": "红河"
},
{
"_id": 373,
"id": 376,
"pid": 29,
"city_code": "101291101",
"city_name": "临沧"
},
{
"_id": 374,
"id": 377,
"pid": 29,
"city_code": "101290401",
"city_name": "曲靖"
},
{
"_id": 375,
"id": 378,
"pid": 29,
"city_code": "101290601",
"city_name": "文山"
},
{
"_id": 376,
"id": 379,
"pid": 29,
"city_code": "101291602",
"city_name": "西双版纳"
},
{
"_id": 377,
"id": 380,
"pid": 29,
"city_code": "101290701",
"city_name": "玉溪"
},
{
"_id": 378,
"id": 381,
"pid": 29,
"city_code": "101291001",
"city_name": "昭通"
},
{
"_id": 379,
"id": 382,
"pid": 30,
"city_code": "101210101",
"city_name": "杭州"
},
{
"_id": 380,
"id": 383,
"pid": 30,
"city_code": "101210201",
"city_name": "湖州"
},
{
"_id": 381,
"id": 384,
"pid": 30,
"city_code": "101210301",
"city_name": "嘉兴"
},
{
"_id": 382,
"id": 385,
"pid": 30,
"city_code": "101210901",
"city_name": "金华"
},
{
"_id": 383,
"id": 386,
"pid": 30,
"city_code": "101210801",
"city_name": "丽水"
},
{
"_id": 384,
"id": 387,
"pid": 30,
"city_code": "101210401",
"city_name": "宁波"
},
{
"_id": 385,
"id": 388,
"pid": 30,
"city_code": "101210501",
"city_name": "绍兴"
},
{
"_id": 386,
"id": 389,
"pid": 30,
"city_code": "101210601",
"city_name": "台州"
},
{
"_id": 387,
"id": 390,
"pid": 30,
"city_code": "101210701",
"city_name": "温州"
},
{
"_id": 388,
"id": 391,
"pid": 30,
"city_code": "101211101",
"city_name": "舟山"
},
{
"_id": 389,
"id": 392,
"pid": 30,
"city_code": "101211001",
"city_name": "衢州"
},
{
"_id": 390,
"id": 400,
"pid": 35,
"city_code": "101220609",
"city_name": "桐城市"
},
{
"_id": 391,
"id": 401,
"pid": 35,
"city_code": "101220605",
"city_name": "怀宁县"
},
{
"_id": 392,
"id": 402,
"pid": 47,
"city_code": "101220602",
"city_name": "枞阳县"
},
{
"_id": 393,
"id": 403,
"pid": 35,
"city_code": "101220604",
"city_name": "潜山县"
},
{
"_id": 394,
"id": 404,
"pid": 35,
"city_code": "101220603",
"city_name": "太湖县"
},
{
"_id": 395,
"id": 405,
"pid": 35,
"city_code": "101220606",
"city_name": "宿松县"
},
{
"_id": 396,
"id": 406,
"pid": 35,
"city_code": "101220607",
"city_name": "望江县"
},
{
"_id": 397,
"id": 407,
"pid": 35,
"city_code": "101220608",
"city_name": "岳西县"
},
{
"_id": 398,
"id": 412,
"pid": 36,
"city_code": "101220202",
"city_name": "怀远县"
},
{
"_id": 399,
"id": 413,
"pid": 36,
"city_code": "101220204",
"city_name": "五河县"
},
{
"_id": 400,
"id": 414,
"pid": 36,
"city_code": "101220203",
"city_name": "固镇县"
},
{
"_id": 401,
"id": 416,
"pid": 3400,
"city_code": "101220106",
"city_name": "庐江县"
},
{
"_id": 402,
"id": 417,
"pid": 48,
"city_code": "101220305",
"city_name": "无为县"
},
{
"_id": 403,
"id": 418,
"pid": 45,
"city_code": "101220503",
"city_name": "含山县"
},
{
"_id": 404,
"id": 419,
"pid": 45,
"city_code": "101220504",
"city_name": "和县"
},
{
"_id": 405,
"id": 421,
"pid": 38,
"city_code": "101221702",
"city_name": "东至县"
},
{
"_id": 406,
"id": 422,
"pid": 38,
"city_code": "101221705",
"city_name": "石台县"
},
{
"_id": 407,
"id": 423,
"pid": 38,
"city_code": "101221703",
"city_name": "青阳县"
},
{
"_id": 408,
"id": 426,
"pid": 39,
"city_code": "101221107",
"city_name": "天长市"
},
{
"_id": 409,
"id": 427,
"pid": 39,
"city_code": "101221103",
"city_name": "明光市"
},
{
"_id": 410,
"id": 428,
"pid": 39,
"city_code": "101221106",
"city_name": "来安县"
},
{
"_id": 411,
"id": 429,
"pid": 39,
"city_code": "101221105",
"city_name": "全椒县"
},
{
"_id": 412,
"id": 430,
"pid": 39,
"city_code": "101221104",
"city_name": "定远县"
},
{
"_id": 413,
"id": 431,
"pid": 39,
"city_code": "101221102",
"city_name": "凤阳县"
},
{
"_id": 414,
"id": 439,
"pid": 40,
"city_code": "101220805",
"city_name": "界首市"
},
{
"_id": 415,
"id": 440,
"pid": 40,
"city_code": "101220804",
"city_name": "临泉县"
},
{
"_id": 416,
"id": 441,
"pid": 40,
"city_code": "101220806",
"city_name": "太和县"
},
{
"_id": 417,
"id": 442,
"pid": 40,
"city_code": "101220802",
"city_name": "阜南县"
},
{
"_id": 418,
"id": 443,
"pid": 40,
"city_code": "101220803",
"city_name": "颍上县"
},
{
"_id": 419,
"id": 447,
"pid": 41,
"city_code": "101221202",
"city_name": "濉溪县"
},
{
"_id": 420,
"id": 452,
"pid": 42,
"city_code": "101220403",
"city_name": "潘集区"
},
{
"_id": 421,
"id": 453,
"pid": 42,
"city_code": "101220402",
"city_name": "凤台县"
},
{
"_id": 422,
"id": 454,
"pid": 43,
"city_code": "101221003",
"city_name": "屯溪区"
},
{
"_id": 423,
"id": 455,
"pid": 43,
"city_code": "101221002",
"city_name": "黄山区"
},
{
"_id": 424,
"id": 457,
"pid": 43,
"city_code": "101221006",
"city_name": "歙县"
},
{
"_id": 425,
"id": 458,
"pid": 43,
"city_code": "101221007",
"city_name": "休宁县"
},
{
"_id": 426,
"id": 459,
"pid": 43,
"city_code": "101221005",
"city_name": "黟县"
},
{
"_id": 427,
"id": 460,
"pid": 43,
"city_code": "101221004",
"city_name": "祁门县"
},
{
"_id": 428,
"id": 463,
"pid": 44,
"city_code": "101221503",
"city_name": "寿县"
},
{
"_id": 429,
"id": 464,
"pid": 44,
"city_code": "101221502",
"city_name": "霍邱县"
},
{
"_id": 430,
"id": 465,
"pid": 44,
"city_code": "101221507",
"city_name": "舒城县"
},
{
"_id": 431,
"id": 466,
"pid": 44,
"city_code": "101221505",
"city_name": "金寨县"
},
{
"_id": 432,
"id": 467,
"pid": 44,
"city_code": "101221506",
"city_name": "霍山县"
},
{
"_id": 433,
"id": 471,
"pid": 45,
"city_code": "101220502",
"city_name": "当涂县"
},
{
"_id": 434,
"id": 473,
"pid": 46,
"city_code": "101220702",
"city_name": "砀山县"
},
{
"_id": 435,
"id": 474,
"pid": 46,
"city_code": "101220705",
"city_name": "萧县"
},
{
"_id": 436,
"id": 475,
"pid": 46,
"city_code": "101220703",
"city_name": "灵璧县"
},
{
"_id": 437,
"id": 476,
"pid": 46,
"city_code": "101220704",
"city_name": "泗县"
},
{
"_id": 438,
"id": 480,
"pid": 47,
"city_code": "101221301",
"city_name": "义安区"
},
{
"_id": 439,
"id": 485,
"pid": 48,
"city_code": "101220303",
"city_name": "芜湖县"
},
{
"_id": 440,
"id": 486,
"pid": 48,
"city_code": "101220302",
"city_name": "繁昌县"
},
{
"_id": 441,
"id": 487,
"pid": 48,
"city_code": "101220304",
"city_name": "南陵县"
},
{
"_id": 442,
"id": 489,
"pid": 49,
"city_code": "101221404",
"city_name": "宁国市"
},
{
"_id": 443,
"id": 490,
"pid": 49,
"city_code": "101221407",
"city_name": "郎溪县"
},
{
"_id": 444,
"id": 491,
"pid": 49,
"city_code": "101221406",
"city_name": "广德县"
},
{
"_id": 445,
"id": 492,
"pid": 49,
"city_code": "101221402",
"city_name": "泾县"
},
{
"_id": 446,
"id": 493,
"pid": 49,
"city_code": "101221405",
"city_name": "绩溪县"
},
{
"_id": 447,
"id": 494,
"pid": 49,
"city_code": "101221403",
"city_name": "旌德县"
},
{
"_id": 448,
"id": 495,
"pid": 50,
"city_code": "101220902",
"city_name": "涡阳县"
},
{
"_id": 449,
"id": 496,
"pid": 50,
"city_code": "101220904",
"city_name": "蒙城县"
},
{
"_id": 450,
"id": 497,
"pid": 50,
"city_code": "101220903",
"city_name": "利辛县"
},
{
"_id": 451,
"id": 501,
"pid": 1,
"city_code": "101010200",
"city_name": "海淀区"
},
{
"_id": 452,
"id": 502,
"pid": 1,
"city_code": "101010300",
"city_name": "朝阳区"
},
{
"_id": 453,
"id": 505,
"pid": 1,
"city_code": "101010900",
"city_name": "丰台区"
},
{
"_id": 454,
"id": 506,
"pid": 1,
"city_code": "101011000",
"city_name": "石景山区"
},
{
"_id": 455,
"id": 507,
"pid": 1,
"city_code": "101011200",
"city_name": "房山区"
},
{
"_id": 456,
"id": 508,
"pid": 1,
"city_code": "101011400",
"city_name": "门头沟区"
},
{
"_id": 457,
"id": 509,
"pid": 1,
"city_code": "101010600",
"city_name": "通州区"
},
{
"_id": 458,
"id": 510,
"pid": 1,
"city_code": "101010400",
"city_name": "顺义区"
},
{
"_id": 459,
"id": 511,
"pid": 1,
"city_code": "101010700",
"city_name": "昌平区"
},
{
"_id": 460,
"id": 512,
"pid": 1,
"city_code": "101010500",
"city_name": "怀柔区"
},
{
"_id": 461,
"id": 513,
"pid": 1,
"city_code": "101011500",
"city_name": "平谷区"
},
{
"_id": 462,
"id": 514,
"pid": 1,
"city_code": "101011100",
"city_name": "大兴区"
},
{
"_id": 463,
"id": 515,
"pid": 1,
"city_code": "101011300",
"city_name": "密云县"
},
{
"_id": 464,
"id": 516,
"pid": 1,
"city_code": "101010800",
"city_name": "延庆县"
},
{
"_id": 465,
"id": 522,
"pid": 52,
"city_code": "101230111",
"city_name": "福清市"
},
{
"_id": 466,
"id": 523,
"pid": 52,
"city_code": "101230110",
"city_name": "长乐市"
},
{
"_id": 467,
"id": 524,
"pid": 52,
"city_code": "101230103",
"city_name": "闽侯县"
},
{
"_id": 468,
"id": 525,
"pid": 52,
"city_code": "101230105",
"city_name": "连江县"
},
{
"_id": 469,
"id": 526,
"pid": 52,
"city_code": "101230104",
"city_name": "罗源县"
},
{
"_id": 470,
"id": 527,
"pid": 52,
"city_code": "101230102",
"city_name": "闽清县"
},
{
"_id": 471,
"id": 528,
"pid": 52,
"city_code": "101230107",
"city_name": "永泰县"
},
{
"_id": 472,
"id": 529,
"pid": 52,
"city_code": "101230108",
"city_name": "平潭县"
},
{
"_id": 473,
"id": 531,
"pid": 53,
"city_code": "101230707",
"city_name": "漳平市"
},
{
"_id": 474,
"id": 532,
"pid": 53,
"city_code": "101230702",
"city_name": "长汀县"
},
{
"_id": 475,
"id": 533,
"pid": 53,
"city_code": "101230706",
"city_name": "永定县"
},
{
"_id": 476,
"id": 534,
"pid": 53,
"city_code": "101230705",
"city_name": "上杭县"
},
{
"_id": 477,
"id": 535,
"pid": 53,
"city_code": "101230704",
"city_name": "武平县"
},
{
"_id": 478,
"id": 536,
"pid": 53,
"city_code": "101230703",
"city_name": "连城县"
},
{
"_id": 479,
"id": 538,
"pid": 54,
"city_code": "101230904",
"city_name": "邵武市"
},
{
"_id": 480,
"id": 539,
"pid": 54,
"city_code": "101230905",
"city_name": "武夷山市"
},
{
"_id": 481,
"id": 540,
"pid": 54,
"city_code": "101230910",
"city_name": "建瓯市"
},
{
"_id": 482,
"id": 541,
"pid": 54,
"city_code": "101230907",
"city_name": "建阳市"
},
{
"_id": 483,
"id": 542,
"pid": 54,
"city_code": "101230902",
"city_name": "顺昌县"
},
{
"_id": 484,
"id": 543,
"pid": 54,
"city_code": "101230906",
"city_name": "浦城县"
},
{
"_id": 485,
"id": 544,
"pid": 54,
"city_code": "101230903",
"city_name": "光泽县"
},
{
"_id": 486,
"id": 545,
"pid": 54,
"city_code": "101230908",
"city_name": "松溪县"
},
{
"_id": 487,
"id": 546,
"pid": 54,
"city_code": "101230909",
"city_name": "政和县"
},
{
"_id": 488,
"id": 548,
"pid": 55,
"city_code": "101230306",
"city_name": "福安市"
},
{
"_id": 489,
"id": 549,
"pid": 55,
"city_code": "101230308",
"city_name": "福鼎市"
},
{
"_id": 490,
"id": 550,
"pid": 55,
"city_code": "101230303",
"city_name": "霞浦县"
},
{
"_id": 491,
"id": 551,
"pid": 55,
"city_code": "101230302",
"city_name": "古田县"
},
{
"_id": 492,
"id": 552,
"pid": 55,
"city_code": "101230309",
"city_name": "屏南县"
},
{
"_id": 493,
"id": 553,
"pid": 55,
"city_code": "101230304",
"city_name": "寿宁县"
},
{
"_id": 494,
"id": 554,
"pid": 55,
"city_code": "101230305",
"city_name": "周宁县"
},
{
"_id": 495,
"id": 555,
"pid": 55,
"city_code": "101230307",
"city_name": "柘荣县"
},
{
"_id": 496,
"id": 556,
"pid": 56,
"city_code": "101230407",
"city_name": "城厢区"
},
{
"_id": 497,
"id": 557,
"pid": 56,
"city_code": "101230404",
"city_name": "涵江区"
},
{
"_id": 498,
"id": 558,
"pid": 56,
"city_code": "101230406",
"city_name": "荔城区"
},
{
"_id": 499,
"id": 559,
"pid": 56,
"city_code": "101230405",
"city_name": "秀屿区"
},
{
"_id": 500,
"id": 560,
"pid": 56,
"city_code": "101230402",
"city_name": "仙游县"
},
{
"_id": 501,
"id": 566,
"pid": 57,
"city_code": "101230510",
"city_name": "石狮市"
},
{
"_id": 502,
"id": 567,
"pid": 57,
"city_code": "101230509",
"city_name": "晋江市"
},
{
"_id": 503,
"id": 568,
"pid": 57,
"city_code": "101230506",
"city_name": "南安市"
},
{
"_id": 504,
"id": 569,
"pid": 57,
"city_code": "101230508",
"city_name": "惠安县"
},
{
"_id": 505,
"id": 570,
"pid": 57,
"city_code": "101230502",
"city_name": "安溪县"
},
{
"_id": 506,
"id": 571,
"pid": 57,
"city_code": "101230504",
"city_name": "永春县"
},
{
"_id": 507,
"id": 572,
"pid": 57,
"city_code": "101230505",
"city_name": "德化县"
},
{
"_id": 508,
"id": 576,
"pid": 58,
"city_code": "101230810",
"city_name": "永安市"
},
{
"_id": 509,
"id": 577,
"pid": 58,
"city_code": "101230807",
"city_name": "明溪县"
},
{
"_id": 510,
"id": 578,
"pid": 58,
"city_code": "101230803",
"city_name": "清流县"
},
{
"_id": 511,
"id": 579,
"pid": 58,
"city_code": "101230802",
"city_name": "宁化县"
},
{
"_id": 512,
"id": 580,
"pid": 58,
"city_code": "101230811",
"city_name": "大田县"
},
{
"_id": 513,
"id": 581,
"pid": 58,
"city_code": "101230809",
"city_name": "尤溪县"
},
{
"_id": 514,
"id": 582,
"pid": 58,
"city_code": "101230808",
"city_name": "沙县"
},
{
"_id": 515,
"id": 583,
"pid": 58,
"city_code": "101230805",
"city_name": "将乐县"
},
{
"_id": 516,
"id": 584,
"pid": 58,
"city_code": "101230804",
"city_name": "泰宁县"
},
{
"_id": 517,
"id": 585,
"pid": 58,
"city_code": "101230806",
"city_name": "建宁县"
},
{
"_id": 518,
"id": 590,
"pid": 59,
"city_code": "101230202",
"city_name": "同安区"
},
{
"_id": 519,
"id": 594,
"pid": 60,
"city_code": "101230605",
"city_name": "龙海市"
},
{
"_id": 520,
"id": 595,
"pid": 60,
"city_code": "101230609",
"city_name": "云霄县"
},
{
"_id": 521,
"id": 596,
"pid": 60,
"city_code": "101230606",
"city_name": "漳浦县"
},
{
"_id": 522,
"id": 597,
"pid": 60,
"city_code": "101230607",
"city_name": "诏安县"
},
{
"_id": 523,
"id": 598,
"pid": 60,
"city_code": "101230602",
"city_name": "长泰县"
},
{
"_id": 524,
"id": 599,
"pid": 60,
"city_code": "101230608",
"city_name": "东山县"
},
{
"_id": 525,
"id": 600,
"pid": 60,
"city_code": "101230603",
"city_name": "南靖县"
},
{
"_id": 526,
"id": 601,
"pid": 60,
"city_code": "101230604",
"city_name": "平和县"
},
{
"_id": 527,
"id": 602,
"pid": 60,
"city_code": "101230610",
"city_name": "华安县"
},
{
"_id": 528,
"id": 603,
"pid": 61,
"city_code": "101160102",
"city_name": "皋兰县"
},
{
"_id": 529,
"id": 609,
"pid": 61,
"city_code": "101160103",
"city_name": "永登县"
},
{
"_id": 530,
"id": 610,
"pid": 61,
"city_code": "101160104",
"city_name": "榆中县"
},
{
"_id": 531,
"id": 611,
"pid": 62,
"city_code": "101161301",
"city_name": "白银区"
},
{
"_id": 532,
"id": 612,
"pid": 62,
"city_code": "101161304",
"city_name": "平川区"
},
{
"_id": 533,
"id": 613,
"pid": 62,
"city_code": "101161303",
"city_name": "会宁县"
},
{
"_id": 534,
"id": 614,
"pid": 62,
"city_code": "101161305",
"city_name": "景泰县"
},
{
"_id": 535,
"id": 615,
"pid": 62,
"city_code": "101161302",
"city_name": "靖远县"
},
{
"_id": 536,
"id": 616,
"pid": 63,
"city_code": "101160205",
"city_name": "临洮县"
},
{
"_id": 537,
"id": 617,
"pid": 63,
"city_code": "101160203",
"city_name": "陇西县"
},
{
"_id": 538,
"id": 618,
"pid": 63,
"city_code": "101160202",
"city_name": "通渭县"
},
{
"_id": 539,
"id": 619,
"pid": 63,
"city_code": "101160204",
"city_name": "渭源县"
},
{
"_id": 540,
"id": 620,
"pid": 63,
"city_code": "101160206",
"city_name": "漳县"
},
{
"_id": 541,
"id": 621,
"pid": 63,
"city_code": "101160207",
"city_name": "岷县"
},
{
"_id": 542,
"id": 624,
"pid": 64,
"city_code": "101161201",
"city_name": "合作市"
},
{
"_id": 543,
"id": 625,
"pid": 64,
"city_code": "101161202",
"city_name": "临潭县"
},
{
"_id": 544,
"id": 626,
"pid": 64,
"city_code": "101161203",
"city_name": "卓尼县"
},
{
"_id": 545,
"id": 627,
"pid": 64,
"city_code": "101161204",
"city_name": "舟曲县"
},
{
"_id": 546,
"id": 628,
"pid": 64,
"city_code": "101161205",
"city_name": "迭部县"
},
{
"_id": 547,
"id": 629,
"pid": 64,
"city_code": "101161206",
"city_name": "玛曲县"
},
{
"_id": 548,
"id": 630,
"pid": 64,
"city_code": "101161207",
"city_name": "碌曲县"
},
{
"_id": 549,
"id": 631,
"pid": 64,
"city_code": "101161208",
"city_name": "夏河县"
},
{
"_id": 550,
"id": 634,
"pid": 66,
"city_code": "101160602",
"city_name": "永昌县"
},
{
"_id": 551,
"id": 636,
"pid": 67,
"city_code": "101160807",
"city_name": "玉门市"
},
{
"_id": 552,
"id": 637,
"pid": 67,
"city_code": "101160808",
"city_name": "敦煌市"
},
{
"_id": 553,
"id": 638,
"pid": 67,
"city_code": "101160803",
"city_name": "金塔县"
},
{
"_id": 554,
"id": 639,
"pid": 67,
"city_code": "101160805",
"city_name": "瓜州县"
},
{
"_id": 555,
"id": 640,
"pid": 67,
"city_code": "101160806",
"city_name": "肃北县"
},
{
"_id": 556,
"id": 641,
"pid": 67,
"city_code": "101160804",
"city_name": "阿克塞"
},
{
"_id": 557,
"id": 642,
"pid": 68,
"city_code": "101161101",
"city_name": "临夏市"
},
{
"_id": 558,
"id": 643,
"pid": 68,
"city_code": "101161101",
"city_name": "临夏县"
},
{
"_id": 559,
"id": 644,
"pid": 68,
"city_code": "101161102",
"city_name": "康乐县"
},
{
"_id": 560,
"id": 645,
"pid": 68,
"city_code": "101161103",
"city_name": "永靖县"
},
{
"_id": 561,
"id": 646,
"pid": 68,
"city_code": "101161104",
"city_name": "广河县"
},
{
"_id": 562,
"id": 647,
"pid": 68,
"city_code": "101161105",
"city_name": "和政县"
},
{
"_id": 563,
"id": 648,
"pid": 68,
"city_code": "101161106",
"city_name": "东乡族自治县"
},
{
"_id": 564,
"id": 649,
"pid": 68,
"city_code": "101161107",
"city_name": "积石山"
},
{
"_id": 565,
"id": 650,
"pid": 69,
"city_code": "101161002",
"city_name": "成县"
},
{
"_id": 566,
"id": 651,
"pid": 69,
"city_code": "101161008",
"city_name": "徽县"
},
{
"_id": 567,
"id": 652,
"pid": 69,
"city_code": "101161005",
"city_name": "康县"
},
{
"_id": 568,
"id": 653,
"pid": 69,
"city_code": "101161007",
"city_name": "礼县"
},
{
"_id": 569,
"id": 654,
"pid": 69,
"city_code": "101161009",
"city_name": "两当县"
},
{
"_id": 570,
"id": 655,
"pid": 69,
"city_code": "101161003",
"city_name": "文县"
},
{
"_id": 571,
"id": 656,
"pid": 69,
"city_code": "101161006",
"city_name": "西和县"
},
{
"_id": 572,
"id": 657,
"pid": 69,
"city_code": "101161004",
"city_name": "宕昌县"
},
{
"_id": 573,
"id": 658,
"pid": 69,
"city_code": "101161001",
"city_name": "武都区"
},
{
"_id": 574,
"id": 659,
"pid": 70,
"city_code": "101160304",
"city_name": "崇信县"
},
{
"_id": 575,
"id": 660,
"pid": 70,
"city_code": "101160305",
"city_name": "华亭县"
},
{
"_id": 576,
"id": 661,
"pid": 70,
"city_code": "101160307",
"city_name": "静宁县"
},
{
"_id": 577,
"id": 662,
"pid": 70,
"city_code": "101160303",
"city_name": "灵台县"
},
{
"_id": 578,
"id": 663,
"pid": 70,
"city_code": "101160308",
"city_name": "崆峒区"
},
{
"_id": 579,
"id": 664,
"pid": 70,
"city_code": "101160306",
"city_name": "庄浪县"
},
{
"_id": 580,
"id": 665,
"pid": 70,
"city_code": "101160302",
"city_name": "泾川县"
},
{
"_id": 581,
"id": 666,
"pid": 71,
"city_code": "101160405",
"city_name": "合水县"
},
{
"_id": 582,
"id": 667,
"pid": 71,
"city_code": "101160404",
"city_name": "华池县"
},
{
"_id": 583,
"id": 668,
"pid": 71,
"city_code": "101160403",
"city_name": "环县"
},
{
"_id": 584,
"id": 669,
"pid": 71,
"city_code": "101160407",
"city_name": "宁县"
},
{
"_id": 585,
"id": 670,
"pid": 71,
"city_code": "101160409",
"city_name": "庆城县"
},
{
"_id": 586,
"id": 671,
"pid": 71,
"city_code": "101160402",
"city_name": "西峰区"
},
{
"_id": 587,
"id": 672,
"pid": 71,
"city_code": "101160408",
"city_name": "镇原县"
},
{
"_id": 588,
"id": 673,
"pid": 71,
"city_code": "101160406",
"city_name": "正宁县"
},
{
"_id": 589,
"id": 674,
"pid": 72,
"city_code": "101160905",
"city_name": "甘谷县"
},
{
"_id": 590,
"id": 675,
"pid": 72,
"city_code": "101160904",
"city_name": "秦安县"
},
{
"_id": 591,
"id": 676,
"pid": 72,
"city_code": "101160903",
"city_name": "清水县"
},
{
"_id": 592,
"id": 678,
"pid": 72,
"city_code": "101160908",
"city_name": "麦积区"
},
{
"_id": 593,
"id": 679,
"pid": 72,
"city_code": "101160906",
"city_name": "武山县"
},
{
"_id": 594,
"id": 680,
"pid": 72,
"city_code": "101160907",
"city_name": "张家川"
},
{
"_id": 595,
"id": 681,
"pid": 73,
"city_code": "101160503",
"city_name": "古浪县"
},
{
"_id": 596,
"id": 682,
"pid": 73,
"city_code": "101160502",
"city_name": "民勤县"
},
{
"_id": 597,
"id": 683,
"pid": 73,
"city_code": "101160505",
"city_name": "天祝县"
},
{
"_id": 598,
"id": 685,
"pid": 74,
"city_code": "101160705",
"city_name": "高台县"
},
{
"_id": 599,
"id": 686,
"pid": 74,
"city_code": "101160704",
"city_name": "临泽县"
},
{
"_id": 600,
"id": 687,
"pid": 74,
"city_code": "101160703",
"city_name": "民乐县"
},
{
"_id": 601,
"id": 688,
"pid": 74,
"city_code": "101160706",
"city_name": "山丹县"
},
{
"_id": 602,
"id": 689,
"pid": 74,
"city_code": "101160702",
"city_name": "肃南县"
},
{
"_id": 603,
"id": 691,
"pid": 75,
"city_code": "101280103",
"city_name": "从化区"
},
{
"_id": 604,
"id": 692,
"pid": 75,
"city_code": "101280106",
"city_name": "天河区"
},
{
"_id": 605,
"id": 699,
"pid": 75,
"city_code": "101280102",
"city_name": "番禺区"
},
{
"_id": 606,
"id": 700,
"pid": 75,
"city_code": "101280105",
"city_name": "花都区"
},
{
"_id": 607,
"id": 701,
"pid": 75,
"city_code": "101280104",
"city_name": "增城区"
},
{
"_id": 608,
"id": 706,
"pid": 76,
"city_code": "101280604",
"city_name": "南山区"
},
{
"_id": 609,
"id": 711,
"pid": 77,
"city_code": "101281503",
"city_name": "潮安县"
},
{
"_id": 610,
"id": 712,
"pid": 77,
"city_code": "101281502",
"city_name": "饶平县"
},
{
"_id": 611,
"id": 746,
"pid": 79,
"city_code": "101280803",
"city_name": "南海区"
},
{
"_id": 612,
"id": 747,
"pid": 79,
"city_code": "101280801",
"city_name": "顺德区"
},
{
"_id": 613,
"id": 748,
"pid": 79,
"city_code": "101280802",
"city_name": "三水区"
},
{
"_id": 614,
"id": 749,
"pid": 79,
"city_code": "101280804",
"city_name": "高明区"
},
{
"_id": 615,
"id": 750,
"pid": 80,
"city_code": "101281206",
"city_name": "东源县"
},
{
"_id": 616,
"id": 751,
"pid": 80,
"city_code": "101281204",
"city_name": "和平县"
},
{
"_id": 617,
"id": 753,
"pid": 80,
"city_code": "101281203",
"city_name": "连平县"
},
{
"_id": 618,
"id": 754,
"pid": 80,
"city_code": "101281205",
"city_name": "龙川县"
},
{
"_id": 619,
"id": 755,
"pid": 80,
"city_code": "101281202",
"city_name": "紫金县"
},
{
"_id": 620,
"id": 756,
"pid": 81,
"city_code": "101280303",
"city_name": "惠阳区"
},
{
"_id": 621,
"id": 759,
"pid": 81,
"city_code": "101280302",
"city_name": "博罗县"
},
{
"_id": 622,
"id": 760,
"pid": 81,
"city_code": "101280304",
"city_name": "惠东县"
},
{
"_id": 623,
"id": 761,
"pid": 81,
"city_code": "101280305",
"city_name": "龙门县"
},
{
"_id": 624,
"id": 762,
"pid": 82,
"city_code": "101281109",
"city_name": "江海区"
},
{
"_id": 625,
"id": 763,
"pid": 82,
"city_code": "101281107",
"city_name": "蓬江区"
},
{
"_id": 626,
"id": 764,
"pid": 82,
"city_code": "101281104",
"city_name": "新会区"
},
{
"_id": 627,
"id": 765,
"pid": 82,
"city_code": "101281106",
"city_name": "台山市"
},
{
"_id": 628,
"id": 766,
"pid": 82,
"city_code": "101281103",
"city_name": "开平市"
},
{
"_id": 629,
"id": 767,
"pid": 82,
"city_code": "101281108",
"city_name": "鹤山市"
},
{
"_id": 630,
"id": 768,
"pid": 82,
"city_code": "101281105",
"city_name": "恩平市"
},
{
"_id": 631,
"id": 770,
"pid": 83,
"city_code": "101281903",
"city_name": "普宁市"
},
{
"_id": 632,
"id": 771,
"pid": 83,
"city_code": "101281905",
"city_name": "揭东县"
},
{
"_id": 633,
"id": 772,
"pid": 83,
"city_code": "101281902",
"city_name": "揭西县"
},
{
"_id": 634,
"id": 773,
"pid": 83,
"city_code": "101281904",
"city_name": "惠来县"
},
{
"_id": 635,
"id": 775,
"pid": 84,
"city_code": "101282006",
"city_name": "茂港区"
},
{
"_id": 636,
"id": 776,
"pid": 84,
"city_code": "101282002",
"city_name": "高州市"
},
{
"_id": 637,
"id": 777,
"pid": 84,
"city_code": "101282003",
"city_name": "化州市"
},
{
"_id": 638,
"id": 778,
"pid": 84,
"city_code": "101282005",
"city_name": "信宜市"
},
{
"_id": 639,
"id": 779,
"pid": 84,
"city_code": "101282004",
"city_name": "电白县"
},
{
"_id": 640,
"id": 780,
"pid": 85,
"city_code": "101280409",
"city_name": "梅县"
},
{
"_id": 641,
"id": 782,
"pid": 85,
"city_code": "101280402",
"city_name": "兴宁市"
},
{
"_id": 642,
"id": 783,
"pid": 85,
"city_code": "101280404",
"city_name": "大埔县"
},
{
"_id": 643,
"id": 784,
"pid": 85,
"city_code": "101280406",
"city_name": "丰顺县"
},
{
"_id": 644,
"id": 785,
"pid": 85,
"city_code": "101280408",
"city_name": "五华县"
},
{
"_id": 645,
"id": 786,
"pid": 85,
"city_code": "101280407",
"city_name": "平远县"
},
{
"_id": 646,
"id": 787,
"pid": 85,
"city_code": "101280403",
"city_name": "蕉岭县"
},
{
"_id": 647,
"id": 789,
"pid": 86,
"city_code": "101281307",
"city_name": "英德市"
},
{
"_id": 648,
"id": 790,
"pid": 86,
"city_code": "101281303",
"city_name": "连州市"
},
{
"_id": 649,
"id": 791,
"pid": 86,
"city_code": "101281306",
"city_name": "佛冈县"
},
{
"_id": 650,
"id": 792,
"pid": 86,
"city_code": "101281305",
"city_name": "阳山县"
},
{
"_id": 651,
"id": 793,
"pid": 86,
"city_code": "101281308",
"city_name": "清新县"
},
{
"_id": 652,
"id": 794,
"pid": 86,
"city_code": "101281304",
"city_name": "连山县"
},
{
"_id": 653,
"id": 795,
"pid": 86,
"city_code": "101281302",
"city_name": "连南县"
},
{
"_id": 654,
"id": 796,
"pid": 87,
"city_code": "101280504",
"city_name": "南澳县"
},
{
"_id": 655,
"id": 797,
"pid": 87,
"city_code": "101280502",
"city_name": "潮阳区"
},
{
"_id": 656,
"id": 798,
"pid": 87,
"city_code": "101280503",
"city_name": "澄海区"
},
{
"_id": 657,
"id": 804,
"pid": 88,
"city_code": "101282103",
"city_name": "陆丰市"
},
{
"_id": 658,
"id": 805,
"pid": 88,
"city_code": "101282102",
"city_name": "海丰县"
},
{
"_id": 659,
"id": 806,
"pid": 88,
"city_code": "101282104",
"city_name": "陆河县"
},
{
"_id": 660,
"id": 807,
"pid": 89,
"city_code": "101280209",
"city_name": "曲江区"
},
{
"_id": 661,
"id": 808,
"pid": 89,
"city_code": "101280210",
"city_name": "浈江区"
},
{
"_id": 662,
"id": 809,
"pid": 89,
"city_code": "101280211",
"city_name": "武江区"
},
{
"_id": 663,
"id": 810,
"pid": 89,
"city_code": "101280209",
"city_name": "曲江区"
},
{
"_id": 664,
"id": 811,
"pid": 89,
"city_code": "101280205",
"city_name": "乐昌市"
},
{
"_id": 665,
"id": 812,
"pid": 89,
"city_code": "101280207",
"city_name": "南雄市"
},
{
"_id": 666,
"id": 813,
"pid": 89,
"city_code": "101280203",
"city_name": "始兴县"
},
{
"_id": 667,
"id": 814,
"pid": 89,
"city_code": "101280206",
"city_name": "仁化县"
},
{
"_id": 668,
"id": 815,
"pid": 89,
"city_code": "101280204",
"city_name": "翁源县"
},
{
"_id": 669,
"id": 816,
"pid": 89,
"city_code": "101280208",
"city_name": "新丰县"
},
{
"_id": 670,
"id": 817,
"pid": 89,
"city_code": "101280202",
"city_name": "乳源县"
},
{
"_id": 671,
"id": 819,
"pid": 90,
"city_code": "101281802",
"city_name": "阳春市"
},
{
"_id": 672,
"id": 820,
"pid": 90,
"city_code": "101281804",
"city_name": "阳西县"
},
{
"_id": 673,
"id": 821,
"pid": 90,
"city_code": "101281803",
"city_name": "阳东县"
},
{
"_id": 674,
"id": 823,
"pid": 91,
"city_code": "101281402",
"city_name": "罗定市"
},
{
"_id": 675,
"id": 824,
"pid": 91,
"city_code": "101281403",
"city_name": "新兴县"
},
{
"_id": 676,
"id": 825,
"pid": 91,
"city_code": "101281404",
"city_name": "郁南县"
},
{
"_id": 677,
"id": 826,
"pid": 91,
"city_code": "101281406",
"city_name": "云安县"
},
{
"_id": 678,
"id": 827,
"pid": 92,
"city_code": "101281006",
"city_name": "赤坎区"
},
{
"_id": 679,
"id": 828,
"pid": 92,
"city_code": "101281009",
"city_name": "霞山区"
},
{
"_id": 680,
"id": 829,
"pid": 92,
"city_code": "101281008",
"city_name": "坡头区"
},
{
"_id": 681,
"id": 830,
"pid": 92,
"city_code": "101281010",
"city_name": "麻章区"
},
{
"_id": 682,
"id": 831,
"pid": 92,
"city_code": "101281005",
"city_name": "廉江市"
},
{
"_id": 683,
"id": 832,
"pid": 92,
"city_code": "101281003",
"city_name": "雷州市"
},
{
"_id": 684,
"id": 833,
"pid": 92,
"city_code": "101281002",
"city_name": "吴川市"
},
{
"_id": 685,
"id": 834,
"pid": 92,
"city_code": "101281007",
"city_name": "遂溪县"
},
{
"_id": 686,
"id": 835,
"pid": 92,
"city_code": "101281004",
"city_name": "徐闻县"
},
{
"_id": 687,
"id": 837,
"pid": 93,
"city_code": "101280908",
"city_name": "高要区"
},
{
"_id": 688,
"id": 838,
"pid": 93,
"city_code": "101280903",
"city_name": "四会市"
},
{
"_id": 689,
"id": 839,
"pid": 93,
"city_code": "101280902",
"city_name": "广宁县"
},
{
"_id": 690,
"id": 840,
"pid": 93,
"city_code": "101280906",
"city_name": "怀集县"
},
{
"_id": 691,
"id": 841,
"pid": 93,
"city_code": "101280907",
"city_name": "封开县"
},
{
"_id": 692,
"id": 842,
"pid": 93,
"city_code": "101280905",
"city_name": "德庆县"
},
{
"_id": 693,
"id": 850,
"pid": 95,
"city_code": "101280702",
"city_name": "斗门区"
},
{
"_id": 694,
"id": 851,
"pid": 95,
"city_code": "101280703",
"city_name": "金湾区"
},
{
"_id": 695,
"id": 852,
"pid": 96,
"city_code": "101300103",
"city_name": "邕宁区"
},
{
"_id": 696,
"id": 858,
"pid": 96,
"city_code": "101300108",
"city_name": "武鸣县"
},
{
"_id": 697,
"id": 859,
"pid": 96,
"city_code": "101300105",
"city_name": "隆安县"
},
{
"_id": 698,
"id": 860,
"pid": 96,
"city_code": "101300106",
"city_name": "马山县"
},
{
"_id": 699,
"id": 861,
"pid": 96,
"city_code": "101300107",
"city_name": "上林县"
},
{
"_id": 700,
"id": 862,
"pid": 96,
"city_code": "101300109",
"city_name": "宾阳县"
},
{
"_id": 701,
"id": 863,
"pid": 96,
"city_code": "101300104",
"city_name": "横县"
},
{
"_id": 702,
"id": 869,
"pid": 97,
"city_code": "101300510",
"city_name": "阳朔县"
},
{
"_id": 703,
"id": 870,
"pid": 97,
"city_code": "101300505",
"city_name": "临桂县"
},
{
"_id": 704,
"id": 871,
"pid": 97,
"city_code": "101300507",
"city_name": "灵川县"
},
{
"_id": 705,
"id": 872,
"pid": 97,
"city_code": "101300508",
"city_name": "全州县"
},
{
"_id": 706,
"id": 873,
"pid": 97,
"city_code": "101300512",
"city_name": "平乐县"
},
{
"_id": 707,
"id": 874,
"pid": 97,
"city_code": "101300506",
"city_name": "兴安县"
},
{
"_id": 708,
"id": 875,
"pid": 97,
"city_code": "101300509",
"city_name": "灌阳县"
},
{
"_id": 709,
"id": 876,
"pid": 97,
"city_code": "101300513",
"city_name": "荔浦县"
},
{
"_id": 710,
"id": 877,
"pid": 97,
"city_code": "101300514",
"city_name": "资源县"
},
{
"_id": 711,
"id": 878,
"pid": 97,
"city_code": "101300504",
"city_name": "永福县"
},
{
"_id": 712,
"id": 879,
"pid": 97,
"city_code": "101300503",
"city_name": "龙胜县"
},
{
"_id": 713,
"id": 880,
"pid": 97,
"city_code": "101300511",
"city_name": "恭城县"
},
{
"_id": 714,
"id": 882,
"pid": 98,
"city_code": "101301011",
"city_name": "凌云县"
},
{
"_id": 715,
"id": 883,
"pid": 98,
"city_code": "101301007",
"city_name": "平果县"
},
{
"_id": 716,
"id": 884,
"pid": 98,
"city_code": "101301009",
"city_name": "西林县"
},
{
"_id": 717,
"id": 885,
"pid": 98,
"city_code": "101301010",
"city_name": "乐业县"
},
{
"_id": 718,
"id": 886,
"pid": 98,
"city_code": "101301004",
"city_name": "德保县"
},
{
"_id": 719,
"id": 887,
"pid": 98,
"city_code": "101301012",
"city_name": "田林县"
},
{
"_id": 720,
"id": 888,
"pid": 98,
"city_code": "101301003",
"city_name": "田阳县"
},
{
"_id": 721,
"id": 889,
"pid": 98,
"city_code": "101301005",
"city_name": "靖西县"
},
{
"_id": 722,
"id": 890,
"pid": 98,
"city_code": "101301006",
"city_name": "田东县"
},
{
"_id": 723,
"id": 891,
"pid": 98,
"city_code": "101301002",
"city_name": "那坡县"
},
{
"_id": 724,
"id": 892,
"pid": 98,
"city_code": "101301008",
"city_name": "隆林县"
},
{
"_id": 725,
"id": 896,
"pid": 99,
"city_code": "101301302",
"city_name": "合浦县"
},
{
"_id": 726,
"id": 898,
"pid": 100,
"city_code": "101300204",
"city_name": "凭祥市"
},
{
"_id": 727,
"id": 899,
"pid": 100,
"city_code": "101300207",
"city_name": "宁明县"
},
{
"_id": 728,
"id": 900,
"pid": 100,
"city_code": "101300206",
"city_name": "扶绥县"
},
{
"_id": 729,
"id": 901,
"pid": 100,
"city_code": "101300203",
"city_name": "龙州县"
},
{
"_id": 730,
"id": 902,
"pid": 100,
"city_code": "101300205",
"city_name": "大新县"
},
{
"_id": 731,
"id": 903,
"pid": 100,
"city_code": "101300202",
"city_name": "天等县"
},
{
"_id": 732,
"id": 905,
"pid": 101,
"city_code": "101301405",
"city_name": "防城区"
},
{
"_id": 733,
"id": 906,
"pid": 101,
"city_code": "101301403",
"city_name": "东兴市"
},
{
"_id": 734,
"id": 907,
"pid": 101,
"city_code": "101301402",
"city_name": "上思县"
},
{
"_id": 735,
"id": 911,
"pid": 102,
"city_code": "101300802",
"city_name": "桂平市"
},
{
"_id": 736,
"id": 912,
"pid": 102,
"city_code": "101300803",
"city_name": "平南县"
},
{
"_id": 737,
"id": 914,
"pid": 103,
"city_code": "101301207",
"city_name": "宜州市"
},
{
"_id": 738,
"id": 915,
"pid": 103,
"city_code": "101301202",
"city_name": "天峨县"
},
{
"_id": 739,
"id": 916,
"pid": 103,
"city_code": "101301208",
"city_name": "凤山县"
},
{
"_id": 740,
"id": 917,
"pid": 103,
"city_code": "101301209",
"city_name": "南丹县"
},
{
"_id": 741,
"id": 918,
"pid": 103,
"city_code": "101301203",
"city_name": "东兰县"
},
{
"_id": 742,
"id": 919,
"pid": 103,
"city_code": "101301210",
"city_name": "都安县"
},
{
"_id": 743,
"id": 920,
"pid": 103,
"city_code": "101301206",
"city_name": "罗城县"
},
{
"_id": 744,
"id": 921,
"pid": 103,
"city_code": "101301204",
"city_name": "巴马县"
},
{
"_id": 745,
"id": 922,
"pid": 103,
"city_code": "101301205",
"city_name": "环江县"
},
{
"_id": 746,
"id": 923,
"pid": 103,
"city_code": "101301211",
"city_name": "大化县"
},
{
"_id": 747,
"id": 925,
"pid": 104,
"city_code": "101300704",
"city_name": "钟山县"
},
{
"_id": 748,
"id": 926,
"pid": 104,
"city_code": "101300702",
"city_name": "昭平县"
},
{
"_id": 749,
"id": 927,
"pid": 104,
"city_code": "101300703",
"city_name": "富川县"
},
{
"_id": 750,
"id": 929,
"pid": 105,
"city_code": "101300406",
"city_name": "合山市"
},
{
"_id": 751,
"id": 930,
"pid": 105,
"city_code": "101300404",
"city_name": "象州县"
},
{
"_id": 752,
"id": 931,
"pid": 105,
"city_code": "101300405",
"city_name": "武宣县"
},
{
"_id": 753,
"id": 932,
"pid": 105,
"city_code": "101300402",
"city_name": "忻城县"
},
{
"_id": 754,
"id": 933,
"pid": 105,
"city_code": "101300403",
"city_name": "金秀县"
},
{
"_id": 755,
"id": 938,
"pid": 106,
"city_code": "101300305",
"city_name": "柳江县"
},
{
"_id": 756,
"id": 939,
"pid": 106,
"city_code": "101300302",
"city_name": "柳城县"
},
{
"_id": 757,
"id": 940,
"pid": 106,
"city_code": "101300304",
"city_name": "鹿寨县"
},
{
"_id": 758,
"id": 941,
"pid": 106,
"city_code": "101300306",
"city_name": "融安县"
},
{
"_id": 759,
"id": 942,
"pid": 106,
"city_code": "101300307",
"city_name": "融水县"
},
{
"_id": 760,
"id": 943,
"pid": 106,
"city_code": "101300308",
"city_name": "三江县"
},
{
"_id": 761,
"id": 946,
"pid": 107,
"city_code": "101301103",
"city_name": "灵山县"
},
{
"_id": 762,
"id": 947,
"pid": 107,
"city_code": "101301102",
"city_name": "浦北县"
},
{
"_id": 763,
"id": 950,
"pid": 108,
"city_code": "101300607",
"city_name": "长洲区"
},
{
"_id": 764,
"id": 951,
"pid": 108,
"city_code": "101300606",
"city_name": "岑溪市"
},
{
"_id": 765,
"id": 952,
"pid": 108,
"city_code": "101300604",
"city_name": "苍梧县"
},
{
"_id": 766,
"id": 953,
"pid": 108,
"city_code": "101300602",
"city_name": "藤县"
},
{
"_id": 767,
"id": 954,
"pid": 108,
"city_code": "101300605",
"city_name": "蒙山县"
},
{
"_id": 768,
"id": 956,
"pid": 109,
"city_code": "101300903",
"city_name": "北流市"
},
{
"_id": 769,
"id": 957,
"pid": 109,
"city_code": "101300904",
"city_name": "容县"
},
{
"_id": 770,
"id": 958,
"pid": 109,
"city_code": "101300905",
"city_name": "陆川县"
},
{
"_id": 771,
"id": 959,
"pid": 109,
"city_code": "101300902",
"city_name": "博白县"
},
{
"_id": 772,
"id": 960,
"pid": 109,
"city_code": "101300906",
"city_name": "兴业县"
},
{
"_id": 773,
"id": 961,
"pid": 110,
"city_code": "101260111",
"city_name": "南明区"
},
{
"_id": 774,
"id": 962,
"pid": 110,
"city_code": "101260110",
"city_name": "云岩区"
},
{
"_id": 775,
"id": 963,
"pid": 110,
"city_code": "101260103",
"city_name": "花溪区"
},
{
"_id": 776,
"id": 964,
"pid": 110,
"city_code": "101260104",
"city_name": "乌当区"
},
{
"_id": 777,
"id": 965,
"pid": 110,
"city_code": "101260102",
"city_name": "白云区"
},
{
"_id": 778,
"id": 966,
"pid": 110,
"city_code": "101260109",
"city_name": "小河区"
},
{
"_id": 779,
"id": 969,
"pid": 110,
"city_code": "101260108",
"city_name": "清镇市"
},
{
"_id": 780,
"id": 970,
"pid": 110,
"city_code": "101260106",
"city_name": "开阳县"
},
{
"_id": 781,
"id": 971,
"pid": 110,
"city_code": "101260107",
"city_name": "修文县"
},
{
"_id": 782,
"id": 972,
"pid": 110,
"city_code": "101260105",
"city_name": "息烽县"
},
{
"_id": 783,
"id": 974,
"pid": 111,
"city_code": "101260306",
"city_name": "关岭县"
},
{
"_id": 784,
"id": 976,
"pid": 111,
"city_code": "101260305",
"city_name": "紫云县"
},
{
"_id": 785,
"id": 977,
"pid": 111,
"city_code": "101260304",
"city_name": "平坝县"
},
{
"_id": 786,
"id": 978,
"pid": 111,
"city_code": "101260302",
"city_name": "普定县"
},
{
"_id": 787,
"id": 980,
"pid": 112,
"city_code": "101260705",
"city_name": "大方县"
},
{
"_id": 788,
"id": 981,
"pid": 112,
"city_code": "101260708",
"city_name": "黔西县"
},
{
"_id": 789,
"id": 982,
"pid": 112,
"city_code": "101260703",
"city_name": "金沙县"
},
{
"_id": 790,
"id": 983,
"pid": 112,
"city_code": "101260707",
"city_name": "织金县"
},
{
"_id": 791,
"id": 984,
"pid": 112,
"city_code": "101260706",
"city_name": "纳雍县"
},
{
"_id": 792,
"id": 985,
"pid": 112,
"city_code": "101260702",
"city_name": "赫章县"
},
{
"_id": 793,
"id": 986,
"pid": 112,
"city_code": "101260704",
"city_name": "威宁县"
},
{
"_id": 794,
"id": 989,
"pid": 113,
"city_code": "101260801",
"city_name": "水城县"
},
{
"_id": 795,
"id": 990,
"pid": 113,
"city_code": "101260804",
"city_name": "盘县"
},
{
"_id": 796,
"id": 991,
"pid": 114,
"city_code": "101260501",
"city_name": "凯里市"
},
{
"_id": 797,
"id": 992,
"pid": 114,
"city_code": "101260505",
"city_name": "黄平县"
},
{
"_id": 798,
"id": 993,
"pid": 114,
"city_code": "101260503",
"city_name": "施秉县"
},
{
"_id": 799,
"id": 994,
"pid": 114,
"city_code": "101260509",
"city_name": "三穗县"
},
{
"_id": 800,
"id": 995,
"pid": 114,
"city_code": "101260504",
"city_name": "镇远县"
},
{
"_id": 801,
"id": 996,
"pid": 114,
"city_code": "101260502",
"city_name": "岑巩县"
},
{
"_id": 802,
"id": 997,
"pid": 114,
"city_code": "101260514",
"city_name": "天柱县"
},
{
"_id": 803,
"id": 998,
"pid": 114,
"city_code": "101260515",
"city_name": "锦屏县"
},
{
"_id": 804,
"id": 999,
"pid": 114,
"city_code": "101260511",
"city_name": "剑河县"
},
{
"_id": 805,
"id": 1000,
"pid": 114,
"city_code": "101260510",
"city_name": "台江县"
},
{
"_id": 806,
"id": 1001,
"pid": 114,
"city_code": "101260513",
"city_name": "黎平县"
},
{
"_id": 807,
"id": 1002,
"pid": 114,
"city_code": "101260516",
"city_name": "榕江县"
},
{
"_id": 808,
"id": 1003,
"pid": 114,
"city_code": "101260517",
"city_name": "从江县"
},
{
"_id": 809,
"id": 1004,
"pid": 114,
"city_code": "101260512",
"city_name": "雷山县"
},
{
"_id": 810,
"id": 1005,
"pid": 114,
"city_code": "101260507",
"city_name": "麻江县"
},
{
"_id": 811,
"id": 1006,
"pid": 114,
"city_code": "101260508",
"city_name": "丹寨县"
},
{
"_id": 812,
"id": 1007,
"pid": 115,
"city_code": "101260401",
"city_name": "都匀市"
},
{
"_id": 813,
"id": 1008,
"pid": 115,
"city_code": "101260405",
"city_name": "福泉市"
},
{
"_id": 814,
"id": 1009,
"pid": 115,
"city_code": "101260412",
"city_name": "荔波县"
},
{
"_id": 815,
"id": 1010,
"pid": 115,
"city_code": "101260402",
"city_name": "贵定县"
},
{
"_id": 816,
"id": 1011,
"pid": 115,
"city_code": "101260403",
"city_name": "瓮安县"
},
{
"_id": 817,
"id": 1012,
"pid": 115,
"city_code": "101260410",
"city_name": "独山县"
},
{
"_id": 818,
"id": 1013,
"pid": 115,
"city_code": "101260409",
"city_name": "平塘县"
},
{
"_id": 819,
"id": 1014,
"pid": 115,
"city_code": "101260408",
"city_name": "罗甸县"
},
{
"_id": 820,
"id": 1015,
"pid": 115,
"city_code": "101260404",
"city_name": "长顺县"
},
{
"_id": 821,
"id": 1016,
"pid": 115,
"city_code": "101260407",
"city_name": "龙里县"
},
{
"_id": 822,
"id": 1017,
"pid": 115,
"city_code": "101260406",
"city_name": "惠水县"
},
{
"_id": 823,
"id": 1018,
"pid": 115,
"city_code": "101260411",
"city_name": "三都县"
},
{
"_id": 824,
"id": 1019,
"pid": 116,
"city_code": "101260906",
"city_name": "兴义市"
},
{
"_id": 825,
"id": 1020,
"pid": 116,
"city_code": "101260903",
"city_name": "兴仁县"
},
{
"_id": 826,
"id": 1021,
"pid": 116,
"city_code": "101260909",
"city_name": "普安县"
},
{
"_id": 827,
"id": 1022,
"pid": 116,
"city_code": "101260902",
"city_name": "晴隆县"
},
{
"_id": 828,
"id": 1023,
"pid": 116,
"city_code": "101260904",
"city_name": "贞丰县"
},
{
"_id": 829,
"id": 1024,
"pid": 116,
"city_code": "101260905",
"city_name": "望谟县"
},
{
"_id": 830,
"id": 1025,
"pid": 116,
"city_code": "101260908",
"city_name": "册亨县"
},
{
"_id": 831,
"id": 1026,
"pid": 116,
"city_code": "101260907",
"city_name": "安龙县"
},
{
"_id": 832,
"id": 1027,
"pid": 117,
"city_code": "101260601",
"city_name": "铜仁市"
},
{
"_id": 833,
"id": 1028,
"pid": 117,
"city_code": "101260602",
"city_name": "江口县"
},
{
"_id": 834,
"id": 1029,
"pid": 117,
"city_code": "101260608",
"city_name": "石阡县"
},
{
"_id": 835,
"id": 1030,
"pid": 117,
"city_code": "101260605",
"city_name": "思南县"
},
{
"_id": 836,
"id": 1031,
"pid": 117,
"city_code": "101260610",
"city_name": "德江县"
},
{
"_id": 837,
"id": 1032,
"pid": 117,
"city_code": "101260603",
"city_name": "玉屏县"
},
{
"_id": 838,
"id": 1033,
"pid": 117,
"city_code": "101260607",
"city_name": "印江县"
},
{
"_id": 839,
"id": 1034,
"pid": 117,
"city_code": "101260609",
"city_name": "沿河县"
},
{
"_id": 840,
"id": 1035,
"pid": 117,
"city_code": "101260611",
"city_name": "松桃县"
},
{
"_id": 841,
"id": 1037,
"pid": 118,
"city_code": "101260215",
"city_name": "红花岗区"
},
{
"_id": 842,
"id": 1038,
"pid": 118,
"city_code": "101260212",
"city_name": "务川县"
},
{
"_id": 843,
"id": 1039,
"pid": 118,
"city_code": "101260210",
"city_name": "道真县"
},
{
"_id": 844,
"id": 1040,
"pid": 118,
"city_code": "101260214",
"city_name": "汇川区"
},
{
"_id": 845,
"id": 1041,
"pid": 118,
"city_code": "101260208",
"city_name": "赤水市"
},
{
"_id": 846,
"id": 1042,
"pid": 118,
"city_code": "101260203",
"city_name": "仁怀市"
},
{
"_id": 847,
"id": 1043,
"pid": 118,
"city_code": "101260202",
"city_name": "遵义县"
},
{
"_id": 848,
"id": 1044,
"pid": 118,
"city_code": "101260207",
"city_name": "桐梓县"
},
{
"_id": 849,
"id": 1045,
"pid": 118,
"city_code": "101260204",
"city_name": "绥阳县"
},
{
"_id": 850,
"id": 1046,
"pid": 118,
"city_code": "101260211",
"city_name": "正安县"
},
{
"_id": 851,
"id": 1047,
"pid": 118,
"city_code": "101260206",
"city_name": "凤冈县"
},
{
"_id": 852,
"id": 1048,
"pid": 118,
"city_code": "101260205",
"city_name": "湄潭县"
},
{
"_id": 853,
"id": 1049,
"pid": 118,
"city_code": "101260213",
"city_name": "余庆县"
},
{
"_id": 854,
"id": 1050,
"pid": 118,
"city_code": "101260209",
"city_name": "习水县"
},
{
"_id": 855,
"id": 1055,
"pid": 119,
"city_code": "101310102",
"city_name": "琼山区"
},
{
"_id": 856,
"id": 1082,
"pid": 137,
"city_code": "101090102",
"city_name": "井陉矿区"
},
{
"_id": 857,
"id": 1084,
"pid": 137,
"city_code": "101090114",
"city_name": "辛集市"
},
{
"_id": 858,
"id": 1085,
"pid": 137,
"city_code": "101090115",
"city_name": "藁城市"
},
{
"_id": 859,
"id": 1086,
"pid": 137,
"city_code": "101090116",
"city_name": "晋州市"
},
{
"_id": 860,
"id": 1087,
"pid": 137,
"city_code": "101090117",
"city_name": "新乐市"
},
{
"_id": 861,
"id": 1088,
"pid": 137,
"city_code": "101090118",
"city_name": "鹿泉区"
},
{
"_id": 862,
"id": 1089,
"pid": 137,
"city_code": "101090102",
"city_name": "井陉县"
},
{
"_id": 863,
"id": 1090,
"pid": 137,
"city_code": "101090103",
"city_name": "正定县"
},
{
"_id": 864,
"id": 1091,
"pid": 137,
"city_code": "101090104",
"city_name": "栾城区"
},
{
"_id": 865,
"id": 1092,
"pid": 137,
"city_code": "101090105",
"city_name": "行唐县"
},
{
"_id": 866,
"id": 1093,
"pid": 137,
"city_code": "101090106",
"city_name": "灵寿县"
},
{
"_id": 867,
"id": 1094,
"pid": 137,
"city_code": "101090107",
"city_name": "高邑县"
},
{
"_id": 868,
"id": 1095,
"pid": 137,
"city_code": "101090108",
"city_name": "深泽县"
},
{
"_id": 869,
"id": 1096,
"pid": 137,
"city_code": "101090109",
"city_name": "赞皇县"
},
{
"_id": 870,
"id": 1097,
"pid": 137,
"city_code": "101090110",
"city_name": "无极县"
},
{
"_id": 871,
"id": 1098,
"pid": 137,
"city_code": "101090111",
"city_name": "平山县"
},
{
"_id": 872,
"id": 1099,
"pid": 137,
"city_code": "101090112",
"city_name": "元氏县"
},
{
"_id": 873,
"id": 1100,
"pid": 137,
"city_code": "101090113",
"city_name": "赵县"
},
{
"_id": 874,
"id": 1104,
"pid": 138,
"city_code": "101090218",
"city_name": "涿州市"
},
{
"_id": 875,
"id": 1105,
"pid": 138,
"city_code": "101090219",
"city_name": "定州市"
},
{
"_id": 876,
"id": 1106,
"pid": 138,
"city_code": "101090220",
"city_name": "安国市"
},
{
"_id": 877,
"id": 1107,
"pid": 138,
"city_code": "101090221",
"city_name": "高碑店市"
},
{
"_id": 878,
"id": 1108,
"pid": 138,
"city_code": "101090202",
"city_name": "满城县"
},
{
"_id": 879,
"id": 1109,
"pid": 138,
"city_code": "101090224",
"city_name": "清苑县"
},
{
"_id": 880,
"id": 1110,
"pid": 138,
"city_code": "101090213",
"city_name": "涞水县"
},
{
"_id": 881,
"id": 1111,
"pid": 138,
"city_code": "101090203",
"city_name": "阜平县"
},
{
"_id": 882,
"id": 1112,
"pid": 138,
"city_code": "101090204",
"city_name": "徐水县"
},
{
"_id": 883,
"id": 1113,
"pid": 138,
"city_code": "101090223",
"city_name": "定兴县"
},
{
"_id": 884,
"id": 1114,
"pid": 138,
"city_code": "101090205",
"city_name": "唐县"
},
{
"_id": 885,
"id": 1115,
"pid": 138,
"city_code": "101090206",
"city_name": "高阳县"
},
{
"_id": 886,
"id": 1116,
"pid": 138,
"city_code": "101090207",
"city_name": "容城县"
},
{
"_id": 887,
"id": 1117,
"pid": 138,
"city_code": "101090209",
"city_name": "涞源县"
},
{
"_id": 888,
"id": 1118,
"pid": 138,
"city_code": "101090210",
"city_name": "望都县"
},
{
"_id": 889,
"id": 1119,
"pid": 138,
"city_code": "101090211",
"city_name": "安新县"
},
{
"_id": 890,
"id": 1120,
"pid": 138,
"city_code": "101090212",
"city_name": "易县"
},
{
"_id": 891,
"id": 1121,
"pid": 138,
"city_code": "101090214",
"city_name": "曲阳县"
},
{
"_id": 892,
"id": 1122,
"pid": 138,
"city_code": "101090215",
"city_name": "蠡县"
},
{
"_id": 893,
"id": 1123,
"pid": 138,
"city_code": "101090216",
"city_name": "顺平县"
},
{
"_id": 894,
"id": 1124,
"pid": 138,
"city_code": "101090225",
"city_name": "博野县"
},
{
"_id": 895,
"id": 1125,
"pid": 138,
"city_code": "101090217",
"city_name": "雄县"
},
{
"_id": 896,
"id": 1128,
"pid": 139,
"city_code": "101090711",
"city_name": "泊头市"
},
{
"_id": 897,
"id": 1129,
"pid": 139,
"city_code": "101090712",
"city_name": "任丘市"
},
{
"_id": 898,
"id": 1130,
"pid": 139,
"city_code": "101090713",
"city_name": "黄骅市"
},
{
"_id": 899,
"id": 1131,
"pid": 139,
"city_code": "101090714",
"city_name": "河间市"
},
{
"_id": 900,
"id": 1132,
"pid": 139,
"city_code": "101090716",
"city_name": "沧县"
},
{
"_id": 901,
"id": 1133,
"pid": 139,
"city_code": "101090702",
"city_name": "青县"
},
{
"_id": 902,
"id": 1134,
"pid": 139,
"city_code": "101090703",
"city_name": "东光县"
},
{
"_id": 903,
"id": 1135,
"pid": 139,
"city_code": "101090704",
"city_name": "海兴县"
},
{
"_id": 904,
"id": 1136,
"pid": 139,
"city_code": "101090705",
"city_name": "盐山县"
},
{
"_id": 905,
"id": 1137,
"pid": 139,
"city_code": "101090706",
"city_name": "肃宁县"
},
{
"_id": 906,
"id": 1138,
"pid": 139,
"city_code": "101090707",
"city_name": "南皮县"
},
{
"_id": 907,
"id": 1139,
"pid": 139,
"city_code": "101090708",
"city_name": "吴桥县"
},
{
"_id": 908,
"id": 1140,
"pid": 139,
"city_code": "101090709",
"city_name": "献县"
},
{
"_id": 909,
"id": 1141,
"pid": 139,
"city_code": "101090710",
"city_name": "孟村县"
},
{
"_id": 910,
"id": 1145,
"pid": 140,
"city_code": "101090403",
"city_name": "承德县"
},
{
"_id": 911,
"id": 1146,
"pid": 140,
"city_code": "101090404",
"city_name": "兴隆县"
},
{
"_id": 912,
"id": 1147,
"pid": 140,
"city_code": "101090405",
"city_name": "平泉县"
},
{
"_id": 913,
"id": 1148,
"pid": 140,
"city_code": "101090406",
"city_name": "滦平县"
},
{
"_id": 914,
"id": 1149,
"pid": 140,
"city_code": "101090407",
"city_name": "隆化县"
},
{
"_id": 915,
"id": 1150,
"pid": 140,
"city_code": "101090408",
"city_name": "丰宁县"
},
{
"_id": 916,
"id": 1151,
"pid": 140,
"city_code": "101090409",
"city_name": "宽城县"
},
{
"_id": 917,
"id": 1152,
"pid": 140,
"city_code": "101090410",
"city_name": "围场县"
},
{
"_id": 918,
"id": 1156,
"pid": 141,
"city_code": "101091002",
"city_name": "峰峰矿区"
},
{
"_id": 919,
"id": 1157,
"pid": 141,
"city_code": "101091016",
"city_name": "武安市"
},
{
"_id": 920,
"id": 1158,
"pid": 141,
"city_code": "101091001",
"city_name": "邯郸县"
},
{
"_id": 921,
"id": 1159,
"pid": 141,
"city_code": "101091003",
"city_name": "临漳县"
},
{
"_id": 922,
"id": 1160,
"pid": 141,
"city_code": "101091004",
"city_name": "成安县"
},
{
"_id": 923,
"id": 1161,
"pid": 141,
"city_code": "101091005",
"city_name": "大名县"
},
{
"_id": 924,
"id": 1162,
"pid": 141,
"city_code": "101091006",
"city_name": "涉县"
},
{
"_id": 925,
"id": 1163,
"pid": 141,
"city_code": "101091007",
"city_name": "磁县"
},
{
"_id": 926,
"id": 1164,
"pid": 141,
"city_code": "101091008",
"city_name": "肥乡县"
},
{
"_id": 927,
"id": 1165,
"pid": 141,
"city_code": "101091009",
"city_name": "永年县"
},
{
"_id": 928,
"id": 1166,
"pid": 141,
"city_code": "101091010",
"city_name": "邱县"
},
{
"_id": 929,
"id": 1167,
"pid": 141,
"city_code": "101091011",
"city_name": "鸡泽县"
},
{
"_id": 930,
"id": 1168,
"pid": 141,
"city_code": "101091012",
"city_name": "广平县"
},
{
"_id": 931,
"id": 1169,
"pid": 141,
"city_code": "101091013",
"city_name": "馆陶县"
},
{
"_id": 932,
"id": 1170,
"pid": 141,
"city_code": "101091014",
"city_name": "魏县"
},
{
"_id": 933,
"id": 1171,
"pid": 141,
"city_code": "101091015",
"city_name": "曲周县"
},
{
"_id": 934,
"id": 1173,
"pid": 142,
"city_code": "101090810",
"city_name": "冀州市"
},
{
"_id": 935,
"id": 1174,
"pid": 142,
"city_code": "101090811",
"city_name": "深州市"
},
{
"_id": 936,
"id": 1175,
"pid": 142,
"city_code": "101090802",
"city_name": "枣强县"
},
{
"_id": 937,
"id": 1176,
"pid": 142,
"city_code": "101090803",
"city_name": "武邑县"
},
{
"_id": 938,
"id": 1177,
"pid": 142,
"city_code": "101090804",
"city_name": "武强县"
},
{
"_id": 939,
"id": 1178,
"pid": 142,
"city_code": "101090805",
"city_name": "饶阳县"
},
{
"_id": 940,
"id": 1179,
"pid": 142,
"city_code": "101090806",
"city_name": "安平县"
},
{
"_id": 941,
"id": 1180,
"pid": 142,
"city_code": "101090807",
"city_name": "故城县"
},
{
"_id": 942,
"id": 1181,
"pid": 142,
"city_code": "101090808",
"city_name": "景县"
},
{
"_id": 943,
"id": 1182,
"pid": 142,
"city_code": "101090809",
"city_name": "阜城县"
},
{
"_id": 944,
"id": 1185,
"pid": 143,
"city_code": "101090608",
"city_name": "霸州市"
},
{
"_id": 945,
"id": 1186,
"pid": 143,
"city_code": "101090609",
"city_name": "三河市"
},
{
"_id": 946,
"id": 1187,
"pid": 143,
"city_code": "101090602",
"city_name": "固安县"
},
{
"_id": 947,
"id": 1188,
"pid": 143,
"city_code": "101090603",
"city_name": "永清县"
},
{
"_id": 948,
"id": 1189,
"pid": 143,
"city_code": "101090604",
"city_name": "香河县"
},
{
"_id": 949,
"id": 1190,
"pid": 143,
"city_code": "101090605",
"city_name": "大城县"
},
{
"_id": 950,
"id": 1191,
"pid": 143,
"city_code": "101090606",
"city_name": "文安县"
},
{
"_id": 951,
"id": 1192,
"pid": 143,
"city_code": "101090607",
"city_name": "大厂县"
},
{
"_id": 952,
"id": 1195,
"pid": 144,
"city_code": "101091106",
"city_name": "北戴河区"
},
{
"_id": 953,
"id": 1196,
"pid": 144,
"city_code": "101091103",
"city_name": "昌黎县"
},
{
"_id": 954,
"id": 1197,
"pid": 144,
"city_code": "101091104",
"city_name": "抚宁县"
},
{
"_id": 955,
"id": 1198,
"pid": 144,
"city_code": "101091105",
"city_name": "卢龙县"
},
{
"_id": 956,
"id": 1199,
"pid": 144,
"city_code": "101091102",
"city_name": "青龙县"
},
{
"_id": 957,
"id": 1204,
"pid": 145,
"city_code": "101090502",
"city_name": "丰南区"
},
{
"_id": 958,
"id": 1205,
"pid": 145,
"city_code": "101090503",
"city_name": "丰润区"
},
{
"_id": 959,
"id": 1206,
"pid": 145,
"city_code": "101090510",
"city_name": "遵化市"
},
{
"_id": 960,
"id": 1207,
"pid": 145,
"city_code": "101090511",
"city_name": "迁安市"
},
{
"_id": 961,
"id": 1208,
"pid": 145,
"city_code": "101090504",
"city_name": "滦县"
},
{
"_id": 962,
"id": 1209,
"pid": 145,
"city_code": "101090505",
"city_name": "滦南县"
},
{
"_id": 963,
"id": 1210,
"pid": 145,
"city_code": "101090506",
"city_name": "乐亭县"
},
{
"_id": 964,
"id": 1211,
"pid": 145,
"city_code": "101090507",
"city_name": "迁西县"
},
{
"_id": 965,
"id": 1212,
"pid": 145,
"city_code": "101090508",
"city_name": "玉田县"
},
{
"_id": 966,
"id": 1213,
"pid": 145,
"city_code": "101090509",
"city_name": "唐海县"
},
{
"_id": 967,
"id": 1216,
"pid": 146,
"city_code": "101090916",
"city_name": "南宫市"
},
{
"_id": 968,
"id": 1217,
"pid": 146,
"city_code": "101090917",
"city_name": "沙河市"
},
{
"_id": 969,
"id": 1218,
"pid": 146,
"city_code": "101090901",
"city_name": "邢台县"
},
{
"_id": 970,
"id": 1219,
"pid": 146,
"city_code": "101090902",
"city_name": "临城县"
},
{
"_id": 971,
"id": 1220,
"pid": 146,
"city_code": "101090904",
"city_name": "内丘县"
},
{
"_id": 972,
"id": 1221,
"pid": 146,
"city_code": "101090905",
"city_name": "柏乡县"
},
{
"_id": 973,
"id": 1222,
"pid": 146,
"city_code": "101090906",
"city_name": "隆尧县"
},
{
"_id": 974,
"id": 1223,
"pid": 146,
"city_code": "101090918",
"city_name": "任县"
},
{
"_id": 975,
"id": 1224,
"pid": 146,
"city_code": "101090907",
"city_name": "南和县"
},
{
"_id": 976,
"id": 1225,
"pid": 146,
"city_code": "101090908",
"city_name": "宁晋县"
},
{
"_id": 977,
"id": 1226,
"pid": 146,
"city_code": "101090909",
"city_name": "巨鹿县"
},
{
"_id": 978,
"id": 1227,
"pid": 146,
"city_code": "101090910",
"city_name": "新河县"
},
{
"_id": 979,
"id": 1228,
"pid": 146,
"city_code": "101090911",
"city_name": "广宗县"
},
{
"_id": 980,
"id": 1229,
"pid": 146,
"city_code": "101090912",
"city_name": "平乡县"
},
{
"_id": 981,
"id": 1230,
"pid": 146,
"city_code": "101090913",
"city_name": "威县"
},
{
"_id": 982,
"id": 1231,
"pid": 146,
"city_code": "101090914",
"city_name": "清河县"
},
{
"_id": 983,
"id": 1232,
"pid": 146,
"city_code": "101090915",
"city_name": "临西县"
},
{
"_id": 984,
"id": 1235,
"pid": 147,
"city_code": "101090302",
"city_name": "宣化区"
},
{
"_id": 985,
"id": 1237,
"pid": 147,
"city_code": "101090302",
"city_name": "宣化县"
},
{
"_id": 986,
"id": 1238,
"pid": 147,
"city_code": "101090303",
"city_name": "张北县"
},
{
"_id": 987,
"id": 1239,
"pid": 147,
"city_code": "101090304",
"city_name": "康保县"
},
{
"_id": 988,
"id": 1240,
"pid": 147,
"city_code": "101090305",
"city_name": "沽源县"
},
{
"_id": 989,
"id": 1241,
"pid": 147,
"city_code": "101090306",
"city_name": "尚义县"
},
{
"_id": 990,
"id": 1242,
"pid": 147,
"city_code": "101090307",
"city_name": "蔚县"
},
{
"_id": 991,
"id": 1243,
"pid": 147,
"city_code": "101090308",
"city_name": "阳原县"
},
{
"_id": 992,
"id": 1244,
"pid": 147,
"city_code": "101090309",
"city_name": "怀安县"
},
{
"_id": 993,
"id": 1245,
"pid": 147,
"city_code": "101090310",
"city_name": "万全县"
},
{
"_id": 994,
"id": 1246,
"pid": 147,
"city_code": "101090311",
"city_name": "怀来县"
},
{
"_id": 995,
"id": 1247,
"pid": 147,
"city_code": "101090312",
"city_name": "涿鹿县"
},
{
"_id": 996,
"id": 1248,
"pid": 147,
"city_code": "101090313",
"city_name": "赤城县"
},
{
"_id": 997,
"id": 1249,
"pid": 147,
"city_code": "101090314",
"city_name": "崇礼县"
},
{
"_id": 998,
"id": 1255,
"pid": 148,
"city_code": "101180108",
"city_name": "上街区"
},
{
"_id": 999,
"id": 1261,
"pid": 148,
"city_code": "101180102",
"city_name": "巩义市"
},
{
"_id": 1000,
"id": 1262,
"pid": 148,
"city_code": "101180103",
"city_name": "荥阳市"
},
{
"_id": 1001,
"id": 1263,
"pid": 148,
"city_code": "101180105",
"city_name": "新密市"
},
{
"_id": 1002,
"id": 1264,
"pid": 148,
"city_code": "101180106",
"city_name": "新郑市"
},
{
"_id": 1003,
"id": 1265,
"pid": 148,
"city_code": "101180104",
"city_name": "登封市"
},
{
"_id": 1004,
"id": 1266,
"pid": 148,
"city_code": "101180107",
"city_name": "中牟县"
},
{
"_id": 1005,
"id": 1272,
"pid": 149,
"city_code": "101180911",
"city_name": "吉利区"
},
{
"_id": 1006,
"id": 1273,
"pid": 149,
"city_code": "101180908",
"city_name": "偃师市"
},
{
"_id": 1007,
"id": 1274,
"pid": 149,
"city_code": "101180903",
"city_name": "孟津县"
},
{
"_id": 1008,
"id": 1275,
"pid": 149,
"city_code": "101180902",
"city_name": "新安县"
},
{
"_id": 1009,
"id": 1276,
"pid": 149,
"city_code": "101180909",
"city_name": "栾川县"
},
{
"_id": 1010,
"id": 1277,
"pid": 149,
"city_code": "101180907",
"city_name": "嵩县"
},
{
"_id": 1011,
"id": 1278,
"pid": 149,
"city_code": "101180910",
"city_name": "汝阳县"
},
{
"_id": 1012,
"id": 1279,
"pid": 149,
"city_code": "101180904",
"city_name": "宜阳县"
},
{
"_id": 1013,
"id": 1280,
"pid": 149,
"city_code": "101180905",
"city_name": "洛宁县"
},
{
"_id": 1014,
"id": 1281,
"pid": 149,
"city_code": "101180906",
"city_name": "伊川县"
},
{
"_id": 1015,
"id": 1287,
"pid": 150,
"city_code": "101180802",
"city_name": "杞县"
},
{
"_id": 1016,
"id": 1288,
"pid": 150,
"city_code": "101180804",
"city_name": "通许县"
},
{
"_id": 1017,
"id": 1289,
"pid": 150,
"city_code": "101180803",
"city_name": "尉氏县"
},
{
"_id": 1018,
"id": 1290,
"pid": 150,
"city_code": "101180801",
"city_name": "开封县"
},
{
"_id": 1019,
"id": 1291,
"pid": 150,
"city_code": "101180805",
"city_name": "兰考县"
},
{
"_id": 1020,
"id": 1296,
"pid": 151,
"city_code": "101180205",
"city_name": "林州市"
},
{
"_id": 1021,
"id": 1297,
"pid": 151,
"city_code": "101180201",
"city_name": "安阳县"
},
{
"_id": 1022,
"id": 1298,
"pid": 151,
"city_code": "101180202",
"city_name": "汤阴县"
},
{
"_id": 1023,
"id": 1299,
"pid": 151,
"city_code": "101180203",
"city_name": "滑县"
},
{
"_id": 1024,
"id": 1300,
"pid": 151,
"city_code": "101180204",
"city_name": "内黄县"
},
{
"_id": 1025,
"id": 1304,
"pid": 152,
"city_code": "101181202",
"city_name": "浚县"
},
{
"_id": 1026,
"id": 1305,
"pid": 152,
"city_code": "101181203",
"city_name": "淇县"
},
{
"_id": 1027,
"id": 1306,
"pid": 153,
"city_code": "101181801",
"city_name": "济源市"
},
{
"_id": 1028,
"id": 1311,
"pid": 154,
"city_code": "101181104",
"city_name": "沁阳市"
},
{
"_id": 1029,
"id": 1312,
"pid": 154,
"city_code": "101181108",
"city_name": "孟州市"
},
{
"_id": 1030,
"id": 1313,
"pid": 154,
"city_code": "101181102",
"city_name": "修武县"
},
{
"_id": 1031,
"id": 1314,
"pid": 154,
"city_code": "101181106",
"city_name": "博爱县"
},
{
"_id": 1032,
"id": 1315,
"pid": 154,
"city_code": "101181103",
"city_name": "武陟县"
},
{
"_id": 1033,
"id": 1316,
"pid": 154,
"city_code": "101181107",
"city_name": "温县"
},
{
"_id": 1034,
"id": 1319,
"pid": 155,
"city_code": "101180711",
"city_name": "邓州市"
},
{
"_id": 1035,
"id": 1320,
"pid": 155,
"city_code": "101180702",
"city_name": "南召县"
},
{
"_id": 1036,
"id": 1321,
"pid": 155,
"city_code": "101180703",
"city_name": "方城县"
},
{
"_id": 1037,
"id": 1322,
"pid": 155,
"city_code": "101180705",
"city_name": "西峡县"
},
{
"_id": 1038,
"id": 1323,
"pid": 155,
"city_code": "101180707",
"city_name": "镇平县"
},
{
"_id": 1039,
"id": 1324,
"pid": 155,
"city_code": "101180706",
"city_name": "内乡县"
},
{
"_id": 1040,
"id": 1325,
"pid": 155,
"city_code": "101180708",
"city_name": "淅川县"
},
{
"_id": 1041,
"id": 1326,
"pid": 155,
"city_code": "101180704",
"city_name": "社旗县"
},
{
"_id": 1042,
"id": 1327,
"pid": 155,
"city_code": "101180710",
"city_name": "唐河县"
},
{
"_id": 1043,
"id": 1328,
"pid": 155,
"city_code": "101180709",
"city_name": "新野县"
},
{
"_id": 1044,
"id": 1329,
"pid": 155,
"city_code": "101180712",
"city_name": "桐柏县"
},
{
"_id": 1045,
"id": 1333,
"pid": 156,
"city_code": "101180508",
"city_name": "石龙区"
},
{
"_id": 1046,
"id": 1334,
"pid": 156,
"city_code": "101180506",
"city_name": "舞钢市"
},
{
"_id": 1047,
"id": 1335,
"pid": 156,
"city_code": "101180504",
"city_name": "汝州市"
},
{
"_id": 1048,
"id": 1336,
"pid": 156,
"city_code": "101180503",
"city_name": "宝丰县"
},
{
"_id": 1049,
"id": 1337,
"pid": 156,
"city_code": "101180505",
"city_name": "叶县"
},
{
"_id": 1050,
"id": 1338,
"pid": 156,
"city_code": "101180507",
"city_name": "鲁山县"
},
{
"_id": 1051,
"id": 1339,
"pid": 156,
"city_code": "101180502",
"city_name": "郏县"
},
{
"_id": 1052,
"id": 1341,
"pid": 157,
"city_code": "101181705",
"city_name": "义马市"
},
{
"_id": 1053,
"id": 1342,
"pid": 157,
"city_code": "101181702",
"city_name": "灵宝市"
},
{
"_id": 1054,
"id": 1343,
"pid": 157,
"city_code": "101181703",
"city_name": "渑池县"
},
{
"_id": 1055,
"id": 1344,
"pid": 157,
"city_code": "101181706",
"city_name": "陕县"
},
{
"_id": 1056,
"id": 1345,
"pid": 157,
"city_code": "101181704",
"city_name": "卢氏县"
},
{
"_id": 1057,
"id": 1347,
"pid": 158,
"city_code": "101181002",
"city_name": "睢阳区"
},
{
"_id": 1058,
"id": 1348,
"pid": 158,
"city_code": "101181009",
"city_name": "永城市"
},
{
"_id": 1059,
"id": 1349,
"pid": 158,
"city_code": "101181004",
"city_name": "民权县"
},
{
"_id": 1060,
"id": 1350,
"pid": 158,
"city_code": "101181003",
"city_name": "睢县"
},
{
"_id": 1061,
"id": 1351,
"pid": 158,
"city_code": "101181007",
"city_name": "宁陵县"
},
{
"_id": 1062,
"id": 1352,
"pid": 158,
"city_code": "101181005",
"city_name": "虞城县"
},
{
"_id": 1063,
"id": 1353,
"pid": 158,
"city_code": "101181006",
"city_name": "柘城县"
},
{
"_id": 1064,
"id": 1354,
"pid": 158,
"city_code": "101181008",
"city_name": "夏邑县"
},
{
"_id": 1065,
"id": 1359,
"pid": 159,
"city_code": "101180305",
"city_name": "卫辉市"
},
{
"_id": 1066,
"id": 1360,
"pid": 159,
"city_code": "101180304",
"city_name": "辉县市"
},
{
"_id": 1067,
"id": 1361,
"pid": 159,
"city_code": "101180301",
"city_name": "新乡县"
},
{
"_id": 1068,
"id": 1362,
"pid": 159,
"city_code": "101180302",
"city_name": "获嘉县"
},
{
"_id": 1069,
"id": 1363,
"pid": 159,
"city_code": "101180303",
"city_name": "原阳县"
},
{
"_id": 1070,
"id": 1364,
"pid": 159,
"city_code": "101180306",
"city_name": "延津县"
},
{
"_id": 1071,
"id": 1365,
"pid": 159,
"city_code": "101180307",
"city_name": "封丘县"
},
{
"_id": 1072,
"id": 1366,
"pid": 159,
"city_code": "101180308",
"city_name": "长垣县"
},
{
"_id": 1073,
"id": 1369,
"pid": 160,
"city_code": "101180603",
"city_name": "罗山县"
},
{
"_id": 1074,
"id": 1370,
"pid": 160,
"city_code": "101180604",
"city_name": "光山县"
},
{
"_id": 1075,
"id": 1371,
"pid": 160,
"city_code": "101180605",
"city_name": "新县"
},
{
"_id": 1076,
"id": 1372,
"pid": 160,
"city_code": "101180609",
"city_name": "商城县"
},
{
"_id": 1077,
"id": 1373,
"pid": 160,
"city_code": "101180608",
"city_name": "固始县"
},
{
"_id": 1078,
"id": 1374,
"pid": 160,
"city_code": "101180607",
"city_name": "潢川县"
},
{
"_id": 1079,
"id": 1375,
"pid": 160,
"city_code": "101180606",
"city_name": "淮滨县"
},
{
"_id": 1080,
"id": 1376,
"pid": 160,
"city_code": "101180602",
"city_name": "息县"
},
{
"_id": 1081,
"id": 1378,
"pid": 161,
"city_code": "101180405",
"city_name": "禹州市"
},
{
"_id": 1082,
"id": 1379,
"pid": 161,
"city_code": "101180404",
"city_name": "长葛市"
},
{
"_id": 1083,
"id": 1380,
"pid": 161,
"city_code": "101180401",
"city_name": "许昌县"
},
{
"_id": 1084,
"id": 1381,
"pid": 161,
"city_code": "101180402",
"city_name": "鄢陵县"
},
{
"_id": 1085,
"id": 1382,
"pid": 161,
"city_code": "101180403",
"city_name": "襄城县"
},
{
"_id": 1086,
"id": 1384,
"pid": 162,
"city_code": "101181407",
"city_name": "项城市"
},
{
"_id": 1087,
"id": 1385,
"pid": 162,
"city_code": "101181402",
"city_name": "扶沟县"
},
{
"_id": 1088,
"id": 1386,
"pid": 162,
"city_code": "101181405",
"city_name": "西华县"
},
{
"_id": 1089,
"id": 1387,
"pid": 162,
"city_code": "101181406",
"city_name": "商水县"
},
{
"_id": 1090,
"id": 1388,
"pid": 162,
"city_code": "101181410",
"city_name": "沈丘县"
},
{
"_id": 1091,
"id": 1389,
"pid": 162,
"city_code": "101181408",
"city_name": "郸城县"
},
{
"_id": 1092,
"id": 1390,
"pid": 162,
"city_code": "101181404",
"city_name": "淮阳县"
},
{
"_id": 1093,
"id": 1391,
"pid": 162,
"city_code": "101181403",
"city_name": "太康县"
},
{
"_id": 1094,
"id": 1392,
"pid": 162,
"city_code": "101181409",
"city_name": "鹿邑县"
},
{
"_id": 1095,
"id": 1394,
"pid": 163,
"city_code": "101181602",
"city_name": "西平县"
},
{
"_id": 1096,
"id": 1395,
"pid": 163,
"city_code": "101181604",
"city_name": "上蔡县"
},
{
"_id": 1097,
"id": 1396,
"pid": 163,
"city_code": "101181607",
"city_name": "平舆县"
},
{
"_id": 1098,
"id": 1397,
"pid": 163,
"city_code": "101181610",
"city_name": "正阳县"
},
{
"_id": 1099,
"id": 1398,
"pid": 163,
"city_code": "101181609",
"city_name": "确山县"
},
{
"_id": 1100,
"id": 1399,
"pid": 163,
"city_code": "101181606",
"city_name": "泌阳县"
},
{
"_id": 1101,
"id": 1400,
"pid": 163,
"city_code": "101181605",
"city_name": "汝南县"
},
{
"_id": 1102,
"id": 1401,
"pid": 163,
"city_code": "101181603",
"city_name": "遂平县"
},
{
"_id": 1103,
"id": 1402,
"pid": 163,
"city_code": "101181608",
"city_name": "新蔡县"
},
{
"_id": 1104,
"id": 1406,
"pid": 164,
"city_code": "101181503",
"city_name": "舞阳县"
},
{
"_id": 1105,
"id": 1407,
"pid": 164,
"city_code": "101181502",
"city_name": "临颍县"
},
{
"_id": 1106,
"id": 1409,
"pid": 165,
"city_code": "101181304",
"city_name": "清丰县"
},
{
"_id": 1107,
"id": 1410,
"pid": 165,
"city_code": "101181303",
"city_name": "南乐县"
},
{
"_id": 1108,
"id": 1411,
"pid": 165,
"city_code": "101181305",
"city_name": "范县"
},
{
"_id": 1109,
"id": 1412,
"pid": 165,
"city_code": "101181302",
"city_name": "台前县"
},
{
"_id": 1110,
"id": 1413,
"pid": 165,
"city_code": "101181301",
"city_name": "濮阳县"
},
{
"_id": 1111,
"id": 1421,
"pid": 166,
"city_code": "101050104",
"city_name": "阿城区"
},
{
"_id": 1112,
"id": 1422,
"pid": 166,
"city_code": "101050103",
"city_name": "呼兰区"
},
{
"_id": 1113,
"id": 1424,
"pid": 166,
"city_code": "101050111",
"city_name": "尚志市"
},
{
"_id": 1114,
"id": 1425,
"pid": 166,
"city_code": "101050102",
"city_name": "双城市"
},
{
"_id": 1115,
"id": 1426,
"pid": 166,
"city_code": "101050112",
"city_name": "五常市"
},
{
"_id": 1116,
"id": 1427,
"pid": 166,
"city_code": "101050109",
"city_name": "方正县"
},
{
"_id": 1117,
"id": 1428,
"pid": 166,
"city_code": "101050105",
"city_name": "宾县"
},
{
"_id": 1118,
"id": 1429,
"pid": 166,
"city_code": "101050106",
"city_name": "依兰县"
},
{
"_id": 1119,
"id": 1430,
"pid": 166,
"city_code": "101050107",
"city_name": "巴彦县"
},
{
"_id": 1120,
"id": 1431,
"pid": 166,
"city_code": "101050108",
"city_name": "通河县"
},
{
"_id": 1121,
"id": 1432,
"pid": 166,
"city_code": "101050113",
"city_name": "木兰县"
},
{
"_id": 1122,
"id": 1433,
"pid": 166,
"city_code": "101050110",
"city_name": "延寿县"
},
{
"_id": 1123,
"id": 1439,
"pid": 167,
"city_code": "101050903",
"city_name": "肇州县"
},
{
"_id": 1124,
"id": 1440,
"pid": 167,
"city_code": "101050904",
"city_name": "肇源县"
},
{
"_id": 1125,
"id": 1441,
"pid": 167,
"city_code": "101050902",
"city_name": "林甸县"
},
{
"_id": 1126,
"id": 1442,
"pid": 167,
"city_code": "101050905",
"city_name": "杜尔伯特"
},
{
"_id": 1127,
"id": 1443,
"pid": 168,
"city_code": "101050704",
"city_name": "呼玛县"
},
{
"_id": 1128,
"id": 1444,
"pid": 168,
"city_code": "101050703",
"city_name": "漠河县"
},
{
"_id": 1129,
"id": 1445,
"pid": 168,
"city_code": "101050702",
"city_name": "塔河县"
},
{
"_id": 1130,
"id": 1448,
"pid": 169,
"city_code": "101051206",
"city_name": "南山区"
},
{
"_id": 1131,
"id": 1452,
"pid": 169,
"city_code": "101051203",
"city_name": "萝北县"
},
{
"_id": 1132,
"id": 1453,
"pid": 169,
"city_code": "101051202",
"city_name": "绥滨县"
},
{
"_id": 1133,
"id": 1455,
"pid": 170,
"city_code": "101050605",
"city_name": "五大连池市"
},
{
"_id": 1134,
"id": 1456,
"pid": 170,
"city_code": "101050606",
"city_name": "北安市"
},
{
"_id": 1135,
"id": 1457,
"pid": 170,
"city_code": "101050602",
"city_name": "嫩江县"
},
{
"_id": 1136,
"id": 1458,
"pid": 170,
"city_code": "101050604",
"city_name": "逊克县"
},
{
"_id": 1137,
"id": 1459,
"pid": 170,
"city_code": "101050603",
"city_name": "孙吴县"
},
{
"_id": 1138,
"id": 1465,
"pid": 171,
"city_code": "101051102",
"city_name": "虎林市"
},
{
"_id": 1139,
"id": 1466,
"pid": 171,
"city_code": "101051103",
"city_name": "密山市"
},
{
"_id": 1140,
"id": 1467,
"pid": 171,
"city_code": "101051104",
"city_name": "鸡东县"
},
{
"_id": 1141,
"id": 1472,
"pid": 172,
"city_code": "101050406",
"city_name": "同江市"
},
{
"_id": 1142,
"id": 1473,
"pid": 172,
"city_code": "101050407",
"city_name": "富锦市"
},
{
"_id": 1143,
"id": 1474,
"pid": 172,
"city_code": "101050405",
"city_name": "桦南县"
},
{
"_id": 1144,
"id": 1475,
"pid": 172,
"city_code": "101050404",
"city_name": "桦川县"
},
{
"_id": 1145,
"id": 1476,
"pid": 172,
"city_code": "101050402",
"city_name": "汤原县"
},
{
"_id": 1146,
"id": 1477,
"pid": 172,
"city_code": "101050403",
"city_name": "抚远县"
},
{
"_id": 1147,
"id": 1482,
"pid": 173,
"city_code": "101050305",
"city_name": "绥芬河市"
},
{
"_id": 1148,
"id": 1483,
"pid": 173,
"city_code": "101050302",
"city_name": "海林市"
},
{
"_id": 1149,
"id": 1484,
"pid": 173,
"city_code": "101050306",
"city_name": "宁安市"
},
{
"_id": 1150,
"id": 1485,
"pid": 173,
"city_code": "101050303",
"city_name": "穆棱市"
},
{
"_id": 1151,
"id": 1486,
"pid": 173,
"city_code": "101050307",
"city_name": "东宁县"
},
{
"_id": 1152,
"id": 1487,
"pid": 173,
"city_code": "101050304",
"city_name": "林口县"
},
{
"_id": 1153,
"id": 1491,
"pid": 174,
"city_code": "101051002",
"city_name": "勃利县"
},
{
"_id": 1154,
"id": 1499,
"pid": 175,
"city_code": "101050202",
"city_name": "讷河市"
},
{
"_id": 1155,
"id": 1500,
"pid": 175,
"city_code": "101050203",
"city_name": "龙江县"
},
{
"_id": 1156,
"id": 1501,
"pid": 175,
"city_code": "101050206",
"city_name": "依安县"
},
{
"_id": 1157,
"id": 1502,
"pid": 175,
"city_code": "101050210",
"city_name": "泰来县"
},
{
"_id": 1158,
"id": 1503,
"pid": 175,
"city_code": "101050204",
"city_name": "甘南县"
},
{
"_id": 1159,
"id": 1504,
"pid": 175,
"city_code": "101050205",
"city_name": "富裕县"
},
{
"_id": 1160,
"id": 1505,
"pid": 175,
"city_code": "101050208",
"city_name": "克山县"
},
{
"_id": 1161,
"id": 1506,
"pid": 175,
"city_code": "101050209",
"city_name": "克东县"
},
{
"_id": 1162,
"id": 1507,
"pid": 175,
"city_code": "101050207",
"city_name": "拜泉县"
},
{
"_id": 1163,
"id": 1512,
"pid": 176,
"city_code": "101051302",
"city_name": "集贤县"
},
{
"_id": 1164,
"id": 1513,
"pid": 176,
"city_code": "101051305",
"city_name": "友谊县"
},
{
"_id": 1165,
"id": 1514,
"pid": 176,
"city_code": "101051303",
"city_name": "宝清县"
},
{
"_id": 1166,
"id": 1515,
"pid": 176,
"city_code": "101051304",
"city_name": "饶河县"
},
{
"_id": 1167,
"id": 1517,
"pid": 177,
"city_code": "101050503",
"city_name": "安达市"
},
{
"_id": 1168,
"id": 1518,
"pid": 177,
"city_code": "101050502",
"city_name": "肇东市"
},
{
"_id": 1169,
"id": 1519,
"pid": 177,
"city_code": "101050504",
"city_name": "海伦市"
},
{
"_id": 1170,
"id": 1520,
"pid": 177,
"city_code": "101050506",
"city_name": "望奎县"
},
{
"_id": 1171,
"id": 1521,
"pid": 177,
"city_code": "101050507",
"city_name": "兰西县"
},
{
"_id": 1172,
"id": 1522,
"pid": 177,
"city_code": "101050508",
"city_name": "青冈县"
},
{
"_id": 1173,
"id": 1523,
"pid": 177,
"city_code": "101050509",
"city_name": "庆安县"
},
{
"_id": 1174,
"id": 1524,
"pid": 177,
"city_code": "101050505",
"city_name": "明水县"
},
{
"_id": 1175,
"id": 1525,
"pid": 177,
"city_code": "101050510",
"city_name": "绥棱县"
},
{
"_id": 1176,
"id": 1526,
"pid": 178,
"city_code": "101050801",
"city_name": "伊春区"
},
{
"_id": 1177,
"id": 1536,
"pid": 178,
"city_code": "101050803",
"city_name": "五营区"
},
{
"_id": 1178,
"id": 1540,
"pid": 178,
"city_code": "101050802",
"city_name": "乌伊岭区"
},
{
"_id": 1179,
"id": 1541,
"pid": 178,
"city_code": "101050804",
"city_name": "铁力市"
},
{
"_id": 1180,
"id": 1542,
"pid": 178,
"city_code": "101050805",
"city_name": "嘉荫县"
},
{
"_id": 1181,
"id": 1550,
"pid": 179,
"city_code": "101200106",
"city_name": "东西湖区"
},
{
"_id": 1182,
"id": 1552,
"pid": 179,
"city_code": "101200102",
"city_name": "蔡甸区"
},
{
"_id": 1183,
"id": 1553,
"pid": 179,
"city_code": "101200105",
"city_name": "江夏区"
},
{
"_id": 1184,
"id": 1554,
"pid": 179,
"city_code": "101200103",
"city_name": "黄陂区"
},
{
"_id": 1185,
"id": 1555,
"pid": 179,
"city_code": "101200104",
"city_name": "新洲区"
},
{
"_id": 1186,
"id": 1560,
"pid": 181,
"city_code": "101200302",
"city_name": "梁子湖区"
},
{
"_id": 1187,
"id": 1562,
"pid": 182,
"city_code": "101200503",
"city_name": "麻城市"
},
{
"_id": 1188,
"id": 1563,
"pid": 182,
"city_code": "101200509",
"city_name": "武穴市"
},
{
"_id": 1189,
"id": 1564,
"pid": 182,
"city_code": "101200510",
"city_name": "团风县"
},
{
"_id": 1190,
"id": 1565,
"pid": 182,
"city_code": "101200502",
"city_name": "红安县"
},
{
"_id": 1191,
"id": 1566,
"pid": 182,
"city_code": "101200504",
"city_name": "罗田县"
},
{
"_id": 1192,
"id": 1567,
"pid": 182,
"city_code": "101200505",
"city_name": "英山县"
},
{
"_id": 1193,
"id": 1568,
"pid": 182,
"city_code": "101200506",
"city_name": "浠水县"
},
{
"_id": 1194,
"id": 1569,
"pid": 182,
"city_code": "101200507",
"city_name": "蕲春县"
},
{
"_id": 1195,
"id": 1570,
"pid": 182,
"city_code": "101200508",
"city_name": "黄梅县"
},
{
"_id": 1196,
"id": 1572,
"pid": 183,
"city_code": "101200606",
"city_name": "西塞山区"
},
{
"_id": 1197,
"id": 1573,
"pid": 183,
"city_code": "101200605",
"city_name": "下陆区"
},
{
"_id": 1198,
"id": 1574,
"pid": 183,
"city_code": "101200604",
"city_name": "铁山区"
},
{
"_id": 1199,
"id": 1575,
"pid": 183,
"city_code": "101200602",
"city_name": "大冶市"
},
{
"_id": 1200,
"id": 1576,
"pid": 183,
"city_code": "101200603",
"city_name": "阳新县"
},
{
"_id": 1201,
"id": 1578,
"pid": 184,
"city_code": "101201404",
"city_name": "掇刀区"
},
{
"_id": 1202,
"id": 1579,
"pid": 184,
"city_code": "101201402",
"city_name": "钟祥市"
},
{
"_id": 1203,
"id": 1580,
"pid": 184,
"city_code": "101201403",
"city_name": "京山县"
},
{
"_id": 1204,
"id": 1581,
"pid": 184,
"city_code": "101201405",
"city_name": "沙洋县"
},
{
"_id": 1205,
"id": 1583,
"pid": 185,
"city_code": "101200801",
"city_name": "荆州区"
},
{
"_id": 1206,
"id": 1584,
"pid": 185,
"city_code": "101200804",
"city_name": "石首市"
},
{
"_id": 1207,
"id": 1585,
"pid": 185,
"city_code": "101200806",
"city_name": "洪湖市"
},
{
"_id": 1208,
"id": 1586,
"pid": 185,
"city_code": "101200807",
"city_name": "松滋市"
},
{
"_id": 1209,
"id": 1587,
"pid": 185,
"city_code": "101200803",
"city_name": "公安县"
},
{
"_id": 1210,
"id": 1588,
"pid": 185,
"city_code": "101200805",
"city_name": "监利县"
},
{
"_id": 1211,
"id": 1589,
"pid": 185,
"city_code": "101200802",
"city_name": "江陵县"
},
{
"_id": 1212,
"id": 1590,
"pid": 186,
"city_code": "101201701",
"city_name": "潜江市"
},
{
"_id": 1213,
"id": 1592,
"pid": 188,
"city_code": "101201109",
"city_name": "张湾区"
},
{
"_id": 1214,
"id": 1593,
"pid": 188,
"city_code": "101201108",
"city_name": "茅箭区"
},
{
"_id": 1215,
"id": 1594,
"pid": 188,
"city_code": "101201107",
"city_name": "丹江口市"
},
{
"_id": 1216,
"id": 1595,
"pid": 188,
"city_code": "101201104",
"city_name": "郧县"
},
{
"_id": 1217,
"id": 1596,
"pid": 188,
"city_code": "101201103",
"city_name": "郧西县"
},
{
"_id": 1218,
"id": 1597,
"pid": 188,
"city_code": "101201105",
"city_name": "竹山县"
},
{
"_id": 1219,
"id": 1598,
"pid": 188,
"city_code": "101201102",
"city_name": "竹溪县"
},
{
"_id": 1220,
"id": 1599,
"pid": 188,
"city_code": "101201106",
"city_name": "房县"
},
{
"_id": 1221,
"id": 1601,
"pid": 189,
"city_code": "101201302",
"city_name": "广水市"
},
{
"_id": 1222,
"id": 1602,
"pid": 190,
"city_code": "101201501",
"city_name": "天门市"
},
{
"_id": 1223,
"id": 1604,
"pid": 191,
"city_code": "101200702",
"city_name": "赤壁市"
},
{
"_id": 1224,
"id": 1605,
"pid": 191,
"city_code": "101200703",
"city_name": "嘉鱼县"
},
{
"_id": 1225,
"id": 1606,
"pid": 191,
"city_code": "101200705",
"city_name": "通城县"
},
{
"_id": 1226,
"id": 1607,
"pid": 191,
"city_code": "101200704",
"city_name": "崇阳县"
},
{
"_id": 1227,
"id": 1608,
"pid": 191,
"city_code": "101200706",
"city_name": "通山县"
},
{
"_id": 1228,
"id": 1611,
"pid": 192,
"city_code": "101200202",
"city_name": "襄州区"
},
{
"_id": 1229,
"id": 1612,
"pid": 192,
"city_code": "101200206",
"city_name": "老河口市"
},
{
"_id": 1230,
"id": 1613,
"pid": 192,
"city_code": "101200208",
"city_name": "枣阳市"
},
{
"_id": 1231,
"id": 1614,
"pid": 192,
"city_code": "101200205",
"city_name": "宜城市"
},
{
"_id": 1232,
"id": 1615,
"pid": 192,
"city_code": "101200204",
"city_name": "南漳县"
},
{
"_id": 1233,
"id": 1616,
"pid": 192,
"city_code": "101200207",
"city_name": "谷城县"
},
{
"_id": 1234,
"id": 1617,
"pid": 192,
"city_code": "101200203",
"city_name": "保康县"
},
{
"_id": 1235,
"id": 1619,
"pid": 193,
"city_code": "101200405",
"city_name": "应城市"
},
{
"_id": 1236,
"id": 1620,
"pid": 193,
"city_code": "101200402",
"city_name": "安陆市"
},
{
"_id": 1237,
"id": 1621,
"pid": 193,
"city_code": "101200406",
"city_name": "汉川市"
},
{
"_id": 1238,
"id": 1622,
"pid": 193,
"city_code": "101200407",
"city_name": "孝昌县"
},
{
"_id": 1239,
"id": 1623,
"pid": 193,
"city_code": "101200404",
"city_name": "大悟县"
},
{
"_id": 1240,
"id": 1624,
"pid": 193,
"city_code": "101200403",
"city_name": "云梦县"
},
{
"_id": 1241,
"id": 1625,
"pid": 194,
"city_code": "101200908",
"city_name": "长阳县"
},
{
"_id": 1242,
"id": 1626,
"pid": 194,
"city_code": "101200906",
"city_name": "五峰县"
},
{
"_id": 1243,
"id": 1631,
"pid": 194,
"city_code": "101200912",
"city_name": "夷陵区"
},
{
"_id": 1244,
"id": 1632,
"pid": 194,
"city_code": "101200909",
"city_name": "宜都市"
},
{
"_id": 1245,
"id": 1633,
"pid": 194,
"city_code": "101200907",
"city_name": "当阳市"
},
{
"_id": 1246,
"id": 1634,
"pid": 194,
"city_code": "101200910",
"city_name": "枝江市"
},
{
"_id": 1247,
"id": 1635,
"pid": 194,
"city_code": "101200902",
"city_name": "远安县"
},
{
"_id": 1248,
"id": 1636,
"pid": 194,
"city_code": "101200904",
"city_name": "兴山县"
},
{
"_id": 1249,
"id": 1637,
"pid": 194,
"city_code": "101200903",
"city_name": "秭归县"
},
{
"_id": 1250,
"id": 1638,
"pid": 195,
"city_code": "101201001",
"city_name": "恩施市"
},
{
"_id": 1251,
"id": 1639,
"pid": 195,
"city_code": "101201002",
"city_name": "利川市"
},
{
"_id": 1252,
"id": 1640,
"pid": 195,
"city_code": "101201003",
"city_name": "建始县"
},
{
"_id": 1253,
"id": 1641,
"pid": 195,
"city_code": "101201008",
"city_name": "巴东县"
},
{
"_id": 1254,
"id": 1642,
"pid": 195,
"city_code": "101201005",
"city_name": "宣恩县"
},
{
"_id": 1255,
"id": 1643,
"pid": 195,
"city_code": "101201004",
"city_name": "咸丰县"
},
{
"_id": 1256,
"id": 1644,
"pid": 195,
"city_code": "101201007",
"city_name": "来凤县"
},
{
"_id": 1257,
"id": 1645,
"pid": 195,
"city_code": "101201006",
"city_name": "鹤峰县"
},
{
"_id": 1258,
"id": 1652,
"pid": 196,
"city_code": "101250103",
"city_name": "浏阳市"
},
{
"_id": 1259,
"id": 1653,
"pid": 196,
"city_code": "101250101",
"city_name": "长沙县"
},
{
"_id": 1260,
"id": 1654,
"pid": 196,
"city_code": "101250105",
"city_name": "望城县"
},
{
"_id": 1261,
"id": 1655,
"pid": 196,
"city_code": "101250102",
"city_name": "宁乡县"
},
{
"_id": 1262,
"id": 1657,
"pid": 197,
"city_code": "101251104",
"city_name": "武陵源区"
},
{
"_id": 1263,
"id": 1658,
"pid": 197,
"city_code": "101251103",
"city_name": "慈利县"
},
{
"_id": 1264,
"id": 1659,
"pid": 197,
"city_code": "101251102",
"city_name": "桑植县"
},
{
"_id": 1265,
"id": 1662,
"pid": 198,
"city_code": "101250608",
"city_name": "津市市"
},
{
"_id": 1266,
"id": 1663,
"pid": 198,
"city_code": "101250602",
"city_name": "安乡县"
},
{
"_id": 1267,
"id": 1664,
"pid": 198,
"city_code": "101250604",
"city_name": "汉寿县"
},
{
"_id": 1268,
"id": 1665,
"pid": 198,
"city_code": "101250605",
"city_name": "澧县"
},
{
"_id": 1269,
"id": 1666,
"pid": 198,
"city_code": "101250606",
"city_name": "临澧县"
},
{
"_id": 1270,
"id": 1667,
"pid": 198,
"city_code": "101250603",
"city_name": "桃源县"
},
{
"_id": 1271,
"id": 1668,
"pid": 198,
"city_code": "101250607",
"city_name": "石门县"
},
{
"_id": 1272,
"id": 1670,
"pid": 199,
"city_code": "101250512",
"city_name": "苏仙区"
},
{
"_id": 1273,
"id": 1671,
"pid": 199,
"city_code": "101250507",
"city_name": "资兴市"
},
{
"_id": 1274,
"id": 1672,
"pid": 199,
"city_code": "101250502",
"city_name": "桂阳县"
},
{
"_id": 1275,
"id": 1673,
"pid": 199,
"city_code": "101250504",
"city_name": "宜章县"
},
{
"_id": 1276,
"id": 1674,
"pid": 199,
"city_code": "101250510",
"city_name": "永兴县"
},
{
"_id": 1277,
"id": 1675,
"pid": 199,
"city_code": "101250503",
"city_name": "嘉禾县"
},
{
"_id": 1278,
"id": 1676,
"pid": 199,
"city_code": "101250505",
"city_name": "临武县"
},
{
"_id": 1279,
"id": 1677,
"pid": 199,
"city_code": "101250508",
"city_name": "汝城县"
},
{
"_id": 1280,
"id": 1678,
"pid": 199,
"city_code": "101250511",
"city_name": "桂东县"
},
{
"_id": 1281,
"id": 1679,
"pid": 199,
"city_code": "101250509",
"city_name": "安仁县"
},
{
"_id": 1282,
"id": 1684,
"pid": 200,
"city_code": "101250409",
"city_name": "南岳区"
},
{
"_id": 1283,
"id": 1685,
"pid": 200,
"city_code": "101250408",
"city_name": "耒阳市"
},
{
"_id": 1284,
"id": 1686,
"pid": 200,
"city_code": "101250406",
"city_name": "常宁市"
},
{
"_id": 1285,
"id": 1687,
"pid": 200,
"city_code": "101250405",
"city_name": "衡阳县"
},
{
"_id": 1286,
"id": 1688,
"pid": 200,
"city_code": "101250407",
"city_name": "衡南县"
},
{
"_id": 1287,
"id": 1689,
"pid": 200,
"city_code": "101250402",
"city_name": "衡山县"
},
{
"_id": 1288,
"id": 1690,
"pid": 200,
"city_code": "101250403",
"city_name": "衡东县"
},
{
"_id": 1289,
"id": 1691,
"pid": 200,
"city_code": "101250404",
"city_name": "祁东县"
},
{
"_id": 1290,
"id": 1692,
"pid": 201,
"city_code": "101251202",
"city_name": "鹤城区"
},
{
"_id": 1291,
"id": 1693,
"pid": 201,
"city_code": "101251205",
"city_name": "靖州县"
},
{
"_id": 1292,
"id": 1694,
"pid": 201,
"city_code": "101251208",
"city_name": "麻阳县"
},
{
"_id": 1293,
"id": 1695,
"pid": 201,
"city_code": "101251207",
"city_name": "通道县"
},
{
"_id": 1294,
"id": 1696,
"pid": 201,
"city_code": "101251209",
"city_name": "新晃县"
},
{
"_id": 1295,
"id": 1697,
"pid": 201,
"city_code": "101251210",
"city_name": "芷江县"
},
{
"_id": 1296,
"id": 1698,
"pid": 201,
"city_code": "101251203",
"city_name": "沅陵县"
},
{
"_id": 1297,
"id": 1699,
"pid": 201,
"city_code": "101251204",
"city_name": "辰溪县"
},
{
"_id": 1298,
"id": 1700,
"pid": 201,
"city_code": "101251211",
"city_name": "溆浦县"
},
{
"_id": 1299,
"id": 1701,
"pid": 201,
"city_code": "101251212",
"city_name": "中方县"
},
{
"_id": 1300,
"id": 1702,
"pid": 201,
"city_code": "101251206",
"city_name": "会同县"
},
{
"_id": 1301,
"id": 1703,
"pid": 201,
"city_code": "101251213",
"city_name": "洪江市"
},
{
"_id": 1302,
"id": 1705,
"pid": 202,
"city_code": "101250803",
"city_name": "冷水江市"
},
{
"_id": 1303,
"id": 1706,
"pid": 202,
"city_code": "101250806",
"city_name": "涟源市"
},
{
"_id": 1304,
"id": 1707,
"pid": 202,
"city_code": "101250802",
"city_name": "双峰县"
},
{
"_id": 1305,
"id": 1708,
"pid": 202,
"city_code": "101250805",
"city_name": "新化县"
},
{
"_id": 1306,
"id": 1709,
"pid": 203,
"city_code": "101250909",
"city_name": "城步县"
},
{
"_id": 1307,
"id": 1713,
"pid": 203,
"city_code": "101250908",
"city_name": "武冈市"
},
{
"_id": 1308,
"id": 1714,
"pid": 203,
"city_code": "101250905",
"city_name": "邵东县"
},
{
"_id": 1309,
"id": 1715,
"pid": 203,
"city_code": "101250904",
"city_name": "新邵县"
},
{
"_id": 1310,
"id": 1716,
"pid": 203,
"city_code": "101250910",
"city_name": "邵阳县"
},
{
"_id": 1311,
"id": 1717,
"pid": 203,
"city_code": "101250902",
"city_name": "隆回县"
},
{
"_id": 1312,
"id": 1718,
"pid": 203,
"city_code": "101250903",
"city_name": "洞口县"
},
{
"_id": 1313,
"id": 1719,
"pid": 203,
"city_code": "101250906",
"city_name": "绥宁县"
},
{
"_id": 1314,
"id": 1720,
"pid": 203,
"city_code": "101250907",
"city_name": "新宁县"
},
{
"_id": 1315,
"id": 1723,
"pid": 204,
"city_code": "101250203",
"city_name": "湘乡市"
},
{
"_id": 1316,
"id": 1724,
"pid": 204,
"city_code": "101250202",
"city_name": "韶山市"
},
{
"_id": 1317,
"id": 1725,
"pid": 204,
"city_code": "101250201",
"city_name": "湘潭县"
},
{
"_id": 1318,
"id": 1726,
"pid": 205,
"city_code": "101251501",
"city_name": "吉首市"
},
{
"_id": 1319,
"id": 1727,
"pid": 205,
"city_code": "101251506",
"city_name": "泸溪县"
},
{
"_id": 1320,
"id": 1728,
"pid": 205,
"city_code": "101251505",
"city_name": "凤凰县"
},
{
"_id": 1321,
"id": 1729,
"pid": 205,
"city_code": "101251508",
"city_name": "花垣县"
},
{
"_id": 1322,
"id": 1730,
"pid": 205,
"city_code": "101251502",
"city_name": "保靖县"
},
{
"_id": 1323,
"id": 1731,
"pid": 205,
"city_code": "101251504",
"city_name": "古丈县"
},
{
"_id": 1324,
"id": 1732,
"pid": 205,
"city_code": "101251503",
"city_name": "永顺县"
},
{
"_id": 1325,
"id": 1733,
"pid": 205,
"city_code": "101251507",
"city_name": "龙山县"
},
{
"_id": 1326,
"id": 1734,
"pid": 206,
"city_code": "101250701",
"city_name": "赫山区"
},
{
"_id": 1327,
"id": 1736,
"pid": 206,
"city_code": "101250705",
"city_name": "沅江市"
},
{
"_id": 1328,
"id": 1737,
"pid": 206,
"city_code": "101250702",
"city_name": "南县"
},
{
"_id": 1329,
"id": 1738,
"pid": 206,
"city_code": "101250703",
"city_name": "桃江县"
},
{
"_id": 1330,
"id": 1739,
"pid": 206,
"city_code": "101250704",
"city_name": "安化县"
},
{
"_id": 1331,
"id": 1740,
"pid": 207,
"city_code": "101251410",
"city_name": "江华县"
},
{
"_id": 1332,
"id": 1743,
"pid": 207,
"city_code": "101251402",
"city_name": "祁阳县"
},
{
"_id": 1333,
"id": 1744,
"pid": 207,
"city_code": "101251403",
"city_name": "东安县"
},
{
"_id": 1334,
"id": 1745,
"pid": 207,
"city_code": "101251404",
"city_name": "双牌县"
},
{
"_id": 1335,
"id": 1746,
"pid": 207,
"city_code": "101251405",
"city_name": "道县"
},
{
"_id": 1336,
"id": 1747,
"pid": 207,
"city_code": "101251407",
"city_name": "江永县"
},
{
"_id": 1337,
"id": 1748,
"pid": 207,
"city_code": "101251406",
"city_name": "宁远县"
},
{
"_id": 1338,
"id": 1749,
"pid": 207,
"city_code": "101251408",
"city_name": "蓝山县"
},
{
"_id": 1339,
"id": 1750,
"pid": 207,
"city_code": "101251409",
"city_name": "新田县"
},
{
"_id": 1340,
"id": 1754,
"pid": 208,
"city_code": "101251004",
"city_name": "汨罗市"
},
{
"_id": 1341,
"id": 1755,
"pid": 208,
"city_code": "101251006",
"city_name": "临湘市"
},
{
"_id": 1342,
"id": 1756,
"pid": 208,
"city_code": "101251001",
"city_name": "岳阳县"
},
{
"_id": 1343,
"id": 1757,
"pid": 208,
"city_code": "101251002",
"city_name": "华容县"
},
{
"_id": 1344,
"id": 1758,
"pid": 208,
"city_code": "101251003",
"city_name": "湘阴县"
},
{
"_id": 1345,
"id": 1759,
"pid": 208,
"city_code": "101251005",
"city_name": "平江县"
},
{
"_id": 1346,
"id": 1764,
"pid": 209,
"city_code": "101250303",
"city_name": "醴陵市"
},
{
"_id": 1347,
"id": 1765,
"pid": 209,
"city_code": "101250304",
"city_name": "株洲县"
},
{
"_id": 1348,
"id": 1766,
"pid": 209,
"city_code": "101250302",
"city_name": "攸县"
},
{
"_id": 1349,
"id": 1767,
"pid": 209,
"city_code": "101250305",
"city_name": "茶陵县"
},
{
"_id": 1350,
"id": 1768,
"pid": 209,
"city_code": "101250306",
"city_name": "炎陵县"
},
{
"_id": 1351,
"id": 1774,
"pid": 210,
"city_code": "101060106",
"city_name": "双阳区"
},
{
"_id": 1352,
"id": 1779,
"pid": 210,
"city_code": "101060103",
"city_name": "德惠市"
},
{
"_id": 1353,
"id": 1780,
"pid": 210,
"city_code": "101060104",
"city_name": "九台市"
},
{
"_id": 1354,
"id": 1781,
"pid": 210,
"city_code": "101060105",
"city_name": "榆树市"
},
{
"_id": 1355,
"id": 1782,
"pid": 210,
"city_code": "101060102",
"city_name": "农安县"
},
{
"_id": 1356,
"id": 1787,
"pid": 211,
"city_code": "101060204",
"city_name": "蛟河市"
},
{
"_id": 1357,
"id": 1788,
"pid": 211,
"city_code": "101060206",
"city_name": "桦甸市"
},
{
"_id": 1358,
"id": 1789,
"pid": 211,
"city_code": "101060202",
"city_name": "舒兰市"
},
{
"_id": 1359,
"id": 1790,
"pid": 211,
"city_code": "101060205",
"city_name": "磐石市"
},
{
"_id": 1360,
"id": 1791,
"pid": 211,
"city_code": "101060203",
"city_name": "永吉县"
},
{
"_id": 1361,
"id": 1793,
"pid": 212,
"city_code": "101060602",
"city_name": "洮南市"
},
{
"_id": 1362,
"id": 1794,
"pid": 212,
"city_code": "101060603",
"city_name": "大安市"
},
{
"_id": 1363,
"id": 1795,
"pid": 212,
"city_code": "101060604",
"city_name": "镇赉县"
},
{
"_id": 1364,
"id": 1796,
"pid": 212,
"city_code": "101060605",
"city_name": "通榆县"
},
{
"_id": 1365,
"id": 1797,
"pid": 213,
"city_code": "101060907",
"city_name": "江源区"
},
{
"_id": 1366,
"id": 1799,
"pid": 213,
"city_code": "101060905",
"city_name": "长白县"
},
{
"_id": 1367,
"id": 1800,
"pid": 213,
"city_code": "101060903",
"city_name": "临江市"
},
{
"_id": 1368,
"id": 1801,
"pid": 213,
"city_code": "101060906",
"city_name": "抚松县"
},
{
"_id": 1369,
"id": 1802,
"pid": 213,
"city_code": "101060902",
"city_name": "靖宇县"
},
{
"_id": 1370,
"id": 1805,
"pid": 214,
"city_code": "101060702",
"city_name": "东丰县"
},
{
"_id": 1371,
"id": 1806,
"pid": 214,
"city_code": "101060703",
"city_name": "东辽县"
},
{
"_id": 1372,
"id": 1809,
"pid": 215,
"city_code": "101060405",
"city_name": "伊通县"
},
{
"_id": 1373,
"id": 1810,
"pid": 215,
"city_code": "101060404",
"city_name": "公主岭市"
},
{
"_id": 1374,
"id": 1811,
"pid": 215,
"city_code": "101060402",
"city_name": "双辽市"
},
{
"_id": 1375,
"id": 1812,
"pid": 215,
"city_code": "101060403",
"city_name": "梨树县"
},
{
"_id": 1376,
"id": 1813,
"pid": 216,
"city_code": "101060803",
"city_name": "前郭尔罗斯"
},
{
"_id": 1377,
"id": 1815,
"pid": 216,
"city_code": "101060804",
"city_name": "长岭县"
},
{
"_id": 1378,
"id": 1816,
"pid": 216,
"city_code": "101060802",
"city_name": "乾安县"
},
{
"_id": 1379,
"id": 1817,
"pid": 216,
"city_code": "101060805",
"city_name": "扶余市"
},
{
"_id": 1380,
"id": 1820,
"pid": 217,
"city_code": "101060502",
"city_name": "梅河口市"
},
{
"_id": 1381,
"id": 1821,
"pid": 217,
"city_code": "101060505",
"city_name": "集安市"
},
{
"_id": 1382,
"id": 1822,
"pid": 217,
"city_code": "101060506",
"city_name": "通化县"
},
{
"_id": 1383,
"id": 1823,
"pid": 217,
"city_code": "101060504",
"city_name": "辉南县"
},
{
"_id": 1384,
"id": 1824,
"pid": 217,
"city_code": "101060503",
"city_name": "柳河县"
},
{
"_id": 1385,
"id": 1825,
"pid": 218,
"city_code": "101060301",
"city_name": "延吉市"
},
{
"_id": 1386,
"id": 1826,
"pid": 218,
"city_code": "101060309",
"city_name": "图们市"
},
{
"_id": 1387,
"id": 1827,
"pid": 218,
"city_code": "101060302",
"city_name": "敦化市"
},
{
"_id": 1388,
"id": 1828,
"pid": 218,
"city_code": "101060308",
"city_name": "珲春市"
},
{
"_id": 1389,
"id": 1829,
"pid": 218,
"city_code": "101060307",
"city_name": "龙井市"
},
{
"_id": 1390,
"id": 1830,
"pid": 218,
"city_code": "101060305",
"city_name": "和龙市"
},
{
"_id": 1391,
"id": 1831,
"pid": 218,
"city_code": "101060303",
"city_name": "安图县"
},
{
"_id": 1392,
"id": 1832,
"pid": 218,
"city_code": "101060304",
"city_name": "汪清县"
},
{
"_id": 1393,
"id": 1841,
"pid": 219,
"city_code": "101190107",
"city_name": "浦口区"
},
{
"_id": 1394,
"id": 1842,
"pid": 219,
"city_code": "101190104",
"city_name": "江宁区"
},
{
"_id": 1395,
"id": 1843,
"pid": 219,
"city_code": "101190105",
"city_name": "六合区"
},
{
"_id": 1396,
"id": 1844,
"pid": 219,
"city_code": "101190102",
"city_name": "溧水区"
},
{
"_id": 1397,
"id": 1845,
"pid": 219,
"city_code": "101190103",
"city_name": "高淳县"
},
{
"_id": 1398,
"id": 1850,
"pid": 220,
"city_code": "101190405",
"city_name": "吴中区"
},
{
"_id": 1399,
"id": 1853,
"pid": 220,
"city_code": "101190404",
"city_name": "昆山市"
},
{
"_id": 1400,
"id": 1854,
"pid": 220,
"city_code": "101190402",
"city_name": "常熟市"
},
{
"_id": 1401,
"id": 1855,
"pid": 220,
"city_code": "101190403",
"city_name": "张家港市"
},
{
"_id": 1402,
"id": 1867,
"pid": 220,
"city_code": "101190407",
"city_name": "吴江区"
},
{
"_id": 1403,
"id": 1868,
"pid": 220,
"city_code": "101190408",
"city_name": "太仓市"
},
{
"_id": 1404,
"id": 1872,
"pid": 221,
"city_code": "101190204",
"city_name": "锡山区"
},
{
"_id": 1405,
"id": 1876,
"pid": 221,
"city_code": "101190202",
"city_name": "江阴市"
},
{
"_id": 1406,
"id": 1877,
"pid": 221,
"city_code": "101190203",
"city_name": "宜兴市"
},
{
"_id": 1407,
"id": 1883,
"pid": 222,
"city_code": "101191104",
"city_name": "武进区"
},
{
"_id": 1408,
"id": 1884,
"pid": 222,
"city_code": "101191102",
"city_name": "溧阳市"
},
{
"_id": 1409,
"id": 1885,
"pid": 222,
"city_code": "101191103",
"city_name": "金坛区"
},
{
"_id": 1410,
"id": 1888,
"pid": 223,
"city_code": "101190908",
"city_name": "楚州区"
},
{
"_id": 1411,
"id": 1889,
"pid": 223,
"city_code": "101190907",
"city_name": "淮阴区"
},
{
"_id": 1412,
"id": 1890,
"pid": 223,
"city_code": "101190905",
"city_name": "涟水县"
},
{
"_id": 1413,
"id": 1891,
"pid": 223,
"city_code": "101190904",
"city_name": "洪泽县"
},
{
"_id": 1414,
"id": 1892,
"pid": 223,
"city_code": "101190903",
"city_name": "盱眙县"
},
{
"_id": 1415,
"id": 1893,
"pid": 223,
"city_code": "101190902",
"city_name": "金湖县"
},
{
"_id": 1416,
"id": 1897,
"pid": 224,
"city_code": "101191003",
"city_name": "赣榆县"
},
{
"_id": 1417,
"id": 1898,
"pid": 224,
"city_code": "101191002",
"city_name": "东海县"
},
{
"_id": 1418,
"id": 1899,
"pid": 224,
"city_code": "101191004",
"city_name": "灌云县"
},
{
"_id": 1419,
"id": 1900,
"pid": 224,
"city_code": "101191005",
"city_name": "灌南县"
},
{
"_id": 1420,
"id": 1904,
"pid": 225,
"city_code": "101190507",
"city_name": "启东市"
},
{
"_id": 1421,
"id": 1905,
"pid": 225,
"city_code": "101190503",
"city_name": "如皋市"
},
{
"_id": 1422,
"id": 1906,
"pid": 225,
"city_code": "101190509",
"city_name": "通州区"
},
{
"_id": 1423,
"id": 1907,
"pid": 225,
"city_code": "101190508",
"city_name": "海门市"
},
{
"_id": 1424,
"id": 1908,
"pid": 225,
"city_code": "101190502",
"city_name": "海安县"
},
{
"_id": 1425,
"id": 1909,
"pid": 225,
"city_code": "101190504",
"city_name": "如东县"
},
{
"_id": 1426,
"id": 1911,
"pid": 226,
"city_code": "101191305",
"city_name": "宿豫区"
},
{
"_id": 1427,
"id": 1912,
"pid": 226,
"city_code": "101191305",
"city_name": "宿豫县"
},
{
"_id": 1428,
"id": 1913,
"pid": 226,
"city_code": "101191302",
"city_name": "沭阳县"
},
{
"_id": 1429,
"id": 1914,
"pid": 226,
"city_code": "101191303",
"city_name": "泗阳县"
},
{
"_id": 1430,
"id": 1915,
"pid": 226,
"city_code": "101191304",
"city_name": "泗洪县"
},
{
"_id": 1431,
"id": 1918,
"pid": 227,
"city_code": "101191202",
"city_name": "兴化市"
},
{
"_id": 1432,
"id": 1919,
"pid": 227,
"city_code": "101191205",
"city_name": "靖江市"
},
{
"_id": 1433,
"id": 1920,
"pid": 227,
"city_code": "101191203",
"city_name": "泰兴市"
},
{
"_id": 1434,
"id": 1921,
"pid": 227,
"city_code": "101191204",
"city_name": "姜堰区"
},
{
"_id": 1435,
"id": 1927,
"pid": 228,
"city_code": "101190807",
"city_name": "新沂市"
},
{
"_id": 1436,
"id": 1928,
"pid": 228,
"city_code": "101190805",
"city_name": "邳州市"
},
{
"_id": 1437,
"id": 1929,
"pid": 228,
"city_code": "101190803",
"city_name": "丰县"
},
{
"_id": 1438,
"id": 1930,
"pid": 228,
"city_code": "101190804",
"city_name": "沛县"
},
{
"_id": 1439,
"id": 1931,
"pid": 228,
"city_code": "101190802",
"city_name": "铜山区"
},
{
"_id": 1440,
"id": 1932,
"pid": 228,
"city_code": "101190806",
"city_name": "睢宁县"
},
{
"_id": 1441,
"id": 1935,
"pid": 229,
"city_code": "101190709",
"city_name": "盐都区"
},
{
"_id": 1442,
"id": 1937,
"pid": 229,
"city_code": "101190707",
"city_name": "东台市"
},
{
"_id": 1443,
"id": 1938,
"pid": 229,
"city_code": "101190708",
"city_name": "大丰区"
},
{
"_id": 1444,
"id": 1939,
"pid": 229,
"city_code": "101190702",
"city_name": "响水县"
},
{
"_id": 1445,
"id": 1940,
"pid": 229,
"city_code": "101190703",
"city_name": "滨海县"
},
{
"_id": 1446,
"id": 1941,
"pid": 229,
"city_code": "101190704",
"city_name": "阜宁县"
},
{
"_id": 1447,
"id": 1942,
"pid": 229,
"city_code": "101190705",
"city_name": "射阳县"
},
{
"_id": 1448,
"id": 1943,
"pid": 229,
"city_code": "101190706",
"city_name": "建湖县"
},
{
"_id": 1449,
"id": 1946,
"pid": 230,
"city_code": "101190606",
"city_name": "邗江区"
},
{
"_id": 1450,
"id": 1947,
"pid": 230,
"city_code": "101190603",
"city_name": "仪征市"
},
{
"_id": 1451,
"id": 1948,
"pid": 230,
"city_code": "101190604",
"city_name": "高邮市"
},
{
"_id": 1452,
"id": 1949,
"pid": 230,
"city_code": "101190605",
"city_name": "江都市"
},
{
"_id": 1453,
"id": 1950,
"pid": 230,
"city_code": "101190602",
"city_name": "宝应县"
},
{
"_id": 1454,
"id": 1953,
"pid": 231,
"city_code": "101190305",
"city_name": "丹徒区"
},
{
"_id": 1455,
"id": 1954,
"pid": 231,
"city_code": "101190302",
"city_name": "丹阳市"
},
{
"_id": 1456,
"id": 1955,
"pid": 231,
"city_code": "101190303",
"city_name": "扬中市"
},
{
"_id": 1457,
"id": 1956,
"pid": 231,
"city_code": "101190304",
"city_name": "句容市"
},
{
"_id": 1458,
"id": 1965,
"pid": 232,
"city_code": "101240103",
"city_name": "南昌县"
},
{
"_id": 1459,
"id": 1966,
"pid": 232,
"city_code": "101240102",
"city_name": "新建县"
},
{
"_id": 1460,
"id": 1967,
"pid": 232,
"city_code": "101240104",
"city_name": "安义县"
},
{
"_id": 1461,
"id": 1968,
"pid": 232,
"city_code": "101240105",
"city_name": "进贤县"
},
{
"_id": 1462,
"id": 1970,
"pid": 233,
"city_code": "101240408",
"city_name": "南城县"
},
{
"_id": 1463,
"id": 1971,
"pid": 233,
"city_code": "101240410",
"city_name": "黎川县"
},
{
"_id": 1464,
"id": 1972,
"pid": 233,
"city_code": "101240409",
"city_name": "南丰县"
},
{
"_id": 1465,
"id": 1973,
"pid": 233,
"city_code": "101240404",
"city_name": "崇仁县"
},
{
"_id": 1466,
"id": 1974,
"pid": 233,
"city_code": "101240403",
"city_name": "乐安县"
},
{
"_id": 1467,
"id": 1975,
"pid": 233,
"city_code": "101240407",
"city_name": "宜黄县"
},
{
"_id": 1468,
"id": 1976,
"pid": 233,
"city_code": "101240405",
"city_name": "金溪县"
},
{
"_id": 1469,
"id": 1977,
"pid": 233,
"city_code": "101240406",
"city_name": "资溪县"
},
{
"_id": 1470,
"id": 1978,
"pid": 233,
"city_code": "101240411",
"city_name": "东乡县"
},
{
"_id": 1471,
"id": 1979,
"pid": 233,
"city_code": "101240402",
"city_name": "广昌县"
},
{
"_id": 1472,
"id": 1981,
"pid": 234,
"city_code": "101240710",
"city_name": "于都县"
},
{
"_id": 1473,
"id": 1982,
"pid": 234,
"city_code": "101240709",
"city_name": "瑞金市"
},
{
"_id": 1474,
"id": 1983,
"pid": 234,
"city_code": "101240704",
"city_name": "南康市"
},
{
"_id": 1475,
"id": 1984,
"pid": 234,
"city_code": "101240718",
"city_name": "赣县"
},
{
"_id": 1476,
"id": 1985,
"pid": 234,
"city_code": "101240706",
"city_name": "信丰县"
},
{
"_id": 1477,
"id": 1986,
"pid": 234,
"city_code": "101240705",
"city_name": "大余县"
},
{
"_id": 1478,
"id": 1987,
"pid": 234,
"city_code": "101240703",
"city_name": "上犹县"
},
{
"_id": 1479,
"id": 1988,
"pid": 234,
"city_code": "101240702",
"city_name": "崇义县"
},
{
"_id": 1480,
"id": 1989,
"pid": 234,
"city_code": "101240712",
"city_name": "安远县"
},
{
"_id": 1481,
"id": 1990,
"pid": 234,
"city_code": "101240714",
"city_name": "龙南县"
},
{
"_id": 1482,
"id": 1991,
"pid": 234,
"city_code": "101240715",
"city_name": "定南县"
},
{
"_id": 1483,
"id": 1992,
"pid": 234,
"city_code": "101240713",
"city_name": "全南县"
},
{
"_id": 1484,
"id": 1993,
"pid": 234,
"city_code": "101240707",
"city_name": "宁都县"
},
{
"_id": 1485,
"id": 1994,
"pid": 234,
"city_code": "101240717",
"city_name": "兴国县"
},
{
"_id": 1486,
"id": 1995,
"pid": 234,
"city_code": "101240711",
"city_name": "会昌县"
},
{
"_id": 1487,
"id": 1996,
"pid": 234,
"city_code": "101240716",
"city_name": "寻乌县"
},
{
"_id": 1488,
"id": 1997,
"pid": 234,
"city_code": "101240708",
"city_name": "石城县"
},
{
"_id": 1489,
"id": 1998,
"pid": 235,
"city_code": "101240612",
"city_name": "安福县"
},
{
"_id": 1490,
"id": 2001,
"pid": 235,
"city_code": "101240608",
"city_name": "井冈山市"
},
{
"_id": 1491,
"id": 2002,
"pid": 235,
"city_code": "101240602",
"city_name": "吉安县"
},
{
"_id": 1492,
"id": 2003,
"pid": 235,
"city_code": "101240603",
"city_name": "吉水县"
},
{
"_id": 1493,
"id": 2004,
"pid": 235,
"city_code": "101240605",
"city_name": "峡江县"
},
{
"_id": 1494,
"id": 2005,
"pid": 235,
"city_code": "101240604",
"city_name": "新干县"
},
{
"_id": 1495,
"id": 2006,
"pid": 235,
"city_code": "101240606",
"city_name": "永丰县"
},
{
"_id": 1496,
"id": 2007,
"pid": 235,
"city_code": "101240611",
"city_name": "泰和县"
},
{
"_id": 1497,
"id": 2008,
"pid": 235,
"city_code": "101240610",
"city_name": "遂川县"
},
{
"_id": 1498,
"id": 2009,
"pid": 235,
"city_code": "101240609",
"city_name": "万安县"
},
{
"_id": 1499,
"id": 2010,
"pid": 235,
"city_code": "101240607",
"city_name": "永新县"
},
{
"_id": 1500,
"id": 2013,
"pid": 236,
"city_code": "101240802",
"city_name": "乐平市"
},
{
"_id": 1501,
"id": 2014,
"pid": 236,
"city_code": "101240803",
"city_name": "浮梁县"
},
{
"_id": 1502,
"id": 2016,
"pid": 237,
"city_code": "101240203",
"city_name": "庐山区"
},
{
"_id": 1503,
"id": 2017,
"pid": 237,
"city_code": "101240202",
"city_name": "瑞昌市"
},
{
"_id": 1504,
"id": 2018,
"pid": 237,
"city_code": "101240201",
"city_name": "九江县"
},
{
"_id": 1505,
"id": 2019,
"pid": 237,
"city_code": "101240204",
"city_name": "武宁县"
},
{
"_id": 1506,
"id": 2020,
"pid": 237,
"city_code": "101240212",
"city_name": "修水县"
},
{
"_id": 1507,
"id": 2021,
"pid": 237,
"city_code": "101240206",
"city_name": "永修县"
},
{
"_id": 1508,
"id": 2022,
"pid": 237,
"city_code": "101240205",
"city_name": "德安县"
},
{
"_id": 1509,
"id": 2023,
"pid": 237,
"city_code": "101240209",
"city_name": "星子县"
},
{
"_id": 1510,
"id": 2024,
"pid": 237,
"city_code": "101240210",
"city_name": "都昌县"
},
{
"_id": 1511,
"id": 2025,
"pid": 237,
"city_code": "101240207",
"city_name": "湖口县"
},
{
"_id": 1512,
"id": 2026,
"pid": 237,
"city_code": "101240208",
"city_name": "彭泽县"
},
{
"_id": 1513,
"id": 2027,
"pid": 238,
"city_code": "101240904",
"city_name": "安源区"
},
{
"_id": 1514,
"id": 2028,
"pid": 238,
"city_code": "101240906",
"city_name": "湘东区"
},
{
"_id": 1515,
"id": 2029,
"pid": 238,
"city_code": "101240902",
"city_name": "莲花县"
},
{
"_id": 1516,
"id": 2030,
"pid": 238,
"city_code": "101240905",
"city_name": "芦溪县"
},
{
"_id": 1517,
"id": 2031,
"pid": 238,
"city_code": "101240903",
"city_name": "上栗县"
},
{
"_id": 1518,
"id": 2033,
"pid": 239,
"city_code": "101240307",
"city_name": "德兴市"
},
{
"_id": 1519,
"id": 2034,
"pid": 239,
"city_code": "101240308",
"city_name": "上饶县"
},
{
"_id": 1520,
"id": 2035,
"pid": 239,
"city_code": "101240313",
"city_name": "广丰县"
},
{
"_id": 1521,
"id": 2036,
"pid": 239,
"city_code": "101240312",
"city_name": "玉山县"
},
{
"_id": 1522,
"id": 2037,
"pid": 239,
"city_code": "101240311",
"city_name": "铅山县"
},
{
"_id": 1523,
"id": 2038,
"pid": 239,
"city_code": "101240310",
"city_name": "横峰县"
},
{
"_id": 1524,
"id": 2039,
"pid": 239,
"city_code": "101240309",
"city_name": "弋阳县"
},
{
"_id": 1525,
"id": 2040,
"pid": 239,
"city_code": "101240305",
"city_name": "余干县"
},
{
"_id": 1526,
"id": 2041,
"pid": 239,
"city_code": "101240302",
"city_name": "鄱阳县"
},
{
"_id": 1527,
"id": 2042,
"pid": 239,
"city_code": "101240306",
"city_name": "万年县"
},
{
"_id": 1528,
"id": 2043,
"pid": 239,
"city_code": "101240303",
"city_name": "婺源县"
},
{
"_id": 1529,
"id": 2045,
"pid": 240,
"city_code": "101241002",
"city_name": "分宜县"
},
{
"_id": 1530,
"id": 2047,
"pid": 241,
"city_code": "101240510",
"city_name": "丰城市"
},
{
"_id": 1531,
"id": 2048,
"pid": 241,
"city_code": "101240509",
"city_name": "樟树市"
},
{
"_id": 1532,
"id": 2049,
"pid": 241,
"city_code": "101240508",
"city_name": "高安市"
},
{
"_id": 1533,
"id": 2050,
"pid": 241,
"city_code": "101240507",
"city_name": "奉新县"
},
{
"_id": 1534,
"id": 2051,
"pid": 241,
"city_code": "101240504",
"city_name": "万载县"
},
{
"_id": 1535,
"id": 2052,
"pid": 241,
"city_code": "101240505",
"city_name": "上高县"
},
{
"_id": 1536,
"id": 2053,
"pid": 241,
"city_code": "101240503",
"city_name": "宜丰县"
},
{
"_id": 1537,
"id": 2054,
"pid": 241,
"city_code": "101240506",
"city_name": "靖安县"
},
{
"_id": 1538,
"id": 2055,
"pid": 241,
"city_code": "101240502",
"city_name": "铜鼓县"
},
{
"_id": 1539,
"id": 2057,
"pid": 242,
"city_code": "101241103",
"city_name": "贵溪市"
},
{
"_id": 1540,
"id": 2058,
"pid": 242,
"city_code": "101241102",
"city_name": "余江县"
},
{
"_id": 1541,
"id": 2064,
"pid": 243,
"city_code": "101070102",
"city_name": "苏家屯区"
},
{
"_id": 1542,
"id": 2067,
"pid": 243,
"city_code": "101070107",
"city_name": "于洪区"
},
{
"_id": 1543,
"id": 2069,
"pid": 243,
"city_code": "101070106",
"city_name": "新民市"
},
{
"_id": 1544,
"id": 2070,
"pid": 243,
"city_code": "101070103",
"city_name": "辽中县"
},
{
"_id": 1545,
"id": 2071,
"pid": 243,
"city_code": "101070104",
"city_name": "康平县"
},
{
"_id": 1546,
"id": 2072,
"pid": 243,
"city_code": "101070105",
"city_name": "法库县"
},
{
"_id": 1547,
"id": 2077,
"pid": 244,
"city_code": "101070205",
"city_name": "旅顺口区"
},
{
"_id": 1548,
"id": 2078,
"pid": 244,
"city_code": "101070203",
"city_name": "金州区"
},
{
"_id": 1549,
"id": 2080,
"pid": 244,
"city_code": "101070202",
"city_name": "瓦房店市"
},
{
"_id": 1550,
"id": 2081,
"pid": 244,
"city_code": "101070204",
"city_name": "普兰店市"
},
{
"_id": 1551,
"id": 2082,
"pid": 244,
"city_code": "101070207",
"city_name": "庄河市"
},
{
"_id": 1552,
"id": 2083,
"pid": 244,
"city_code": "101070206",
"city_name": "长海县"
},
{
"_id": 1553,
"id": 2088,
"pid": 245,
"city_code": "101070303",
"city_name": "岫岩县"
},
{
"_id": 1554,
"id": 2089,
"pid": 245,
"city_code": "101070304",
"city_name": "海城市"
},
{
"_id": 1555,
"id": 2090,
"pid": 245,
"city_code": "101070302",
"city_name": "台安县"
},
{
"_id": 1556,
"id": 2091,
"pid": 246,
"city_code": "101070502",
"city_name": "本溪县"
},
{
"_id": 1557,
"id": 2096,
"pid": 246,
"city_code": "101070504",
"city_name": "桓仁县"
},
{
"_id": 1558,
"id": 2099,
"pid": 247,
"city_code": "101071204",
"city_name": "喀喇沁左翼蒙古族自治县"
},
{
"_id": 1559,
"id": 2100,
"pid": 247,
"city_code": "101071205",
"city_name": "北票市"
},
{
"_id": 1560,
"id": 2101,
"pid": 247,
"city_code": "101071203",
"city_name": "凌源市"
},
{
"_id": 1561,
"id": 2103,
"pid": 247,
"city_code": "101071207",
"city_name": "建平县"
},
{
"_id": 1562,
"id": 2107,
"pid": 248,
"city_code": "101070603",
"city_name": "宽甸县"
},
{
"_id": 1563,
"id": 2108,
"pid": 248,
"city_code": "101070604",
"city_name": "东港市"
},
{
"_id": 1564,
"id": 2109,
"pid": 248,
"city_code": "101070602",
"city_name": "凤城市"
},
{
"_id": 1565,
"id": 2114,
"pid": 249,
"city_code": "101070403",
"city_name": "清原县"
},
{
"_id": 1566,
"id": 2115,
"pid": 249,
"city_code": "101070402",
"city_name": "新宾县"
},
{
"_id": 1567,
"id": 2116,
"pid": 249,
"city_code": "101070401",
"city_name": "抚顺县"
},
{
"_id": 1568,
"id": 2123,
"pid": 250,
"city_code": "101070902",
"city_name": "彰武县"
},
{
"_id": 1569,
"id": 2127,
"pid": 251,
"city_code": "101071404",
"city_name": "兴城市"
},
{
"_id": 1570,
"id": 2128,
"pid": 251,
"city_code": "101071403",
"city_name": "绥中县"
},
{
"_id": 1571,
"id": 2129,
"pid": 251,
"city_code": "101071402",
"city_name": "建昌县"
},
{
"_id": 1572,
"id": 2133,
"pid": 252,
"city_code": "101070702",
"city_name": "凌海市"
},
{
"_id": 1573,
"id": 2134,
"pid": 252,
"city_code": "101070706",
"city_name": "北镇市"
},
{
"_id": 1574,
"id": 2135,
"pid": 252,
"city_code": "101070705",
"city_name": "黑山县"
},
{
"_id": 1575,
"id": 2136,
"pid": 252,
"city_code": "101070704",
"city_name": "义县"
},
{
"_id": 1576,
"id": 2141,
"pid": 253,
"city_code": "101071004",
"city_name": "弓长岭区"
},
{
"_id": 1577,
"id": 2142,
"pid": 253,
"city_code": "101071003",
"city_name": "灯塔市"
},
{
"_id": 1578,
"id": 2143,
"pid": 253,
"city_code": "101071002",
"city_name": "辽阳县"
},
{
"_id": 1579,
"id": 2146,
"pid": 254,
"city_code": "101071302",
"city_name": "大洼县"
},
{
"_id": 1580,
"id": 2147,
"pid": 254,
"city_code": "101071303",
"city_name": "盘山县"
},
{
"_id": 1581,
"id": 2150,
"pid": 255,
"city_code": "101071105",
"city_name": "调兵山市"
},
{
"_id": 1582,
"id": 2151,
"pid": 255,
"city_code": "101071102",
"city_name": "开原市"
},
{
"_id": 1583,
"id": 2152,
"pid": 255,
"city_code": "101071101",
"city_name": "铁岭县"
},
{
"_id": 1584,
"id": 2153,
"pid": 255,
"city_code": "101071104",
"city_name": "西丰县"
},
{
"_id": 1585,
"id": 2154,
"pid": 255,
"city_code": "101071103",
"city_name": "昌图县"
},
{
"_id": 1586,
"id": 2159,
"pid": 256,
"city_code": "101070803",
"city_name": "盖州市"
},
{
"_id": 1587,
"id": 2160,
"pid": 256,
"city_code": "101070802",
"city_name": "大石桥市"
},
{
"_id": 1588,
"id": 2165,
"pid": 257,
"city_code": "101080105",
"city_name": "清水河县"
},
{
"_id": 1589,
"id": 2166,
"pid": 257,
"city_code": "101080102",
"city_name": "土默特左旗"
},
{
"_id": 1590,
"id": 2167,
"pid": 257,
"city_code": "101080103",
"city_name": "托克托县"
},
{
"_id": 1591,
"id": 2168,
"pid": 257,
"city_code": "101080104",
"city_name": "和林格尔县"
},
{
"_id": 1592,
"id": 2169,
"pid": 257,
"city_code": "101080107",
"city_name": "武川县"
},
{
"_id": 1593,
"id": 2170,
"pid": 258,
"city_code": "101081201",
"city_name": "阿拉善左旗"
},
{
"_id": 1594,
"id": 2171,
"pid": 258,
"city_code": "101081202",
"city_name": "阿拉善右旗"
},
{
"_id": 1595,
"id": 2172,
"pid": 258,
"city_code": "101081203",
"city_name": "额济纳旗"
},
{
"_id": 1596,
"id": 2173,
"pid": 259,
"city_code": "101080801",
"city_name": "临河区"
},
{
"_id": 1597,
"id": 2174,
"pid": 259,
"city_code": "101080802",
"city_name": "五原县"
},
{
"_id": 1598,
"id": 2175,
"pid": 259,
"city_code": "101080803",
"city_name": "磴口县"
},
{
"_id": 1599,
"id": 2176,
"pid": 259,
"city_code": "101080804",
"city_name": "乌拉特前旗"
},
{
"_id": 1600,
"id": 2177,
"pid": 259,
"city_code": "101080806",
"city_name": "乌拉特中旗"
},
{
"_id": 1601,
"id": 2178,
"pid": 259,
"city_code": "101080807",
"city_name": "乌拉特后旗"
},
{
"_id": 1602,
"id": 2179,
"pid": 259,
"city_code": "101080810",
"city_name": "杭锦后旗"
},
{
"_id": 1603,
"id": 2184,
"pid": 260,
"city_code": "101080207",
"city_name": "石拐区"
},
{
"_id": 1604,
"id": 2185,
"pid": 260,
"city_code": "101080202",
"city_name": "白云鄂博"
},
{
"_id": 1605,
"id": 2186,
"pid": 260,
"city_code": "101080204",
"city_name": "土默特右旗"
},
{
"_id": 1606,
"id": 2187,
"pid": 260,
"city_code": "101080205",
"city_name": "固阳县"
},
{
"_id": 1607,
"id": 2188,
"pid": 260,
"city_code": "101080206",
"city_name": "达尔罕茂明安联合旗"
},
{
"_id": 1608,
"id": 2192,
"pid": 261,
"city_code": "101080603",
"city_name": "阿鲁科尔沁旗"
},
{
"_id": 1609,
"id": 2193,
"pid": 261,
"city_code": "101080605",
"city_name": "巴林左旗"
},
{
"_id": 1610,
"id": 2194,
"pid": 261,
"city_code": "101080606",
"city_name": "巴林右旗"
},
{
"_id": 1611,
"id": 2195,
"pid": 261,
"city_code": "101080607",
"city_name": "林西县"
},
{
"_id": 1612,
"id": 2196,
"pid": 261,
"city_code": "101080608",
"city_name": "克什克腾旗"
},
{
"_id": 1613,
"id": 2197,
"pid": 261,
"city_code": "101080609",
"city_name": "翁牛特旗"
},
{
"_id": 1614,
"id": 2198,
"pid": 261,
"city_code": "101080611",
"city_name": "喀喇沁旗"
},
{
"_id": 1615,
"id": 2199,
"pid": 261,
"city_code": "101080613",
"city_name": "宁城县"
},
{
"_id": 1616,
"id": 2200,
"pid": 261,
"city_code": "101080614",
"city_name": "敖汉旗"
},
{
"_id": 1617,
"id": 2201,
"pid": 262,
"city_code": "101080713",
"city_name": "东胜区"
},
{
"_id": 1618,
"id": 2202,
"pid": 262,
"city_code": "101080703",
"city_name": "达拉特旗"
},
{
"_id": 1619,
"id": 2203,
"pid": 262,
"city_code": "101080704",
"city_name": "准格尔旗"
},
{
"_id": 1620,
"id": 2204,
"pid": 262,
"city_code": "101080705",
"city_name": "鄂托克前旗"
},
{
"_id": 1621,
"id": 2205,
"pid": 262,
"city_code": "101080708",
"city_name": "鄂托克旗"
},
{
"_id": 1622,
"id": 2206,
"pid": 262,
"city_code": "101080709",
"city_name": "杭锦旗"
},
{
"_id": 1623,
"id": 2207,
"pid": 262,
"city_code": "101080710",
"city_name": "乌审旗"
},
{
"_id": 1624,
"id": 2208,
"pid": 262,
"city_code": "101080711",
"city_name": "伊金霍洛旗"
},
{
"_id": 1625,
"id": 2209,
"pid": 263,
"city_code": "101081001",
"city_name": "海拉尔区"
},
{
"_id": 1626,
"id": 2210,
"pid": 263,
"city_code": "101081004",
"city_name": "莫力达瓦"
},
{
"_id": 1627,
"id": 2211,
"pid": 263,
"city_code": "101081010",
"city_name": "满洲里市"
},
{
"_id": 1628,
"id": 2212,
"pid": 263,
"city_code": "101081011",
"city_name": "牙克石市"
},
{
"_id": 1629,
"id": 2213,
"pid": 263,
"city_code": "101081012",
"city_name": "扎兰屯市"
},
{
"_id": 1630,
"id": 2214,
"pid": 263,
"city_code": "101081014",
"city_name": "额尔古纳市"
},
{
"_id": 1631,
"id": 2215,
"pid": 263,
"city_code": "101081015",
"city_name": "根河市"
},
{
"_id": 1632,
"id": 2216,
"pid": 263,
"city_code": "101081003",
"city_name": "阿荣旗"
},
{
"_id": 1633,
"id": 2217,
"pid": 263,
"city_code": "101081005",
"city_name": "鄂伦春自治旗"
},
{
"_id": 1634,
"id": 2218,
"pid": 263,
"city_code": "101081006",
"city_name": "鄂温克族自治旗"
},
{
"_id": 1635,
"id": 2219,
"pid": 263,
"city_code": "101081007",
"city_name": "陈巴尔虎旗"
},
{
"_id": 1636,
"id": 2220,
"pid": 263,
"city_code": "101081008",
"city_name": "新巴尔虎左旗"
},
{
"_id": 1637,
"id": 2221,
"pid": 263,
"city_code": "101081009",
"city_name": "新巴尔虎右旗"
},
{
"_id": 1638,
"id": 2223,
"pid": 264,
"city_code": "101080512",
"city_name": "霍林郭勒市"
},
{
"_id": 1639,
"id": 2224,
"pid": 264,
"city_code": "101080503",
"city_name": "科尔沁左翼中旗"
},
{
"_id": 1640,
"id": 2225,
"pid": 264,
"city_code": "101080504",
"city_name": "科尔沁左翼后旗"
},
{
"_id": 1641,
"id": 2226,
"pid": 264,
"city_code": "101080506",
"city_name": "开鲁县"
},
{
"_id": 1642,
"id": 2227,
"pid": 264,
"city_code": "101080507",
"city_name": "库伦旗"
},
{
"_id": 1643,
"id": 2228,
"pid": 264,
"city_code": "101080508",
"city_name": "奈曼旗"
},
{
"_id": 1644,
"id": 2229,
"pid": 264,
"city_code": "101080509",
"city_name": "扎鲁特旗"
},
{
"_id": 1645,
"id": 2233,
"pid": 266,
"city_code": "101080403",
"city_name": "化德县"
},
{
"_id": 1646,
"id": 2234,
"pid": 266,
"city_code": "101080401",
"city_name": "集宁区"
},
{
"_id": 1647,
"id": 2235,
"pid": 266,
"city_code": "101080412",
"city_name": "丰镇市"
},
{
"_id": 1648,
"id": 2236,
"pid": 266,
"city_code": "101080402",
"city_name": "卓资县"
},
{
"_id": 1649,
"id": 2237,
"pid": 266,
"city_code": "101080404",
"city_name": "商都县"
},
{
"_id": 1650,
"id": 2238,
"pid": 266,
"city_code": "101080406",
"city_name": "兴和县"
},
{
"_id": 1651,
"id": 2239,
"pid": 266,
"city_code": "101080407",
"city_name": "凉城县"
},
{
"_id": 1652,
"id": 2240,
"pid": 266,
"city_code": "101080408",
"city_name": "察哈尔右翼前旗"
},
{
"_id": 1653,
"id": 2241,
"pid": 266,
"city_code": "101080409",
"city_name": "察哈尔右翼中旗"
},
{
"_id": 1654,
"id": 2242,
"pid": 266,
"city_code": "101080410",
"city_name": "察哈尔右翼后旗"
},
{
"_id": 1655,
"id": 2243,
"pid": 266,
"city_code": "101080411",
"city_name": "四子王旗"
},
{
"_id": 1656,
"id": 2244,
"pid": 267,
"city_code": "101080903",
"city_name": "二连浩特市"
},
{
"_id": 1657,
"id": 2245,
"pid": 267,
"city_code": "101080901",
"city_name": "锡林浩特市"
},
{
"_id": 1658,
"id": 2246,
"pid": 267,
"city_code": "101080904",
"city_name": "阿巴嘎旗"
},
{
"_id": 1659,
"id": 2247,
"pid": 267,
"city_code": "101080906",
"city_name": "苏尼特左旗"
},
{
"_id": 1660,
"id": 2248,
"pid": 267,
"city_code": "101080907",
"city_name": "苏尼特右旗"
},
{
"_id": 1661,
"id": 2249,
"pid": 267,
"city_code": "101080909",
"city_name": "东乌珠穆沁旗"
},
{
"_id": 1662,
"id": 2250,
"pid": 267,
"city_code": "101080910",
"city_name": "西乌珠穆沁旗"
},
{
"_id": 1663,
"id": 2251,
"pid": 267,
"city_code": "101080911",
"city_name": "太仆寺旗"
},
{
"_id": 1664,
"id": 2252,
"pid": 267,
"city_code": "101080912",
"city_name": "镶黄旗"
},
{
"_id": 1665,
"id": 2253,
"pid": 267,
"city_code": "101080913",
"city_name": "正镶白旗"
},
{
"_id": 1666,
"id": 2255,
"pid": 267,
"city_code": "101080915",
"city_name": "多伦县"
},
{
"_id": 1667,
"id": 2256,
"pid": 268,
"city_code": "101081101",
"city_name": "乌兰浩特市"
},
{
"_id": 1668,
"id": 2257,
"pid": 268,
"city_code": "101081102",
"city_name": "阿尔山市"
},
{
"_id": 1669,
"id": 2258,
"pid": 268,
"city_code": "101081109",
"city_name": "科尔沁右翼前旗"
},
{
"_id": 1670,
"id": 2259,
"pid": 268,
"city_code": "101081103",
"city_name": "科尔沁右翼中旗"
},
{
"_id": 1671,
"id": 2260,
"pid": 268,
"city_code": "101081105",
"city_name": "扎赉特旗"
},
{
"_id": 1672,
"id": 2261,
"pid": 268,
"city_code": "101081107",
"city_name": "突泉县"
},
{
"_id": 1673,
"id": 2265,
"pid": 269,
"city_code": "101170103",
"city_name": "灵武市"
},
{
"_id": 1674,
"id": 2266,
"pid": 269,
"city_code": "101170102",
"city_name": "永宁县"
},
{
"_id": 1675,
"id": 2267,
"pid": 269,
"city_code": "101170104",
"city_name": "贺兰县"
},
{
"_id": 1676,
"id": 2270,
"pid": 270,
"city_code": "101170402",
"city_name": "西吉县"
},
{
"_id": 1677,
"id": 2271,
"pid": 270,
"city_code": "101170403",
"city_name": "隆德县"
},
{
"_id": 1678,
"id": 2272,
"pid": 270,
"city_code": "101170404",
"city_name": "泾源县"
},
{
"_id": 1679,
"id": 2273,
"pid": 270,
"city_code": "101170406",
"city_name": "彭阳县"
},
{
"_id": 1680,
"id": 2274,
"pid": 271,
"city_code": "101170202",
"city_name": "惠农区"
},
{
"_id": 1681,
"id": 2275,
"pid": 271,
"city_code": "101170206",
"city_name": "大武口区"
},
{
"_id": 1682,
"id": 2276,
"pid": 271,
"city_code": "101170202",
"city_name": "惠农区"
},
{
"_id": 1683,
"id": 2277,
"pid": 271,
"city_code": "101170204",
"city_name": "陶乐县"
},
{
"_id": 1684,
"id": 2278,
"pid": 271,
"city_code": "101170203",
"city_name": "平罗县"
},
{
"_id": 1685,
"id": 2281,
"pid": 272,
"city_code": "101170306",
"city_name": "青铜峡市"
},
{
"_id": 1686,
"id": 2283,
"pid": 272,
"city_code": "101170303",
"city_name": "盐池县"
},
{
"_id": 1687,
"id": 2284,
"pid": 272,
"city_code": "101170302",
"city_name": "同心县"
},
{
"_id": 1688,
"id": 2286,
"pid": 273,
"city_code": "101170504",
"city_name": "海原县"
},
{
"_id": 1689,
"id": 2287,
"pid": 273,
"city_code": "101170502",
"city_name": "中宁县"
},
{
"_id": 1690,
"id": 2292,
"pid": 274,
"city_code": "101150104",
"city_name": "湟中县"
},
{
"_id": 1691,
"id": 2293,
"pid": 274,
"city_code": "101150103",
"city_name": "湟源县"
},
{
"_id": 1692,
"id": 2294,
"pid": 274,
"city_code": "101150102",
"city_name": "大通县"
},
{
"_id": 1693,
"id": 2295,
"pid": 275,
"city_code": "101150508",
"city_name": "玛沁县"
},
{
"_id": 1694,
"id": 2296,
"pid": 275,
"city_code": "101150502",
"city_name": "班玛县"
},
{
"_id": 1695,
"id": 2297,
"pid": 275,
"city_code": "101150503",
"city_name": "甘德县"
},
{
"_id": 1696,
"id": 2298,
"pid": 275,
"city_code": "101150504",
"city_name": "达日县"
},
{
"_id": 1697,
"id": 2299,
"pid": 275,
"city_code": "101150505",
"city_name": "久治县"
},
{
"_id": 1698,
"id": 2300,
"pid": 275,
"city_code": "101150506",
"city_name": "玛多县"
},
{
"_id": 1699,
"id": 2301,
"pid": 276,
"city_code": "101150804",
"city_name": "海晏县"
},
{
"_id": 1700,
"id": 2302,
"pid": 276,
"city_code": "101150803",
"city_name": "祁连县"
},
{
"_id": 1701,
"id": 2303,
"pid": 276,
"city_code": "101150806",
"city_name": "刚察县"
},
{
"_id": 1702,
"id": 2304,
"pid": 276,
"city_code": "101150802",
"city_name": "门源县"
},
{
"_id": 1703,
"id": 2305,
"pid": 277,
"city_code": "101150208",
"city_name": "平安县"
},
{
"_id": 1704,
"id": 2306,
"pid": 277,
"city_code": "101150202",
"city_name": "乐都县"
},
{
"_id": 1705,
"id": 2307,
"pid": 277,
"city_code": "101150203",
"city_name": "民和县"
},
{
"_id": 1706,
"id": 2308,
"pid": 277,
"city_code": "101150204",
"city_name": "互助县"
},
{
"_id": 1707,
"id": 2309,
"pid": 277,
"city_code": "101150205",
"city_name": "化隆县"
},
{
"_id": 1708,
"id": 2310,
"pid": 277,
"city_code": "101150206",
"city_name": "循化县"
},
{
"_id": 1709,
"id": 2311,
"pid": 278,
"city_code": "101150409",
"city_name": "共和县"
},
{
"_id": 1710,
"id": 2312,
"pid": 278,
"city_code": "101150408",
"city_name": "同德县"
},
{
"_id": 1711,
"id": 2313,
"pid": 278,
"city_code": "101150404",
"city_name": "贵德县"
},
{
"_id": 1712,
"id": 2314,
"pid": 278,
"city_code": "101150406",
"city_name": "兴海县"
},
{
"_id": 1713,
"id": 2315,
"pid": 278,
"city_code": "101150407",
"city_name": "贵南县"
},
{
"_id": 1714,
"id": 2316,
"pid": 279,
"city_code": "101150716",
"city_name": "德令哈市"
},
{
"_id": 1715,
"id": 2317,
"pid": 279,
"city_code": "101150702",
"city_name": "格尔木市"
},
{
"_id": 1716,
"id": 2318,
"pid": 279,
"city_code": "101150709",
"city_name": "乌兰县"
},
{
"_id": 1717,
"id": 2319,
"pid": 279,
"city_code": "101150710",
"city_name": "都兰县"
},
{
"_id": 1718,
"id": 2320,
"pid": 279,
"city_code": "101150708",
"city_name": "天峻县"
},
{
"_id": 1719,
"id": 2321,
"pid": 280,
"city_code": "101150305",
"city_name": "同仁县"
},
{
"_id": 1720,
"id": 2322,
"pid": 280,
"city_code": "101150302",
"city_name": "尖扎县"
},
{
"_id": 1721,
"id": 2323,
"pid": 280,
"city_code": "101150303",
"city_name": "泽库县"
},
{
"_id": 1722,
"id": 2324,
"pid": 280,
"city_code": "101150304",
"city_name": "河南蒙古族自治县"
},
{
"_id": 1723,
"id": 2325,
"pid": 281,
"city_code": "101150601",
"city_name": "玉树县"
},
{
"_id": 1724,
"id": 2326,
"pid": 281,
"city_code": "101150604",
"city_name": "杂多县"
},
{
"_id": 1725,
"id": 2327,
"pid": 281,
"city_code": "101150602",
"city_name": "称多县"
},
{
"_id": 1726,
"id": 2328,
"pid": 281,
"city_code": "101150603",
"city_name": "治多县"
},
{
"_id": 1727,
"id": 2329,
"pid": 281,
"city_code": "101150605",
"city_name": "囊谦县"
},
{
"_id": 1728,
"id": 2330,
"pid": 281,
"city_code": "101150606",
"city_name": "曲麻莱县"
},
{
"_id": 1729,
"id": 2336,
"pid": 282,
"city_code": "101120102",
"city_name": "长清区"
},
{
"_id": 1730,
"id": 2337,
"pid": 282,
"city_code": "101120104",
"city_name": "章丘市"
},
{
"_id": 1731,
"id": 2338,
"pid": 282,
"city_code": "101120105",
"city_name": "平阴县"
},
{
"_id": 1732,
"id": 2339,
"pid": 282,
"city_code": "101120106",
"city_name": "济阳县"
},
{
"_id": 1733,
"id": 2340,
"pid": 282,
"city_code": "101120103",
"city_name": "商河县"
},
{
"_id": 1734,
"id": 2347,
"pid": 283,
"city_code": "101120202",
"city_name": "崂山区"
},
{
"_id": 1735,
"id": 2348,
"pid": 283,
"city_code": "101120205",
"city_name": "胶州市"
},
{
"_id": 1736,
"id": 2349,
"pid": 283,
"city_code": "101120204",
"city_name": "即墨市"
},
{
"_id": 1737,
"id": 2350,
"pid": 283,
"city_code": "101120208",
"city_name": "平度市"
},
{
"_id": 1738,
"id": 2351,
"pid": 283,
"city_code": "101120206",
"city_name": "胶南市"
},
{
"_id": 1739,
"id": 2352,
"pid": 283,
"city_code": "101120207",
"city_name": "莱西市"
},
{
"_id": 1740,
"id": 2354,
"pid": 284,
"city_code": "101121105",
"city_name": "惠民县"
},
{
"_id": 1741,
"id": 2355,
"pid": 284,
"city_code": "101121104",
"city_name": "阳信县"
},
{
"_id": 1742,
"id": 2356,
"pid": 284,
"city_code": "101121103",
"city_name": "无棣县"
},
{
"_id": 1743,
"id": 2357,
"pid": 284,
"city_code": "101121106",
"city_name": "沾化县"
},
{
"_id": 1744,
"id": 2358,
"pid": 284,
"city_code": "101121102",
"city_name": "博兴县"
},
{
"_id": 1745,
"id": 2359,
"pid": 284,
"city_code": "101121107",
"city_name": "邹平县"
},
{
"_id": 1746,
"id": 2361,
"pid": 285,
"city_code": "101120404",
"city_name": "陵县"
},
{
"_id": 1747,
"id": 2362,
"pid": 285,
"city_code": "101120406",
"city_name": "乐陵市"
},
{
"_id": 1748,
"id": 2363,
"pid": 285,
"city_code": "101120411",
"city_name": "禹城市"
},
{
"_id": 1749,
"id": 2364,
"pid": 285,
"city_code": "101120409",
"city_name": "宁津县"
},
{
"_id": 1750,
"id": 2365,
"pid": 285,
"city_code": "101120407",
"city_name": "庆云县"
},
{
"_id": 1751,
"id": 2366,
"pid": 285,
"city_code": "101120403",
"city_name": "临邑县"
},
{
"_id": 1752,
"id": 2367,
"pid": 285,
"city_code": "101120405",
"city_name": "齐河县"
},
{
"_id": 1753,
"id": 2368,
"pid": 285,
"city_code": "101120408",
"city_name": "平原县"
},
{
"_id": 1754,
"id": 2369,
"pid": 285,
"city_code": "101120410",
"city_name": "夏津县"
},
{
"_id": 1755,
"id": 2370,
"pid": 285,
"city_code": "101120402",
"city_name": "武城县"
},
{
"_id": 1756,
"id": 2371,
"pid": 286,
"city_code": "101121201",
"city_name": "东营区"
},
{
"_id": 1757,
"id": 2372,
"pid": 286,
"city_code": "101121202",
"city_name": "河口区"
},
{
"_id": 1758,
"id": 2373,
"pid": 286,
"city_code": "101121203",
"city_name": "垦利县"
},
{
"_id": 1759,
"id": 2374,
"pid": 286,
"city_code": "101121204",
"city_name": "利津县"
},
{
"_id": 1760,
"id": 2375,
"pid": 286,
"city_code": "101121205",
"city_name": "广饶县"
},
{
"_id": 1761,
"id": 2377,
"pid": 287,
"city_code": "101121007",
"city_name": "曹县"
},
{
"_id": 1762,
"id": 2378,
"pid": 287,
"city_code": "101121009",
"city_name": "单县"
},
{
"_id": 1763,
"id": 2379,
"pid": 287,
"city_code": "101121008",
"city_name": "成武县"
},
{
"_id": 1764,
"id": 2380,
"pid": 287,
"city_code": "101121006",
"city_name": "巨野县"
},
{
"_id": 1765,
"id": 2381,
"pid": 287,
"city_code": "101121003",
"city_name": "郓城县"
},
{
"_id": 1766,
"id": 2382,
"pid": 287,
"city_code": "101121002",
"city_name": "鄄城县"
},
{
"_id": 1767,
"id": 2383,
"pid": 287,
"city_code": "101121005",
"city_name": "定陶县"
},
{
"_id": 1768,
"id": 2384,
"pid": 287,
"city_code": "101121004",
"city_name": "东明县"
},
{
"_id": 1769,
"id": 2387,
"pid": 288,
"city_code": "101120710",
"city_name": "曲阜市"
},
{
"_id": 1770,
"id": 2388,
"pid": 288,
"city_code": "101120705",
"city_name": "兖州市"
},
{
"_id": 1771,
"id": 2389,
"pid": 288,
"city_code": "101120711",
"city_name": "邹城市"
},
{
"_id": 1772,
"id": 2390,
"pid": 288,
"city_code": "101120703",
"city_name": "微山县"
},
{
"_id": 1773,
"id": 2391,
"pid": 288,
"city_code": "101120704",
"city_name": "鱼台县"
},
{
"_id": 1774,
"id": 2392,
"pid": 288,
"city_code": "101120706",
"city_name": "金乡县"
},
{
"_id": 1775,
"id": 2393,
"pid": 288,
"city_code": "101120702",
"city_name": "嘉祥县"
},
{
"_id": 1776,
"id": 2394,
"pid": 288,
"city_code": "101120707",
"city_name": "汶上县"
},
{
"_id": 1777,
"id": 2395,
"pid": 288,
"city_code": "101120708",
"city_name": "泗水县"
},
{
"_id": 1778,
"id": 2396,
"pid": 288,
"city_code": "101120709",
"city_name": "梁山县"
},
{
"_id": 1779,
"id": 2400,
"pid": 290,
"city_code": "101121707",
"city_name": "临清市"
},
{
"_id": 1780,
"id": 2401,
"pid": 290,
"city_code": "101121703",
"city_name": "阳谷县"
},
{
"_id": 1781,
"id": 2402,
"pid": 290,
"city_code": "101121709",
"city_name": "莘县"
},
{
"_id": 1782,
"id": 2403,
"pid": 290,
"city_code": "101121705",
"city_name": "茌平县"
},
{
"_id": 1783,
"id": 2404,
"pid": 290,
"city_code": "101121706",
"city_name": "东阿县"
},
{
"_id": 1784,
"id": 2405,
"pid": 290,
"city_code": "101121702",
"city_name": "冠县"
},
{
"_id": 1785,
"id": 2406,
"pid": 290,
"city_code": "101121704",
"city_name": "高唐县"
},
{
"_id": 1786,
"id": 2410,
"pid": 291,
"city_code": "101120903",
"city_name": "沂南县"
},
{
"_id": 1787,
"id": 2411,
"pid": 291,
"city_code": "101120906",
"city_name": "郯城县"
},
{
"_id": 1788,
"id": 2412,
"pid": 291,
"city_code": "101120910",
"city_name": "沂水县"
},
{
"_id": 1789,
"id": 2413,
"pid": 291,
"city_code": "101120904",
"city_name": "兰陵县"
},
{
"_id": 1790,
"id": 2414,
"pid": 291,
"city_code": "101120909",
"city_name": "费县"
},
{
"_id": 1791,
"id": 2415,
"pid": 291,
"city_code": "101120908",
"city_name": "平邑县"
},
{
"_id": 1792,
"id": 2416,
"pid": 291,
"city_code": "101120902",
"city_name": "莒南县"
},
{
"_id": 1793,
"id": 2417,
"pid": 291,
"city_code": "101120907",
"city_name": "蒙阴县"
},
{
"_id": 1794,
"id": 2418,
"pid": 291,
"city_code": "101120905",
"city_name": "临沭县"
},
{
"_id": 1795,
"id": 2421,
"pid": 292,
"city_code": "101121502",
"city_name": "五莲县"
},
{
"_id": 1796,
"id": 2422,
"pid": 292,
"city_code": "101121503",
"city_name": "莒县"
},
{
"_id": 1797,
"id": 2423,
"pid": 293,
"city_code": "101120803",
"city_name": "泰山区"
},
{
"_id": 1798,
"id": 2425,
"pid": 293,
"city_code": "101120802",
"city_name": "新泰市"
},
{
"_id": 1799,
"id": 2426,
"pid": 293,
"city_code": "101120804",
"city_name": "肥城市"
},
{
"_id": 1800,
"id": 2427,
"pid": 293,
"city_code": "101120806",
"city_name": "宁阳县"
},
{
"_id": 1801,
"id": 2428,
"pid": 293,
"city_code": "101120805",
"city_name": "东平县"
},
{
"_id": 1802,
"id": 2429,
"pid": 294,
"city_code": "101121303",
"city_name": "荣成市"
},
{
"_id": 1803,
"id": 2430,
"pid": 294,
"city_code": "101121304",
"city_name": "乳山市"
},
{
"_id": 1804,
"id": 2432,
"pid": 294,
"city_code": "101121302",
"city_name": "文登市"
},
{
"_id": 1805,
"id": 2437,
"pid": 295,
"city_code": "101120602",
"city_name": "青州市"
},
{
"_id": 1806,
"id": 2438,
"pid": 295,
"city_code": "101120609",
"city_name": "诸城市"
},
{
"_id": 1807,
"id": 2439,
"pid": 295,
"city_code": "101120603",
"city_name": "寿光市"
},
{
"_id": 1808,
"id": 2440,
"pid": 295,
"city_code": "101120607",
"city_name": "安丘市"
},
{
"_id": 1809,
"id": 2441,
"pid": 295,
"city_code": "101120608",
"city_name": "高密市"
},
{
"_id": 1810,
"id": 2442,
"pid": 295,
"city_code": "101120606",
"city_name": "昌邑市"
},
{
"_id": 1811,
"id": 2443,
"pid": 295,
"city_code": "101120604",
"city_name": "临朐县"
},
{
"_id": 1812,
"id": 2444,
"pid": 295,
"city_code": "101120605",
"city_name": "昌乐县"
},
{
"_id": 1813,
"id": 2446,
"pid": 296,
"city_code": "101120508",
"city_name": "福山区"
},
{
"_id": 1814,
"id": 2447,
"pid": 296,
"city_code": "101120509",
"city_name": "牟平区"
},
{
"_id": 1815,
"id": 2450,
"pid": 296,
"city_code": "101120505",
"city_name": "龙口市"
},
{
"_id": 1816,
"id": 2451,
"pid": 296,
"city_code": "101120510",
"city_name": "莱阳市"
},
{
"_id": 1817,
"id": 2452,
"pid": 296,
"city_code": "101120502",
"city_name": "莱州市"
},
{
"_id": 1818,
"id": 2453,
"pid": 296,
"city_code": "101120504",
"city_name": "蓬莱市"
},
{
"_id": 1819,
"id": 2454,
"pid": 296,
"city_code": "101120506",
"city_name": "招远市"
},
{
"_id": 1820,
"id": 2455,
"pid": 296,
"city_code": "101120507",
"city_name": "栖霞市"
},
{
"_id": 1821,
"id": 2456,
"pid": 296,
"city_code": "101120511",
"city_name": "海阳市"
},
{
"_id": 1822,
"id": 2457,
"pid": 296,
"city_code": "101120503",
"city_name": "长岛县"
},
{
"_id": 1823,
"id": 2460,
"pid": 297,
"city_code": "101121403",
"city_name": "峄城区"
},
{
"_id": 1824,
"id": 2461,
"pid": 297,
"city_code": "101121404",
"city_name": "台儿庄区"
},
{
"_id": 1825,
"id": 2462,
"pid": 297,
"city_code": "101121402",
"city_name": "薛城区"
},
{
"_id": 1826,
"id": 2463,
"pid": 297,
"city_code": "101121405",
"city_name": "滕州市"
},
{
"_id": 1827,
"id": 2465,
"pid": 298,
"city_code": "101120308",
"city_name": "临淄区"
},
{
"_id": 1828,
"id": 2466,
"pid": 298,
"city_code": "101120302",
"city_name": "淄川区"
},
{
"_id": 1829,
"id": 2467,
"pid": 298,
"city_code": "101120303",
"city_name": "博山区"
},
{
"_id": 1830,
"id": 2468,
"pid": 298,
"city_code": "101120305",
"city_name": "周村区"
},
{
"_id": 1831,
"id": 2469,
"pid": 298,
"city_code": "101120307",
"city_name": "桓台县"
},
{
"_id": 1832,
"id": 2470,
"pid": 298,
"city_code": "101120304",
"city_name": "高青县"
},
{
"_id": 1833,
"id": 2471,
"pid": 298,
"city_code": "101120306",
"city_name": "沂源县"
},
{
"_id": 1834,
"id": 2481,
"pid": 299,
"city_code": "101100102",
"city_name": "清徐县"
},
{
"_id": 1835,
"id": 2482,
"pid": 299,
"city_code": "101100103",
"city_name": "阳曲县"
},
{
"_id": 1836,
"id": 2483,
"pid": 299,
"city_code": "101100104",
"city_name": "娄烦县"
},
{
"_id": 1837,
"id": 2484,
"pid": 299,
"city_code": "101100105",
"city_name": "古交市"
},
{
"_id": 1838,
"id": 2487,
"pid": 300,
"city_code": "101100508",
"city_name": "沁县"
},
{
"_id": 1839,
"id": 2488,
"pid": 300,
"city_code": "101100504",
"city_name": "潞城市"
},
{
"_id": 1840,
"id": 2489,
"pid": 300,
"city_code": "101100501",
"city_name": "长治县"
},
{
"_id": 1841,
"id": 2490,
"pid": 300,
"city_code": "101100505",
"city_name": "襄垣县"
},
{
"_id": 1842,
"id": 2491,
"pid": 300,
"city_code": "101100503",
"city_name": "屯留县"
},
{
"_id": 1843,
"id": 2492,
"pid": 300,
"city_code": "101100506",
"city_name": "平顺县"
},
{
"_id": 1844,
"id": 2493,
"pid": 300,
"city_code": "101100502",
"city_name": "黎城县"
},
{
"_id": 1845,
"id": 2494,
"pid": 300,
"city_code": "101100511",
"city_name": "壶关县"
},
{
"_id": 1846,
"id": 2495,
"pid": 300,
"city_code": "101100509",
"city_name": "长子县"
},
{
"_id": 1847,
"id": 2496,
"pid": 300,
"city_code": "101100507",
"city_name": "武乡县"
},
{
"_id": 1848,
"id": 2497,
"pid": 300,
"city_code": "101100510",
"city_name": "沁源县"
},
{
"_id": 1849,
"id": 2502,
"pid": 301,
"city_code": "101100202",
"city_name": "阳高县"
},
{
"_id": 1850,
"id": 2503,
"pid": 301,
"city_code": "101100204",
"city_name": "天镇县"
},
{
"_id": 1851,
"id": 2504,
"pid": 301,
"city_code": "101100205",
"city_name": "广灵县"
},
{
"_id": 1852,
"id": 2505,
"pid": 301,
"city_code": "101100206",
"city_name": "灵丘县"
},
{
"_id": 1853,
"id": 2506,
"pid": 301,
"city_code": "101100207",
"city_name": "浑源县"
},
{
"_id": 1854,
"id": 2507,
"pid": 301,
"city_code": "101100208",
"city_name": "左云县"
},
{
"_id": 1855,
"id": 2508,
"pid": 301,
"city_code": "101100203",
"city_name": "大同县"
},
{
"_id": 1856,
"id": 2510,
"pid": 302,
"city_code": "101100605",
"city_name": "高平市"
},
{
"_id": 1857,
"id": 2511,
"pid": 302,
"city_code": "101100602",
"city_name": "沁水县"
},
{
"_id": 1858,
"id": 2512,
"pid": 302,
"city_code": "101100603",
"city_name": "阳城县"
},
{
"_id": 1859,
"id": 2513,
"pid": 302,
"city_code": "101100604",
"city_name": "陵川县"
},
{
"_id": 1860,
"id": 2514,
"pid": 302,
"city_code": "101100606",
"city_name": "泽州县"
},
{
"_id": 1861,
"id": 2515,
"pid": 303,
"city_code": "101100402",
"city_name": "榆次区"
},
{
"_id": 1862,
"id": 2516,
"pid": 303,
"city_code": "101100412",
"city_name": "介休市"
},
{
"_id": 1863,
"id": 2517,
"pid": 303,
"city_code": "101100403",
"city_name": "榆社县"
},
{
"_id": 1864,
"id": 2518,
"pid": 303,
"city_code": "101100404",
"city_name": "左权县"
},
{
"_id": 1865,
"id": 2519,
"pid": 303,
"city_code": "101100405",
"city_name": "和顺县"
},
{
"_id": 1866,
"id": 2520,
"pid": 303,
"city_code": "101100406",
"city_name": "昔阳县"
},
{
"_id": 1867,
"id": 2521,
"pid": 303,
"city_code": "101100407",
"city_name": "寿阳县"
},
{
"_id": 1868,
"id": 2522,
"pid": 303,
"city_code": "101100408",
"city_name": "太谷县"
},
{
"_id": 1869,
"id": 2523,
"pid": 303,
"city_code": "101100409",
"city_name": "祁县"
},
{
"_id": 1870,
"id": 2524,
"pid": 303,
"city_code": "101100410",
"city_name": "平遥县"
},
{
"_id": 1871,
"id": 2525,
"pid": 303,
"city_code": "101100411",
"city_name": "灵石县"
},
{
"_id": 1872,
"id": 2527,
"pid": 304,
"city_code": "101100714",
"city_name": "侯马市"
},
{
"_id": 1873,
"id": 2528,
"pid": 304,
"city_code": "101100711",
"city_name": "霍州市"
},
{
"_id": 1874,
"id": 2529,
"pid": 304,
"city_code": "101100702",
"city_name": "曲沃县"
},
{
"_id": 1875,
"id": 2530,
"pid": 304,
"city_code": "101100713",
"city_name": "翼城县"
},
{
"_id": 1876,
"id": 2531,
"pid": 304,
"city_code": "101100707",
"city_name": "襄汾县"
},
{
"_id": 1877,
"id": 2532,
"pid": 304,
"city_code": "101100710",
"city_name": "洪洞县"
},
{
"_id": 1878,
"id": 2533,
"pid": 304,
"city_code": "101100706",
"city_name": "吉县"
},
{
"_id": 1879,
"id": 2534,
"pid": 304,
"city_code": "101100716",
"city_name": "安泽县"
},
{
"_id": 1880,
"id": 2535,
"pid": 304,
"city_code": "101100715",
"city_name": "浮山县"
},
{
"_id": 1881,
"id": 2536,
"pid": 304,
"city_code": "101100717",
"city_name": "古县"
},
{
"_id": 1882,
"id": 2537,
"pid": 304,
"city_code": "101100712",
"city_name": "乡宁县"
},
{
"_id": 1883,
"id": 2538,
"pid": 304,
"city_code": "101100705",
"city_name": "大宁县"
},
{
"_id": 1884,
"id": 2539,
"pid": 304,
"city_code": "101100704",
"city_name": "隰县"
},
{
"_id": 1885,
"id": 2540,
"pid": 304,
"city_code": "101100703",
"city_name": "永和县"
},
{
"_id": 1886,
"id": 2541,
"pid": 304,
"city_code": "101100708",
"city_name": "蒲县"
},
{
"_id": 1887,
"id": 2542,
"pid": 304,
"city_code": "101100709",
"city_name": "汾西县"
},
{
"_id": 1888,
"id": 2543,
"pid": 305,
"city_code": "101101101",
"city_name": "离石市"
},
{
"_id": 1889,
"id": 2544,
"pid": 305,
"city_code": "101101101",
"city_name": "离石区"
},
{
"_id": 1890,
"id": 2545,
"pid": 305,
"city_code": "101101110",
"city_name": "孝义市"
},
{
"_id": 1891,
"id": 2546,
"pid": 305,
"city_code": "101101111",
"city_name": "汾阳市"
},
{
"_id": 1892,
"id": 2547,
"pid": 305,
"city_code": "101101112",
"city_name": "文水县"
},
{
"_id": 1893,
"id": 2548,
"pid": 305,
"city_code": "101101113",
"city_name": "交城县"
},
{
"_id": 1894,
"id": 2549,
"pid": 305,
"city_code": "101101103",
"city_name": "兴县"
},
{
"_id": 1895,
"id": 2550,
"pid": 305,
"city_code": "101101102",
"city_name": "临县"
},
{
"_id": 1896,
"id": 2551,
"pid": 305,
"city_code": "101101105",
"city_name": "柳林县"
},
{
"_id": 1897,
"id": 2552,
"pid": 305,
"city_code": "101101106",
"city_name": "石楼县"
},
{
"_id": 1898,
"id": 2553,
"pid": 305,
"city_code": "101101104",
"city_name": "岚县"
},
{
"_id": 1899,
"id": 2554,
"pid": 305,
"city_code": "101101107",
"city_name": "方山县"
},
{
"_id": 1900,
"id": 2555,
"pid": 305,
"city_code": "101101109",
"city_name": "中阳县"
},
{
"_id": 1901,
"id": 2556,
"pid": 305,
"city_code": "101101108",
"city_name": "交口县"
},
{
"_id": 1902,
"id": 2558,
"pid": 306,
"city_code": "101100902",
"city_name": "平鲁区"
},
{
"_id": 1903,
"id": 2559,
"pid": 306,
"city_code": "101100903",
"city_name": "山阴县"
},
{
"_id": 1904,
"id": 2560,
"pid": 306,
"city_code": "101100905",
"city_name": "应县"
},
{
"_id": 1905,
"id": 2561,
"pid": 306,
"city_code": "101100904",
"city_name": "右玉县"
},
{
"_id": 1906,
"id": 2562,
"pid": 306,
"city_code": "101100906",
"city_name": "怀仁县"
},
{
"_id": 1907,
"id": 2564,
"pid": 307,
"city_code": "101101015",
"city_name": "原平市"
},
{
"_id": 1908,
"id": 2565,
"pid": 307,
"city_code": "101101002",
"city_name": "定襄县"
},
{
"_id": 1909,
"id": 2566,
"pid": 307,
"city_code": "101101003",
"city_name": "五台县"
},
{
"_id": 1910,
"id": 2567,
"pid": 307,
"city_code": "101101008",
"city_name": "代县"
},
{
"_id": 1911,
"id": 2568,
"pid": 307,
"city_code": "101101009",
"city_name": "繁峙县"
},
{
"_id": 1912,
"id": 2569,
"pid": 307,
"city_code": "101101007",
"city_name": "宁武县"
},
{
"_id": 1913,
"id": 2570,
"pid": 307,
"city_code": "101101012",
"city_name": "静乐县"
},
{
"_id": 1914,
"id": 2571,
"pid": 307,
"city_code": "101101006",
"city_name": "神池县"
},
{
"_id": 1915,
"id": 2572,
"pid": 307,
"city_code": "101101014",
"city_name": "五寨县"
},
{
"_id": 1916,
"id": 2573,
"pid": 307,
"city_code": "101101013",
"city_name": "岢岚县"
},
{
"_id": 1917,
"id": 2574,
"pid": 307,
"city_code": "101101004",
"city_name": "河曲县"
},
{
"_id": 1918,
"id": 2575,
"pid": 307,
"city_code": "101101011",
"city_name": "保德县"
},
{
"_id": 1919,
"id": 2576,
"pid": 307,
"city_code": "101101005",
"city_name": "偏关县"
},
{
"_id": 1920,
"id": 2580,
"pid": 308,
"city_code": "101100303",
"city_name": "平定县"
},
{
"_id": 1921,
"id": 2581,
"pid": 308,
"city_code": "101100302",
"city_name": "盂县"
},
{
"_id": 1922,
"id": 2583,
"pid": 309,
"city_code": "101100810",
"city_name": "永济市"
},
{
"_id": 1923,
"id": 2584,
"pid": 309,
"city_code": "101100805",
"city_name": "河津市"
},
{
"_id": 1924,
"id": 2585,
"pid": 309,
"city_code": "101100802",
"city_name": "临猗县"
},
{
"_id": 1925,
"id": 2586,
"pid": 309,
"city_code": "101100804",
"city_name": "万荣县"
},
{
"_id": 1926,
"id": 2587,
"pid": 309,
"city_code": "101100808",
"city_name": "闻喜县"
},
{
"_id": 1927,
"id": 2588,
"pid": 309,
"city_code": "101100803",
"city_name": "稷山县"
},
{
"_id": 1928,
"id": 2589,
"pid": 309,
"city_code": "101100806",
"city_name": "新绛县"
},
{
"_id": 1929,
"id": 2590,
"pid": 309,
"city_code": "101100807",
"city_name": "绛县"
},
{
"_id": 1930,
"id": 2591,
"pid": 309,
"city_code": "101100809",
"city_name": "垣曲县"
},
{
"_id": 1931,
"id": 2592,
"pid": 309,
"city_code": "101100812",
"city_name": "夏县"
},
{
"_id": 1932,
"id": 2593,
"pid": 309,
"city_code": "101100813",
"city_name": "平陆县"
},
{
"_id": 1933,
"id": 2594,
"pid": 309,
"city_code": "101100811",
"city_name": "芮城县"
},
{
"_id": 1934,
"id": 2602,
"pid": 310,
"city_code": "101110103",
"city_name": "临潼区"
},
{
"_id": 1935,
"id": 2603,
"pid": 310,
"city_code": "101110102",
"city_name": "长安区"
},
{
"_id": 1936,
"id": 2604,
"pid": 310,
"city_code": "101110104",
"city_name": "蓝田县"
},
{
"_id": 1937,
"id": 2605,
"pid": 310,
"city_code": "101110105",
"city_name": "周至县"
},
{
"_id": 1938,
"id": 2606,
"pid": 310,
"city_code": "101110106",
"city_name": "户县"
},
{
"_id": 1939,
"id": 2607,
"pid": 310,
"city_code": "101110107",
"city_name": "高陵县"
},
{
"_id": 1940,
"id": 2609,
"pid": 311,
"city_code": "101110704",
"city_name": "汉阴县"
},
{
"_id": 1941,
"id": 2610,
"pid": 311,
"city_code": "101110703",
"city_name": "石泉县"
},
{
"_id": 1942,
"id": 2611,
"pid": 311,
"city_code": "101110710",
"city_name": "宁陕县"
},
{
"_id": 1943,
"id": 2612,
"pid": 311,
"city_code": "101110702",
"city_name": "紫阳县"
},
{
"_id": 1944,
"id": 2613,
"pid": 311,
"city_code": "101110706",
"city_name": "岚皋县"
},
{
"_id": 1945,
"id": 2614,
"pid": 311,
"city_code": "101110707",
"city_name": "平利县"
},
{
"_id": 1946,
"id": 2615,
"pid": 311,
"city_code": "101110709",
"city_name": "镇坪县"
},
{
"_id": 1947,
"id": 2616,
"pid": 311,
"city_code": "101110705",
"city_name": "旬阳县"
},
{
"_id": 1948,
"id": 2617,
"pid": 311,
"city_code": "101110708",
"city_name": "白河县"
},
{
"_id": 1949,
"id": 2618,
"pid": 312,
"city_code": "101110912",
"city_name": "陈仓区"
},
{
"_id": 1950,
"id": 2621,
"pid": 312,
"city_code": "101110906",
"city_name": "凤翔县"
},
{
"_id": 1951,
"id": 2622,
"pid": 312,
"city_code": "101110905",
"city_name": "岐山县"
},
{
"_id": 1952,
"id": 2623,
"pid": 312,
"city_code": "101110907",
"city_name": "扶风县"
},
{
"_id": 1953,
"id": 2624,
"pid": 312,
"city_code": "101110908",
"city_name": "眉县"
},
{
"_id": 1954,
"id": 2625,
"pid": 312,
"city_code": "101110911",
"city_name": "陇县"
},
{
"_id": 1955,
"id": 2626,
"pid": 312,
"city_code": "101110903",
"city_name": "千阳县"
},
{
"_id": 1956,
"id": 2627,
"pid": 312,
"city_code": "101110904",
"city_name": "麟游县"
},
{
"_id": 1957,
"id": 2628,
"pid": 312,
"city_code": "101110910",
"city_name": "凤县"
},
{
"_id": 1958,
"id": 2629,
"pid": 312,
"city_code": "101110909",
"city_name": "太白县"
},
{
"_id": 1959,
"id": 2631,
"pid": 313,
"city_code": "101110810",
"city_name": "南郑县"
},
{
"_id": 1960,
"id": 2632,
"pid": 313,
"city_code": "101110806",
"city_name": "城固县"
},
{
"_id": 1961,
"id": 2633,
"pid": 313,
"city_code": "101110805",
"city_name": "洋县"
},
{
"_id": 1962,
"id": 2634,
"pid": 313,
"city_code": "101110807",
"city_name": "西乡县"
},
{
"_id": 1963,
"id": 2635,
"pid": 313,
"city_code": "101110803",
"city_name": "勉县"
},
{
"_id": 1964,
"id": 2636,
"pid": 313,
"city_code": "101110809",
"city_name": "宁强县"
},
{
"_id": 1965,
"id": 2637,
"pid": 313,
"city_code": "101110802",
"city_name": "略阳县"
},
{
"_id": 1966,
"id": 2638,
"pid": 313,
"city_code": "101110811",
"city_name": "镇巴县"
},
{
"_id": 1967,
"id": 2639,
"pid": 313,
"city_code": "101110804",
"city_name": "留坝县"
},
{
"_id": 1968,
"id": 2640,
"pid": 313,
"city_code": "101110808",
"city_name": "佛坪县"
},
{
"_id": 1969,
"id": 2641,
"pid": 314,
"city_code": "101110604",
"city_name": "商州区"
},
{
"_id": 1970,
"id": 2642,
"pid": 314,
"city_code": "101110602",
"city_name": "洛南县"
},
{
"_id": 1971,
"id": 2643,
"pid": 314,
"city_code": "101110606",
"city_name": "丹凤县"
},
{
"_id": 1972,
"id": 2644,
"pid": 314,
"city_code": "101110607",
"city_name": "商南县"
},
{
"_id": 1973,
"id": 2645,
"pid": 314,
"city_code": "101110608",
"city_name": "山阳县"
},
{
"_id": 1974,
"id": 2646,
"pid": 314,
"city_code": "101110605",
"city_name": "镇安县"
},
{
"_id": 1975,
"id": 2647,
"pid": 314,
"city_code": "101110603",
"city_name": "柞水县"
},
{
"_id": 1976,
"id": 2648,
"pid": 315,
"city_code": "101111004",
"city_name": "耀州区"
},
{
"_id": 1977,
"id": 2651,
"pid": 315,
"city_code": "101111003",
"city_name": "宜君县"
},
{
"_id": 1978,
"id": 2653,
"pid": 316,
"city_code": "101110510",
"city_name": "韩城市"
},
{
"_id": 1979,
"id": 2654,
"pid": 316,
"city_code": "101110511",
"city_name": "华阴市"
},
{
"_id": 1980,
"id": 2655,
"pid": 316,
"city_code": "101110502",
"city_name": "华县"
},
{
"_id": 1981,
"id": 2656,
"pid": 316,
"city_code": "101110503",
"city_name": "潼关县"
},
{
"_id": 1982,
"id": 2657,
"pid": 316,
"city_code": "101110504",
"city_name": "大荔县"
},
{
"_id": 1983,
"id": 2658,
"pid": 316,
"city_code": "101110509",
"city_name": "合阳县"
},
{
"_id": 1984,
"id": 2659,
"pid": 316,
"city_code": "101110508",
"city_name": "澄城县"
},
{
"_id": 1985,
"id": 2660,
"pid": 316,
"city_code": "101110507",
"city_name": "蒲城县"
},
{
"_id": 1986,
"id": 2661,
"pid": 316,
"city_code": "101110505",
"city_name": "白水县"
},
{
"_id": 1987,
"id": 2662,
"pid": 316,
"city_code": "101110506",
"city_name": "富平县"
},
{
"_id": 1988,
"id": 2666,
"pid": 317,
"city_code": "101110211",
"city_name": "兴平市"
},
{
"_id": 1989,
"id": 2667,
"pid": 317,
"city_code": "101110201",
"city_name": "三原县"
},
{
"_id": 1990,
"id": 2668,
"pid": 317,
"city_code": "101110205",
"city_name": "泾阳县"
},
{
"_id": 1991,
"id": 2669,
"pid": 317,
"city_code": "101110207",
"city_name": "乾县"
},
{
"_id": 1992,
"id": 2670,
"pid": 317,
"city_code": "101110202",
"city_name": "礼泉县"
},
{
"_id": 1993,
"id": 2671,
"pid": 317,
"city_code": "101110203",
"city_name": "永寿县"
},
{
"_id": 1994,
"id": 2672,
"pid": 317,
"city_code": "101110208",
"city_name": "彬县"
},
{
"_id": 1995,
"id": 2673,
"pid": 317,
"city_code": "101110209",
"city_name": "长武县"
},
{
"_id": 1996,
"id": 2674,
"pid": 317,
"city_code": "101110210",
"city_name": "旬邑县"
},
{
"_id": 1997,
"id": 2675,
"pid": 317,
"city_code": "101110204",
"city_name": "淳化县"
},
{
"_id": 1998,
"id": 2676,
"pid": 317,
"city_code": "101110206",
"city_name": "武功县"
},
{
"_id": 1999,
"id": 2677,
"pid": 318,
"city_code": "101110312",
"city_name": "吴起县"
},
{
"_id": 2000,
"id": 2679,
"pid": 318,
"city_code": "101110301",
"city_name": "延长县"
},
{
"_id": 2001,
"id": 2680,
"pid": 318,
"city_code": "101110302",
"city_name": "延川县"
},
{
"_id": 2002,
"id": 2681,
"pid": 318,
"city_code": "101110303",
"city_name": "子长县"
},
{
"_id": 2003,
"id": 2682,
"pid": 318,
"city_code": "101110307",
"city_name": "安塞县"
},
{
"_id": 2004,
"id": 2683,
"pid": 318,
"city_code": "101110306",
"city_name": "志丹县"
},
{
"_id": 2005,
"id": 2684,
"pid": 318,
"city_code": "101110308",
"city_name": "甘泉县"
},
{
"_id": 2006,
"id": 2685,
"pid": 318,
"city_code": "101110305",
"city_name": "富县"
},
{
"_id": 2007,
"id": 2686,
"pid": 318,
"city_code": "101110309",
"city_name": "洛川县"
},
{
"_id": 2008,
"id": 2687,
"pid": 318,
"city_code": "101110304",
"city_name": "宜川县"
},
{
"_id": 2009,
"id": 2688,
"pid": 318,
"city_code": "101110311",
"city_name": "黄龙县"
},
{
"_id": 2010,
"id": 2689,
"pid": 318,
"city_code": "101110310",
"city_name": "黄陵县"
},
{
"_id": 2011,
"id": 2690,
"pid": 319,
"city_code": "101110413",
"city_name": "榆阳区"
},
{
"_id": 2012,
"id": 2691,
"pid": 319,
"city_code": "101110403",
"city_name": "神木县"
},
{
"_id": 2013,
"id": 2692,
"pid": 319,
"city_code": "101110402",
"city_name": "府谷县"
},
{
"_id": 2014,
"id": 2693,
"pid": 319,
"city_code": "101110407",
"city_name": "横山县"
},
{
"_id": 2015,
"id": 2694,
"pid": 319,
"city_code": "101110406",
"city_name": "靖边县"
},
{
"_id": 2016,
"id": 2695,
"pid": 319,
"city_code": "101110405",
"city_name": "定边县"
},
{
"_id": 2017,
"id": 2696,
"pid": 319,
"city_code": "101110410",
"city_name": "绥德县"
},
{
"_id": 2018,
"id": 2697,
"pid": 319,
"city_code": "101110408",
"city_name": "米脂县"
},
{
"_id": 2019,
"id": 2698,
"pid": 319,
"city_code": "101110404",
"city_name": "佳县"
},
{
"_id": 2020,
"id": 2699,
"pid": 319,
"city_code": "101110411",
"city_name": "吴堡县"
},
{
"_id": 2021,
"id": 2700,
"pid": 319,
"city_code": "101110412",
"city_name": "清涧县"
},
{
"_id": 2022,
"id": 2701,
"pid": 319,
"city_code": "101110409",
"city_name": "子洲县"
},
{
"_id": 2023,
"id": 2704,
"pid": 24,
"city_code": "101020200",
"city_name": "闵行区"
},
{
"_id": 2024,
"id": 2706,
"pid": 24,
"city_code": "101021300",
"city_name": "浦东新区"
},
{
"_id": 2025,
"id": 2714,
"pid": 24,
"city_code": "101020900",
"city_name": "松江区"
},
{
"_id": 2026,
"id": 2715,
"pid": 24,
"city_code": "101020500",
"city_name": "嘉定区"
},
{
"_id": 2027,
"id": 2716,
"pid": 24,
"city_code": "101020300",
"city_name": "宝山区"
},
{
"_id": 2028,
"id": 2717,
"pid": 24,
"city_code": "101020800",
"city_name": "青浦区"
},
{
"_id": 2029,
"id": 2718,
"pid": 24,
"city_code": "101020700",
"city_name": "金山区"
},
{
"_id": 2030,
"id": 2719,
"pid": 24,
"city_code": "101021000",
"city_name": "奉贤区"
},
{
"_id": 2031,
"id": 2720,
"pid": 24,
"city_code": "101021100",
"city_name": "崇明区"
},
{
"_id": 2032,
"id": 2726,
"pid": 321,
"city_code": "101270102",
"city_name": "龙泉驿区"
},
{
"_id": 2033,
"id": 2727,
"pid": 321,
"city_code": "101270115",
"city_name": "青白江区"
},
{
"_id": 2034,
"id": 2728,
"pid": 321,
"city_code": "101270103",
"city_name": "新都区"
},
{
"_id": 2035,
"id": 2729,
"pid": 321,
"city_code": "101270104",
"city_name": "温江区"
},
{
"_id": 2036,
"id": 2732,
"pid": 321,
"city_code": "101270111",
"city_name": "都江堰市"
},
{
"_id": 2037,
"id": 2733,
"pid": 321,
"city_code": "101270112",
"city_name": "彭州市"
},
{
"_id": 2038,
"id": 2734,
"pid": 321,
"city_code": "101270113",
"city_name": "邛崃市"
},
{
"_id": 2039,
"id": 2735,
"pid": 321,
"city_code": "101270114",
"city_name": "崇州市"
},
{
"_id": 2040,
"id": 2736,
"pid": 321,
"city_code": "101270105",
"city_name": "金堂县"
},
{
"_id": 2041,
"id": 2737,
"pid": 321,
"city_code": "101270106",
"city_name": "双流县"
},
{
"_id": 2042,
"id": 2738,
"pid": 321,
"city_code": "101270107",
"city_name": "郫县"
},
{
"_id": 2043,
"id": 2739,
"pid": 321,
"city_code": "101270108",
"city_name": "大邑县"
},
{
"_id": 2044,
"id": 2740,
"pid": 321,
"city_code": "101270109",
"city_name": "蒲江县"
},
{
"_id": 2045,
"id": 2741,
"pid": 321,
"city_code": "101270110",
"city_name": "新津县"
},
{
"_id": 2046,
"id": 2754,
"pid": 322,
"city_code": "101270408",
"city_name": "江油市"
},
{
"_id": 2047,
"id": 2755,
"pid": 322,
"city_code": "101270403",
"city_name": "盐亭县"
},
{
"_id": 2048,
"id": 2756,
"pid": 322,
"city_code": "101270402",
"city_name": "三台县"
},
{
"_id": 2049,
"id": 2757,
"pid": 322,
"city_code": "101270407",
"city_name": "平武县"
},
{
"_id": 2050,
"id": 2758,
"pid": 322,
"city_code": "101270404",
"city_name": "安县"
},
{
"_id": 2051,
"id": 2759,
"pid": 322,
"city_code": "101270405",
"city_name": "梓潼县"
},
{
"_id": 2052,
"id": 2760,
"pid": 322,
"city_code": "101270406",
"city_name": "北川县"
},
{
"_id": 2053,
"id": 2761,
"pid": 323,
"city_code": "101271910",
"city_name": "马尔康县"
},
{
"_id": 2054,
"id": 2762,
"pid": 323,
"city_code": "101271902",
"city_name": "汶川县"
},
{
"_id": 2055,
"id": 2763,
"pid": 323,
"city_code": "101271903",
"city_name": "理县"
},
{
"_id": 2056,
"id": 2764,
"pid": 323,
"city_code": "101271904",
"city_name": "茂县"
},
{
"_id": 2057,
"id": 2765,
"pid": 323,
"city_code": "101271905",
"city_name": "松潘县"
},
{
"_id": 2058,
"id": 2766,
"pid": 323,
"city_code": "101271906",
"city_name": "九寨沟县"
},
{
"_id": 2059,
"id": 2767,
"pid": 323,
"city_code": "101271907",
"city_name": "金川县"
},
{
"_id": 2060,
"id": 2768,
"pid": 323,
"city_code": "101271908",
"city_name": "小金县"
},
{
"_id": 2061,
"id": 2769,
"pid": 323,
"city_code": "101271909",
"city_name": "黑水县"
},
{
"_id": 2062,
"id": 2770,
"pid": 323,
"city_code": "101271911",
"city_name": "壤塘县"
},
{
"_id": 2063,
"id": 2771,
"pid": 323,
"city_code": "101271901",
"city_name": "阿坝县"
},
{
"_id": 2064,
"id": 2772,
"pid": 323,
"city_code": "101271912",
"city_name": "若尔盖县"
},
{
"_id": 2065,
"id": 2773,
"pid": 323,
"city_code": "101271913",
"city_name": "红原县"
},
{
"_id": 2066,
"id": 2775,
"pid": 324,
"city_code": "101270902",
"city_name": "通江县"
},
{
"_id": 2067,
"id": 2776,
"pid": 324,
"city_code": "101270903",
"city_name": "南江县"
},
{
"_id": 2068,
"id": 2777,
"pid": 324,
"city_code": "101270904",
"city_name": "平昌县"
},
{
"_id": 2069,
"id": 2779,
"pid": 325,
"city_code": "101270606",
"city_name": "万源市"
},
{
"_id": 2070,
"id": 2780,
"pid": 325,
"city_code": "101270608",
"city_name": "达川区"
},
{
"_id": 2071,
"id": 2781,
"pid": 325,
"city_code": "101270602",
"city_name": "宣汉县"
},
{
"_id": 2072,
"id": 2782,
"pid": 325,
"city_code": "101270603",
"city_name": "开江县"
},
{
"_id": 2073,
"id": 2783,
"pid": 325,
"city_code": "101270604",
"city_name": "大竹县"
},
{
"_id": 2074,
"id": 2784,
"pid": 325,
"city_code": "101270605",
"city_name": "渠县"
},
{
"_id": 2075,
"id": 2786,
"pid": 326,
"city_code": "101272003",
"city_name": "广汉市"
},
{
"_id": 2076,
"id": 2787,
"pid": 326,
"city_code": "101272004",
"city_name": "什邡市"
},
{
"_id": 2077,
"id": 2788,
"pid": 326,
"city_code": "101272005",
"city_name": "绵竹市"
},
{
"_id": 2078,
"id": 2789,
"pid": 326,
"city_code": "101272006",
"city_name": "罗江县"
},
{
"_id": 2079,
"id": 2790,
"pid": 326,
"city_code": "101272002",
"city_name": "中江县"
},
{
"_id": 2080,
"id": 2791,
"pid": 327,
"city_code": "101271802",
"city_name": "康定县"
},
{
"_id": 2081,
"id": 2792,
"pid": 327,
"city_code": "101271804",
"city_name": "丹巴县"
},
{
"_id": 2082,
"id": 2793,
"pid": 327,
"city_code": "101271803",
"city_name": "泸定县"
},
{
"_id": 2083,
"id": 2794,
"pid": 327,
"city_code": "101271808",
"city_name": "炉霍县"
},
{
"_id": 2084,
"id": 2795,
"pid": 327,
"city_code": "101271805",
"city_name": "九龙县"
},
{
"_id": 2085,
"id": 2796,
"pid": 327,
"city_code": "101271801",
"city_name": "甘孜县"
},
{
"_id": 2086,
"id": 2797,
"pid": 327,
"city_code": "101271806",
"city_name": "雅江县"
},
{
"_id": 2087,
"id": 2798,
"pid": 327,
"city_code": "101271809",
"city_name": "新龙县"
},
{
"_id": 2088,
"id": 2799,
"pid": 327,
"city_code": "101271807",
"city_name": "道孚县"
},
{
"_id": 2089,
"id": 2800,
"pid": 327,
"city_code": "101271811",
"city_name": "白玉县"
},
{
"_id": 2090,
"id": 2801,
"pid": 327,
"city_code": "101271814",
"city_name": "理塘县"
},
{
"_id": 2091,
"id": 2802,
"pid": 327,
"city_code": "101271810",
"city_name": "德格县"
},
{
"_id": 2092,
"id": 2803,
"pid": 327,
"city_code": "101271816",
"city_name": "乡城县"
},
{
"_id": 2093,
"id": 2804,
"pid": 327,
"city_code": "101271812",
"city_name": "石渠县"
},
{
"_id": 2094,
"id": 2805,
"pid": 327,
"city_code": "101271817",
"city_name": "稻城县"
},
{
"_id": 2095,
"id": 2806,
"pid": 327,
"city_code": "101271813",
"city_name": "色达县"
},
{
"_id": 2096,
"id": 2807,
"pid": 327,
"city_code": "101271815",
"city_name": "巴塘县"
},
{
"_id": 2097,
"id": 2808,
"pid": 327,
"city_code": "101271818",
"city_name": "得荣县"
},
{
"_id": 2098,
"id": 2809,
"pid": 328,
"city_code": "101270801",
"city_name": "广安区"
},
{
"_id": 2099,
"id": 2810,
"pid": 328,
"city_code": "101270805",
"city_name": "华蓥市"
},
{
"_id": 2100,
"id": 2811,
"pid": 328,
"city_code": "101270802",
"city_name": "岳池县"
},
{
"_id": 2101,
"id": 2812,
"pid": 328,
"city_code": "101270803",
"city_name": "武胜县"
},
{
"_id": 2102,
"id": 2813,
"pid": 328,
"city_code": "101270804",
"city_name": "邻水县"
},
{
"_id": 2103,
"id": 2817,
"pid": 329,
"city_code": "101272102",
"city_name": "旺苍县"
},
{
"_id": 2104,
"id": 2818,
"pid": 329,
"city_code": "101272103",
"city_name": "青川县"
},
{
"_id": 2105,
"id": 2819,
"pid": 329,
"city_code": "101272104",
"city_name": "剑阁县"
},
{
"_id": 2106,
"id": 2820,
"pid": 329,
"city_code": "101272105",
"city_name": "苍溪县"
},
{
"_id": 2107,
"id": 2821,
"pid": 330,
"city_code": "101271409",
"city_name": "峨眉山市"
},
{
"_id": 2108,
"id": 2823,
"pid": 330,
"city_code": "101271402",
"city_name": "犍为县"
},
{
"_id": 2109,
"id": 2824,
"pid": 330,
"city_code": "101271403",
"city_name": "井研县"
},
{
"_id": 2110,
"id": 2825,
"pid": 330,
"city_code": "101271404",
"city_name": "夹江县"
},
{
"_id": 2111,
"id": 2826,
"pid": 330,
"city_code": "101271405",
"city_name": "沐川县"
},
{
"_id": 2112,
"id": 2827,
"pid": 330,
"city_code": "101271406",
"city_name": "峨边县"
},
{
"_id": 2113,
"id": 2828,
"pid": 330,
"city_code": "101271407",
"city_name": "马边县"
},
{
"_id": 2114,
"id": 2829,
"pid": 331,
"city_code": "101271610",
"city_name": "西昌市"
},
{
"_id": 2115,
"id": 2830,
"pid": 331,
"city_code": "101271604",
"city_name": "盐源县"
},
{
"_id": 2116,
"id": 2831,
"pid": 331,
"city_code": "101271605",
"city_name": "德昌县"
},
{
"_id": 2117,
"id": 2832,
"pid": 331,
"city_code": "101271606",
"city_name": "会理县"
},
{
"_id": 2118,
"id": 2833,
"pid": 331,
"city_code": "101271607",
"city_name": "会东县"
},
{
"_id": 2119,
"id": 2834,
"pid": 331,
"city_code": "101271608",
"city_name": "宁南县"
},
{
"_id": 2120,
"id": 2835,
"pid": 331,
"city_code": "101271609",
"city_name": "普格县"
},
{
"_id": 2121,
"id": 2836,
"pid": 331,
"city_code": "101271619",
"city_name": "布拖县"
},
{
"_id": 2122,
"id": 2837,
"pid": 331,
"city_code": "101271611",
"city_name": "金阳县"
},
{
"_id": 2123,
"id": 2838,
"pid": 331,
"city_code": "101271612",
"city_name": "昭觉县"
},
{
"_id": 2124,
"id": 2839,
"pid": 331,
"city_code": "101271613",
"city_name": "喜德县"
},
{
"_id": 2125,
"id": 2840,
"pid": 331,
"city_code": "101271614",
"city_name": "冕宁县"
},
{
"_id": 2126,
"id": 2841,
"pid": 331,
"city_code": "101271615",
"city_name": "越西县"
},
{
"_id": 2127,
"id": 2842,
"pid": 331,
"city_code": "101271616",
"city_name": "甘洛县"
},
{
"_id": 2128,
"id": 2843,
"pid": 331,
"city_code": "101271618",
"city_name": "美姑县"
},
{
"_id": 2129,
"id": 2844,
"pid": 331,
"city_code": "101271617",
"city_name": "雷波县"
},
{
"_id": 2130,
"id": 2845,
"pid": 331,
"city_code": "101271603",
"city_name": "木里县"
},
{
"_id": 2131,
"id": 2847,
"pid": 332,
"city_code": "101271502",
"city_name": "仁寿县"
},
{
"_id": 2132,
"id": 2848,
"pid": 332,
"city_code": "101271503",
"city_name": "彭山县"
},
{
"_id": 2133,
"id": 2849,
"pid": 332,
"city_code": "101271504",
"city_name": "洪雅县"
},
{
"_id": 2134,
"id": 2850,
"pid": 332,
"city_code": "101271505",
"city_name": "丹棱县"
},
{
"_id": 2135,
"id": 2851,
"pid": 332,
"city_code": "101271506",
"city_name": "青神县"
},
{
"_id": 2136,
"id": 2852,
"pid": 333,
"city_code": "101270507",
"city_name": "阆中市"
},
{
"_id": 2137,
"id": 2853,
"pid": 333,
"city_code": "101270502",
"city_name": "南部县"
},
{
"_id": 2138,
"id": 2854,
"pid": 333,
"city_code": "101270503",
"city_name": "营山县"
},
{
"_id": 2139,
"id": 2855,
"pid": 333,
"city_code": "101270504",
"city_name": "蓬安县"
},
{
"_id": 2140,
"id": 2856,
"pid": 333,
"city_code": "101270505",
"city_name": "仪陇县"
},
{
"_id": 2141,
"id": 2860,
"pid": 333,
"city_code": "101270506",
"city_name": "西充县"
},
{
"_id": 2142,
"id": 2862,
"pid": 334,
"city_code": "101271202",
"city_name": "东兴区"
},
{
"_id": 2143,
"id": 2863,
"pid": 334,
"city_code": "101271203",
"city_name": "威远县"
},
{
"_id": 2144,
"id": 2864,
"pid": 334,
"city_code": "101271204",
"city_name": "资中县"
},
{
"_id": 2145,
"id": 2865,
"pid": 334,
"city_code": "101271205",
"city_name": "隆昌县"
},
{
"_id": 2146,
"id": 2868,
"pid": 335,
"city_code": "101270202",
"city_name": "仁和区"
},
{
"_id": 2147,
"id": 2869,
"pid": 335,
"city_code": "101270203",
"city_name": "米易县"
},
{
"_id": 2148,
"id": 2870,
"pid": 335,
"city_code": "101270204",
"city_name": "盐边县"
},
{
"_id": 2149,
"id": 2873,
"pid": 336,
"city_code": "101270702",
"city_name": "蓬溪县"
},
{
"_id": 2150,
"id": 2874,
"pid": 336,
"city_code": "101270703",
"city_name": "射洪县"
},
{
"_id": 2151,
"id": 2877,
"pid": 337,
"city_code": "101271702",
"city_name": "名山县"
},
{
"_id": 2152,
"id": 2878,
"pid": 337,
"city_code": "101271703",
"city_name": "荥经县"
},
{
"_id": 2153,
"id": 2879,
"pid": 337,
"city_code": "101271704",
"city_name": "汉源县"
},
{
"_id": 2154,
"id": 2880,
"pid": 337,
"city_code": "101271705",
"city_name": "石棉县"
},
{
"_id": 2155,
"id": 2881,
"pid": 337,
"city_code": "101271706",
"city_name": "天全县"
},
{
"_id": 2156,
"id": 2882,
"pid": 337,
"city_code": "101271707",
"city_name": "芦山县"
},
{
"_id": 2157,
"id": 2883,
"pid": 337,
"city_code": "101271708",
"city_name": "宝兴县"
},
{
"_id": 2158,
"id": 2885,
"pid": 338,
"city_code": "101271103",
"city_name": "宜宾县"
},
{
"_id": 2159,
"id": 2886,
"pid": 338,
"city_code": "101271104",
"city_name": "南溪县"
},
{
"_id": 2160,
"id": 2887,
"pid": 338,
"city_code": "101271105",
"city_name": "江安县"
},
{
"_id": 2161,
"id": 2888,
"pid": 338,
"city_code": "101271106",
"city_name": "长宁县"
},
{
"_id": 2162,
"id": 2889,
"pid": 338,
"city_code": "101271107",
"city_name": "高县"
},
{
"_id": 2163,
"id": 2890,
"pid": 338,
"city_code": "101271108",
"city_name": "珙县"
},
{
"_id": 2164,
"id": 2891,
"pid": 338,
"city_code": "101271109",
"city_name": "筠连县"
},
{
"_id": 2165,
"id": 2892,
"pid": 338,
"city_code": "101271110",
"city_name": "兴文县"
},
{
"_id": 2166,
"id": 2893,
"pid": 338,
"city_code": "101271111",
"city_name": "屏山县"
},
{
"_id": 2167,
"id": 2895,
"pid": 321,
"city_code": "101271304",
"city_name": "简阳市"
},
{
"_id": 2168,
"id": 2896,
"pid": 339,
"city_code": "101271302",
"city_name": "安岳县"
},
{
"_id": 2169,
"id": 2897,
"pid": 339,
"city_code": "101271303",
"city_name": "乐至县"
},
{
"_id": 2170,
"id": 2902,
"pid": 340,
"city_code": "101270303",
"city_name": "荣县"
},
{
"_id": 2171,
"id": 2903,
"pid": 340,
"city_code": "101270302",
"city_name": "富顺县"
},
{
"_id": 2172,
"id": 2905,
"pid": 341,
"city_code": "101271007",
"city_name": "纳溪区"
},
{
"_id": 2173,
"id": 2907,
"pid": 341,
"city_code": "101271003",
"city_name": "泸县"
},
{
"_id": 2174,
"id": 2908,
"pid": 341,
"city_code": "101271004",
"city_name": "合江县"
},
{
"_id": 2175,
"id": 2909,
"pid": 341,
"city_code": "101271005",
"city_name": "叙永县"
},
{
"_id": 2176,
"id": 2910,
"pid": 341,
"city_code": "101271006",
"city_name": "古蔺县"
},
{
"_id": 2177,
"id": 2917,
"pid": 26,
"city_code": "101030400",
"city_name": "东丽区"
},
{
"_id": 2178,
"id": 2918,
"pid": 26,
"city_code": "101031000",
"city_name": "津南区"
},
{
"_id": 2179,
"id": 2919,
"pid": 26,
"city_code": "101030500",
"city_name": "西青区"
},
{
"_id": 2180,
"id": 2920,
"pid": 26,
"city_code": "101030600",
"city_name": "北辰区"
},
{
"_id": 2181,
"id": 2921,
"pid": 26,
"city_code": "101031100",
"city_name": "塘沽区"
},
{
"_id": 2182,
"id": 2922,
"pid": 26,
"city_code": "101030800",
"city_name": "汉沽区"
},
{
"_id": 2183,
"id": 2923,
"pid": 26,
"city_code": "101031200",
"city_name": "大港区"
},
{
"_id": 2184,
"id": 2924,
"pid": 26,
"city_code": "101030200",
"city_name": "武清区"
},
{
"_id": 2185,
"id": 2925,
"pid": 26,
"city_code": "101030300",
"city_name": "宝坻区"
},
{
"_id": 2186,
"id": 2927,
"pid": 26,
"city_code": "101030700",
"city_name": "宁河区"
},
{
"_id": 2187,
"id": 2928,
"pid": 26,
"city_code": "101030900",
"city_name": "静海区"
},
{
"_id": 2188,
"id": 2929,
"pid": 26,
"city_code": "101031400",
"city_name": "蓟州区"
},
{
"_id": 2189,
"id": 2931,
"pid": 343,
"city_code": "101140104",
"city_name": "林周县"
},
{
"_id": 2190,
"id": 2932,
"pid": 343,
"city_code": "101140102",
"city_name": "当雄县"
},
{
"_id": 2191,
"id": 2933,
"pid": 343,
"city_code": "101140103",
"city_name": "尼木县"
},
{
"_id": 2192,
"id": 2934,
"pid": 343,
"city_code": "101140106",
"city_name": "曲水县"
},
{
"_id": 2193,
"id": 2935,
"pid": 343,
"city_code": "101140105",
"city_name": "堆龙德庆县"
},
{
"_id": 2194,
"id": 2936,
"pid": 343,
"city_code": "101140107",
"city_name": "达孜县"
},
{
"_id": 2195,
"id": 2937,
"pid": 343,
"city_code": "101140108",
"city_name": "墨竹工卡县"
},
{
"_id": 2196,
"id": 2938,
"pid": 344,
"city_code": "101140707",
"city_name": "噶尔县"
},
{
"_id": 2197,
"id": 2939,
"pid": 344,
"city_code": "101140705",
"city_name": "普兰县"
},
{
"_id": 2198,
"id": 2940,
"pid": 344,
"city_code": "101140706",
"city_name": "札达县"
},
{
"_id": 2199,
"id": 2941,
"pid": 344,
"city_code": "101140708",
"city_name": "日土县"
},
{
"_id": 2200,
"id": 2942,
"pid": 344,
"city_code": "101140709",
"city_name": "革吉县"
},
{
"_id": 2201,
"id": 2943,
"pid": 344,
"city_code": "101140702",
"city_name": "改则县"
},
{
"_id": 2202,
"id": 2944,
"pid": 344,
"city_code": "101140710",
"city_name": "措勤县"
},
{
"_id": 2203,
"id": 2945,
"pid": 345,
"city_code": "101140501",
"city_name": "昌都县"
},
{
"_id": 2204,
"id": 2946,
"pid": 345,
"city_code": "101140509",
"city_name": "江达县"
},
{
"_id": 2205,
"id": 2947,
"pid": 345,
"city_code": "101140511",
"city_name": "贡觉县"
},
{
"_id": 2206,
"id": 2948,
"pid": 345,
"city_code": "101140503",
"city_name": "类乌齐县"
},
{
"_id": 2207,
"id": 2949,
"pid": 345,
"city_code": "101140502",
"city_name": "丁青县"
},
{
"_id": 2208,
"id": 2950,
"pid": 345,
"city_code": "101140510",
"city_name": "察雅县"
},
{
"_id": 2209,
"id": 2951,
"pid": 345,
"city_code": "101140507",
"city_name": "八宿县"
},
{
"_id": 2210,
"id": 2952,
"pid": 345,
"city_code": "101140505",
"city_name": "左贡县"
},
{
"_id": 2211,
"id": 2953,
"pid": 345,
"city_code": "101140506",
"city_name": "芒康县"
},
{
"_id": 2212,
"id": 2954,
"pid": 345,
"city_code": "101140504",
"city_name": "洛隆县"
},
{
"_id": 2213,
"id": 2955,
"pid": 345,
"city_code": "101140503",
"city_name": "边坝县"
},
{
"_id": 2214,
"id": 2956,
"pid": 346,
"city_code": "101140401",
"city_name": "林芝县"
},
{
"_id": 2215,
"id": 2957,
"pid": 346,
"city_code": "101140405",
"city_name": "工布江达县"
},
{
"_id": 2216,
"id": 2958,
"pid": 346,
"city_code": "101140403",
"city_name": "米林县"
},
{
"_id": 2217,
"id": 2959,
"pid": 346,
"city_code": "101140407",
"city_name": "墨脱县"
},
{
"_id": 2218,
"id": 2960,
"pid": 346,
"city_code": "101140402",
"city_name": "波密县"
},
{
"_id": 2219,
"id": 2961,
"pid": 346,
"city_code": "101140404",
"city_name": "察隅县"
},
{
"_id": 2220,
"id": 2962,
"pid": 346,
"city_code": "101140406",
"city_name": "朗县"
},
{
"_id": 2221,
"id": 2963,
"pid": 347,
"city_code": "101140601",
"city_name": "那曲县"
},
{
"_id": 2222,
"id": 2964,
"pid": 347,
"city_code": "101140603",
"city_name": "嘉黎县"
},
{
"_id": 2223,
"id": 2965,
"pid": 347,
"city_code": "101140607",
"city_name": "比如县"
},
{
"_id": 2224,
"id": 2966,
"pid": 347,
"city_code": "101140607",
"city_name": "聂荣县"
},
{
"_id": 2225,
"id": 2967,
"pid": 347,
"city_code": "101140605",
"city_name": "安多县"
},
{
"_id": 2226,
"id": 2968,
"pid": 347,
"city_code": "101140703",
"city_name": "申扎县"
},
{
"_id": 2227,
"id": 2969,
"pid": 347,
"city_code": "101140606",
"city_name": "索县"
},
{
"_id": 2228,
"id": 2970,
"pid": 347,
"city_code": "101140604",
"city_name": "班戈县"
},
{
"_id": 2229,
"id": 2971,
"pid": 347,
"city_code": "101140608",
"city_name": "巴青县"
},
{
"_id": 2230,
"id": 2972,
"pid": 347,
"city_code": "101140602",
"city_name": "尼玛县"
},
{
"_id": 2231,
"id": 2973,
"pid": 348,
"city_code": "101140201",
"city_name": "日喀则市"
},
{
"_id": 2232,
"id": 2974,
"pid": 348,
"city_code": "101140203",
"city_name": "南木林县"
},
{
"_id": 2233,
"id": 2975,
"pid": 348,
"city_code": "101140206",
"city_name": "江孜县"
},
{
"_id": 2234,
"id": 2976,
"pid": 348,
"city_code": "101140205",
"city_name": "定日县"
},
{
"_id": 2235,
"id": 2977,
"pid": 348,
"city_code": "101140213",
"city_name": "萨迦县"
},
{
"_id": 2236,
"id": 2978,
"pid": 348,
"city_code": "101140202",
"city_name": "拉孜县"
},
{
"_id": 2237,
"id": 2979,
"pid": 348,
"city_code": "101140211",
"city_name": "昂仁县"
},
{
"_id": 2238,
"id": 2980,
"pid": 348,
"city_code": "101140214",
"city_name": "谢通门县"
},
{
"_id": 2239,
"id": 2981,
"pid": 348,
"city_code": "101140217",
"city_name": "白朗县"
},
{
"_id": 2240,
"id": 2982,
"pid": 348,
"city_code": "101140220",
"city_name": "仁布县"
},
{
"_id": 2241,
"id": 2983,
"pid": 348,
"city_code": "101140219",
"city_name": "康马县"
},
{
"_id": 2242,
"id": 2984,
"pid": 348,
"city_code": "101140212",
"city_name": "定结县"
},
{
"_id": 2243,
"id": 2985,
"pid": 348,
"city_code": "101140208",
"city_name": "仲巴县"
},
{
"_id": 2244,
"id": 2986,
"pid": 348,
"city_code": "101140218",
"city_name": "亚东县"
},
{
"_id": 2245,
"id": 2987,
"pid": 348,
"city_code": "101140210",
"city_name": "吉隆县"
},
{
"_id": 2246,
"id": 2988,
"pid": 348,
"city_code": "101140204",
"city_name": "聂拉木县"
},
{
"_id": 2247,
"id": 2989,
"pid": 348,
"city_code": "101140209",
"city_name": "萨嘎县"
},
{
"_id": 2248,
"id": 2990,
"pid": 348,
"city_code": "101140216",
"city_name": "岗巴县"
},
{
"_id": 2249,
"id": 2991,
"pid": 349,
"city_code": "101140309",
"city_name": "乃东县"
},
{
"_id": 2250,
"id": 2992,
"pid": 349,
"city_code": "101140303",
"city_name": "扎囊县"
},
{
"_id": 2251,
"id": 2993,
"pid": 349,
"city_code": "101140302",
"city_name": "贡嘎县"
},
{
"_id": 2252,
"id": 2994,
"pid": 349,
"city_code": "101140310",
"city_name": "桑日县"
},
{
"_id": 2253,
"id": 2995,
"pid": 349,
"city_code": "101140303",
"city_name": "琼结县"
},
{
"_id": 2254,
"id": 2996,
"pid": 349,
"city_code": "101140314",
"city_name": "曲松县"
},
{
"_id": 2255,
"id": 2997,
"pid": 349,
"city_code": "101140312",
"city_name": "措美县"
},
{
"_id": 2256,
"id": 2998,
"pid": 349,
"city_code": "101140311",
"city_name": "洛扎县"
},
{
"_id": 2257,
"id": 2999,
"pid": 349,
"city_code": "101140304",
"city_name": "加查县"
},
{
"_id": 2258,
"id": 3000,
"pid": 349,
"city_code": "101140307",
"city_name": "隆子县"
},
{
"_id": 2259,
"id": 3001,
"pid": 349,
"city_code": "101140306",
"city_name": "错那县"
},
{
"_id": 2260,
"id": 3002,
"pid": 349,
"city_code": "101140305",
"city_name": "浪卡子县"
},
{
"_id": 2261,
"id": 3008,
"pid": 350,
"city_code": "101130105",
"city_name": "达坂城区"
},
{
"_id": 2262,
"id": 3010,
"pid": 350,
"city_code": "101130101",
"city_name": "乌鲁木齐县"
},
{
"_id": 2263,
"id": 3011,
"pid": 351,
"city_code": "101130801",
"city_name": "阿克苏市"
},
{
"_id": 2264,
"id": 3012,
"pid": 351,
"city_code": "101130803",
"city_name": "温宿县"
},
{
"_id": 2265,
"id": 3013,
"pid": 351,
"city_code": "101130807",
"city_name": "库车县"
},
{
"_id": 2266,
"id": 3014,
"pid": 351,
"city_code": "101130806",
"city_name": "沙雅县"
},
{
"_id": 2267,
"id": 3015,
"pid": 351,
"city_code": "101130805",
"city_name": "新和县"
},
{
"_id": 2268,
"id": 3016,
"pid": 351,
"city_code": "101130804",
"city_name": "拜城县"
},
{
"_id": 2269,
"id": 3017,
"pid": 351,
"city_code": "101130802",
"city_name": "乌什县"
},
{
"_id": 2270,
"id": 3018,
"pid": 351,
"city_code": "101130809",
"city_name": "阿瓦提县"
},
{
"_id": 2271,
"id": 3019,
"pid": 351,
"city_code": "101130808",
"city_name": "柯坪县"
},
{
"_id": 2272,
"id": 3020,
"pid": 352,
"city_code": "101130701",
"city_name": "阿拉尔市"
},
{
"_id": 2273,
"id": 3021,
"pid": 353,
"city_code": "101130601",
"city_name": "库尔勒"
},
{
"_id": 2274,
"id": 3022,
"pid": 353,
"city_code": "101130602",
"city_name": "轮台县"
},
{
"_id": 2275,
"id": 3023,
"pid": 353,
"city_code": "101130603",
"city_name": "尉犁县"
},
{
"_id": 2276,
"id": 3024,
"pid": 353,
"city_code": "101130604",
"city_name": "若羌县"
},
{
"_id": 2277,
"id": 3025,
"pid": 353,
"city_code": "101130605",
"city_name": "且末县"
},
{
"_id": 2278,
"id": 3026,
"pid": 353,
"city_code": "101130607",
"city_name": "焉耆县"
},
{
"_id": 2279,
"id": 3027,
"pid": 353,
"city_code": "101130606",
"city_name": "和静县"
},
{
"_id": 2280,
"id": 3028,
"pid": 353,
"city_code": "101130608",
"city_name": "和硕县"
},
{
"_id": 2281,
"id": 3029,
"pid": 353,
"city_code": "101130612",
"city_name": "博湖县"
},
{
"_id": 2282,
"id": 3030,
"pid": 354,
"city_code": "101131601",
"city_name": "博乐市"
},
{
"_id": 2283,
"id": 3031,
"pid": 354,
"city_code": "101131603",
"city_name": "精河县"
},
{
"_id": 2284,
"id": 3032,
"pid": 354,
"city_code": "101131602",
"city_name": "温泉县"
},
{
"_id": 2285,
"id": 3033,
"pid": 355,
"city_code": "101130402",
"city_name": "呼图壁县"
},
{
"_id": 2286,
"id": 3034,
"pid": 355,
"city_code": "101130403",
"city_name": "米泉市"
},
{
"_id": 2287,
"id": 3035,
"pid": 355,
"city_code": "101130401",
"city_name": "昌吉市"
},
{
"_id": 2288,
"id": 3036,
"pid": 355,
"city_code": "101130404",
"city_name": "阜康市"
},
{
"_id": 2289,
"id": 3037,
"pid": 355,
"city_code": "101130407",
"city_name": "玛纳斯县"
},
{
"_id": 2290,
"id": 3038,
"pid": 355,
"city_code": "101130406",
"city_name": "奇台县"
},
{
"_id": 2291,
"id": 3039,
"pid": 355,
"city_code": "101130405",
"city_name": "吉木萨尔县"
},
{
"_id": 2292,
"id": 3040,
"pid": 355,
"city_code": "101130408",
"city_name": "木垒县"
},
{
"_id": 2293,
"id": 3041,
"pid": 356,
"city_code": "101131201",
"city_name": "哈密市"
},
{
"_id": 2294,
"id": 3042,
"pid": 356,
"city_code": "101131204",
"city_name": "伊吾县"
},
{
"_id": 2295,
"id": 3043,
"pid": 356,
"city_code": "101131203",
"city_name": "巴里坤"
},
{
"_id": 2296,
"id": 3044,
"pid": 357,
"city_code": "101131301",
"city_name": "和田市"
},
{
"_id": 2297,
"id": 3045,
"pid": 357,
"city_code": "101131301",
"city_name": "和田县"
},
{
"_id": 2298,
"id": 3046,
"pid": 357,
"city_code": "101131304",
"city_name": "墨玉县"
},
{
"_id": 2299,
"id": 3047,
"pid": 357,
"city_code": "101131302",
"city_name": "皮山县"
},
{
"_id": 2300,
"id": 3048,
"pid": 357,
"city_code": "101131305",
"city_name": "洛浦县"
},
{
"_id": 2301,
"id": 3049,
"pid": 357,
"city_code": "101131303",
"city_name": "策勒县"
},
{
"_id": 2302,
"id": 3050,
"pid": 357,
"city_code": "101131307",
"city_name": "于田县"
},
{
"_id": 2303,
"id": 3051,
"pid": 357,
"city_code": "101131306",
"city_name": "民丰县"
},
{
"_id": 2304,
"id": 3052,
"pid": 358,
"city_code": "101130901",
"city_name": "喀什市"
},
{
"_id": 2305,
"id": 3053,
"pid": 358,
"city_code": "101130911",
"city_name": "疏附县"
},
{
"_id": 2306,
"id": 3054,
"pid": 358,
"city_code": "101130912",
"city_name": "疏勒县"
},
{
"_id": 2307,
"id": 3055,
"pid": 358,
"city_code": "101130902",
"city_name": "英吉沙县"
},
{
"_id": 2308,
"id": 3056,
"pid": 358,
"city_code": "101130907",
"city_name": "泽普县"
},
{
"_id": 2309,
"id": 3057,
"pid": 358,
"city_code": "101130905",
"city_name": "莎车县"
},
{
"_id": 2310,
"id": 3058,
"pid": 358,
"city_code": "101130906",
"city_name": "叶城县"
},
{
"_id": 2311,
"id": 3059,
"pid": 358,
"city_code": "101130904",
"city_name": "麦盖提县"
},
{
"_id": 2312,
"id": 3060,
"pid": 358,
"city_code": "101130909",
"city_name": "岳普湖县"
},
{
"_id": 2313,
"id": 3061,
"pid": 358,
"city_code": "101130910",
"city_name": "伽师县"
},
{
"_id": 2314,
"id": 3062,
"pid": 358,
"city_code": "101130908",
"city_name": "巴楚县"
},
{
"_id": 2315,
"id": 3063,
"pid": 358,
"city_code": "101130903",
"city_name": "塔什库尔干"
},
{
"_id": 2316,
"id": 3064,
"pid": 359,
"city_code": "101130201",
"city_name": "克拉玛依市"
},
{
"_id": 2317,
"id": 3065,
"pid": 360,
"city_code": "101131501",
"city_name": "阿图什市"
},
{
"_id": 2318,
"id": 3066,
"pid": 360,
"city_code": "101131503",
"city_name": "阿克陶县"
},
{
"_id": 2319,
"id": 3067,
"pid": 360,
"city_code": "101131504",
"city_name": "阿合奇县"
},
{
"_id": 2320,
"id": 3068,
"pid": 360,
"city_code": "101131502",
"city_name": "乌恰县"
},
{
"_id": 2321,
"id": 3069,
"pid": 361,
"city_code": "101130301",
"city_name": "石河子市"
},
{
"_id": 2322,
"id": 3071,
"pid": 363,
"city_code": "101130501",
"city_name": "吐鲁番市"
},
{
"_id": 2323,
"id": 3072,
"pid": 363,
"city_code": "101130504",
"city_name": "鄯善县"
},
{
"_id": 2324,
"id": 3073,
"pid": 363,
"city_code": "101130502",
"city_name": "托克逊县"
},
{
"_id": 2325,
"id": 3075,
"pid": 365,
"city_code": "101131401",
"city_name": "阿勒泰"
},
{
"_id": 2326,
"id": 3076,
"pid": 365,
"city_code": "101131104",
"city_name": "和布克赛尔"
},
{
"_id": 2327,
"id": 3077,
"pid": 365,
"city_code": "101131001",
"city_name": "伊宁市"
},
{
"_id": 2328,
"id": 3078,
"pid": 365,
"city_code": "101131406",
"city_name": "布尔津县"
},
{
"_id": 2329,
"id": 3079,
"pid": 365,
"city_code": "101131011",
"city_name": "奎屯市"
},
{
"_id": 2330,
"id": 3080,
"pid": 365,
"city_code": "101131106",
"city_name": "乌苏市"
},
{
"_id": 2331,
"id": 3081,
"pid": 365,
"city_code": "101131103",
"city_name": "额敏县"
},
{
"_id": 2332,
"id": 3082,
"pid": 365,
"city_code": "101131408",
"city_name": "富蕴县"
},
{
"_id": 2333,
"id": 3083,
"pid": 365,
"city_code": "101131004",
"city_name": "伊宁县"
},
{
"_id": 2334,
"id": 3084,
"pid": 365,
"city_code": "101131407",
"city_name": "福海县"
},
{
"_id": 2335,
"id": 3085,
"pid": 365,
"city_code": "101131009",
"city_name": "霍城县"
},
{
"_id": 2336,
"id": 3086,
"pid": 365,
"city_code": "101131107",
"city_name": "沙湾县"
},
{
"_id": 2337,
"id": 3087,
"pid": 365,
"city_code": "101131005",
"city_name": "巩留县"
},
{
"_id": 2338,
"id": 3088,
"pid": 365,
"city_code": "101131402",
"city_name": "哈巴河县"
},
{
"_id": 2339,
"id": 3089,
"pid": 365,
"city_code": "101131105",
"city_name": "托里县"
},
{
"_id": 2340,
"id": 3090,
"pid": 365,
"city_code": "101131409",
"city_name": "青河县"
},
{
"_id": 2341,
"id": 3091,
"pid": 365,
"city_code": "101131006",
"city_name": "新源县"
},
{
"_id": 2342,
"id": 3092,
"pid": 365,
"city_code": "101131102",
"city_name": "裕民县"
},
{
"_id": 2343,
"id": 3094,
"pid": 365,
"city_code": "101131405",
"city_name": "吉木乃县"
},
{
"_id": 2344,
"id": 3095,
"pid": 365,
"city_code": "101131007",
"city_name": "昭苏县"
},
{
"_id": 2345,
"id": 3096,
"pid": 365,
"city_code": "101131008",
"city_name": "特克斯县"
},
{
"_id": 2346,
"id": 3097,
"pid": 365,
"city_code": "101131003",
"city_name": "尼勒克县"
},
{
"_id": 2347,
"id": 3098,
"pid": 365,
"city_code": "101131002",
"city_name": "察布查尔"
},
{
"_id": 2348,
"id": 3103,
"pid": 366,
"city_code": "101290103",
"city_name": "东川区"
},
{
"_id": 2349,
"id": 3104,
"pid": 366,
"city_code": "101290112",
"city_name": "安宁市"
},
{
"_id": 2350,
"id": 3105,
"pid": 366,
"city_code": "101290108",
"city_name": "呈贡县"
},
{
"_id": 2351,
"id": 3106,
"pid": 366,
"city_code": "101290105",
"city_name": "晋宁县"
},
{
"_id": 2352,
"id": 3107,
"pid": 366,
"city_code": "101290109",
"city_name": "富民县"
},
{
"_id": 2353,
"id": 3108,
"pid": 366,
"city_code": "101290106",
"city_name": "宜良县"
},
{
"_id": 2354,
"id": 3109,
"pid": 366,
"city_code": "101290110",
"city_name": "嵩明县"
},
{
"_id": 2355,
"id": 3110,
"pid": 366,
"city_code": "101290107",
"city_name": "石林县"
},
{
"_id": 2356,
"id": 3111,
"pid": 366,
"city_code": "101290111",
"city_name": "禄劝县"
},
{
"_id": 2357,
"id": 3112,
"pid": 366,
"city_code": "101290104",
"city_name": "寻甸县"
},
{
"_id": 2358,
"id": 3113,
"pid": 367,
"city_code": "101291204",
"city_name": "兰坪县"
},
{
"_id": 2359,
"id": 3114,
"pid": 367,
"city_code": "101291205",
"city_name": "泸水县"
},
{
"_id": 2360,
"id": 3115,
"pid": 367,
"city_code": "101291203",
"city_name": "福贡县"
},
{
"_id": 2361,
"id": 3116,
"pid": 367,
"city_code": "101291207",
"city_name": "贡山县"
},
{
"_id": 2362,
"id": 3117,
"pid": 368,
"city_code": "101290912",
"city_name": "宁洱县"
},
{
"_id": 2363,
"id": 3118,
"pid": 368,
"city_code": "101290901",
"city_name": "思茅区"
},
{
"_id": 2364,
"id": 3119,
"pid": 368,
"city_code": "101290906",
"city_name": "墨江县"
},
{
"_id": 2365,
"id": 3120,
"pid": 368,
"city_code": "101290903",
"city_name": "景东县"
},
{
"_id": 2366,
"id": 3121,
"pid": 368,
"city_code": "101290902",
"city_name": "景谷县"
},
{
"_id": 2367,
"id": 3122,
"pid": 368,
"city_code": "101290911",
"city_name": "镇沅县"
},
{
"_id": 2368,
"id": 3123,
"pid": 368,
"city_code": "101290907",
"city_name": "江城县"
},
{
"_id": 2369,
"id": 3124,
"pid": 368,
"city_code": "101290908",
"city_name": "孟连县"
},
{
"_id": 2370,
"id": 3125,
"pid": 368,
"city_code": "101290904",
"city_name": "澜沧县"
},
{
"_id": 2371,
"id": 3126,
"pid": 368,
"city_code": "101290909",
"city_name": "西盟县"
},
{
"_id": 2372,
"id": 3128,
"pid": 369,
"city_code": "101291404",
"city_name": "宁蒗县"
},
{
"_id": 2373,
"id": 3130,
"pid": 369,
"city_code": "101291402",
"city_name": "永胜县"
},
{
"_id": 2374,
"id": 3131,
"pid": 369,
"city_code": "101291403",
"city_name": "华坪县"
},
{
"_id": 2375,
"id": 3133,
"pid": 370,
"city_code": "101290504",
"city_name": "施甸县"
},
{
"_id": 2376,
"id": 3134,
"pid": 370,
"city_code": "101290506",
"city_name": "腾冲县"
},
{
"_id": 2377,
"id": 3135,
"pid": 370,
"city_code": "101290503",
"city_name": "龙陵县"
},
{
"_id": 2378,
"id": 3136,
"pid": 370,
"city_code": "101290505",
"city_name": "昌宁县"
},
{
"_id": 2379,
"id": 3137,
"pid": 371,
"city_code": "101290801",
"city_name": "楚雄市"
},
{
"_id": 2380,
"id": 3138,
"pid": 371,
"city_code": "101290809",
"city_name": "双柏县"
},
{
"_id": 2381,
"id": 3139,
"pid": 371,
"city_code": "101290805",
"city_name": "牟定县"
},
{
"_id": 2382,
"id": 3140,
"pid": 371,
"city_code": "101290806",
"city_name": "南华县"
},
{
"_id": 2383,
"id": 3141,
"pid": 371,
"city_code": "101290804",
"city_name": "姚安县"
},
{
"_id": 2384,
"id": 3142,
"pid": 371,
"city_code": "101290802",
"city_name": "大姚县"
},
{
"_id": 2385,
"id": 3143,
"pid": 371,
"city_code": "101290810",
"city_name": "永仁县"
},
{
"_id": 2386,
"id": 3144,
"pid": 371,
"city_code": "101290803",
"city_name": "元谋县"
},
{
"_id": 2387,
"id": 3145,
"pid": 371,
"city_code": "101290807",
"city_name": "武定县"
},
{
"_id": 2388,
"id": 3146,
"pid": 371,
"city_code": "101290808",
"city_name": "禄丰县"
},
{
"_id": 2389,
"id": 3147,
"pid": 372,
"city_code": "101290201",
"city_name": "大理市"
},
{
"_id": 2390,
"id": 3148,
"pid": 372,
"city_code": "101290207",
"city_name": "祥云县"
},
{
"_id": 2391,
"id": 3149,
"pid": 372,
"city_code": "101290205",
"city_name": "宾川县"
},
{
"_id": 2392,
"id": 3150,
"pid": 372,
"city_code": "101290206",
"city_name": "弥渡县"
},
{
"_id": 2393,
"id": 3151,
"pid": 372,
"city_code": "101290204",
"city_name": "永平县"
},
{
"_id": 2394,
"id": 3152,
"pid": 372,
"city_code": "101290202",
"city_name": "云龙县"
},
{
"_id": 2395,
"id": 3153,
"pid": 372,
"city_code": "101290210",
"city_name": "洱源县"
},
{
"_id": 2396,
"id": 3154,
"pid": 372,
"city_code": "101290209",
"city_name": "剑川县"
},
{
"_id": 2397,
"id": 3155,
"pid": 372,
"city_code": "101290211",
"city_name": "鹤庆县"
},
{
"_id": 2398,
"id": 3156,
"pid": 372,
"city_code": "101290203",
"city_name": "漾濞县"
},
{
"_id": 2399,
"id": 3157,
"pid": 372,
"city_code": "101290212",
"city_name": "南涧县"
},
{
"_id": 2400,
"id": 3158,
"pid": 372,
"city_code": "101290208",
"city_name": "巍山县"
},
{
"_id": 2401,
"id": 3159,
"pid": 373,
"city_code": "101291508",
"city_name": "潞西市"
},
{
"_id": 2402,
"id": 3160,
"pid": 373,
"city_code": "101291506",
"city_name": "瑞丽市"
},
{
"_id": 2403,
"id": 3161,
"pid": 373,
"city_code": "101291507",
"city_name": "梁河县"
},
{
"_id": 2404,
"id": 3162,
"pid": 373,
"city_code": "101291504",
"city_name": "盈江县"
},
{
"_id": 2405,
"id": 3163,
"pid": 373,
"city_code": "101291503",
"city_name": "陇川县"
},
{
"_id": 2406,
"id": 3164,
"pid": 374,
"city_code": "101291301",
"city_name": "香格里拉县"
},
{
"_id": 2407,
"id": 3165,
"pid": 374,
"city_code": "101291302",
"city_name": "德钦县"
},
{
"_id": 2408,
"id": 3166,
"pid": 374,
"city_code": "101291303",
"city_name": "维西县"
},
{
"_id": 2409,
"id": 3167,
"pid": 375,
"city_code": "101290311",
"city_name": "泸西县"
},
{
"_id": 2410,
"id": 3168,
"pid": 375,
"city_code": "101290309",
"city_name": "蒙自市"
},
{
"_id": 2411,
"id": 3169,
"pid": 375,
"city_code": "101290308",
"city_name": "个旧市"
},
{
"_id": 2412,
"id": 3170,
"pid": 375,
"city_code": "101290307",
"city_name": "开远市"
},
{
"_id": 2413,
"id": 3171,
"pid": 375,
"city_code": "101290306",
"city_name": "绿春县"
},
{
"_id": 2414,
"id": 3172,
"pid": 375,
"city_code": "101290303",
"city_name": "建水县"
},
{
"_id": 2415,
"id": 3173,
"pid": 375,
"city_code": "101290302",
"city_name": "石屏县"
},
{
"_id": 2416,
"id": 3174,
"pid": 375,
"city_code": "101290304",
"city_name": "弥勒县"
},
{
"_id": 2417,
"id": 3175,
"pid": 375,
"city_code": "101290305",
"city_name": "元阳县"
},
{
"_id": 2418,
"id": 3176,
"pid": 375,
"city_code": "101290301",
"city_name": "红河县"
},
{
"_id": 2419,
"id": 3177,
"pid": 375,
"city_code": "101290312",
"city_name": "金平县"
},
{
"_id": 2420,
"id": 3178,
"pid": 375,
"city_code": "101290313",
"city_name": "河口县"
},
{
"_id": 2421,
"id": 3179,
"pid": 375,
"city_code": "101290310",
"city_name": "屏边县"
},
{
"_id": 2422,
"id": 3181,
"pid": 376,
"city_code": "101291105",
"city_name": "凤庆县"
},
{
"_id": 2423,
"id": 3182,
"pid": 376,
"city_code": "101291107",
"city_name": "云县"
},
{
"_id": 2424,
"id": 3183,
"pid": 376,
"city_code": "101291106",
"city_name": "永德县"
},
{
"_id": 2425,
"id": 3184,
"pid": 376,
"city_code": "101291108",
"city_name": "镇康县"
},
{
"_id": 2426,
"id": 3185,
"pid": 376,
"city_code": "101291104",
"city_name": "双江县"
},
{
"_id": 2427,
"id": 3186,
"pid": 376,
"city_code": "101291103",
"city_name": "耿马县"
},
{
"_id": 2428,
"id": 3187,
"pid": 376,
"city_code": "101291102",
"city_name": "沧源县"
},
{
"_id": 2429,
"id": 3189,
"pid": 377,
"city_code": "101290409",
"city_name": "宣威市"
},
{
"_id": 2430,
"id": 3190,
"pid": 377,
"city_code": "101290405",
"city_name": "马龙县"
},
{
"_id": 2431,
"id": 3191,
"pid": 377,
"city_code": "101290403",
"city_name": "陆良县"
},
{
"_id": 2432,
"id": 3192,
"pid": 377,
"city_code": "101290406",
"city_name": "师宗县"
},
{
"_id": 2433,
"id": 3193,
"pid": 377,
"city_code": "101290407",
"city_name": "罗平县"
},
{
"_id": 2434,
"id": 3194,
"pid": 377,
"city_code": "101290404",
"city_name": "富源县"
},
{
"_id": 2435,
"id": 3195,
"pid": 377,
"city_code": "101290408",
"city_name": "会泽县"
},
{
"_id": 2436,
"id": 3196,
"pid": 377,
"city_code": "101290402",
"city_name": "沾益县"
},
{
"_id": 2437,
"id": 3197,
"pid": 378,
"city_code": "101290601",
"city_name": "文山县"
},
{
"_id": 2438,
"id": 3198,
"pid": 378,
"city_code": "101290605",
"city_name": "砚山县"
},
{
"_id": 2439,
"id": 3199,
"pid": 378,
"city_code": "101290602",
"city_name": "西畴县"
},
{
"_id": 2440,
"id": 3200,
"pid": 378,
"city_code": "101290604",
"city_name": "麻栗坡县"
},
{
"_id": 2441,
"id": 3201,
"pid": 378,
"city_code": "101290603",
"city_name": "马关县"
},
{
"_id": 2442,
"id": 3202,
"pid": 378,
"city_code": "101290606",
"city_name": "丘北县"
},
{
"_id": 2443,
"id": 3203,
"pid": 378,
"city_code": "101290607",
"city_name": "广南县"
},
{
"_id": 2444,
"id": 3204,
"pid": 378,
"city_code": "101290608",
"city_name": "富宁县"
},
{
"_id": 2445,
"id": 3205,
"pid": 379,
"city_code": "101291601",
"city_name": "景洪市"
},
{
"_id": 2446,
"id": 3206,
"pid": 379,
"city_code": "101291603",
"city_name": "勐海县"
},
{
"_id": 2447,
"id": 3207,
"pid": 379,
"city_code": "101291605",
"city_name": "勐腊县"
},
{
"_id": 2448,
"id": 3209,
"pid": 380,
"city_code": "101290703",
"city_name": "江川县"
},
{
"_id": 2449,
"id": 3210,
"pid": 380,
"city_code": "101290702",
"city_name": "澄江县"
},
{
"_id": 2450,
"id": 3211,
"pid": 380,
"city_code": "101290704",
"city_name": "通海县"
},
{
"_id": 2451,
"id": 3212,
"pid": 380,
"city_code": "101290705",
"city_name": "华宁县"
},
{
"_id": 2452,
"id": 3213,
"pid": 380,
"city_code": "101290707",
"city_name": "易门县"
},
{
"_id": 2453,
"id": 3214,
"pid": 380,
"city_code": "101290708",
"city_name": "峨山县"
},
{
"_id": 2454,
"id": 3215,
"pid": 380,
"city_code": "101290706",
"city_name": "新平县"
},
{
"_id": 2455,
"id": 3216,
"pid": 380,
"city_code": "101290709",
"city_name": "元江县"
},
{
"_id": 2456,
"id": 3218,
"pid": 381,
"city_code": "101291002",
"city_name": "鲁甸县"
},
{
"_id": 2457,
"id": 3219,
"pid": 381,
"city_code": "101291006",
"city_name": "巧家县"
},
{
"_id": 2458,
"id": 3220,
"pid": 381,
"city_code": "101291009",
"city_name": "盐津县"
},
{
"_id": 2459,
"id": 3221,
"pid": 381,
"city_code": "101291010",
"city_name": "大关县"
},
{
"_id": 2460,
"id": 3222,
"pid": 381,
"city_code": "101291008",
"city_name": "永善县"
},
{
"_id": 2461,
"id": 3223,
"pid": 381,
"city_code": "101291007",
"city_name": "绥江县"
},
{
"_id": 2462,
"id": 3224,
"pid": 381,
"city_code": "101291004",
"city_name": "镇雄县"
},
{
"_id": 2463,
"id": 3225,
"pid": 381,
"city_code": "101291003",
"city_name": "彝良县"
},
{
"_id": 2464,
"id": 3226,
"pid": 381,
"city_code": "101291005",
"city_name": "威信县"
},
{
"_id": 2465,
"id": 3227,
"pid": 381,
"city_code": "101291011",
"city_name": "水富县"
},
{
"_id": 2466,
"id": 3234,
"pid": 382,
"city_code": "101210102",
"city_name": "萧山区"
},
{
"_id": 2467,
"id": 3235,
"pid": 382,
"city_code": "101210106",
"city_name": "余杭区"
},
{
"_id": 2468,
"id": 3237,
"pid": 382,
"city_code": "101210105",
"city_name": "建德市"
},
{
"_id": 2469,
"id": 3238,
"pid": 382,
"city_code": "101210108",
"city_name": "富阳区"
},
{
"_id": 2470,
"id": 3239,
"pid": 382,
"city_code": "101210107",
"city_name": "临安市"
},
{
"_id": 2471,
"id": 3240,
"pid": 382,
"city_code": "101210103",
"city_name": "桐庐县"
},
{
"_id": 2472,
"id": 3241,
"pid": 382,
"city_code": "101210104",
"city_name": "淳安县"
},
{
"_id": 2473,
"id": 3244,
"pid": 383,
"city_code": "101210204",
"city_name": "德清县"
},
{
"_id": 2474,
"id": 3245,
"pid": 383,
"city_code": "101210202",
"city_name": "长兴县"
},
{
"_id": 2475,
"id": 3246,
"pid": 383,
"city_code": "101210203",
"city_name": "安吉县"
},
{
"_id": 2476,
"id": 3249,
"pid": 384,
"city_code": "101210303",
"city_name": "海宁市"
},
{
"_id": 2477,
"id": 3250,
"pid": 384,
"city_code": "101210302",
"city_name": "嘉善县"
},
{
"_id": 2478,
"id": 3251,
"pid": 384,
"city_code": "101210305",
"city_name": "平湖市"
},
{
"_id": 2479,
"id": 3252,
"pid": 384,
"city_code": "101210304",
"city_name": "桐乡市"
},
{
"_id": 2480,
"id": 3253,
"pid": 384,
"city_code": "101210306",
"city_name": "海盐县"
},
{
"_id": 2481,
"id": 3256,
"pid": 385,
"city_code": "101210903",
"city_name": "兰溪市"
},
{
"_id": 2482,
"id": 3257,
"pid": 385,
"city_code": "101210904",
"city_name": "义乌市"
},
{
"_id": 2483,
"id": 3264,
"pid": 385,
"city_code": "101210905",
"city_name": "东阳市"
},
{
"_id": 2484,
"id": 3265,
"pid": 385,
"city_code": "101210907",
"city_name": "永康市"
},
{
"_id": 2485,
"id": 3266,
"pid": 385,
"city_code": "101210906",
"city_name": "武义县"
},
{
"_id": 2486,
"id": 3267,
"pid": 385,
"city_code": "101210902",
"city_name": "浦江县"
},
{
"_id": 2487,
"id": 3268,
"pid": 385,
"city_code": "101210908",
"city_name": "磐安县"
},
{
"_id": 2488,
"id": 3270,
"pid": 386,
"city_code": "101210803",
"city_name": "龙泉市"
},
{
"_id": 2489,
"id": 3271,
"pid": 386,
"city_code": "101210805",
"city_name": "青田县"
},
{
"_id": 2490,
"id": 3272,
"pid": 386,
"city_code": "101210804",
"city_name": "缙云县"
},
{
"_id": 2491,
"id": 3273,
"pid": 386,
"city_code": "101210802",
"city_name": "遂昌县"
},
{
"_id": 2492,
"id": 3274,
"pid": 386,
"city_code": "101210808",
"city_name": "松阳县"
},
{
"_id": 2493,
"id": 3275,
"pid": 386,
"city_code": "101210806",
"city_name": "云和县"
},
{
"_id": 2494,
"id": 3276,
"pid": 386,
"city_code": "101210807",
"city_name": "庆元县"
},
{
"_id": 2495,
"id": 3277,
"pid": 386,
"city_code": "101210809",
"city_name": "景宁县"
},
{
"_id": 2496,
"id": 3281,
"pid": 387,
"city_code": "101210412",
"city_name": "镇海区"
},
{
"_id": 2497,
"id": 3282,
"pid": 387,
"city_code": "101210410",
"city_name": "北仑区"
},
{
"_id": 2498,
"id": 3283,
"pid": 387,
"city_code": "101210411",
"city_name": "鄞州区"
},
{
"_id": 2499,
"id": 3284,
"pid": 387,
"city_code": "101210404",
"city_name": "余姚市"
},
{
"_id": 2500,
"id": 3285,
"pid": 387,
"city_code": "101210403",
"city_name": "慈溪市"
},
{
"_id": 2501,
"id": 3286,
"pid": 387,
"city_code": "101210405",
"city_name": "奉化区"
},
{
"_id": 2502,
"id": 3287,
"pid": 387,
"city_code": "101210406",
"city_name": "象山县"
},
{
"_id": 2503,
"id": 3288,
"pid": 387,
"city_code": "101210408",
"city_name": "宁海县"
},
{
"_id": 2504,
"id": 3290,
"pid": 388,
"city_code": "101210503",
"city_name": "上虞区"
},
{
"_id": 2505,
"id": 3291,
"pid": 388,
"city_code": "101210505",
"city_name": "嵊州市"
},
{
"_id": 2506,
"id": 3292,
"pid": 388,
"city_code": "101210501",
"city_name": "绍兴县"
},
{
"_id": 2507,
"id": 3293,
"pid": 388,
"city_code": "101210504",
"city_name": "新昌县"
},
{
"_id": 2508,
"id": 3294,
"pid": 388,
"city_code": "101210502",
"city_name": "诸暨市"
},
{
"_id": 2509,
"id": 3295,
"pid": 389,
"city_code": "101210611",
"city_name": "椒江区"
},
{
"_id": 2510,
"id": 3296,
"pid": 389,
"city_code": "101210612",
"city_name": "黄岩区"
},
{
"_id": 2511,
"id": 3297,
"pid": 389,
"city_code": "101210613",
"city_name": "路桥区"
},
{
"_id": 2512,
"id": 3298,
"pid": 389,
"city_code": "101210607",
"city_name": "温岭市"
},
{
"_id": 2513,
"id": 3299,
"pid": 389,
"city_code": "101210610",
"city_name": "临海市"
},
{
"_id": 2514,
"id": 3300,
"pid": 389,
"city_code": "101210603",
"city_name": "玉环县"
},
{
"_id": 2515,
"id": 3301,
"pid": 389,
"city_code": "101210604",
"city_name": "三门县"
},
{
"_id": 2516,
"id": 3302,
"pid": 389,
"city_code": "101210605",
"city_name": "天台县"
},
{
"_id": 2517,
"id": 3303,
"pid": 389,
"city_code": "101210606",
"city_name": "仙居县"
},
{
"_id": 2518,
"id": 3307,
"pid": 390,
"city_code": "101210705",
"city_name": "瑞安市"
},
{
"_id": 2519,
"id": 3308,
"pid": 390,
"city_code": "101210707",
"city_name": "乐清市"
},
{
"_id": 2520,
"id": 3309,
"pid": 390,
"city_code": "101210706",
"city_name": "洞头区"
},
{
"_id": 2521,
"id": 3310,
"pid": 390,
"city_code": "101210708",
"city_name": "永嘉县"
},
{
"_id": 2522,
"id": 3311,
"pid": 390,
"city_code": "101210704",
"city_name": "平阳县"
},
{
"_id": 2523,
"id": 3312,
"pid": 390,
"city_code": "101210709",
"city_name": "苍南县"
},
{
"_id": 2524,
"id": 3313,
"pid": 390,
"city_code": "101210703",
"city_name": "文成县"
},
{
"_id": 2525,
"id": 3314,
"pid": 390,
"city_code": "101210702",
"city_name": "泰顺县"
},
{
"_id": 2526,
"id": 3315,
"pid": 391,
"city_code": "101211106",
"city_name": "定海区"
},
{
"_id": 2527,
"id": 3316,
"pid": 391,
"city_code": "101211105",
"city_name": "普陀区"
},
{
"_id": 2528,
"id": 3317,
"pid": 391,
"city_code": "101211104",
"city_name": "岱山县"
},
{
"_id": 2529,
"id": 3318,
"pid": 391,
"city_code": "101211102",
"city_name": "嵊泗县"
},
{
"_id": 2530,
"id": 3319,
"pid": 392,
"city_code": "101211006",
"city_name": "衢江区"
},
{
"_id": 2531,
"id": 3320,
"pid": 392,
"city_code": "101211005",
"city_name": "江山市"
},
{
"_id": 2532,
"id": 3321,
"pid": 392,
"city_code": "101211002",
"city_name": "常山县"
},
{
"_id": 2533,
"id": 3322,
"pid": 392,
"city_code": "101211003",
"city_name": "开化县"
},
{
"_id": 2534,
"id": 3323,
"pid": 392,
"city_code": "101211004",
"city_name": "龙游县"
},
{
"_id": 2535,
"id": 3324,
"pid": 31,
"city_code": "101040300",
"city_name": "合川区"
},
{
"_id": 2536,
"id": 3325,
"pid": 31,
"city_code": "101040500",
"city_name": "江津区"
},
{
"_id": 2537,
"id": 3326,
"pid": 31,
"city_code": "101040400",
"city_name": "南川区"
},
{
"_id": 2538,
"id": 3327,
"pid": 31,
"city_code": "101040200",
"city_name": "永川区"
},
{
"_id": 2539,
"id": 3329,
"pid": 31,
"city_code": "101040700",
"city_name": "渝北区"
},
{
"_id": 2540,
"id": 3330,
"pid": 31,
"city_code": "101040600",
"city_name": "万盛区"
},
{
"_id": 2541,
"id": 3332,
"pid": 31,
"city_code": "101041300",
"city_name": "万州区"
},
{
"_id": 2542,
"id": 3333,
"pid": 31,
"city_code": "101040800",
"city_name": "北碚区"
},
{
"_id": 2543,
"id": 3334,
"pid": 31,
"city_code": "101043700",
"city_name": "沙坪坝区"
},
{
"_id": 2544,
"id": 3335,
"pid": 31,
"city_code": "101040900",
"city_name": "巴南区"
},
{
"_id": 2545,
"id": 3336,
"pid": 31,
"city_code": "101041400",
"city_name": "涪陵区"
},
{
"_id": 2546,
"id": 3340,
"pid": 31,
"city_code": "101041100",
"city_name": "黔江区"
},
{
"_id": 2547,
"id": 3341,
"pid": 31,
"city_code": "101041000",
"city_name": "长寿区"
},
{
"_id": 2548,
"id": 3343,
"pid": 31,
"city_code": "101043300",
"city_name": "綦江区"
},
{
"_id": 2549,
"id": 3344,
"pid": 31,
"city_code": "101042100",
"city_name": "潼南区"
},
{
"_id": 2550,
"id": 3345,
"pid": 31,
"city_code": "101042800",
"city_name": "铜梁区"
},
{
"_id": 2551,
"id": 3346,
"pid": 31,
"city_code": "101042600",
"city_name": "大足县"
},
{
"_id": 2552,
"id": 3347,
"pid": 31,
"city_code": "101042700",
"city_name": "荣昌区"
},
{
"_id": 2553,
"id": 3348,
"pid": 31,
"city_code": "101042900",
"city_name": "璧山区"
},
{
"_id": 2554,
"id": 3349,
"pid": 31,
"city_code": "101042200",
"city_name": "垫江县"
},
{
"_id": 2555,
"id": 3350,
"pid": 31,
"city_code": "101043100",
"city_name": "武隆县"
},
{
"_id": 2556,
"id": 3351,
"pid": 31,
"city_code": "101043000",
"city_name": "丰都县"
},
{
"_id": 2557,
"id": 3352,
"pid": 31,
"city_code": "101041600",
"city_name": "城口县"
},
{
"_id": 2558,
"id": 3353,
"pid": 31,
"city_code": "101042300",
"city_name": "梁平县"
},
{
"_id": 2559,
"id": 3354,
"pid": 31,
"city_code": "101041500",
"city_name": "开县"
},
{
"_id": 2560,
"id": 3355,
"pid": 31,
"city_code": "101041800",
"city_name": "巫溪县"
},
{
"_id": 2561,
"id": 3356,
"pid": 31,
"city_code": "101042000",
"city_name": "巫山县"
},
{
"_id": 2562,
"id": 3357,
"pid": 31,
"city_code": "101041900",
"city_name": "奉节县"
},
{
"_id": 2563,
"id": 3358,
"pid": 31,
"city_code": "101041700",
"city_name": "云阳县"
},
{
"_id": 2564,
"id": 3359,
"pid": 31,
"city_code": "101042400",
"city_name": "忠县"
},
{
"_id": 2565,
"id": 3360,
"pid": 31,
"city_code": "101042500",
"city_name": "石柱县"
},
{
"_id": 2566,
"id": 3361,
"pid": 31,
"city_code": "101043200",
"city_name": "彭水县"
},
{
"_id": 2567,
"id": 3362,
"pid": 31,
"city_code": "101043400",
"city_name": "酉阳县"
},
{
"_id": 2568,
"id": 3363,
"pid": 31,
"city_code": "101043600",
"city_name": "秀山县"
},
{
"_id": 2569,
"id": 3368,
"pid": 32,
"city_code": "101320102",
"city_name": "九龙城区"
},
{
"_id": 2570,
"id": 3383,
"pid": 34,
"city_code": "101340101",
"city_name": "台北"
},
{
"_id": 2571,
"id": 3384,
"pid": 34,
"city_code": "101340201",
"city_name": "高雄"
},
{
"_id": 2572,
"id": 3385,
"pid": 34,
"city_code": "CHTW0006",
"city_name": "基隆"
},
{
"_id": 2573,
"id": 3386,
"pid": 34,
"city_code": "101340401",
"city_name": "台中"
},
{
"_id": 2574,
"id": 3387,
"pid": 34,
"city_code": "101340301",
"city_name": "台南"
},
{
"_id": 2575,
"id": 3388,
"pid": 34,
"city_code": "101340103",
"city_name": "新竹"
},
{
"_id": 2576,
"id": 3389,
"pid": 34,
"city_code": "101340901",
"city_name": "嘉义"
},
{
"_id": 2577,
"id": 3390,
"pid": 34,
"city_code": "101340701",
"city_name": "宜兰县"
},
{
"_id": 2578,
"id": 3391,
"pid": 34,
"city_code": "101340102",
"city_name": "桃园县"
},
{
"_id": 2579,
"id": 3392,
"pid": 34,
"city_code": "CHTW0016",
"city_name": "苗栗县"
},
{
"_id": 2580,
"id": 3393,
"pid": 34,
"city_code": "CHTW0017",
"city_name": "彰化县"
},
{
"_id": 2581,
"id": 3394,
"pid": 34,
"city_code": "101340404",
"city_name": "南投县"
},
{
"_id": 2582,
"id": 3395,
"pid": 34,
"city_code": "101340406",
"city_name": "云林县"
},
{
"_id": 2583,
"id": 3396,
"pid": 34,
"city_code": "101340205",
"city_name": "屏东县"
},
{
"_id": 2584,
"id": 3397,
"pid": 34,
"city_code": "101341101",
"city_name": "台东县"
},
{
"_id": 2585,
"id": 3398,
"pid": 34,
"city_code": "101340405",
"city_name": "花莲县"
},
{
"_id": 2586,
"id": 3400,
"pid": 2,
"city_code": "101220101",
"city_name": "合肥"
},
{
"_id": 2587,
"id": 3405,
"pid": 3400,
"city_code": "101220102",
"city_name": "长丰县"
},
{
"_id": 2588,
"id": 3406,
"pid": 3400,
"city_code": "101220103",
"city_name": "肥东县"
},
{
"_id": 2589,
"id": 3407,
"pid": 3400,
"city_code": "101220104",
"city_name": "肥西县"
},
{
"_id": 2590,
"id": 3259,
"pid": 168,
"city_code": "101050708",
"city_name": "加格达奇区"
},
{
"_id": 2591,
"id": 3261,
"pid": 168,
"city_code": "101050706",
"city_name": "新林区"
},
{
"_id": 2592,
"id": 3262,
"pid": 168,
"city_code": "101050705",
"city_name": "呼中区"
},
{
"_id": 2593,
"id": 1856,
"pid": 365,
"city_code": "101131101",
"city_name": "塔城市"
},
{
"_id": 2594,
"id": 3657,
"pid": 28,
"city_code": "",
"city_name": "北屯"
},
{
"_id": 2595,
"id": 3661,
"pid": 8,
"city_code": "",
"city_name": "三沙"
}
]
"""
def print_c():
return json.loads(a) | 15.661439 | 30 | 0.434654 | import json
a="""
[
{
"_id": 1,
"id": 1,
"pid": 0,
"city_code": "101010100",
"city_name": "北京"
},
{
"_id": 2,
"id": 2,
"pid": 0,
"city_code": "",
"city_name": "安徽"
},
{
"_id": 3,
"id": 3,
"pid": 0,
"city_code": "",
"city_name": "福建"
},
{
"_id": 4,
"id": 4,
"pid": 0,
"city_code": "",
"city_name": "甘肃"
},
{
"_id": 5,
"id": 5,
"pid": 0,
"city_code": "",
"city_name": "广东"
},
{
"_id": 6,
"id": 6,
"pid": 0,
"city_code": "",
"city_name": "广西"
},
{
"_id": 7,
"id": 7,
"pid": 0,
"city_code": "",
"city_name": "贵州"
},
{
"_id": 8,
"id": 8,
"pid": 0,
"city_code": "",
"city_name": "海南"
},
{
"_id": 9,
"id": 9,
"pid": 0,
"city_code": "",
"city_name": "河北"
},
{
"_id": 10,
"id": 10,
"pid": 0,
"city_code": "",
"city_name": "河南"
},
{
"_id": 11,
"id": 11,
"pid": 0,
"city_code": "",
"city_name": "黑龙江"
},
{
"_id": 12,
"id": 12,
"pid": 0,
"city_code": "",
"city_name": "湖北"
},
{
"_id": 13,
"id": 13,
"pid": 0,
"city_code": "",
"city_name": "湖南"
},
{
"_id": 14,
"id": 14,
"pid": 0,
"city_code": "",
"city_name": "吉林"
},
{
"_id": 15,
"id": 15,
"pid": 0,
"city_code": "",
"city_name": "江苏"
},
{
"_id": 16,
"id": 16,
"pid": 0,
"city_code": "",
"city_name": "江西"
},
{
"_id": 17,
"id": 17,
"pid": 0,
"city_code": "",
"city_name": "辽宁"
},
{
"_id": 18,
"id": 18,
"pid": 0,
"city_code": "",
"city_name": "内蒙古"
},
{
"_id": 19,
"id": 19,
"pid": 0,
"city_code": "",
"city_name": "宁夏"
},
{
"_id": 20,
"id": 20,
"pid": 0,
"city_code": "",
"city_name": "青海"
},
{
"_id": 21,
"id": 21,
"pid": 0,
"city_code": "",
"city_name": "山东"
},
{
"_id": 22,
"id": 22,
"pid": 0,
"city_code": "",
"city_name": "山西"
},
{
"_id": 23,
"id": 23,
"pid": 0,
"city_code": "",
"city_name": "陕西"
},
{
"_id": 24,
"id": 24,
"pid": 0,
"city_code": "101020100",
"city_name": "上海"
},
{
"_id": 25,
"id": 25,
"pid": 0,
"city_code": "",
"city_name": "四川"
},
{
"_id": 26,
"id": 26,
"pid": 0,
"city_code": "101030100",
"city_name": "天津"
},
{
"_id": 27,
"id": 27,
"pid": 0,
"city_code": "",
"city_name": "西藏"
},
{
"_id": 28,
"id": 28,
"pid": 0,
"city_code": "",
"city_name": "新疆"
},
{
"_id": 29,
"id": 29,
"pid": 0,
"city_code": "",
"city_name": "云南"
},
{
"_id": 30,
"id": 30,
"pid": 0,
"city_code": "",
"city_name": "浙江"
},
{
"_id": 31,
"id": 31,
"pid": 0,
"city_code": "101040100",
"city_name": "重庆"
},
{
"_id": 32,
"id": 32,
"pid": 0,
"city_code": "101320101",
"city_name": "香港"
},
{
"_id": 33,
"id": 33,
"pid": 0,
"city_code": "101330101",
"city_name": "澳门"
},
{
"_id": 34,
"id": 34,
"pid": 0,
"city_code": "",
"city_name": "台湾"
},
{
"_id": 35,
"id": 35,
"pid": 2,
"city_code": "101220601",
"city_name": "安庆"
},
{
"_id": 36,
"id": 36,
"pid": 2,
"city_code": "101220201",
"city_name": "蚌埠"
},
{
"_id": 37,
"id": 37,
"pid": 3400,
"city_code": "101220105",
"city_name": "巢湖市"
},
{
"_id": 38,
"id": 38,
"pid": 2,
"city_code": "101221701",
"city_name": "池州"
},
{
"_id": 39,
"id": 39,
"pid": 2,
"city_code": "101221101",
"city_name": "滁州"
},
{
"_id": 40,
"id": 40,
"pid": 2,
"city_code": "101220801",
"city_name": "阜阳"
},
{
"_id": 41,
"id": 41,
"pid": 2,
"city_code": "101221201",
"city_name": "淮北"
},
{
"_id": 42,
"id": 42,
"pid": 2,
"city_code": "101220401",
"city_name": "淮南"
},
{
"_id": 43,
"id": 43,
"pid": 2,
"city_code": "101221001",
"city_name": "黄山"
},
{
"_id": 44,
"id": 44,
"pid": 2,
"city_code": "101221501",
"city_name": "六安"
},
{
"_id": 45,
"id": 45,
"pid": 2,
"city_code": "101220501",
"city_name": "马鞍山"
},
{
"_id": 46,
"id": 46,
"pid": 2,
"city_code": "101220701",
"city_name": "宿州"
},
{
"_id": 47,
"id": 47,
"pid": 2,
"city_code": "101221301",
"city_name": "铜陵"
},
{
"_id": 48,
"id": 48,
"pid": 2,
"city_code": "101220301",
"city_name": "芜湖"
},
{
"_id": 49,
"id": 49,
"pid": 2,
"city_code": "101221401",
"city_name": "宣城"
},
{
"_id": 50,
"id": 50,
"pid": 2,
"city_code": "101220901",
"city_name": "亳州"
},
{
"_id": 51,
"id": 52,
"pid": 3,
"city_code": "101230101",
"city_name": "福州"
},
{
"_id": 52,
"id": 53,
"pid": 3,
"city_code": "101230701",
"city_name": "龙岩"
},
{
"_id": 53,
"id": 54,
"pid": 3,
"city_code": "101230901",
"city_name": "南平"
},
{
"_id": 54,
"id": 55,
"pid": 3,
"city_code": "101230301",
"city_name": "宁德"
},
{
"_id": 55,
"id": 56,
"pid": 3,
"city_code": "101230401",
"city_name": "莆田"
},
{
"_id": 56,
"id": 57,
"pid": 3,
"city_code": "101230501",
"city_name": "泉州"
},
{
"_id": 57,
"id": 58,
"pid": 3,
"city_code": "101230801",
"city_name": "三明"
},
{
"_id": 58,
"id": 59,
"pid": 3,
"city_code": "101230201",
"city_name": "厦门"
},
{
"_id": 59,
"id": 60,
"pid": 3,
"city_code": "101230601",
"city_name": "漳州"
},
{
"_id": 60,
"id": 61,
"pid": 4,
"city_code": "101160101",
"city_name": "兰州"
},
{
"_id": 61,
"id": 62,
"pid": 4,
"city_code": "101161301",
"city_name": "白银"
},
{
"_id": 62,
"id": 63,
"pid": 4,
"city_code": "101160201",
"city_name": "定西"
},
{
"_id": 63,
"id": 64,
"pid": 4,
"city_code": "",
"city_name": "甘南州"
},
{
"_id": 64,
"id": 65,
"pid": 4,
"city_code": "101161401",
"city_name": "嘉峪关"
},
{
"_id": 65,
"id": 66,
"pid": 4,
"city_code": "101160601",
"city_name": "金昌"
},
{
"_id": 66,
"id": 67,
"pid": 4,
"city_code": "101160801",
"city_name": "酒泉"
},
{
"_id": 67,
"id": 68,
"pid": 4,
"city_code": "101161101",
"city_name": "临夏"
},
{
"_id": 68,
"id": 69,
"pid": 4,
"city_code": "101161010",
"city_name": "陇南市"
},
{
"_id": 69,
"id": 70,
"pid": 4,
"city_code": "101160301",
"city_name": "平凉"
},
{
"_id": 70,
"id": 71,
"pid": 4,
"city_code": "101160401",
"city_name": "庆阳"
},
{
"_id": 71,
"id": 72,
"pid": 4,
"city_code": "101160901",
"city_name": "天水"
},
{
"_id": 72,
"id": 73,
"pid": 4,
"city_code": "101160501",
"city_name": "武威"
},
{
"_id": 73,
"id": 74,
"pid": 4,
"city_code": "101160701",
"city_name": "张掖"
},
{
"_id": 74,
"id": 75,
"pid": 5,
"city_code": "101280101",
"city_name": "广州"
},
{
"_id": 75,
"id": 76,
"pid": 5,
"city_code": "101280601",
"city_name": "深圳"
},
{
"_id": 76,
"id": 77,
"pid": 5,
"city_code": "101281501",
"city_name": "潮州"
},
{
"_id": 77,
"id": 78,
"pid": 5,
"city_code": "101281601",
"city_name": "东莞"
},
{
"_id": 78,
"id": 79,
"pid": 5,
"city_code": "101280800",
"city_name": "佛山"
},
{
"_id": 79,
"id": 80,
"pid": 5,
"city_code": "101281201",
"city_name": "河源"
},
{
"_id": 80,
"id": 81,
"pid": 5,
"city_code": "101280301",
"city_name": "惠州"
},
{
"_id": 81,
"id": 82,
"pid": 5,
"city_code": "101281101",
"city_name": "江门"
},
{
"_id": 82,
"id": 83,
"pid": 5,
"city_code": "101281901",
"city_name": "揭阳"
},
{
"_id": 83,
"id": 84,
"pid": 5,
"city_code": "101282001",
"city_name": "茂名"
},
{
"_id": 84,
"id": 85,
"pid": 5,
"city_code": "101280401",
"city_name": "梅州"
},
{
"_id": 85,
"id": 86,
"pid": 5,
"city_code": "101281301",
"city_name": "清远"
},
{
"_id": 86,
"id": 87,
"pid": 5,
"city_code": "101280501",
"city_name": "汕头"
},
{
"_id": 87,
"id": 88,
"pid": 5,
"city_code": "101282101",
"city_name": "汕尾"
},
{
"_id": 88,
"id": 89,
"pid": 5,
"city_code": "101280201",
"city_name": "韶关"
},
{
"_id": 89,
"id": 90,
"pid": 5,
"city_code": "101281801",
"city_name": "阳江"
},
{
"_id": 90,
"id": 91,
"pid": 5,
"city_code": "101281401",
"city_name": "云浮"
},
{
"_id": 91,
"id": 92,
"pid": 5,
"city_code": "101281001",
"city_name": "湛江"
},
{
"_id": 92,
"id": 93,
"pid": 5,
"city_code": "101280901",
"city_name": "肇庆"
},
{
"_id": 93,
"id": 94,
"pid": 5,
"city_code": "101281701",
"city_name": "中山"
},
{
"_id": 94,
"id": 95,
"pid": 5,
"city_code": "101280701",
"city_name": "珠海"
},
{
"_id": 95,
"id": 96,
"pid": 6,
"city_code": "101300101",
"city_name": "南宁"
},
{
"_id": 96,
"id": 97,
"pid": 6,
"city_code": "101300501",
"city_name": "桂林"
},
{
"_id": 97,
"id": 98,
"pid": 6,
"city_code": "101301001",
"city_name": "百色"
},
{
"_id": 98,
"id": 99,
"pid": 6,
"city_code": "101301301",
"city_name": "北海"
},
{
"_id": 99,
"id": 100,
"pid": 6,
"city_code": "101300201",
"city_name": "崇左"
},
{
"_id": 100,
"id": 101,
"pid": 6,
"city_code": "101301401",
"city_name": "防城港"
},
{
"_id": 101,
"id": 102,
"pid": 6,
"city_code": "101300801",
"city_name": "贵港"
},
{
"_id": 102,
"id": 103,
"pid": 6,
"city_code": "101301201",
"city_name": "河池"
},
{
"_id": 103,
"id": 104,
"pid": 6,
"city_code": "101300701",
"city_name": "贺州"
},
{
"_id": 104,
"id": 105,
"pid": 6,
"city_code": "101300401",
"city_name": "来宾"
},
{
"_id": 105,
"id": 106,
"pid": 6,
"city_code": "101300301",
"city_name": "柳州"
},
{
"_id": 106,
"id": 107,
"pid": 6,
"city_code": "101301101",
"city_name": "钦州"
},
{
"_id": 107,
"id": 108,
"pid": 6,
"city_code": "101300601",
"city_name": "梧州"
},
{
"_id": 108,
"id": 109,
"pid": 6,
"city_code": "101300901",
"city_name": "玉林"
},
{
"_id": 109,
"id": 110,
"pid": 7,
"city_code": "101260101",
"city_name": "贵阳"
},
{
"_id": 110,
"id": 111,
"pid": 7,
"city_code": "101260301",
"city_name": "安顺"
},
{
"_id": 111,
"id": 112,
"pid": 7,
"city_code": "101260701",
"city_name": "毕节"
},
{
"_id": 112,
"id": 113,
"pid": 7,
"city_code": "101260801",
"city_name": "六盘水"
},
{
"_id": 113,
"id": 114,
"pid": 7,
"city_code": "101260506",
"city_name": "黔东南"
},
{
"_id": 114,
"id": 115,
"pid": 7,
"city_code": "101260413",
"city_name": "黔南"
},
{
"_id": 115,
"id": 116,
"pid": 7,
"city_code": "101260906",
"city_name": "黔西南"
},
{
"_id": 116,
"id": 117,
"pid": 7,
"city_code": "101260601",
"city_name": "铜仁"
},
{
"_id": 117,
"id": 118,
"pid": 7,
"city_code": "101260201",
"city_name": "遵义"
},
{
"_id": 118,
"id": 119,
"pid": 8,
"city_code": "101310101",
"city_name": "海口"
},
{
"_id": 119,
"id": 120,
"pid": 8,
"city_code": "101310201",
"city_name": "三亚"
},
{
"_id": 120,
"id": 121,
"pid": 8,
"city_code": "101310207",
"city_name": "白沙县"
},
{
"_id": 121,
"id": 122,
"pid": 8,
"city_code": "101310214",
"city_name": "保亭县"
},
{
"_id": 122,
"id": 123,
"pid": 8,
"city_code": "101310206",
"city_name": "昌江县"
},
{
"_id": 123,
"id": 124,
"pid": 8,
"city_code": "101310204",
"city_name": "澄迈县"
},
{
"_id": 124,
"id": 125,
"pid": 8,
"city_code": "101310209",
"city_name": "定安县"
},
{
"_id": 125,
"id": 126,
"pid": 8,
"city_code": "101310202",
"city_name": "东方"
},
{
"_id": 126,
"id": 127,
"pid": 8,
"city_code": "101310221",
"city_name": "乐东县"
},
{
"_id": 127,
"id": 128,
"pid": 8,
"city_code": "101310203",
"city_name": "临高县"
},
{
"_id": 128,
"id": 129,
"pid": 8,
"city_code": "101310216",
"city_name": "陵水县"
},
{
"_id": 129,
"id": 130,
"pid": 8,
"city_code": "101310211",
"city_name": "琼海"
},
{
"_id": 130,
"id": 131,
"pid": 8,
"city_code": "101310208",
"city_name": "琼中"
},
{
"_id": 131,
"id": 132,
"pid": 8,
"city_code": "101310210",
"city_name": "屯昌县"
},
{
"_id": 132,
"id": 133,
"pid": 8,
"city_code": "101310215",
"city_name": "万宁"
},
{
"_id": 133,
"id": 134,
"pid": 8,
"city_code": "101310212",
"city_name": "文昌"
},
{
"_id": 134,
"id": 135,
"pid": 8,
"city_code": "101310222",
"city_name": "五指山"
},
{
"_id": 135,
"id": 136,
"pid": 8,
"city_code": "101310205",
"city_name": "儋州"
},
{
"_id": 136,
"id": 137,
"pid": 9,
"city_code": "101090101",
"city_name": "石家庄"
},
{
"_id": 137,
"id": 138,
"pid": 9,
"city_code": "101090201",
"city_name": "保定"
},
{
"_id": 138,
"id": 139,
"pid": 9,
"city_code": "101090701",
"city_name": "沧州"
},
{
"_id": 139,
"id": 140,
"pid": 9,
"city_code": "101090402",
"city_name": "承德"
},
{
"_id": 140,
"id": 141,
"pid": 9,
"city_code": "101091001",
"city_name": "邯郸"
},
{
"_id": 141,
"id": 142,
"pid": 9,
"city_code": "101090801",
"city_name": "衡水"
},
{
"_id": 142,
"id": 143,
"pid": 9,
"city_code": "101090601",
"city_name": "廊坊"
},
{
"_id": 143,
"id": 144,
"pid": 9,
"city_code": "101091101",
"city_name": "秦皇岛"
},
{
"_id": 144,
"id": 145,
"pid": 9,
"city_code": "101090501",
"city_name": "唐山"
},
{
"_id": 145,
"id": 146,
"pid": 9,
"city_code": "101090901",
"city_name": "邢台"
},
{
"_id": 146,
"id": 147,
"pid": 9,
"city_code": "101090301",
"city_name": "张家口"
},
{
"_id": 147,
"id": 148,
"pid": 10,
"city_code": "101180101",
"city_name": "郑州"
},
{
"_id": 148,
"id": 149,
"pid": 10,
"city_code": "101180901",
"city_name": "洛阳"
},
{
"_id": 149,
"id": 150,
"pid": 10,
"city_code": "101180801",
"city_name": "开封"
},
{
"_id": 150,
"id": 151,
"pid": 10,
"city_code": "101180201",
"city_name": "安阳"
},
{
"_id": 151,
"id": 152,
"pid": 10,
"city_code": "101181201",
"city_name": "鹤壁"
},
{
"_id": 152,
"id": 153,
"pid": 10,
"city_code": "101181801",
"city_name": "济源"
},
{
"_id": 153,
"id": 154,
"pid": 10,
"city_code": "101181101",
"city_name": "焦作"
},
{
"_id": 154,
"id": 155,
"pid": 10,
"city_code": "101180701",
"city_name": "南阳"
},
{
"_id": 155,
"id": 156,
"pid": 10,
"city_code": "101180501",
"city_name": "平顶山"
},
{
"_id": 156,
"id": 157,
"pid": 10,
"city_code": "101181701",
"city_name": "三门峡"
},
{
"_id": 157,
"id": 158,
"pid": 10,
"city_code": "101181001",
"city_name": "商丘"
},
{
"_id": 158,
"id": 159,
"pid": 10,
"city_code": "101180301",
"city_name": "新乡"
},
{
"_id": 159,
"id": 160,
"pid": 10,
"city_code": "101180601",
"city_name": "信阳"
},
{
"_id": 160,
"id": 161,
"pid": 10,
"city_code": "101180401",
"city_name": "许昌"
},
{
"_id": 161,
"id": 162,
"pid": 10,
"city_code": "101181401",
"city_name": "周口"
},
{
"_id": 162,
"id": 163,
"pid": 10,
"city_code": "101181601",
"city_name": "驻马店"
},
{
"_id": 163,
"id": 164,
"pid": 10,
"city_code": "101181501",
"city_name": "漯河"
},
{
"_id": 164,
"id": 165,
"pid": 10,
"city_code": "101181301",
"city_name": "濮阳"
},
{
"_id": 165,
"id": 166,
"pid": 11,
"city_code": "101050101",
"city_name": "哈尔滨"
},
{
"_id": 166,
"id": 167,
"pid": 11,
"city_code": "101050901",
"city_name": "大庆"
},
{
"_id": 167,
"id": 168,
"pid": 11,
"city_code": "101050701",
"city_name": "大兴安岭"
},
{
"_id": 168,
"id": 169,
"pid": 11,
"city_code": "101051201",
"city_name": "鹤岗"
},
{
"_id": 169,
"id": 170,
"pid": 11,
"city_code": "101050601",
"city_name": "黑河"
},
{
"_id": 170,
"id": 171,
"pid": 11,
"city_code": "101051101",
"city_name": "鸡西"
},
{
"_id": 171,
"id": 172,
"pid": 11,
"city_code": "101050401",
"city_name": "佳木斯"
},
{
"_id": 172,
"id": 173,
"pid": 11,
"city_code": "101050301",
"city_name": "牡丹江"
},
{
"_id": 173,
"id": 174,
"pid": 11,
"city_code": "101051002",
"city_name": "七台河"
},
{
"_id": 174,
"id": 175,
"pid": 11,
"city_code": "101050201",
"city_name": "齐齐哈尔"
},
{
"_id": 175,
"id": 176,
"pid": 11,
"city_code": "101051301",
"city_name": "双鸭山"
},
{
"_id": 176,
"id": 177,
"pid": 11,
"city_code": "101050501",
"city_name": "绥化"
},
{
"_id": 177,
"id": 178,
"pid": 11,
"city_code": "101050801",
"city_name": "伊春"
},
{
"_id": 178,
"id": 179,
"pid": 12,
"city_code": "101200101",
"city_name": "武汉"
},
{
"_id": 179,
"id": 180,
"pid": 12,
"city_code": "101201601",
"city_name": "仙桃"
},
{
"_id": 180,
"id": 181,
"pid": 12,
"city_code": "101200301",
"city_name": "鄂州"
},
{
"_id": 181,
"id": 182,
"pid": 12,
"city_code": "101200501",
"city_name": "黄冈"
},
{
"_id": 182,
"id": 183,
"pid": 12,
"city_code": "101200601",
"city_name": "黄石"
},
{
"_id": 183,
"id": 184,
"pid": 12,
"city_code": "101201401",
"city_name": "荆门"
},
{
"_id": 184,
"id": 185,
"pid": 12,
"city_code": "101200801",
"city_name": "荆州"
},
{
"_id": 185,
"id": 186,
"pid": 12,
"city_code": "101201701",
"city_name": "潜江"
},
{
"_id": 186,
"id": 187,
"pid": 12,
"city_code": "101201201",
"city_name": "神农架林区"
},
{
"_id": 187,
"id": 188,
"pid": 12,
"city_code": "101201101",
"city_name": "十堰"
},
{
"_id": 188,
"id": 189,
"pid": 12,
"city_code": "101201301",
"city_name": "随州"
},
{
"_id": 189,
"id": 190,
"pid": 12,
"city_code": "101201501",
"city_name": "天门"
},
{
"_id": 190,
"id": 191,
"pid": 12,
"city_code": "101200701",
"city_name": "咸宁"
},
{
"_id": 191,
"id": 192,
"pid": 12,
"city_code": "101200202",
"city_name": "襄阳"
},
{
"_id": 192,
"id": 193,
"pid": 12,
"city_code": "101200401",
"city_name": "孝感"
},
{
"_id": 193,
"id": 194,
"pid": 12,
"city_code": "101200901",
"city_name": "宜昌"
},
{
"_id": 194,
"id": 195,
"pid": 12,
"city_code": "101201001",
"city_name": "恩施"
},
{
"_id": 195,
"id": 196,
"pid": 13,
"city_code": "101250101",
"city_name": "长沙"
},
{
"_id": 196,
"id": 197,
"pid": 13,
"city_code": "101251101",
"city_name": "张家界"
},
{
"_id": 197,
"id": 198,
"pid": 13,
"city_code": "101250601",
"city_name": "常德"
},
{
"_id": 198,
"id": 199,
"pid": 13,
"city_code": "101250501",
"city_name": "郴州"
},
{
"_id": 199,
"id": 200,
"pid": 13,
"city_code": "101250401",
"city_name": "衡阳"
},
{
"_id": 200,
"id": 201,
"pid": 13,
"city_code": "101251201",
"city_name": "怀化"
},
{
"_id": 201,
"id": 202,
"pid": 13,
"city_code": "101250801",
"city_name": "娄底"
},
{
"_id": 202,
"id": 203,
"pid": 13,
"city_code": "101250901",
"city_name": "邵阳"
},
{
"_id": 203,
"id": 204,
"pid": 13,
"city_code": "101250201",
"city_name": "湘潭"
},
{
"_id": 204,
"id": 205,
"pid": 13,
"city_code": "101251509",
"city_name": "湘西"
},
{
"_id": 205,
"id": 206,
"pid": 13,
"city_code": "101250700",
"city_name": "益阳"
},
{
"_id": 206,
"id": 207,
"pid": 13,
"city_code": "101251401",
"city_name": "永州"
},
{
"_id": 207,
"id": 208,
"pid": 13,
"city_code": "101251001",
"city_name": "岳阳"
},
{
"_id": 208,
"id": 209,
"pid": 13,
"city_code": "101250301",
"city_name": "株洲"
},
{
"_id": 209,
"id": 210,
"pid": 14,
"city_code": "101060101",
"city_name": "长春"
},
{
"_id": 210,
"id": 211,
"pid": 14,
"city_code": "101060201",
"city_name": "吉林市"
},
{
"_id": 211,
"id": 212,
"pid": 14,
"city_code": "101060601",
"city_name": "白城"
},
{
"_id": 212,
"id": 213,
"pid": 14,
"city_code": "101060901",
"city_name": "白山"
},
{
"_id": 213,
"id": 214,
"pid": 14,
"city_code": "101060701",
"city_name": "辽源"
},
{
"_id": 214,
"id": 215,
"pid": 14,
"city_code": "101060401",
"city_name": "四平"
},
{
"_id": 215,
"id": 216,
"pid": 14,
"city_code": "101060801",
"city_name": "松原"
},
{
"_id": 216,
"id": 217,
"pid": 14,
"city_code": "101060501",
"city_name": "通化"
},
{
"_id": 217,
"id": 218,
"pid": 14,
"city_code": "101060312",
"city_name": "延边"
},
{
"_id": 218,
"id": 219,
"pid": 15,
"city_code": "101190101",
"city_name": "南京"
},
{
"_id": 219,
"id": 220,
"pid": 15,
"city_code": "101190401",
"city_name": "苏州"
},
{
"_id": 220,
"id": 221,
"pid": 15,
"city_code": "101190201",
"city_name": "无锡"
},
{
"_id": 221,
"id": 222,
"pid": 15,
"city_code": "101191101",
"city_name": "常州"
},
{
"_id": 222,
"id": 223,
"pid": 15,
"city_code": "101190901",
"city_name": "淮安"
},
{
"_id": 223,
"id": 224,
"pid": 15,
"city_code": "101191001",
"city_name": "连云港"
},
{
"_id": 224,
"id": 225,
"pid": 15,
"city_code": "101190501",
"city_name": "南通"
},
{
"_id": 225,
"id": 226,
"pid": 15,
"city_code": "101191301",
"city_name": "宿迁"
},
{
"_id": 226,
"id": 227,
"pid": 15,
"city_code": "101191201",
"city_name": "泰州"
},
{
"_id": 227,
"id": 228,
"pid": 15,
"city_code": "101190801",
"city_name": "徐州"
},
{
"_id": 228,
"id": 229,
"pid": 15,
"city_code": "101190701",
"city_name": "盐城"
},
{
"_id": 229,
"id": 230,
"pid": 15,
"city_code": "101190601",
"city_name": "扬州"
},
{
"_id": 230,
"id": 231,
"pid": 15,
"city_code": "101190301",
"city_name": "镇江"
},
{
"_id": 231,
"id": 232,
"pid": 16,
"city_code": "101240101",
"city_name": "南昌"
},
{
"_id": 232,
"id": 233,
"pid": 16,
"city_code": "101240401",
"city_name": "抚州"
},
{
"_id": 233,
"id": 234,
"pid": 16,
"city_code": "101240701",
"city_name": "赣州"
},
{
"_id": 234,
"id": 235,
"pid": 16,
"city_code": "101240601",
"city_name": "吉安"
},
{
"_id": 235,
"id": 236,
"pid": 16,
"city_code": "101240801",
"city_name": "景德镇"
},
{
"_id": 236,
"id": 237,
"pid": 16,
"city_code": "101240201",
"city_name": "九江"
},
{
"_id": 237,
"id": 238,
"pid": 16,
"city_code": "101240901",
"city_name": "萍乡"
},
{
"_id": 238,
"id": 239,
"pid": 16,
"city_code": "101240301",
"city_name": "上饶"
},
{
"_id": 239,
"id": 240,
"pid": 16,
"city_code": "101241001",
"city_name": "新余"
},
{
"_id": 240,
"id": 241,
"pid": 16,
"city_code": "101240501",
"city_name": "宜春"
},
{
"_id": 241,
"id": 242,
"pid": 16,
"city_code": "101241101",
"city_name": "鹰潭"
},
{
"_id": 242,
"id": 243,
"pid": 17,
"city_code": "101070101",
"city_name": "沈阳"
},
{
"_id": 243,
"id": 244,
"pid": 17,
"city_code": "101070201",
"city_name": "大连"
},
{
"_id": 244,
"id": 245,
"pid": 17,
"city_code": "101070301",
"city_name": "鞍山"
},
{
"_id": 245,
"id": 246,
"pid": 17,
"city_code": "101070501",
"city_name": "本溪"
},
{
"_id": 246,
"id": 247,
"pid": 17,
"city_code": "101071201",
"city_name": "朝阳"
},
{
"_id": 247,
"id": 248,
"pid": 17,
"city_code": "101070601",
"city_name": "丹东"
},
{
"_id": 248,
"id": 249,
"pid": 17,
"city_code": "101070401",
"city_name": "抚顺"
},
{
"_id": 249,
"id": 250,
"pid": 17,
"city_code": "101070901",
"city_name": "阜新"
},
{
"_id": 250,
"id": 251,
"pid": 17,
"city_code": "101071401",
"city_name": "葫芦岛"
},
{
"_id": 251,
"id": 252,
"pid": 17,
"city_code": "101070701",
"city_name": "锦州"
},
{
"_id": 252,
"id": 253,
"pid": 17,
"city_code": "101071001",
"city_name": "辽阳"
},
{
"_id": 253,
"id": 254,
"pid": 17,
"city_code": "101071301",
"city_name": "盘锦"
},
{
"_id": 254,
"id": 255,
"pid": 17,
"city_code": "101071101",
"city_name": "铁岭"
},
{
"_id": 255,
"id": 256,
"pid": 17,
"city_code": "101070801",
"city_name": "营口"
},
{
"_id": 256,
"id": 257,
"pid": 18,
"city_code": "101080101",
"city_name": "呼和浩特"
},
{
"_id": 257,
"id": 258,
"pid": 18,
"city_code": "101081213",
"city_name": "阿拉善盟"
},
{
"_id": 258,
"id": 259,
"pid": 18,
"city_code": "101080801",
"city_name": "巴彦淖尔"
},
{
"_id": 259,
"id": 260,
"pid": 18,
"city_code": "101080201",
"city_name": "包头"
},
{
"_id": 260,
"id": 261,
"pid": 18,
"city_code": "101080601",
"city_name": "赤峰"
},
{
"_id": 261,
"id": 262,
"pid": 18,
"city_code": "101080701",
"city_name": "鄂尔多斯"
},
{
"_id": 262,
"id": 263,
"pid": 18,
"city_code": "101081001",
"city_name": "呼伦贝尔"
},
{
"_id": 263,
"id": 264,
"pid": 18,
"city_code": "101080501",
"city_name": "通辽"
},
{
"_id": 264,
"id": 265,
"pid": 18,
"city_code": "101080301",
"city_name": "乌海"
},
{
"_id": 265,
"id": 266,
"pid": 18,
"city_code": "101080405",
"city_name": "乌兰察布"
},
{
"_id": 266,
"id": 267,
"pid": 18,
"city_code": "101080902",
"city_name": "锡林郭勒"
},
{
"_id": 267,
"id": 268,
"pid": 18,
"city_code": "101081108",
"city_name": "兴安盟"
},
{
"_id": 268,
"id": 269,
"pid": 19,
"city_code": "101170101",
"city_name": "银川"
},
{
"_id": 269,
"id": 270,
"pid": 19,
"city_code": "101170401",
"city_name": "固原"
},
{
"_id": 270,
"id": 271,
"pid": 19,
"city_code": "101170201",
"city_name": "石嘴山"
},
{
"_id": 271,
"id": 272,
"pid": 19,
"city_code": "101170301",
"city_name": "吴忠"
},
{
"_id": 272,
"id": 273,
"pid": 19,
"city_code": "101170501",
"city_name": "中卫"
},
{
"_id": 273,
"id": 274,
"pid": 20,
"city_code": "101150101",
"city_name": "西宁"
},
{
"_id": 274,
"id": 275,
"pid": 20,
"city_code": "101150501",
"city_name": "果洛"
},
{
"_id": 275,
"id": 276,
"pid": 20,
"city_code": "101150801",
"city_name": "海北"
},
{
"_id": 276,
"id": 277,
"pid": 20,
"city_code": "101150201",
"city_name": "海东"
},
{
"_id": 277,
"id": 278,
"pid": 20,
"city_code": "101150401",
"city_name": "海南州"
},
{
"_id": 278,
"id": 279,
"pid": 20,
"city_code": "101150701",
"city_name": "海西"
},
{
"_id": 279,
"id": 280,
"pid": 20,
"city_code": "101150301",
"city_name": "黄南"
},
{
"_id": 280,
"id": 281,
"pid": 20,
"city_code": "101150601",
"city_name": "玉树"
},
{
"_id": 281,
"id": 282,
"pid": 21,
"city_code": "101120101",
"city_name": "济南"
},
{
"_id": 282,
"id": 283,
"pid": 21,
"city_code": "101120201",
"city_name": "青岛"
},
{
"_id": 283,
"id": 284,
"pid": 21,
"city_code": "101121101",
"city_name": "滨州"
},
{
"_id": 284,
"id": 285,
"pid": 21,
"city_code": "101120401",
"city_name": "德州"
},
{
"_id": 285,
"id": 286,
"pid": 21,
"city_code": "101121201",
"city_name": "东营"
},
{
"_id": 286,
"id": 287,
"pid": 21,
"city_code": "101121001",
"city_name": "菏泽"
},
{
"_id": 287,
"id": 288,
"pid": 21,
"city_code": "101120701",
"city_name": "济宁"
},
{
"_id": 288,
"id": 289,
"pid": 21,
"city_code": "101121601",
"city_name": "莱芜"
},
{
"_id": 289,
"id": 290,
"pid": 21,
"city_code": "101121701",
"city_name": "聊城"
},
{
"_id": 290,
"id": 291,
"pid": 21,
"city_code": "101120901",
"city_name": "临沂"
},
{
"_id": 291,
"id": 292,
"pid": 21,
"city_code": "101121501",
"city_name": "日照"
},
{
"_id": 292,
"id": 293,
"pid": 21,
"city_code": "101120801",
"city_name": "泰安"
},
{
"_id": 293,
"id": 294,
"pid": 21,
"city_code": "101121301",
"city_name": "威海"
},
{
"_id": 294,
"id": 295,
"pid": 21,
"city_code": "101120601",
"city_name": "潍坊"
},
{
"_id": 295,
"id": 296,
"pid": 21,
"city_code": "101120501",
"city_name": "烟台"
},
{
"_id": 296,
"id": 297,
"pid": 21,
"city_code": "101121401",
"city_name": "枣庄"
},
{
"_id": 297,
"id": 298,
"pid": 21,
"city_code": "101120301",
"city_name": "淄博"
},
{
"_id": 298,
"id": 299,
"pid": 22,
"city_code": "101100101",
"city_name": "太原"
},
{
"_id": 299,
"id": 300,
"pid": 22,
"city_code": "101100501",
"city_name": "长治"
},
{
"_id": 300,
"id": 301,
"pid": 22,
"city_code": "101100201",
"city_name": "大同"
},
{
"_id": 301,
"id": 302,
"pid": 22,
"city_code": "101100601",
"city_name": "晋城"
},
{
"_id": 302,
"id": 303,
"pid": 22,
"city_code": "101100401",
"city_name": "晋中"
},
{
"_id": 303,
"id": 304,
"pid": 22,
"city_code": "101100701",
"city_name": "临汾"
},
{
"_id": 304,
"id": 305,
"pid": 22,
"city_code": "101101100",
"city_name": "吕梁"
},
{
"_id": 305,
"id": 306,
"pid": 22,
"city_code": "101100901",
"city_name": "朔州"
},
{
"_id": 306,
"id": 307,
"pid": 22,
"city_code": "101101001",
"city_name": "忻州"
},
{
"_id": 307,
"id": 308,
"pid": 22,
"city_code": "101100301",
"city_name": "阳泉"
},
{
"_id": 308,
"id": 309,
"pid": 22,
"city_code": "101100801",
"city_name": "运城"
},
{
"_id": 309,
"id": 310,
"pid": 23,
"city_code": "101110101",
"city_name": "西安"
},
{
"_id": 310,
"id": 311,
"pid": 23,
"city_code": "101110701",
"city_name": "安康"
},
{
"_id": 311,
"id": 312,
"pid": 23,
"city_code": "101110901",
"city_name": "宝鸡"
},
{
"_id": 312,
"id": 313,
"pid": 23,
"city_code": "101110801",
"city_name": "汉中"
},
{
"_id": 313,
"id": 314,
"pid": 23,
"city_code": "101110601",
"city_name": "商洛"
},
{
"_id": 314,
"id": 315,
"pid": 23,
"city_code": "101111001",
"city_name": "铜川"
},
{
"_id": 315,
"id": 316,
"pid": 23,
"city_code": "101110501",
"city_name": "渭南"
},
{
"_id": 316,
"id": 317,
"pid": 23,
"city_code": "101110200",
"city_name": "咸阳"
},
{
"_id": 317,
"id": 318,
"pid": 23,
"city_code": "101110300",
"city_name": "延安"
},
{
"_id": 318,
"id": 319,
"pid": 23,
"city_code": "101110401",
"city_name": "榆林"
},
{
"_id": 319,
"id": 321,
"pid": 25,
"city_code": "101270101",
"city_name": "成都"
},
{
"_id": 320,
"id": 322,
"pid": 25,
"city_code": "101270401",
"city_name": "绵阳"
},
{
"_id": 321,
"id": 323,
"pid": 25,
"city_code": "101271901",
"city_name": "阿坝"
},
{
"_id": 322,
"id": 324,
"pid": 25,
"city_code": "101270901",
"city_name": "巴中"
},
{
"_id": 323,
"id": 325,
"pid": 25,
"city_code": "101270601",
"city_name": "达州"
},
{
"_id": 324,
"id": 326,
"pid": 25,
"city_code": "101272001",
"city_name": "德阳"
},
{
"_id": 325,
"id": 327,
"pid": 25,
"city_code": "101271801",
"city_name": "甘孜"
},
{
"_id": 326,
"id": 328,
"pid": 25,
"city_code": "101270801",
"city_name": "广安"
},
{
"_id": 327,
"id": 329,
"pid": 25,
"city_code": "101272101",
"city_name": "广元"
},
{
"_id": 328,
"id": 330,
"pid": 25,
"city_code": "101271401",
"city_name": "乐山"
},
{
"_id": 329,
"id": 331,
"pid": 25,
"city_code": "101271601",
"city_name": "凉山"
},
{
"_id": 330,
"id": 332,
"pid": 25,
"city_code": "101271501",
"city_name": "眉山"
},
{
"_id": 331,
"id": 333,
"pid": 25,
"city_code": "101270501",
"city_name": "南充"
},
{
"_id": 332,
"id": 334,
"pid": 25,
"city_code": "101271201",
"city_name": "内江"
},
{
"_id": 333,
"id": 335,
"pid": 25,
"city_code": "101270201",
"city_name": "攀枝花"
},
{
"_id": 334,
"id": 336,
"pid": 25,
"city_code": "101270701",
"city_name": "遂宁"
},
{
"_id": 335,
"id": 337,
"pid": 25,
"city_code": "101271701",
"city_name": "雅安"
},
{
"_id": 336,
"id": 338,
"pid": 25,
"city_code": "101271101",
"city_name": "宜宾"
},
{
"_id": 337,
"id": 339,
"pid": 25,
"city_code": "101271301",
"city_name": "资阳"
},
{
"_id": 338,
"id": 340,
"pid": 25,
"city_code": "101270301",
"city_name": "自贡"
},
{
"_id": 339,
"id": 341,
"pid": 25,
"city_code": "101271001",
"city_name": "泸州"
},
{
"_id": 340,
"id": 343,
"pid": 27,
"city_code": "101140101",
"city_name": "拉萨"
},
{
"_id": 341,
"id": 344,
"pid": 27,
"city_code": "101140701",
"city_name": "阿里"
},
{
"_id": 342,
"id": 345,
"pid": 27,
"city_code": "101140501",
"city_name": "昌都"
},
{
"_id": 343,
"id": 346,
"pid": 27,
"city_code": "101140401",
"city_name": "林芝"
},
{
"_id": 344,
"id": 347,
"pid": 27,
"city_code": "101140601",
"city_name": "那曲"
},
{
"_id": 345,
"id": 348,
"pid": 27,
"city_code": "101140201",
"city_name": "日喀则"
},
{
"_id": 346,
"id": 349,
"pid": 27,
"city_code": "101140301",
"city_name": "山南"
},
{
"_id": 347,
"id": 350,
"pid": 28,
"city_code": "101130101",
"city_name": "乌鲁木齐"
},
{
"_id": 348,
"id": 351,
"pid": 28,
"city_code": "101130801",
"city_name": "阿克苏"
},
{
"_id": 349,
"id": 352,
"pid": 28,
"city_code": "101130701",
"city_name": "阿拉尔"
},
{
"_id": 350,
"id": 353,
"pid": 28,
"city_code": "101130609",
"city_name": "巴音郭楞"
},
{
"_id": 351,
"id": 354,
"pid": 28,
"city_code": "101131604",
"city_name": "博尔塔拉"
},
{
"_id": 352,
"id": 355,
"pid": 28,
"city_code": "101130401",
"city_name": "昌吉"
},
{
"_id": 353,
"id": 356,
"pid": 28,
"city_code": "101131201",
"city_name": "哈密"
},
{
"_id": 354,
"id": 357,
"pid": 28,
"city_code": "101131301",
"city_name": "和田"
},
{
"_id": 355,
"id": 358,
"pid": 28,
"city_code": "101130901",
"city_name": "喀什"
},
{
"_id": 356,
"id": 359,
"pid": 28,
"city_code": "101130201",
"city_name": "克拉玛依"
},
{
"_id": 357,
"id": 360,
"pid": 28,
"city_code": "",
"city_name": "克孜勒苏"
},
{
"_id": 358,
"id": 361,
"pid": 28,
"city_code": "101130301",
"city_name": "石河子"
},
{
"_id": 359,
"id": 362,
"pid": 28,
"city_code": "",
"city_name": "图木舒克"
},
{
"_id": 360,
"id": 363,
"pid": 28,
"city_code": "101130501",
"city_name": "吐鲁番"
},
{
"_id": 361,
"id": 364,
"pid": 28,
"city_code": "",
"city_name": "五家渠"
},
{
"_id": 362,
"id": 365,
"pid": 28,
"city_code": "101131012",
"city_name": "伊犁"
},
{
"_id": 363,
"id": 366,
"pid": 29,
"city_code": "101290101",
"city_name": "昆明"
},
{
"_id": 364,
"id": 367,
"pid": 29,
"city_code": "101291201",
"city_name": "怒江"
},
{
"_id": 365,
"id": 368,
"pid": 29,
"city_code": "101290901",
"city_name": "普洱"
},
{
"_id": 366,
"id": 369,
"pid": 29,
"city_code": "101291401",
"city_name": "丽江"
},
{
"_id": 367,
"id": 370,
"pid": 29,
"city_code": "101290501",
"city_name": "保山"
},
{
"_id": 368,
"id": 371,
"pid": 29,
"city_code": "101290801",
"city_name": "楚雄"
},
{
"_id": 369,
"id": 372,
"pid": 29,
"city_code": "101290201",
"city_name": "大理"
},
{
"_id": 370,
"id": 373,
"pid": 29,
"city_code": "101291501",
"city_name": "德宏"
},
{
"_id": 371,
"id": 374,
"pid": 29,
"city_code": "101291305",
"city_name": "迪庆"
},
{
"_id": 372,
"id": 375,
"pid": 29,
"city_code": "101290301",
"city_name": "红河"
},
{
"_id": 373,
"id": 376,
"pid": 29,
"city_code": "101291101",
"city_name": "临沧"
},
{
"_id": 374,
"id": 377,
"pid": 29,
"city_code": "101290401",
"city_name": "曲靖"
},
{
"_id": 375,
"id": 378,
"pid": 29,
"city_code": "101290601",
"city_name": "文山"
},
{
"_id": 376,
"id": 379,
"pid": 29,
"city_code": "101291602",
"city_name": "西双版纳"
},
{
"_id": 377,
"id": 380,
"pid": 29,
"city_code": "101290701",
"city_name": "玉溪"
},
{
"_id": 378,
"id": 381,
"pid": 29,
"city_code": "101291001",
"city_name": "昭通"
},
{
"_id": 379,
"id": 382,
"pid": 30,
"city_code": "101210101",
"city_name": "杭州"
},
{
"_id": 380,
"id": 383,
"pid": 30,
"city_code": "101210201",
"city_name": "湖州"
},
{
"_id": 381,
"id": 384,
"pid": 30,
"city_code": "101210301",
"city_name": "嘉兴"
},
{
"_id": 382,
"id": 385,
"pid": 30,
"city_code": "101210901",
"city_name": "金华"
},
{
"_id": 383,
"id": 386,
"pid": 30,
"city_code": "101210801",
"city_name": "丽水"
},
{
"_id": 384,
"id": 387,
"pid": 30,
"city_code": "101210401",
"city_name": "宁波"
},
{
"_id": 385,
"id": 388,
"pid": 30,
"city_code": "101210501",
"city_name": "绍兴"
},
{
"_id": 386,
"id": 389,
"pid": 30,
"city_code": "101210601",
"city_name": "台州"
},
{
"_id": 387,
"id": 390,
"pid": 30,
"city_code": "101210701",
"city_name": "温州"
},
{
"_id": 388,
"id": 391,
"pid": 30,
"city_code": "101211101",
"city_name": "舟山"
},
{
"_id": 389,
"id": 392,
"pid": 30,
"city_code": "101211001",
"city_name": "衢州"
},
{
"_id": 390,
"id": 400,
"pid": 35,
"city_code": "101220609",
"city_name": "桐城市"
},
{
"_id": 391,
"id": 401,
"pid": 35,
"city_code": "101220605",
"city_name": "怀宁县"
},
{
"_id": 392,
"id": 402,
"pid": 47,
"city_code": "101220602",
"city_name": "枞阳县"
},
{
"_id": 393,
"id": 403,
"pid": 35,
"city_code": "101220604",
"city_name": "潜山县"
},
{
"_id": 394,
"id": 404,
"pid": 35,
"city_code": "101220603",
"city_name": "太湖县"
},
{
"_id": 395,
"id": 405,
"pid": 35,
"city_code": "101220606",
"city_name": "宿松县"
},
{
"_id": 396,
"id": 406,
"pid": 35,
"city_code": "101220607",
"city_name": "望江县"
},
{
"_id": 397,
"id": 407,
"pid": 35,
"city_code": "101220608",
"city_name": "岳西县"
},
{
"_id": 398,
"id": 412,
"pid": 36,
"city_code": "101220202",
"city_name": "怀远县"
},
{
"_id": 399,
"id": 413,
"pid": 36,
"city_code": "101220204",
"city_name": "五河县"
},
{
"_id": 400,
"id": 414,
"pid": 36,
"city_code": "101220203",
"city_name": "固镇县"
},
{
"_id": 401,
"id": 416,
"pid": 3400,
"city_code": "101220106",
"city_name": "庐江县"
},
{
"_id": 402,
"id": 417,
"pid": 48,
"city_code": "101220305",
"city_name": "无为县"
},
{
"_id": 403,
"id": 418,
"pid": 45,
"city_code": "101220503",
"city_name": "含山县"
},
{
"_id": 404,
"id": 419,
"pid": 45,
"city_code": "101220504",
"city_name": "和县"
},
{
"_id": 405,
"id": 421,
"pid": 38,
"city_code": "101221702",
"city_name": "东至县"
},
{
"_id": 406,
"id": 422,
"pid": 38,
"city_code": "101221705",
"city_name": "石台县"
},
{
"_id": 407,
"id": 423,
"pid": 38,
"city_code": "101221703",
"city_name": "青阳县"
},
{
"_id": 408,
"id": 426,
"pid": 39,
"city_code": "101221107",
"city_name": "天长市"
},
{
"_id": 409,
"id": 427,
"pid": 39,
"city_code": "101221103",
"city_name": "明光市"
},
{
"_id": 410,
"id": 428,
"pid": 39,
"city_code": "101221106",
"city_name": "来安县"
},
{
"_id": 411,
"id": 429,
"pid": 39,
"city_code": "101221105",
"city_name": "全椒县"
},
{
"_id": 412,
"id": 430,
"pid": 39,
"city_code": "101221104",
"city_name": "定远县"
},
{
"_id": 413,
"id": 431,
"pid": 39,
"city_code": "101221102",
"city_name": "凤阳县"
},
{
"_id": 414,
"id": 439,
"pid": 40,
"city_code": "101220805",
"city_name": "界首市"
},
{
"_id": 415,
"id": 440,
"pid": 40,
"city_code": "101220804",
"city_name": "临泉县"
},
{
"_id": 416,
"id": 441,
"pid": 40,
"city_code": "101220806",
"city_name": "太和县"
},
{
"_id": 417,
"id": 442,
"pid": 40,
"city_code": "101220802",
"city_name": "阜南县"
},
{
"_id": 418,
"id": 443,
"pid": 40,
"city_code": "101220803",
"city_name": "颍上县"
},
{
"_id": 419,
"id": 447,
"pid": 41,
"city_code": "101221202",
"city_name": "濉溪县"
},
{
"_id": 420,
"id": 452,
"pid": 42,
"city_code": "101220403",
"city_name": "潘集区"
},
{
"_id": 421,
"id": 453,
"pid": 42,
"city_code": "101220402",
"city_name": "凤台县"
},
{
"_id": 422,
"id": 454,
"pid": 43,
"city_code": "101221003",
"city_name": "屯溪区"
},
{
"_id": 423,
"id": 455,
"pid": 43,
"city_code": "101221002",
"city_name": "黄山区"
},
{
"_id": 424,
"id": 457,
"pid": 43,
"city_code": "101221006",
"city_name": "歙县"
},
{
"_id": 425,
"id": 458,
"pid": 43,
"city_code": "101221007",
"city_name": "休宁县"
},
{
"_id": 426,
"id": 459,
"pid": 43,
"city_code": "101221005",
"city_name": "黟县"
},
{
"_id": 427,
"id": 460,
"pid": 43,
"city_code": "101221004",
"city_name": "祁门县"
},
{
"_id": 428,
"id": 463,
"pid": 44,
"city_code": "101221503",
"city_name": "寿县"
},
{
"_id": 429,
"id": 464,
"pid": 44,
"city_code": "101221502",
"city_name": "霍邱县"
},
{
"_id": 430,
"id": 465,
"pid": 44,
"city_code": "101221507",
"city_name": "舒城县"
},
{
"_id": 431,
"id": 466,
"pid": 44,
"city_code": "101221505",
"city_name": "金寨县"
},
{
"_id": 432,
"id": 467,
"pid": 44,
"city_code": "101221506",
"city_name": "霍山县"
},
{
"_id": 433,
"id": 471,
"pid": 45,
"city_code": "101220502",
"city_name": "当涂县"
},
{
"_id": 434,
"id": 473,
"pid": 46,
"city_code": "101220702",
"city_name": "砀山县"
},
{
"_id": 435,
"id": 474,
"pid": 46,
"city_code": "101220705",
"city_name": "萧县"
},
{
"_id": 436,
"id": 475,
"pid": 46,
"city_code": "101220703",
"city_name": "灵璧县"
},
{
"_id": 437,
"id": 476,
"pid": 46,
"city_code": "101220704",
"city_name": "泗县"
},
{
"_id": 438,
"id": 480,
"pid": 47,
"city_code": "101221301",
"city_name": "义安区"
},
{
"_id": 439,
"id": 485,
"pid": 48,
"city_code": "101220303",
"city_name": "芜湖县"
},
{
"_id": 440,
"id": 486,
"pid": 48,
"city_code": "101220302",
"city_name": "繁昌县"
},
{
"_id": 441,
"id": 487,
"pid": 48,
"city_code": "101220304",
"city_name": "南陵县"
},
{
"_id": 442,
"id": 489,
"pid": 49,
"city_code": "101221404",
"city_name": "宁国市"
},
{
"_id": 443,
"id": 490,
"pid": 49,
"city_code": "101221407",
"city_name": "郎溪县"
},
{
"_id": 444,
"id": 491,
"pid": 49,
"city_code": "101221406",
"city_name": "广德县"
},
{
"_id": 445,
"id": 492,
"pid": 49,
"city_code": "101221402",
"city_name": "泾县"
},
{
"_id": 446,
"id": 493,
"pid": 49,
"city_code": "101221405",
"city_name": "绩溪县"
},
{
"_id": 447,
"id": 494,
"pid": 49,
"city_code": "101221403",
"city_name": "旌德县"
},
{
"_id": 448,
"id": 495,
"pid": 50,
"city_code": "101220902",
"city_name": "涡阳县"
},
{
"_id": 449,
"id": 496,
"pid": 50,
"city_code": "101220904",
"city_name": "蒙城县"
},
{
"_id": 450,
"id": 497,
"pid": 50,
"city_code": "101220903",
"city_name": "利辛县"
},
{
"_id": 451,
"id": 501,
"pid": 1,
"city_code": "101010200",
"city_name": "海淀区"
},
{
"_id": 452,
"id": 502,
"pid": 1,
"city_code": "101010300",
"city_name": "朝阳区"
},
{
"_id": 453,
"id": 505,
"pid": 1,
"city_code": "101010900",
"city_name": "丰台区"
},
{
"_id": 454,
"id": 506,
"pid": 1,
"city_code": "101011000",
"city_name": "石景山区"
},
{
"_id": 455,
"id": 507,
"pid": 1,
"city_code": "101011200",
"city_name": "房山区"
},
{
"_id": 456,
"id": 508,
"pid": 1,
"city_code": "101011400",
"city_name": "门头沟区"
},
{
"_id": 457,
"id": 509,
"pid": 1,
"city_code": "101010600",
"city_name": "通州区"
},
{
"_id": 458,
"id": 510,
"pid": 1,
"city_code": "101010400",
"city_name": "顺义区"
},
{
"_id": 459,
"id": 511,
"pid": 1,
"city_code": "101010700",
"city_name": "昌平区"
},
{
"_id": 460,
"id": 512,
"pid": 1,
"city_code": "101010500",
"city_name": "怀柔区"
},
{
"_id": 461,
"id": 513,
"pid": 1,
"city_code": "101011500",
"city_name": "平谷区"
},
{
"_id": 462,
"id": 514,
"pid": 1,
"city_code": "101011100",
"city_name": "大兴区"
},
{
"_id": 463,
"id": 515,
"pid": 1,
"city_code": "101011300",
"city_name": "密云县"
},
{
"_id": 464,
"id": 516,
"pid": 1,
"city_code": "101010800",
"city_name": "延庆县"
},
{
"_id": 465,
"id": 522,
"pid": 52,
"city_code": "101230111",
"city_name": "福清市"
},
{
"_id": 466,
"id": 523,
"pid": 52,
"city_code": "101230110",
"city_name": "长乐市"
},
{
"_id": 467,
"id": 524,
"pid": 52,
"city_code": "101230103",
"city_name": "闽侯县"
},
{
"_id": 468,
"id": 525,
"pid": 52,
"city_code": "101230105",
"city_name": "连江县"
},
{
"_id": 469,
"id": 526,
"pid": 52,
"city_code": "101230104",
"city_name": "罗源县"
},
{
"_id": 470,
"id": 527,
"pid": 52,
"city_code": "101230102",
"city_name": "闽清县"
},
{
"_id": 471,
"id": 528,
"pid": 52,
"city_code": "101230107",
"city_name": "永泰县"
},
{
"_id": 472,
"id": 529,
"pid": 52,
"city_code": "101230108",
"city_name": "平潭县"
},
{
"_id": 473,
"id": 531,
"pid": 53,
"city_code": "101230707",
"city_name": "漳平市"
},
{
"_id": 474,
"id": 532,
"pid": 53,
"city_code": "101230702",
"city_name": "长汀县"
},
{
"_id": 475,
"id": 533,
"pid": 53,
"city_code": "101230706",
"city_name": "永定县"
},
{
"_id": 476,
"id": 534,
"pid": 53,
"city_code": "101230705",
"city_name": "上杭县"
},
{
"_id": 477,
"id": 535,
"pid": 53,
"city_code": "101230704",
"city_name": "武平县"
},
{
"_id": 478,
"id": 536,
"pid": 53,
"city_code": "101230703",
"city_name": "连城县"
},
{
"_id": 479,
"id": 538,
"pid": 54,
"city_code": "101230904",
"city_name": "邵武市"
},
{
"_id": 480,
"id": 539,
"pid": 54,
"city_code": "101230905",
"city_name": "武夷山市"
},
{
"_id": 481,
"id": 540,
"pid": 54,
"city_code": "101230910",
"city_name": "建瓯市"
},
{
"_id": 482,
"id": 541,
"pid": 54,
"city_code": "101230907",
"city_name": "建阳市"
},
{
"_id": 483,
"id": 542,
"pid": 54,
"city_code": "101230902",
"city_name": "顺昌县"
},
{
"_id": 484,
"id": 543,
"pid": 54,
"city_code": "101230906",
"city_name": "浦城县"
},
{
"_id": 485,
"id": 544,
"pid": 54,
"city_code": "101230903",
"city_name": "光泽县"
},
{
"_id": 486,
"id": 545,
"pid": 54,
"city_code": "101230908",
"city_name": "松溪县"
},
{
"_id": 487,
"id": 546,
"pid": 54,
"city_code": "101230909",
"city_name": "政和县"
},
{
"_id": 488,
"id": 548,
"pid": 55,
"city_code": "101230306",
"city_name": "福安市"
},
{
"_id": 489,
"id": 549,
"pid": 55,
"city_code": "101230308",
"city_name": "福鼎市"
},
{
"_id": 490,
"id": 550,
"pid": 55,
"city_code": "101230303",
"city_name": "霞浦县"
},
{
"_id": 491,
"id": 551,
"pid": 55,
"city_code": "101230302",
"city_name": "古田县"
},
{
"_id": 492,
"id": 552,
"pid": 55,
"city_code": "101230309",
"city_name": "屏南县"
},
{
"_id": 493,
"id": 553,
"pid": 55,
"city_code": "101230304",
"city_name": "寿宁县"
},
{
"_id": 494,
"id": 554,
"pid": 55,
"city_code": "101230305",
"city_name": "周宁县"
},
{
"_id": 495,
"id": 555,
"pid": 55,
"city_code": "101230307",
"city_name": "柘荣县"
},
{
"_id": 496,
"id": 556,
"pid": 56,
"city_code": "101230407",
"city_name": "城厢区"
},
{
"_id": 497,
"id": 557,
"pid": 56,
"city_code": "101230404",
"city_name": "涵江区"
},
{
"_id": 498,
"id": 558,
"pid": 56,
"city_code": "101230406",
"city_name": "荔城区"
},
{
"_id": 499,
"id": 559,
"pid": 56,
"city_code": "101230405",
"city_name": "秀屿区"
},
{
"_id": 500,
"id": 560,
"pid": 56,
"city_code": "101230402",
"city_name": "仙游县"
},
{
"_id": 501,
"id": 566,
"pid": 57,
"city_code": "101230510",
"city_name": "石狮市"
},
{
"_id": 502,
"id": 567,
"pid": 57,
"city_code": "101230509",
"city_name": "晋江市"
},
{
"_id": 503,
"id": 568,
"pid": 57,
"city_code": "101230506",
"city_name": "南安市"
},
{
"_id": 504,
"id": 569,
"pid": 57,
"city_code": "101230508",
"city_name": "惠安县"
},
{
"_id": 505,
"id": 570,
"pid": 57,
"city_code": "101230502",
"city_name": "安溪县"
},
{
"_id": 506,
"id": 571,
"pid": 57,
"city_code": "101230504",
"city_name": "永春县"
},
{
"_id": 507,
"id": 572,
"pid": 57,
"city_code": "101230505",
"city_name": "德化县"
},
{
"_id": 508,
"id": 576,
"pid": 58,
"city_code": "101230810",
"city_name": "永安市"
},
{
"_id": 509,
"id": 577,
"pid": 58,
"city_code": "101230807",
"city_name": "明溪县"
},
{
"_id": 510,
"id": 578,
"pid": 58,
"city_code": "101230803",
"city_name": "清流县"
},
{
"_id": 511,
"id": 579,
"pid": 58,
"city_code": "101230802",
"city_name": "宁化县"
},
{
"_id": 512,
"id": 580,
"pid": 58,
"city_code": "101230811",
"city_name": "大田县"
},
{
"_id": 513,
"id": 581,
"pid": 58,
"city_code": "101230809",
"city_name": "尤溪县"
},
{
"_id": 514,
"id": 582,
"pid": 58,
"city_code": "101230808",
"city_name": "沙县"
},
{
"_id": 515,
"id": 583,
"pid": 58,
"city_code": "101230805",
"city_name": "将乐县"
},
{
"_id": 516,
"id": 584,
"pid": 58,
"city_code": "101230804",
"city_name": "泰宁县"
},
{
"_id": 517,
"id": 585,
"pid": 58,
"city_code": "101230806",
"city_name": "建宁县"
},
{
"_id": 518,
"id": 590,
"pid": 59,
"city_code": "101230202",
"city_name": "同安区"
},
{
"_id": 519,
"id": 594,
"pid": 60,
"city_code": "101230605",
"city_name": "龙海市"
},
{
"_id": 520,
"id": 595,
"pid": 60,
"city_code": "101230609",
"city_name": "云霄县"
},
{
"_id": 521,
"id": 596,
"pid": 60,
"city_code": "101230606",
"city_name": "漳浦县"
},
{
"_id": 522,
"id": 597,
"pid": 60,
"city_code": "101230607",
"city_name": "诏安县"
},
{
"_id": 523,
"id": 598,
"pid": 60,
"city_code": "101230602",
"city_name": "长泰县"
},
{
"_id": 524,
"id": 599,
"pid": 60,
"city_code": "101230608",
"city_name": "东山县"
},
{
"_id": 525,
"id": 600,
"pid": 60,
"city_code": "101230603",
"city_name": "南靖县"
},
{
"_id": 526,
"id": 601,
"pid": 60,
"city_code": "101230604",
"city_name": "平和县"
},
{
"_id": 527,
"id": 602,
"pid": 60,
"city_code": "101230610",
"city_name": "华安县"
},
{
"_id": 528,
"id": 603,
"pid": 61,
"city_code": "101160102",
"city_name": "皋兰县"
},
{
"_id": 529,
"id": 609,
"pid": 61,
"city_code": "101160103",
"city_name": "永登县"
},
{
"_id": 530,
"id": 610,
"pid": 61,
"city_code": "101160104",
"city_name": "榆中县"
},
{
"_id": 531,
"id": 611,
"pid": 62,
"city_code": "101161301",
"city_name": "白银区"
},
{
"_id": 532,
"id": 612,
"pid": 62,
"city_code": "101161304",
"city_name": "平川区"
},
{
"_id": 533,
"id": 613,
"pid": 62,
"city_code": "101161303",
"city_name": "会宁县"
},
{
"_id": 534,
"id": 614,
"pid": 62,
"city_code": "101161305",
"city_name": "景泰县"
},
{
"_id": 535,
"id": 615,
"pid": 62,
"city_code": "101161302",
"city_name": "靖远县"
},
{
"_id": 536,
"id": 616,
"pid": 63,
"city_code": "101160205",
"city_name": "临洮县"
},
{
"_id": 537,
"id": 617,
"pid": 63,
"city_code": "101160203",
"city_name": "陇西县"
},
{
"_id": 538,
"id": 618,
"pid": 63,
"city_code": "101160202",
"city_name": "通渭县"
},
{
"_id": 539,
"id": 619,
"pid": 63,
"city_code": "101160204",
"city_name": "渭源县"
},
{
"_id": 540,
"id": 620,
"pid": 63,
"city_code": "101160206",
"city_name": "漳县"
},
{
"_id": 541,
"id": 621,
"pid": 63,
"city_code": "101160207",
"city_name": "岷县"
},
{
"_id": 542,
"id": 624,
"pid": 64,
"city_code": "101161201",
"city_name": "合作市"
},
{
"_id": 543,
"id": 625,
"pid": 64,
"city_code": "101161202",
"city_name": "临潭县"
},
{
"_id": 544,
"id": 626,
"pid": 64,
"city_code": "101161203",
"city_name": "卓尼县"
},
{
"_id": 545,
"id": 627,
"pid": 64,
"city_code": "101161204",
"city_name": "舟曲县"
},
{
"_id": 546,
"id": 628,
"pid": 64,
"city_code": "101161205",
"city_name": "迭部县"
},
{
"_id": 547,
"id": 629,
"pid": 64,
"city_code": "101161206",
"city_name": "玛曲县"
},
{
"_id": 548,
"id": 630,
"pid": 64,
"city_code": "101161207",
"city_name": "碌曲县"
},
{
"_id": 549,
"id": 631,
"pid": 64,
"city_code": "101161208",
"city_name": "夏河县"
},
{
"_id": 550,
"id": 634,
"pid": 66,
"city_code": "101160602",
"city_name": "永昌县"
},
{
"_id": 551,
"id": 636,
"pid": 67,
"city_code": "101160807",
"city_name": "玉门市"
},
{
"_id": 552,
"id": 637,
"pid": 67,
"city_code": "101160808",
"city_name": "敦煌市"
},
{
"_id": 553,
"id": 638,
"pid": 67,
"city_code": "101160803",
"city_name": "金塔县"
},
{
"_id": 554,
"id": 639,
"pid": 67,
"city_code": "101160805",
"city_name": "瓜州县"
},
{
"_id": 555,
"id": 640,
"pid": 67,
"city_code": "101160806",
"city_name": "肃北县"
},
{
"_id": 556,
"id": 641,
"pid": 67,
"city_code": "101160804",
"city_name": "阿克塞"
},
{
"_id": 557,
"id": 642,
"pid": 68,
"city_code": "101161101",
"city_name": "临夏市"
},
{
"_id": 558,
"id": 643,
"pid": 68,
"city_code": "101161101",
"city_name": "临夏县"
},
{
"_id": 559,
"id": 644,
"pid": 68,
"city_code": "101161102",
"city_name": "康乐县"
},
{
"_id": 560,
"id": 645,
"pid": 68,
"city_code": "101161103",
"city_name": "永靖县"
},
{
"_id": 561,
"id": 646,
"pid": 68,
"city_code": "101161104",
"city_name": "广河县"
},
{
"_id": 562,
"id": 647,
"pid": 68,
"city_code": "101161105",
"city_name": "和政县"
},
{
"_id": 563,
"id": 648,
"pid": 68,
"city_code": "101161106",
"city_name": "东乡族自治县"
},
{
"_id": 564,
"id": 649,
"pid": 68,
"city_code": "101161107",
"city_name": "积石山"
},
{
"_id": 565,
"id": 650,
"pid": 69,
"city_code": "101161002",
"city_name": "成县"
},
{
"_id": 566,
"id": 651,
"pid": 69,
"city_code": "101161008",
"city_name": "徽县"
},
{
"_id": 567,
"id": 652,
"pid": 69,
"city_code": "101161005",
"city_name": "康县"
},
{
"_id": 568,
"id": 653,
"pid": 69,
"city_code": "101161007",
"city_name": "礼县"
},
{
"_id": 569,
"id": 654,
"pid": 69,
"city_code": "101161009",
"city_name": "两当县"
},
{
"_id": 570,
"id": 655,
"pid": 69,
"city_code": "101161003",
"city_name": "文县"
},
{
"_id": 571,
"id": 656,
"pid": 69,
"city_code": "101161006",
"city_name": "西和县"
},
{
"_id": 572,
"id": 657,
"pid": 69,
"city_code": "101161004",
"city_name": "宕昌县"
},
{
"_id": 573,
"id": 658,
"pid": 69,
"city_code": "101161001",
"city_name": "武都区"
},
{
"_id": 574,
"id": 659,
"pid": 70,
"city_code": "101160304",
"city_name": "崇信县"
},
{
"_id": 575,
"id": 660,
"pid": 70,
"city_code": "101160305",
"city_name": "华亭县"
},
{
"_id": 576,
"id": 661,
"pid": 70,
"city_code": "101160307",
"city_name": "静宁县"
},
{
"_id": 577,
"id": 662,
"pid": 70,
"city_code": "101160303",
"city_name": "灵台县"
},
{
"_id": 578,
"id": 663,
"pid": 70,
"city_code": "101160308",
"city_name": "崆峒区"
},
{
"_id": 579,
"id": 664,
"pid": 70,
"city_code": "101160306",
"city_name": "庄浪县"
},
{
"_id": 580,
"id": 665,
"pid": 70,
"city_code": "101160302",
"city_name": "泾川县"
},
{
"_id": 581,
"id": 666,
"pid": 71,
"city_code": "101160405",
"city_name": "合水县"
},
{
"_id": 582,
"id": 667,
"pid": 71,
"city_code": "101160404",
"city_name": "华池县"
},
{
"_id": 583,
"id": 668,
"pid": 71,
"city_code": "101160403",
"city_name": "环县"
},
{
"_id": 584,
"id": 669,
"pid": 71,
"city_code": "101160407",
"city_name": "宁县"
},
{
"_id": 585,
"id": 670,
"pid": 71,
"city_code": "101160409",
"city_name": "庆城县"
},
{
"_id": 586,
"id": 671,
"pid": 71,
"city_code": "101160402",
"city_name": "西峰区"
},
{
"_id": 587,
"id": 672,
"pid": 71,
"city_code": "101160408",
"city_name": "镇原县"
},
{
"_id": 588,
"id": 673,
"pid": 71,
"city_code": "101160406",
"city_name": "正宁县"
},
{
"_id": 589,
"id": 674,
"pid": 72,
"city_code": "101160905",
"city_name": "甘谷县"
},
{
"_id": 590,
"id": 675,
"pid": 72,
"city_code": "101160904",
"city_name": "秦安县"
},
{
"_id": 591,
"id": 676,
"pid": 72,
"city_code": "101160903",
"city_name": "清水县"
},
{
"_id": 592,
"id": 678,
"pid": 72,
"city_code": "101160908",
"city_name": "麦积区"
},
{
"_id": 593,
"id": 679,
"pid": 72,
"city_code": "101160906",
"city_name": "武山县"
},
{
"_id": 594,
"id": 680,
"pid": 72,
"city_code": "101160907",
"city_name": "张家川"
},
{
"_id": 595,
"id": 681,
"pid": 73,
"city_code": "101160503",
"city_name": "古浪县"
},
{
"_id": 596,
"id": 682,
"pid": 73,
"city_code": "101160502",
"city_name": "民勤县"
},
{
"_id": 597,
"id": 683,
"pid": 73,
"city_code": "101160505",
"city_name": "天祝县"
},
{
"_id": 598,
"id": 685,
"pid": 74,
"city_code": "101160705",
"city_name": "高台县"
},
{
"_id": 599,
"id": 686,
"pid": 74,
"city_code": "101160704",
"city_name": "临泽县"
},
{
"_id": 600,
"id": 687,
"pid": 74,
"city_code": "101160703",
"city_name": "民乐县"
},
{
"_id": 601,
"id": 688,
"pid": 74,
"city_code": "101160706",
"city_name": "山丹县"
},
{
"_id": 602,
"id": 689,
"pid": 74,
"city_code": "101160702",
"city_name": "肃南县"
},
{
"_id": 603,
"id": 691,
"pid": 75,
"city_code": "101280103",
"city_name": "从化区"
},
{
"_id": 604,
"id": 692,
"pid": 75,
"city_code": "101280106",
"city_name": "天河区"
},
{
"_id": 605,
"id": 699,
"pid": 75,
"city_code": "101280102",
"city_name": "番禺区"
},
{
"_id": 606,
"id": 700,
"pid": 75,
"city_code": "101280105",
"city_name": "花都区"
},
{
"_id": 607,
"id": 701,
"pid": 75,
"city_code": "101280104",
"city_name": "增城区"
},
{
"_id": 608,
"id": 706,
"pid": 76,
"city_code": "101280604",
"city_name": "南山区"
},
{
"_id": 609,
"id": 711,
"pid": 77,
"city_code": "101281503",
"city_name": "潮安县"
},
{
"_id": 610,
"id": 712,
"pid": 77,
"city_code": "101281502",
"city_name": "饶平县"
},
{
"_id": 611,
"id": 746,
"pid": 79,
"city_code": "101280803",
"city_name": "南海区"
},
{
"_id": 612,
"id": 747,
"pid": 79,
"city_code": "101280801",
"city_name": "顺德区"
},
{
"_id": 613,
"id": 748,
"pid": 79,
"city_code": "101280802",
"city_name": "三水区"
},
{
"_id": 614,
"id": 749,
"pid": 79,
"city_code": "101280804",
"city_name": "高明区"
},
{
"_id": 615,
"id": 750,
"pid": 80,
"city_code": "101281206",
"city_name": "东源县"
},
{
"_id": 616,
"id": 751,
"pid": 80,
"city_code": "101281204",
"city_name": "和平县"
},
{
"_id": 617,
"id": 753,
"pid": 80,
"city_code": "101281203",
"city_name": "连平县"
},
{
"_id": 618,
"id": 754,
"pid": 80,
"city_code": "101281205",
"city_name": "龙川县"
},
{
"_id": 619,
"id": 755,
"pid": 80,
"city_code": "101281202",
"city_name": "紫金县"
},
{
"_id": 620,
"id": 756,
"pid": 81,
"city_code": "101280303",
"city_name": "惠阳区"
},
{
"_id": 621,
"id": 759,
"pid": 81,
"city_code": "101280302",
"city_name": "博罗县"
},
{
"_id": 622,
"id": 760,
"pid": 81,
"city_code": "101280304",
"city_name": "惠东县"
},
{
"_id": 623,
"id": 761,
"pid": 81,
"city_code": "101280305",
"city_name": "龙门县"
},
{
"_id": 624,
"id": 762,
"pid": 82,
"city_code": "101281109",
"city_name": "江海区"
},
{
"_id": 625,
"id": 763,
"pid": 82,
"city_code": "101281107",
"city_name": "蓬江区"
},
{
"_id": 626,
"id": 764,
"pid": 82,
"city_code": "101281104",
"city_name": "新会区"
},
{
"_id": 627,
"id": 765,
"pid": 82,
"city_code": "101281106",
"city_name": "台山市"
},
{
"_id": 628,
"id": 766,
"pid": 82,
"city_code": "101281103",
"city_name": "开平市"
},
{
"_id": 629,
"id": 767,
"pid": 82,
"city_code": "101281108",
"city_name": "鹤山市"
},
{
"_id": 630,
"id": 768,
"pid": 82,
"city_code": "101281105",
"city_name": "恩平市"
},
{
"_id": 631,
"id": 770,
"pid": 83,
"city_code": "101281903",
"city_name": "普宁市"
},
{
"_id": 632,
"id": 771,
"pid": 83,
"city_code": "101281905",
"city_name": "揭东县"
},
{
"_id": 633,
"id": 772,
"pid": 83,
"city_code": "101281902",
"city_name": "揭西县"
},
{
"_id": 634,
"id": 773,
"pid": 83,
"city_code": "101281904",
"city_name": "惠来县"
},
{
"_id": 635,
"id": 775,
"pid": 84,
"city_code": "101282006",
"city_name": "茂港区"
},
{
"_id": 636,
"id": 776,
"pid": 84,
"city_code": "101282002",
"city_name": "高州市"
},
{
"_id": 637,
"id": 777,
"pid": 84,
"city_code": "101282003",
"city_name": "化州市"
},
{
"_id": 638,
"id": 778,
"pid": 84,
"city_code": "101282005",
"city_name": "信宜市"
},
{
"_id": 639,
"id": 779,
"pid": 84,
"city_code": "101282004",
"city_name": "电白县"
},
{
"_id": 640,
"id": 780,
"pid": 85,
"city_code": "101280409",
"city_name": "梅县"
},
{
"_id": 641,
"id": 782,
"pid": 85,
"city_code": "101280402",
"city_name": "兴宁市"
},
{
"_id": 642,
"id": 783,
"pid": 85,
"city_code": "101280404",
"city_name": "大埔县"
},
{
"_id": 643,
"id": 784,
"pid": 85,
"city_code": "101280406",
"city_name": "丰顺县"
},
{
"_id": 644,
"id": 785,
"pid": 85,
"city_code": "101280408",
"city_name": "五华县"
},
{
"_id": 645,
"id": 786,
"pid": 85,
"city_code": "101280407",
"city_name": "平远县"
},
{
"_id": 646,
"id": 787,
"pid": 85,
"city_code": "101280403",
"city_name": "蕉岭县"
},
{
"_id": 647,
"id": 789,
"pid": 86,
"city_code": "101281307",
"city_name": "英德市"
},
{
"_id": 648,
"id": 790,
"pid": 86,
"city_code": "101281303",
"city_name": "连州市"
},
{
"_id": 649,
"id": 791,
"pid": 86,
"city_code": "101281306",
"city_name": "佛冈县"
},
{
"_id": 650,
"id": 792,
"pid": 86,
"city_code": "101281305",
"city_name": "阳山县"
},
{
"_id": 651,
"id": 793,
"pid": 86,
"city_code": "101281308",
"city_name": "清新县"
},
{
"_id": 652,
"id": 794,
"pid": 86,
"city_code": "101281304",
"city_name": "连山县"
},
{
"_id": 653,
"id": 795,
"pid": 86,
"city_code": "101281302",
"city_name": "连南县"
},
{
"_id": 654,
"id": 796,
"pid": 87,
"city_code": "101280504",
"city_name": "南澳县"
},
{
"_id": 655,
"id": 797,
"pid": 87,
"city_code": "101280502",
"city_name": "潮阳区"
},
{
"_id": 656,
"id": 798,
"pid": 87,
"city_code": "101280503",
"city_name": "澄海区"
},
{
"_id": 657,
"id": 804,
"pid": 88,
"city_code": "101282103",
"city_name": "陆丰市"
},
{
"_id": 658,
"id": 805,
"pid": 88,
"city_code": "101282102",
"city_name": "海丰县"
},
{
"_id": 659,
"id": 806,
"pid": 88,
"city_code": "101282104",
"city_name": "陆河县"
},
{
"_id": 660,
"id": 807,
"pid": 89,
"city_code": "101280209",
"city_name": "曲江区"
},
{
"_id": 661,
"id": 808,
"pid": 89,
"city_code": "101280210",
"city_name": "浈江区"
},
{
"_id": 662,
"id": 809,
"pid": 89,
"city_code": "101280211",
"city_name": "武江区"
},
{
"_id": 663,
"id": 810,
"pid": 89,
"city_code": "101280209",
"city_name": "曲江区"
},
{
"_id": 664,
"id": 811,
"pid": 89,
"city_code": "101280205",
"city_name": "乐昌市"
},
{
"_id": 665,
"id": 812,
"pid": 89,
"city_code": "101280207",
"city_name": "南雄市"
},
{
"_id": 666,
"id": 813,
"pid": 89,
"city_code": "101280203",
"city_name": "始兴县"
},
{
"_id": 667,
"id": 814,
"pid": 89,
"city_code": "101280206",
"city_name": "仁化县"
},
{
"_id": 668,
"id": 815,
"pid": 89,
"city_code": "101280204",
"city_name": "翁源县"
},
{
"_id": 669,
"id": 816,
"pid": 89,
"city_code": "101280208",
"city_name": "新丰县"
},
{
"_id": 670,
"id": 817,
"pid": 89,
"city_code": "101280202",
"city_name": "乳源县"
},
{
"_id": 671,
"id": 819,
"pid": 90,
"city_code": "101281802",
"city_name": "阳春市"
},
{
"_id": 672,
"id": 820,
"pid": 90,
"city_code": "101281804",
"city_name": "阳西县"
},
{
"_id": 673,
"id": 821,
"pid": 90,
"city_code": "101281803",
"city_name": "阳东县"
},
{
"_id": 674,
"id": 823,
"pid": 91,
"city_code": "101281402",
"city_name": "罗定市"
},
{
"_id": 675,
"id": 824,
"pid": 91,
"city_code": "101281403",
"city_name": "新兴县"
},
{
"_id": 676,
"id": 825,
"pid": 91,
"city_code": "101281404",
"city_name": "郁南县"
},
{
"_id": 677,
"id": 826,
"pid": 91,
"city_code": "101281406",
"city_name": "云安县"
},
{
"_id": 678,
"id": 827,
"pid": 92,
"city_code": "101281006",
"city_name": "赤坎区"
},
{
"_id": 679,
"id": 828,
"pid": 92,
"city_code": "101281009",
"city_name": "霞山区"
},
{
"_id": 680,
"id": 829,
"pid": 92,
"city_code": "101281008",
"city_name": "坡头区"
},
{
"_id": 681,
"id": 830,
"pid": 92,
"city_code": "101281010",
"city_name": "麻章区"
},
{
"_id": 682,
"id": 831,
"pid": 92,
"city_code": "101281005",
"city_name": "廉江市"
},
{
"_id": 683,
"id": 832,
"pid": 92,
"city_code": "101281003",
"city_name": "雷州市"
},
{
"_id": 684,
"id": 833,
"pid": 92,
"city_code": "101281002",
"city_name": "吴川市"
},
{
"_id": 685,
"id": 834,
"pid": 92,
"city_code": "101281007",
"city_name": "遂溪县"
},
{
"_id": 686,
"id": 835,
"pid": 92,
"city_code": "101281004",
"city_name": "徐闻县"
},
{
"_id": 687,
"id": 837,
"pid": 93,
"city_code": "101280908",
"city_name": "高要区"
},
{
"_id": 688,
"id": 838,
"pid": 93,
"city_code": "101280903",
"city_name": "四会市"
},
{
"_id": 689,
"id": 839,
"pid": 93,
"city_code": "101280902",
"city_name": "广宁县"
},
{
"_id": 690,
"id": 840,
"pid": 93,
"city_code": "101280906",
"city_name": "怀集县"
},
{
"_id": 691,
"id": 841,
"pid": 93,
"city_code": "101280907",
"city_name": "封开县"
},
{
"_id": 692,
"id": 842,
"pid": 93,
"city_code": "101280905",
"city_name": "德庆县"
},
{
"_id": 693,
"id": 850,
"pid": 95,
"city_code": "101280702",
"city_name": "斗门区"
},
{
"_id": 694,
"id": 851,
"pid": 95,
"city_code": "101280703",
"city_name": "金湾区"
},
{
"_id": 695,
"id": 852,
"pid": 96,
"city_code": "101300103",
"city_name": "邕宁区"
},
{
"_id": 696,
"id": 858,
"pid": 96,
"city_code": "101300108",
"city_name": "武鸣县"
},
{
"_id": 697,
"id": 859,
"pid": 96,
"city_code": "101300105",
"city_name": "隆安县"
},
{
"_id": 698,
"id": 860,
"pid": 96,
"city_code": "101300106",
"city_name": "马山县"
},
{
"_id": 699,
"id": 861,
"pid": 96,
"city_code": "101300107",
"city_name": "上林县"
},
{
"_id": 700,
"id": 862,
"pid": 96,
"city_code": "101300109",
"city_name": "宾阳县"
},
{
"_id": 701,
"id": 863,
"pid": 96,
"city_code": "101300104",
"city_name": "横县"
},
{
"_id": 702,
"id": 869,
"pid": 97,
"city_code": "101300510",
"city_name": "阳朔县"
},
{
"_id": 703,
"id": 870,
"pid": 97,
"city_code": "101300505",
"city_name": "临桂县"
},
{
"_id": 704,
"id": 871,
"pid": 97,
"city_code": "101300507",
"city_name": "灵川县"
},
{
"_id": 705,
"id": 872,
"pid": 97,
"city_code": "101300508",
"city_name": "全州县"
},
{
"_id": 706,
"id": 873,
"pid": 97,
"city_code": "101300512",
"city_name": "平乐县"
},
{
"_id": 707,
"id": 874,
"pid": 97,
"city_code": "101300506",
"city_name": "兴安县"
},
{
"_id": 708,
"id": 875,
"pid": 97,
"city_code": "101300509",
"city_name": "灌阳县"
},
{
"_id": 709,
"id": 876,
"pid": 97,
"city_code": "101300513",
"city_name": "荔浦县"
},
{
"_id": 710,
"id": 877,
"pid": 97,
"city_code": "101300514",
"city_name": "资源县"
},
{
"_id": 711,
"id": 878,
"pid": 97,
"city_code": "101300504",
"city_name": "永福县"
},
{
"_id": 712,
"id": 879,
"pid": 97,
"city_code": "101300503",
"city_name": "龙胜县"
},
{
"_id": 713,
"id": 880,
"pid": 97,
"city_code": "101300511",
"city_name": "恭城县"
},
{
"_id": 714,
"id": 882,
"pid": 98,
"city_code": "101301011",
"city_name": "凌云县"
},
{
"_id": 715,
"id": 883,
"pid": 98,
"city_code": "101301007",
"city_name": "平果县"
},
{
"_id": 716,
"id": 884,
"pid": 98,
"city_code": "101301009",
"city_name": "西林县"
},
{
"_id": 717,
"id": 885,
"pid": 98,
"city_code": "101301010",
"city_name": "乐业县"
},
{
"_id": 718,
"id": 886,
"pid": 98,
"city_code": "101301004",
"city_name": "德保县"
},
{
"_id": 719,
"id": 887,
"pid": 98,
"city_code": "101301012",
"city_name": "田林县"
},
{
"_id": 720,
"id": 888,
"pid": 98,
"city_code": "101301003",
"city_name": "田阳县"
},
{
"_id": 721,
"id": 889,
"pid": 98,
"city_code": "101301005",
"city_name": "靖西县"
},
{
"_id": 722,
"id": 890,
"pid": 98,
"city_code": "101301006",
"city_name": "田东县"
},
{
"_id": 723,
"id": 891,
"pid": 98,
"city_code": "101301002",
"city_name": "那坡县"
},
{
"_id": 724,
"id": 892,
"pid": 98,
"city_code": "101301008",
"city_name": "隆林县"
},
{
"_id": 725,
"id": 896,
"pid": 99,
"city_code": "101301302",
"city_name": "合浦县"
},
{
"_id": 726,
"id": 898,
"pid": 100,
"city_code": "101300204",
"city_name": "凭祥市"
},
{
"_id": 727,
"id": 899,
"pid": 100,
"city_code": "101300207",
"city_name": "宁明县"
},
{
"_id": 728,
"id": 900,
"pid": 100,
"city_code": "101300206",
"city_name": "扶绥县"
},
{
"_id": 729,
"id": 901,
"pid": 100,
"city_code": "101300203",
"city_name": "龙州县"
},
{
"_id": 730,
"id": 902,
"pid": 100,
"city_code": "101300205",
"city_name": "大新县"
},
{
"_id": 731,
"id": 903,
"pid": 100,
"city_code": "101300202",
"city_name": "天等县"
},
{
"_id": 732,
"id": 905,
"pid": 101,
"city_code": "101301405",
"city_name": "防城区"
},
{
"_id": 733,
"id": 906,
"pid": 101,
"city_code": "101301403",
"city_name": "东兴市"
},
{
"_id": 734,
"id": 907,
"pid": 101,
"city_code": "101301402",
"city_name": "上思县"
},
{
"_id": 735,
"id": 911,
"pid": 102,
"city_code": "101300802",
"city_name": "桂平市"
},
{
"_id": 736,
"id": 912,
"pid": 102,
"city_code": "101300803",
"city_name": "平南县"
},
{
"_id": 737,
"id": 914,
"pid": 103,
"city_code": "101301207",
"city_name": "宜州市"
},
{
"_id": 738,
"id": 915,
"pid": 103,
"city_code": "101301202",
"city_name": "天峨县"
},
{
"_id": 739,
"id": 916,
"pid": 103,
"city_code": "101301208",
"city_name": "凤山县"
},
{
"_id": 740,
"id": 917,
"pid": 103,
"city_code": "101301209",
"city_name": "南丹县"
},
{
"_id": 741,
"id": 918,
"pid": 103,
"city_code": "101301203",
"city_name": "东兰县"
},
{
"_id": 742,
"id": 919,
"pid": 103,
"city_code": "101301210",
"city_name": "都安县"
},
{
"_id": 743,
"id": 920,
"pid": 103,
"city_code": "101301206",
"city_name": "罗城县"
},
{
"_id": 744,
"id": 921,
"pid": 103,
"city_code": "101301204",
"city_name": "巴马县"
},
{
"_id": 745,
"id": 922,
"pid": 103,
"city_code": "101301205",
"city_name": "环江县"
},
{
"_id": 746,
"id": 923,
"pid": 103,
"city_code": "101301211",
"city_name": "大化县"
},
{
"_id": 747,
"id": 925,
"pid": 104,
"city_code": "101300704",
"city_name": "钟山县"
},
{
"_id": 748,
"id": 926,
"pid": 104,
"city_code": "101300702",
"city_name": "昭平县"
},
{
"_id": 749,
"id": 927,
"pid": 104,
"city_code": "101300703",
"city_name": "富川县"
},
{
"_id": 750,
"id": 929,
"pid": 105,
"city_code": "101300406",
"city_name": "合山市"
},
{
"_id": 751,
"id": 930,
"pid": 105,
"city_code": "101300404",
"city_name": "象州县"
},
{
"_id": 752,
"id": 931,
"pid": 105,
"city_code": "101300405",
"city_name": "武宣县"
},
{
"_id": 753,
"id": 932,
"pid": 105,
"city_code": "101300402",
"city_name": "忻城县"
},
{
"_id": 754,
"id": 933,
"pid": 105,
"city_code": "101300403",
"city_name": "金秀县"
},
{
"_id": 755,
"id": 938,
"pid": 106,
"city_code": "101300305",
"city_name": "柳江县"
},
{
"_id": 756,
"id": 939,
"pid": 106,
"city_code": "101300302",
"city_name": "柳城县"
},
{
"_id": 757,
"id": 940,
"pid": 106,
"city_code": "101300304",
"city_name": "鹿寨县"
},
{
"_id": 758,
"id": 941,
"pid": 106,
"city_code": "101300306",
"city_name": "融安县"
},
{
"_id": 759,
"id": 942,
"pid": 106,
"city_code": "101300307",
"city_name": "融水县"
},
{
"_id": 760,
"id": 943,
"pid": 106,
"city_code": "101300308",
"city_name": "三江县"
},
{
"_id": 761,
"id": 946,
"pid": 107,
"city_code": "101301103",
"city_name": "灵山县"
},
{
"_id": 762,
"id": 947,
"pid": 107,
"city_code": "101301102",
"city_name": "浦北县"
},
{
"_id": 763,
"id": 950,
"pid": 108,
"city_code": "101300607",
"city_name": "长洲区"
},
{
"_id": 764,
"id": 951,
"pid": 108,
"city_code": "101300606",
"city_name": "岑溪市"
},
{
"_id": 765,
"id": 952,
"pid": 108,
"city_code": "101300604",
"city_name": "苍梧县"
},
{
"_id": 766,
"id": 953,
"pid": 108,
"city_code": "101300602",
"city_name": "藤县"
},
{
"_id": 767,
"id": 954,
"pid": 108,
"city_code": "101300605",
"city_name": "蒙山县"
},
{
"_id": 768,
"id": 956,
"pid": 109,
"city_code": "101300903",
"city_name": "北流市"
},
{
"_id": 769,
"id": 957,
"pid": 109,
"city_code": "101300904",
"city_name": "容县"
},
{
"_id": 770,
"id": 958,
"pid": 109,
"city_code": "101300905",
"city_name": "陆川县"
},
{
"_id": 771,
"id": 959,
"pid": 109,
"city_code": "101300902",
"city_name": "博白县"
},
{
"_id": 772,
"id": 960,
"pid": 109,
"city_code": "101300906",
"city_name": "兴业县"
},
{
"_id": 773,
"id": 961,
"pid": 110,
"city_code": "101260111",
"city_name": "南明区"
},
{
"_id": 774,
"id": 962,
"pid": 110,
"city_code": "101260110",
"city_name": "云岩区"
},
{
"_id": 775,
"id": 963,
"pid": 110,
"city_code": "101260103",
"city_name": "花溪区"
},
{
"_id": 776,
"id": 964,
"pid": 110,
"city_code": "101260104",
"city_name": "乌当区"
},
{
"_id": 777,
"id": 965,
"pid": 110,
"city_code": "101260102",
"city_name": "白云区"
},
{
"_id": 778,
"id": 966,
"pid": 110,
"city_code": "101260109",
"city_name": "小河区"
},
{
"_id": 779,
"id": 969,
"pid": 110,
"city_code": "101260108",
"city_name": "清镇市"
},
{
"_id": 780,
"id": 970,
"pid": 110,
"city_code": "101260106",
"city_name": "开阳县"
},
{
"_id": 781,
"id": 971,
"pid": 110,
"city_code": "101260107",
"city_name": "修文县"
},
{
"_id": 782,
"id": 972,
"pid": 110,
"city_code": "101260105",
"city_name": "息烽县"
},
{
"_id": 783,
"id": 974,
"pid": 111,
"city_code": "101260306",
"city_name": "关岭县"
},
{
"_id": 784,
"id": 976,
"pid": 111,
"city_code": "101260305",
"city_name": "紫云县"
},
{
"_id": 785,
"id": 977,
"pid": 111,
"city_code": "101260304",
"city_name": "平坝县"
},
{
"_id": 786,
"id": 978,
"pid": 111,
"city_code": "101260302",
"city_name": "普定县"
},
{
"_id": 787,
"id": 980,
"pid": 112,
"city_code": "101260705",
"city_name": "大方县"
},
{
"_id": 788,
"id": 981,
"pid": 112,
"city_code": "101260708",
"city_name": "黔西县"
},
{
"_id": 789,
"id": 982,
"pid": 112,
"city_code": "101260703",
"city_name": "金沙县"
},
{
"_id": 790,
"id": 983,
"pid": 112,
"city_code": "101260707",
"city_name": "织金县"
},
{
"_id": 791,
"id": 984,
"pid": 112,
"city_code": "101260706",
"city_name": "纳雍县"
},
{
"_id": 792,
"id": 985,
"pid": 112,
"city_code": "101260702",
"city_name": "赫章县"
},
{
"_id": 793,
"id": 986,
"pid": 112,
"city_code": "101260704",
"city_name": "威宁县"
},
{
"_id": 794,
"id": 989,
"pid": 113,
"city_code": "101260801",
"city_name": "水城县"
},
{
"_id": 795,
"id": 990,
"pid": 113,
"city_code": "101260804",
"city_name": "盘县"
},
{
"_id": 796,
"id": 991,
"pid": 114,
"city_code": "101260501",
"city_name": "凯里市"
},
{
"_id": 797,
"id": 992,
"pid": 114,
"city_code": "101260505",
"city_name": "黄平县"
},
{
"_id": 798,
"id": 993,
"pid": 114,
"city_code": "101260503",
"city_name": "施秉县"
},
{
"_id": 799,
"id": 994,
"pid": 114,
"city_code": "101260509",
"city_name": "三穗县"
},
{
"_id": 800,
"id": 995,
"pid": 114,
"city_code": "101260504",
"city_name": "镇远县"
},
{
"_id": 801,
"id": 996,
"pid": 114,
"city_code": "101260502",
"city_name": "岑巩县"
},
{
"_id": 802,
"id": 997,
"pid": 114,
"city_code": "101260514",
"city_name": "天柱县"
},
{
"_id": 803,
"id": 998,
"pid": 114,
"city_code": "101260515",
"city_name": "锦屏县"
},
{
"_id": 804,
"id": 999,
"pid": 114,
"city_code": "101260511",
"city_name": "剑河县"
},
{
"_id": 805,
"id": 1000,
"pid": 114,
"city_code": "101260510",
"city_name": "台江县"
},
{
"_id": 806,
"id": 1001,
"pid": 114,
"city_code": "101260513",
"city_name": "黎平县"
},
{
"_id": 807,
"id": 1002,
"pid": 114,
"city_code": "101260516",
"city_name": "榕江县"
},
{
"_id": 808,
"id": 1003,
"pid": 114,
"city_code": "101260517",
"city_name": "从江县"
},
{
"_id": 809,
"id": 1004,
"pid": 114,
"city_code": "101260512",
"city_name": "雷山县"
},
{
"_id": 810,
"id": 1005,
"pid": 114,
"city_code": "101260507",
"city_name": "麻江县"
},
{
"_id": 811,
"id": 1006,
"pid": 114,
"city_code": "101260508",
"city_name": "丹寨县"
},
{
"_id": 812,
"id": 1007,
"pid": 115,
"city_code": "101260401",
"city_name": "都匀市"
},
{
"_id": 813,
"id": 1008,
"pid": 115,
"city_code": "101260405",
"city_name": "福泉市"
},
{
"_id": 814,
"id": 1009,
"pid": 115,
"city_code": "101260412",
"city_name": "荔波县"
},
{
"_id": 815,
"id": 1010,
"pid": 115,
"city_code": "101260402",
"city_name": "贵定县"
},
{
"_id": 816,
"id": 1011,
"pid": 115,
"city_code": "101260403",
"city_name": "瓮安县"
},
{
"_id": 817,
"id": 1012,
"pid": 115,
"city_code": "101260410",
"city_name": "独山县"
},
{
"_id": 818,
"id": 1013,
"pid": 115,
"city_code": "101260409",
"city_name": "平塘县"
},
{
"_id": 819,
"id": 1014,
"pid": 115,
"city_code": "101260408",
"city_name": "罗甸县"
},
{
"_id": 820,
"id": 1015,
"pid": 115,
"city_code": "101260404",
"city_name": "长顺县"
},
{
"_id": 821,
"id": 1016,
"pid": 115,
"city_code": "101260407",
"city_name": "龙里县"
},
{
"_id": 822,
"id": 1017,
"pid": 115,
"city_code": "101260406",
"city_name": "惠水县"
},
{
"_id": 823,
"id": 1018,
"pid": 115,
"city_code": "101260411",
"city_name": "三都县"
},
{
"_id": 824,
"id": 1019,
"pid": 116,
"city_code": "101260906",
"city_name": "兴义市"
},
{
"_id": 825,
"id": 1020,
"pid": 116,
"city_code": "101260903",
"city_name": "兴仁县"
},
{
"_id": 826,
"id": 1021,
"pid": 116,
"city_code": "101260909",
"city_name": "普安县"
},
{
"_id": 827,
"id": 1022,
"pid": 116,
"city_code": "101260902",
"city_name": "晴隆县"
},
{
"_id": 828,
"id": 1023,
"pid": 116,
"city_code": "101260904",
"city_name": "贞丰县"
},
{
"_id": 829,
"id": 1024,
"pid": 116,
"city_code": "101260905",
"city_name": "望谟县"
},
{
"_id": 830,
"id": 1025,
"pid": 116,
"city_code": "101260908",
"city_name": "册亨县"
},
{
"_id": 831,
"id": 1026,
"pid": 116,
"city_code": "101260907",
"city_name": "安龙县"
},
{
"_id": 832,
"id": 1027,
"pid": 117,
"city_code": "101260601",
"city_name": "铜仁市"
},
{
"_id": 833,
"id": 1028,
"pid": 117,
"city_code": "101260602",
"city_name": "江口县"
},
{
"_id": 834,
"id": 1029,
"pid": 117,
"city_code": "101260608",
"city_name": "石阡县"
},
{
"_id": 835,
"id": 1030,
"pid": 117,
"city_code": "101260605",
"city_name": "思南县"
},
{
"_id": 836,
"id": 1031,
"pid": 117,
"city_code": "101260610",
"city_name": "德江县"
},
{
"_id": 837,
"id": 1032,
"pid": 117,
"city_code": "101260603",
"city_name": "玉屏县"
},
{
"_id": 838,
"id": 1033,
"pid": 117,
"city_code": "101260607",
"city_name": "印江县"
},
{
"_id": 839,
"id": 1034,
"pid": 117,
"city_code": "101260609",
"city_name": "沿河县"
},
{
"_id": 840,
"id": 1035,
"pid": 117,
"city_code": "101260611",
"city_name": "松桃县"
},
{
"_id": 841,
"id": 1037,
"pid": 118,
"city_code": "101260215",
"city_name": "红花岗区"
},
{
"_id": 842,
"id": 1038,
"pid": 118,
"city_code": "101260212",
"city_name": "务川县"
},
{
"_id": 843,
"id": 1039,
"pid": 118,
"city_code": "101260210",
"city_name": "道真县"
},
{
"_id": 844,
"id": 1040,
"pid": 118,
"city_code": "101260214",
"city_name": "汇川区"
},
{
"_id": 845,
"id": 1041,
"pid": 118,
"city_code": "101260208",
"city_name": "赤水市"
},
{
"_id": 846,
"id": 1042,
"pid": 118,
"city_code": "101260203",
"city_name": "仁怀市"
},
{
"_id": 847,
"id": 1043,
"pid": 118,
"city_code": "101260202",
"city_name": "遵义县"
},
{
"_id": 848,
"id": 1044,
"pid": 118,
"city_code": "101260207",
"city_name": "桐梓县"
},
{
"_id": 849,
"id": 1045,
"pid": 118,
"city_code": "101260204",
"city_name": "绥阳县"
},
{
"_id": 850,
"id": 1046,
"pid": 118,
"city_code": "101260211",
"city_name": "正安县"
},
{
"_id": 851,
"id": 1047,
"pid": 118,
"city_code": "101260206",
"city_name": "凤冈县"
},
{
"_id": 852,
"id": 1048,
"pid": 118,
"city_code": "101260205",
"city_name": "湄潭县"
},
{
"_id": 853,
"id": 1049,
"pid": 118,
"city_code": "101260213",
"city_name": "余庆县"
},
{
"_id": 854,
"id": 1050,
"pid": 118,
"city_code": "101260209",
"city_name": "习水县"
},
{
"_id": 855,
"id": 1055,
"pid": 119,
"city_code": "101310102",
"city_name": "琼山区"
},
{
"_id": 856,
"id": 1082,
"pid": 137,
"city_code": "101090102",
"city_name": "井陉矿区"
},
{
"_id": 857,
"id": 1084,
"pid": 137,
"city_code": "101090114",
"city_name": "辛集市"
},
{
"_id": 858,
"id": 1085,
"pid": 137,
"city_code": "101090115",
"city_name": "藁城市"
},
{
"_id": 859,
"id": 1086,
"pid": 137,
"city_code": "101090116",
"city_name": "晋州市"
},
{
"_id": 860,
"id": 1087,
"pid": 137,
"city_code": "101090117",
"city_name": "新乐市"
},
{
"_id": 861,
"id": 1088,
"pid": 137,
"city_code": "101090118",
"city_name": "鹿泉区"
},
{
"_id": 862,
"id": 1089,
"pid": 137,
"city_code": "101090102",
"city_name": "井陉县"
},
{
"_id": 863,
"id": 1090,
"pid": 137,
"city_code": "101090103",
"city_name": "正定县"
},
{
"_id": 864,
"id": 1091,
"pid": 137,
"city_code": "101090104",
"city_name": "栾城区"
},
{
"_id": 865,
"id": 1092,
"pid": 137,
"city_code": "101090105",
"city_name": "行唐县"
},
{
"_id": 866,
"id": 1093,
"pid": 137,
"city_code": "101090106",
"city_name": "灵寿县"
},
{
"_id": 867,
"id": 1094,
"pid": 137,
"city_code": "101090107",
"city_name": "高邑县"
},
{
"_id": 868,
"id": 1095,
"pid": 137,
"city_code": "101090108",
"city_name": "深泽县"
},
{
"_id": 869,
"id": 1096,
"pid": 137,
"city_code": "101090109",
"city_name": "赞皇县"
},
{
"_id": 870,
"id": 1097,
"pid": 137,
"city_code": "101090110",
"city_name": "无极县"
},
{
"_id": 871,
"id": 1098,
"pid": 137,
"city_code": "101090111",
"city_name": "平山县"
},
{
"_id": 872,
"id": 1099,
"pid": 137,
"city_code": "101090112",
"city_name": "元氏县"
},
{
"_id": 873,
"id": 1100,
"pid": 137,
"city_code": "101090113",
"city_name": "赵县"
},
{
"_id": 874,
"id": 1104,
"pid": 138,
"city_code": "101090218",
"city_name": "涿州市"
},
{
"_id": 875,
"id": 1105,
"pid": 138,
"city_code": "101090219",
"city_name": "定州市"
},
{
"_id": 876,
"id": 1106,
"pid": 138,
"city_code": "101090220",
"city_name": "安国市"
},
{
"_id": 877,
"id": 1107,
"pid": 138,
"city_code": "101090221",
"city_name": "高碑店市"
},
{
"_id": 878,
"id": 1108,
"pid": 138,
"city_code": "101090202",
"city_name": "满城县"
},
{
"_id": 879,
"id": 1109,
"pid": 138,
"city_code": "101090224",
"city_name": "清苑县"
},
{
"_id": 880,
"id": 1110,
"pid": 138,
"city_code": "101090213",
"city_name": "涞水县"
},
{
"_id": 881,
"id": 1111,
"pid": 138,
"city_code": "101090203",
"city_name": "阜平县"
},
{
"_id": 882,
"id": 1112,
"pid": 138,
"city_code": "101090204",
"city_name": "徐水县"
},
{
"_id": 883,
"id": 1113,
"pid": 138,
"city_code": "101090223",
"city_name": "定兴县"
},
{
"_id": 884,
"id": 1114,
"pid": 138,
"city_code": "101090205",
"city_name": "唐县"
},
{
"_id": 885,
"id": 1115,
"pid": 138,
"city_code": "101090206",
"city_name": "高阳县"
},
{
"_id": 886,
"id": 1116,
"pid": 138,
"city_code": "101090207",
"city_name": "容城县"
},
{
"_id": 887,
"id": 1117,
"pid": 138,
"city_code": "101090209",
"city_name": "涞源县"
},
{
"_id": 888,
"id": 1118,
"pid": 138,
"city_code": "101090210",
"city_name": "望都县"
},
{
"_id": 889,
"id": 1119,
"pid": 138,
"city_code": "101090211",
"city_name": "安新县"
},
{
"_id": 890,
"id": 1120,
"pid": 138,
"city_code": "101090212",
"city_name": "易县"
},
{
"_id": 891,
"id": 1121,
"pid": 138,
"city_code": "101090214",
"city_name": "曲阳县"
},
{
"_id": 892,
"id": 1122,
"pid": 138,
"city_code": "101090215",
"city_name": "蠡县"
},
{
"_id": 893,
"id": 1123,
"pid": 138,
"city_code": "101090216",
"city_name": "顺平县"
},
{
"_id": 894,
"id": 1124,
"pid": 138,
"city_code": "101090225",
"city_name": "博野县"
},
{
"_id": 895,
"id": 1125,
"pid": 138,
"city_code": "101090217",
"city_name": "雄县"
},
{
"_id": 896,
"id": 1128,
"pid": 139,
"city_code": "101090711",
"city_name": "泊头市"
},
{
"_id": 897,
"id": 1129,
"pid": 139,
"city_code": "101090712",
"city_name": "任丘市"
},
{
"_id": 898,
"id": 1130,
"pid": 139,
"city_code": "101090713",
"city_name": "黄骅市"
},
{
"_id": 899,
"id": 1131,
"pid": 139,
"city_code": "101090714",
"city_name": "河间市"
},
{
"_id": 900,
"id": 1132,
"pid": 139,
"city_code": "101090716",
"city_name": "沧县"
},
{
"_id": 901,
"id": 1133,
"pid": 139,
"city_code": "101090702",
"city_name": "青县"
},
{
"_id": 902,
"id": 1134,
"pid": 139,
"city_code": "101090703",
"city_name": "东光县"
},
{
"_id": 903,
"id": 1135,
"pid": 139,
"city_code": "101090704",
"city_name": "海兴县"
},
{
"_id": 904,
"id": 1136,
"pid": 139,
"city_code": "101090705",
"city_name": "盐山县"
},
{
"_id": 905,
"id": 1137,
"pid": 139,
"city_code": "101090706",
"city_name": "肃宁县"
},
{
"_id": 906,
"id": 1138,
"pid": 139,
"city_code": "101090707",
"city_name": "南皮县"
},
{
"_id": 907,
"id": 1139,
"pid": 139,
"city_code": "101090708",
"city_name": "吴桥县"
},
{
"_id": 908,
"id": 1140,
"pid": 139,
"city_code": "101090709",
"city_name": "献县"
},
{
"_id": 909,
"id": 1141,
"pid": 139,
"city_code": "101090710",
"city_name": "孟村县"
},
{
"_id": 910,
"id": 1145,
"pid": 140,
"city_code": "101090403",
"city_name": "承德县"
},
{
"_id": 911,
"id": 1146,
"pid": 140,
"city_code": "101090404",
"city_name": "兴隆县"
},
{
"_id": 912,
"id": 1147,
"pid": 140,
"city_code": "101090405",
"city_name": "平泉县"
},
{
"_id": 913,
"id": 1148,
"pid": 140,
"city_code": "101090406",
"city_name": "滦平县"
},
{
"_id": 914,
"id": 1149,
"pid": 140,
"city_code": "101090407",
"city_name": "隆化县"
},
{
"_id": 915,
"id": 1150,
"pid": 140,
"city_code": "101090408",
"city_name": "丰宁县"
},
{
"_id": 916,
"id": 1151,
"pid": 140,
"city_code": "101090409",
"city_name": "宽城县"
},
{
"_id": 917,
"id": 1152,
"pid": 140,
"city_code": "101090410",
"city_name": "围场县"
},
{
"_id": 918,
"id": 1156,
"pid": 141,
"city_code": "101091002",
"city_name": "峰峰矿区"
},
{
"_id": 919,
"id": 1157,
"pid": 141,
"city_code": "101091016",
"city_name": "武安市"
},
{
"_id": 920,
"id": 1158,
"pid": 141,
"city_code": "101091001",
"city_name": "邯郸县"
},
{
"_id": 921,
"id": 1159,
"pid": 141,
"city_code": "101091003",
"city_name": "临漳县"
},
{
"_id": 922,
"id": 1160,
"pid": 141,
"city_code": "101091004",
"city_name": "成安县"
},
{
"_id": 923,
"id": 1161,
"pid": 141,
"city_code": "101091005",
"city_name": "大名县"
},
{
"_id": 924,
"id": 1162,
"pid": 141,
"city_code": "101091006",
"city_name": "涉县"
},
{
"_id": 925,
"id": 1163,
"pid": 141,
"city_code": "101091007",
"city_name": "磁县"
},
{
"_id": 926,
"id": 1164,
"pid": 141,
"city_code": "101091008",
"city_name": "肥乡县"
},
{
"_id": 927,
"id": 1165,
"pid": 141,
"city_code": "101091009",
"city_name": "永年县"
},
{
"_id": 928,
"id": 1166,
"pid": 141,
"city_code": "101091010",
"city_name": "邱县"
},
{
"_id": 929,
"id": 1167,
"pid": 141,
"city_code": "101091011",
"city_name": "鸡泽县"
},
{
"_id": 930,
"id": 1168,
"pid": 141,
"city_code": "101091012",
"city_name": "广平县"
},
{
"_id": 931,
"id": 1169,
"pid": 141,
"city_code": "101091013",
"city_name": "馆陶县"
},
{
"_id": 932,
"id": 1170,
"pid": 141,
"city_code": "101091014",
"city_name": "魏县"
},
{
"_id": 933,
"id": 1171,
"pid": 141,
"city_code": "101091015",
"city_name": "曲周县"
},
{
"_id": 934,
"id": 1173,
"pid": 142,
"city_code": "101090810",
"city_name": "冀州市"
},
{
"_id": 935,
"id": 1174,
"pid": 142,
"city_code": "101090811",
"city_name": "深州市"
},
{
"_id": 936,
"id": 1175,
"pid": 142,
"city_code": "101090802",
"city_name": "枣强县"
},
{
"_id": 937,
"id": 1176,
"pid": 142,
"city_code": "101090803",
"city_name": "武邑县"
},
{
"_id": 938,
"id": 1177,
"pid": 142,
"city_code": "101090804",
"city_name": "武强县"
},
{
"_id": 939,
"id": 1178,
"pid": 142,
"city_code": "101090805",
"city_name": "饶阳县"
},
{
"_id": 940,
"id": 1179,
"pid": 142,
"city_code": "101090806",
"city_name": "安平县"
},
{
"_id": 941,
"id": 1180,
"pid": 142,
"city_code": "101090807",
"city_name": "故城县"
},
{
"_id": 942,
"id": 1181,
"pid": 142,
"city_code": "101090808",
"city_name": "景县"
},
{
"_id": 943,
"id": 1182,
"pid": 142,
"city_code": "101090809",
"city_name": "阜城县"
},
{
"_id": 944,
"id": 1185,
"pid": 143,
"city_code": "101090608",
"city_name": "霸州市"
},
{
"_id": 945,
"id": 1186,
"pid": 143,
"city_code": "101090609",
"city_name": "三河市"
},
{
"_id": 946,
"id": 1187,
"pid": 143,
"city_code": "101090602",
"city_name": "固安县"
},
{
"_id": 947,
"id": 1188,
"pid": 143,
"city_code": "101090603",
"city_name": "永清县"
},
{
"_id": 948,
"id": 1189,
"pid": 143,
"city_code": "101090604",
"city_name": "香河县"
},
{
"_id": 949,
"id": 1190,
"pid": 143,
"city_code": "101090605",
"city_name": "大城县"
},
{
"_id": 950,
"id": 1191,
"pid": 143,
"city_code": "101090606",
"city_name": "文安县"
},
{
"_id": 951,
"id": 1192,
"pid": 143,
"city_code": "101090607",
"city_name": "大厂县"
},
{
"_id": 952,
"id": 1195,
"pid": 144,
"city_code": "101091106",
"city_name": "北戴河区"
},
{
"_id": 953,
"id": 1196,
"pid": 144,
"city_code": "101091103",
"city_name": "昌黎县"
},
{
"_id": 954,
"id": 1197,
"pid": 144,
"city_code": "101091104",
"city_name": "抚宁县"
},
{
"_id": 955,
"id": 1198,
"pid": 144,
"city_code": "101091105",
"city_name": "卢龙县"
},
{
"_id": 956,
"id": 1199,
"pid": 144,
"city_code": "101091102",
"city_name": "青龙县"
},
{
"_id": 957,
"id": 1204,
"pid": 145,
"city_code": "101090502",
"city_name": "丰南区"
},
{
"_id": 958,
"id": 1205,
"pid": 145,
"city_code": "101090503",
"city_name": "丰润区"
},
{
"_id": 959,
"id": 1206,
"pid": 145,
"city_code": "101090510",
"city_name": "遵化市"
},
{
"_id": 960,
"id": 1207,
"pid": 145,
"city_code": "101090511",
"city_name": "迁安市"
},
{
"_id": 961,
"id": 1208,
"pid": 145,
"city_code": "101090504",
"city_name": "滦县"
},
{
"_id": 962,
"id": 1209,
"pid": 145,
"city_code": "101090505",
"city_name": "滦南县"
},
{
"_id": 963,
"id": 1210,
"pid": 145,
"city_code": "101090506",
"city_name": "乐亭县"
},
{
"_id": 964,
"id": 1211,
"pid": 145,
"city_code": "101090507",
"city_name": "迁西县"
},
{
"_id": 965,
"id": 1212,
"pid": 145,
"city_code": "101090508",
"city_name": "玉田县"
},
{
"_id": 966,
"id": 1213,
"pid": 145,
"city_code": "101090509",
"city_name": "唐海县"
},
{
"_id": 967,
"id": 1216,
"pid": 146,
"city_code": "101090916",
"city_name": "南宫市"
},
{
"_id": 968,
"id": 1217,
"pid": 146,
"city_code": "101090917",
"city_name": "沙河市"
},
{
"_id": 969,
"id": 1218,
"pid": 146,
"city_code": "101090901",
"city_name": "邢台县"
},
{
"_id": 970,
"id": 1219,
"pid": 146,
"city_code": "101090902",
"city_name": "临城县"
},
{
"_id": 971,
"id": 1220,
"pid": 146,
"city_code": "101090904",
"city_name": "内丘县"
},
{
"_id": 972,
"id": 1221,
"pid": 146,
"city_code": "101090905",
"city_name": "柏乡县"
},
{
"_id": 973,
"id": 1222,
"pid": 146,
"city_code": "101090906",
"city_name": "隆尧县"
},
{
"_id": 974,
"id": 1223,
"pid": 146,
"city_code": "101090918",
"city_name": "任县"
},
{
"_id": 975,
"id": 1224,
"pid": 146,
"city_code": "101090907",
"city_name": "南和县"
},
{
"_id": 976,
"id": 1225,
"pid": 146,
"city_code": "101090908",
"city_name": "宁晋县"
},
{
"_id": 977,
"id": 1226,
"pid": 146,
"city_code": "101090909",
"city_name": "巨鹿县"
},
{
"_id": 978,
"id": 1227,
"pid": 146,
"city_code": "101090910",
"city_name": "新河县"
},
{
"_id": 979,
"id": 1228,
"pid": 146,
"city_code": "101090911",
"city_name": "广宗县"
},
{
"_id": 980,
"id": 1229,
"pid": 146,
"city_code": "101090912",
"city_name": "平乡县"
},
{
"_id": 981,
"id": 1230,
"pid": 146,
"city_code": "101090913",
"city_name": "威县"
},
{
"_id": 982,
"id": 1231,
"pid": 146,
"city_code": "101090914",
"city_name": "清河县"
},
{
"_id": 983,
"id": 1232,
"pid": 146,
"city_code": "101090915",
"city_name": "临西县"
},
{
"_id": 984,
"id": 1235,
"pid": 147,
"city_code": "101090302",
"city_name": "宣化区"
},
{
"_id": 985,
"id": 1237,
"pid": 147,
"city_code": "101090302",
"city_name": "宣化县"
},
{
"_id": 986,
"id": 1238,
"pid": 147,
"city_code": "101090303",
"city_name": "张北县"
},
{
"_id": 987,
"id": 1239,
"pid": 147,
"city_code": "101090304",
"city_name": "康保县"
},
{
"_id": 988,
"id": 1240,
"pid": 147,
"city_code": "101090305",
"city_name": "沽源县"
},
{
"_id": 989,
"id": 1241,
"pid": 147,
"city_code": "101090306",
"city_name": "尚义县"
},
{
"_id": 990,
"id": 1242,
"pid": 147,
"city_code": "101090307",
"city_name": "蔚县"
},
{
"_id": 991,
"id": 1243,
"pid": 147,
"city_code": "101090308",
"city_name": "阳原县"
},
{
"_id": 992,
"id": 1244,
"pid": 147,
"city_code": "101090309",
"city_name": "怀安县"
},
{
"_id": 993,
"id": 1245,
"pid": 147,
"city_code": "101090310",
"city_name": "万全县"
},
{
"_id": 994,
"id": 1246,
"pid": 147,
"city_code": "101090311",
"city_name": "怀来县"
},
{
"_id": 995,
"id": 1247,
"pid": 147,
"city_code": "101090312",
"city_name": "涿鹿县"
},
{
"_id": 996,
"id": 1248,
"pid": 147,
"city_code": "101090313",
"city_name": "赤城县"
},
{
"_id": 997,
"id": 1249,
"pid": 147,
"city_code": "101090314",
"city_name": "崇礼县"
},
{
"_id": 998,
"id": 1255,
"pid": 148,
"city_code": "101180108",
"city_name": "上街区"
},
{
"_id": 999,
"id": 1261,
"pid": 148,
"city_code": "101180102",
"city_name": "巩义市"
},
{
"_id": 1000,
"id": 1262,
"pid": 148,
"city_code": "101180103",
"city_name": "荥阳市"
},
{
"_id": 1001,
"id": 1263,
"pid": 148,
"city_code": "101180105",
"city_name": "新密市"
},
{
"_id": 1002,
"id": 1264,
"pid": 148,
"city_code": "101180106",
"city_name": "新郑市"
},
{
"_id": 1003,
"id": 1265,
"pid": 148,
"city_code": "101180104",
"city_name": "登封市"
},
{
"_id": 1004,
"id": 1266,
"pid": 148,
"city_code": "101180107",
"city_name": "中牟县"
},
{
"_id": 1005,
"id": 1272,
"pid": 149,
"city_code": "101180911",
"city_name": "吉利区"
},
{
"_id": 1006,
"id": 1273,
"pid": 149,
"city_code": "101180908",
"city_name": "偃师市"
},
{
"_id": 1007,
"id": 1274,
"pid": 149,
"city_code": "101180903",
"city_name": "孟津县"
},
{
"_id": 1008,
"id": 1275,
"pid": 149,
"city_code": "101180902",
"city_name": "新安县"
},
{
"_id": 1009,
"id": 1276,
"pid": 149,
"city_code": "101180909",
"city_name": "栾川县"
},
{
"_id": 1010,
"id": 1277,
"pid": 149,
"city_code": "101180907",
"city_name": "嵩县"
},
{
"_id": 1011,
"id": 1278,
"pid": 149,
"city_code": "101180910",
"city_name": "汝阳县"
},
{
"_id": 1012,
"id": 1279,
"pid": 149,
"city_code": "101180904",
"city_name": "宜阳县"
},
{
"_id": 1013,
"id": 1280,
"pid": 149,
"city_code": "101180905",
"city_name": "洛宁县"
},
{
"_id": 1014,
"id": 1281,
"pid": 149,
"city_code": "101180906",
"city_name": "伊川县"
},
{
"_id": 1015,
"id": 1287,
"pid": 150,
"city_code": "101180802",
"city_name": "杞县"
},
{
"_id": 1016,
"id": 1288,
"pid": 150,
"city_code": "101180804",
"city_name": "通许县"
},
{
"_id": 1017,
"id": 1289,
"pid": 150,
"city_code": "101180803",
"city_name": "尉氏县"
},
{
"_id": 1018,
"id": 1290,
"pid": 150,
"city_code": "101180801",
"city_name": "开封县"
},
{
"_id": 1019,
"id": 1291,
"pid": 150,
"city_code": "101180805",
"city_name": "兰考县"
},
{
"_id": 1020,
"id": 1296,
"pid": 151,
"city_code": "101180205",
"city_name": "林州市"
},
{
"_id": 1021,
"id": 1297,
"pid": 151,
"city_code": "101180201",
"city_name": "安阳县"
},
{
"_id": 1022,
"id": 1298,
"pid": 151,
"city_code": "101180202",
"city_name": "汤阴县"
},
{
"_id": 1023,
"id": 1299,
"pid": 151,
"city_code": "101180203",
"city_name": "滑县"
},
{
"_id": 1024,
"id": 1300,
"pid": 151,
"city_code": "101180204",
"city_name": "内黄县"
},
{
"_id": 1025,
"id": 1304,
"pid": 152,
"city_code": "101181202",
"city_name": "浚县"
},
{
"_id": 1026,
"id": 1305,
"pid": 152,
"city_code": "101181203",
"city_name": "淇县"
},
{
"_id": 1027,
"id": 1306,
"pid": 153,
"city_code": "101181801",
"city_name": "济源市"
},
{
"_id": 1028,
"id": 1311,
"pid": 154,
"city_code": "101181104",
"city_name": "沁阳市"
},
{
"_id": 1029,
"id": 1312,
"pid": 154,
"city_code": "101181108",
"city_name": "孟州市"
},
{
"_id": 1030,
"id": 1313,
"pid": 154,
"city_code": "101181102",
"city_name": "修武县"
},
{
"_id": 1031,
"id": 1314,
"pid": 154,
"city_code": "101181106",
"city_name": "博爱县"
},
{
"_id": 1032,
"id": 1315,
"pid": 154,
"city_code": "101181103",
"city_name": "武陟县"
},
{
"_id": 1033,
"id": 1316,
"pid": 154,
"city_code": "101181107",
"city_name": "温县"
},
{
"_id": 1034,
"id": 1319,
"pid": 155,
"city_code": "101180711",
"city_name": "邓州市"
},
{
"_id": 1035,
"id": 1320,
"pid": 155,
"city_code": "101180702",
"city_name": "南召县"
},
{
"_id": 1036,
"id": 1321,
"pid": 155,
"city_code": "101180703",
"city_name": "方城县"
},
{
"_id": 1037,
"id": 1322,
"pid": 155,
"city_code": "101180705",
"city_name": "西峡县"
},
{
"_id": 1038,
"id": 1323,
"pid": 155,
"city_code": "101180707",
"city_name": "镇平县"
},
{
"_id": 1039,
"id": 1324,
"pid": 155,
"city_code": "101180706",
"city_name": "内乡县"
},
{
"_id": 1040,
"id": 1325,
"pid": 155,
"city_code": "101180708",
"city_name": "淅川县"
},
{
"_id": 1041,
"id": 1326,
"pid": 155,
"city_code": "101180704",
"city_name": "社旗县"
},
{
"_id": 1042,
"id": 1327,
"pid": 155,
"city_code": "101180710",
"city_name": "唐河县"
},
{
"_id": 1043,
"id": 1328,
"pid": 155,
"city_code": "101180709",
"city_name": "新野县"
},
{
"_id": 1044,
"id": 1329,
"pid": 155,
"city_code": "101180712",
"city_name": "桐柏县"
},
{
"_id": 1045,
"id": 1333,
"pid": 156,
"city_code": "101180508",
"city_name": "石龙区"
},
{
"_id": 1046,
"id": 1334,
"pid": 156,
"city_code": "101180506",
"city_name": "舞钢市"
},
{
"_id": 1047,
"id": 1335,
"pid": 156,
"city_code": "101180504",
"city_name": "汝州市"
},
{
"_id": 1048,
"id": 1336,
"pid": 156,
"city_code": "101180503",
"city_name": "宝丰县"
},
{
"_id": 1049,
"id": 1337,
"pid": 156,
"city_code": "101180505",
"city_name": "叶县"
},
{
"_id": 1050,
"id": 1338,
"pid": 156,
"city_code": "101180507",
"city_name": "鲁山县"
},
{
"_id": 1051,
"id": 1339,
"pid": 156,
"city_code": "101180502",
"city_name": "郏县"
},
{
"_id": 1052,
"id": 1341,
"pid": 157,
"city_code": "101181705",
"city_name": "义马市"
},
{
"_id": 1053,
"id": 1342,
"pid": 157,
"city_code": "101181702",
"city_name": "灵宝市"
},
{
"_id": 1054,
"id": 1343,
"pid": 157,
"city_code": "101181703",
"city_name": "渑池县"
},
{
"_id": 1055,
"id": 1344,
"pid": 157,
"city_code": "101181706",
"city_name": "陕县"
},
{
"_id": 1056,
"id": 1345,
"pid": 157,
"city_code": "101181704",
"city_name": "卢氏县"
},
{
"_id": 1057,
"id": 1347,
"pid": 158,
"city_code": "101181002",
"city_name": "睢阳区"
},
{
"_id": 1058,
"id": 1348,
"pid": 158,
"city_code": "101181009",
"city_name": "永城市"
},
{
"_id": 1059,
"id": 1349,
"pid": 158,
"city_code": "101181004",
"city_name": "民权县"
},
{
"_id": 1060,
"id": 1350,
"pid": 158,
"city_code": "101181003",
"city_name": "睢县"
},
{
"_id": 1061,
"id": 1351,
"pid": 158,
"city_code": "101181007",
"city_name": "宁陵县"
},
{
"_id": 1062,
"id": 1352,
"pid": 158,
"city_code": "101181005",
"city_name": "虞城县"
},
{
"_id": 1063,
"id": 1353,
"pid": 158,
"city_code": "101181006",
"city_name": "柘城县"
},
{
"_id": 1064,
"id": 1354,
"pid": 158,
"city_code": "101181008",
"city_name": "夏邑县"
},
{
"_id": 1065,
"id": 1359,
"pid": 159,
"city_code": "101180305",
"city_name": "卫辉市"
},
{
"_id": 1066,
"id": 1360,
"pid": 159,
"city_code": "101180304",
"city_name": "辉县市"
},
{
"_id": 1067,
"id": 1361,
"pid": 159,
"city_code": "101180301",
"city_name": "新乡县"
},
{
"_id": 1068,
"id": 1362,
"pid": 159,
"city_code": "101180302",
"city_name": "获嘉县"
},
{
"_id": 1069,
"id": 1363,
"pid": 159,
"city_code": "101180303",
"city_name": "原阳县"
},
{
"_id": 1070,
"id": 1364,
"pid": 159,
"city_code": "101180306",
"city_name": "延津县"
},
{
"_id": 1071,
"id": 1365,
"pid": 159,
"city_code": "101180307",
"city_name": "封丘县"
},
{
"_id": 1072,
"id": 1366,
"pid": 159,
"city_code": "101180308",
"city_name": "长垣县"
},
{
"_id": 1073,
"id": 1369,
"pid": 160,
"city_code": "101180603",
"city_name": "罗山县"
},
{
"_id": 1074,
"id": 1370,
"pid": 160,
"city_code": "101180604",
"city_name": "光山县"
},
{
"_id": 1075,
"id": 1371,
"pid": 160,
"city_code": "101180605",
"city_name": "新县"
},
{
"_id": 1076,
"id": 1372,
"pid": 160,
"city_code": "101180609",
"city_name": "商城县"
},
{
"_id": 1077,
"id": 1373,
"pid": 160,
"city_code": "101180608",
"city_name": "固始县"
},
{
"_id": 1078,
"id": 1374,
"pid": 160,
"city_code": "101180607",
"city_name": "潢川县"
},
{
"_id": 1079,
"id": 1375,
"pid": 160,
"city_code": "101180606",
"city_name": "淮滨县"
},
{
"_id": 1080,
"id": 1376,
"pid": 160,
"city_code": "101180602",
"city_name": "息县"
},
{
"_id": 1081,
"id": 1378,
"pid": 161,
"city_code": "101180405",
"city_name": "禹州市"
},
{
"_id": 1082,
"id": 1379,
"pid": 161,
"city_code": "101180404",
"city_name": "长葛市"
},
{
"_id": 1083,
"id": 1380,
"pid": 161,
"city_code": "101180401",
"city_name": "许昌县"
},
{
"_id": 1084,
"id": 1381,
"pid": 161,
"city_code": "101180402",
"city_name": "鄢陵县"
},
{
"_id": 1085,
"id": 1382,
"pid": 161,
"city_code": "101180403",
"city_name": "襄城县"
},
{
"_id": 1086,
"id": 1384,
"pid": 162,
"city_code": "101181407",
"city_name": "项城市"
},
{
"_id": 1087,
"id": 1385,
"pid": 162,
"city_code": "101181402",
"city_name": "扶沟县"
},
{
"_id": 1088,
"id": 1386,
"pid": 162,
"city_code": "101181405",
"city_name": "西华县"
},
{
"_id": 1089,
"id": 1387,
"pid": 162,
"city_code": "101181406",
"city_name": "商水县"
},
{
"_id": 1090,
"id": 1388,
"pid": 162,
"city_code": "101181410",
"city_name": "沈丘县"
},
{
"_id": 1091,
"id": 1389,
"pid": 162,
"city_code": "101181408",
"city_name": "郸城县"
},
{
"_id": 1092,
"id": 1390,
"pid": 162,
"city_code": "101181404",
"city_name": "淮阳县"
},
{
"_id": 1093,
"id": 1391,
"pid": 162,
"city_code": "101181403",
"city_name": "太康县"
},
{
"_id": 1094,
"id": 1392,
"pid": 162,
"city_code": "101181409",
"city_name": "鹿邑县"
},
{
"_id": 1095,
"id": 1394,
"pid": 163,
"city_code": "101181602",
"city_name": "西平县"
},
{
"_id": 1096,
"id": 1395,
"pid": 163,
"city_code": "101181604",
"city_name": "上蔡县"
},
{
"_id": 1097,
"id": 1396,
"pid": 163,
"city_code": "101181607",
"city_name": "平舆县"
},
{
"_id": 1098,
"id": 1397,
"pid": 163,
"city_code": "101181610",
"city_name": "正阳县"
},
{
"_id": 1099,
"id": 1398,
"pid": 163,
"city_code": "101181609",
"city_name": "确山县"
},
{
"_id": 1100,
"id": 1399,
"pid": 163,
"city_code": "101181606",
"city_name": "泌阳县"
},
{
"_id": 1101,
"id": 1400,
"pid": 163,
"city_code": "101181605",
"city_name": "汝南县"
},
{
"_id": 1102,
"id": 1401,
"pid": 163,
"city_code": "101181603",
"city_name": "遂平县"
},
{
"_id": 1103,
"id": 1402,
"pid": 163,
"city_code": "101181608",
"city_name": "新蔡县"
},
{
"_id": 1104,
"id": 1406,
"pid": 164,
"city_code": "101181503",
"city_name": "舞阳县"
},
{
"_id": 1105,
"id": 1407,
"pid": 164,
"city_code": "101181502",
"city_name": "临颍县"
},
{
"_id": 1106,
"id": 1409,
"pid": 165,
"city_code": "101181304",
"city_name": "清丰县"
},
{
"_id": 1107,
"id": 1410,
"pid": 165,
"city_code": "101181303",
"city_name": "南乐县"
},
{
"_id": 1108,
"id": 1411,
"pid": 165,
"city_code": "101181305",
"city_name": "范县"
},
{
"_id": 1109,
"id": 1412,
"pid": 165,
"city_code": "101181302",
"city_name": "台前县"
},
{
"_id": 1110,
"id": 1413,
"pid": 165,
"city_code": "101181301",
"city_name": "濮阳县"
},
{
"_id": 1111,
"id": 1421,
"pid": 166,
"city_code": "101050104",
"city_name": "阿城区"
},
{
"_id": 1112,
"id": 1422,
"pid": 166,
"city_code": "101050103",
"city_name": "呼兰区"
},
{
"_id": 1113,
"id": 1424,
"pid": 166,
"city_code": "101050111",
"city_name": "尚志市"
},
{
"_id": 1114,
"id": 1425,
"pid": 166,
"city_code": "101050102",
"city_name": "双城市"
},
{
"_id": 1115,
"id": 1426,
"pid": 166,
"city_code": "101050112",
"city_name": "五常市"
},
{
"_id": 1116,
"id": 1427,
"pid": 166,
"city_code": "101050109",
"city_name": "方正县"
},
{
"_id": 1117,
"id": 1428,
"pid": 166,
"city_code": "101050105",
"city_name": "宾县"
},
{
"_id": 1118,
"id": 1429,
"pid": 166,
"city_code": "101050106",
"city_name": "依兰县"
},
{
"_id": 1119,
"id": 1430,
"pid": 166,
"city_code": "101050107",
"city_name": "巴彦县"
},
{
"_id": 1120,
"id": 1431,
"pid": 166,
"city_code": "101050108",
"city_name": "通河县"
},
{
"_id": 1121,
"id": 1432,
"pid": 166,
"city_code": "101050113",
"city_name": "木兰县"
},
{
"_id": 1122,
"id": 1433,
"pid": 166,
"city_code": "101050110",
"city_name": "延寿县"
},
{
"_id": 1123,
"id": 1439,
"pid": 167,
"city_code": "101050903",
"city_name": "肇州县"
},
{
"_id": 1124,
"id": 1440,
"pid": 167,
"city_code": "101050904",
"city_name": "肇源县"
},
{
"_id": 1125,
"id": 1441,
"pid": 167,
"city_code": "101050902",
"city_name": "林甸县"
},
{
"_id": 1126,
"id": 1442,
"pid": 167,
"city_code": "101050905",
"city_name": "杜尔伯特"
},
{
"_id": 1127,
"id": 1443,
"pid": 168,
"city_code": "101050704",
"city_name": "呼玛县"
},
{
"_id": 1128,
"id": 1444,
"pid": 168,
"city_code": "101050703",
"city_name": "漠河县"
},
{
"_id": 1129,
"id": 1445,
"pid": 168,
"city_code": "101050702",
"city_name": "塔河县"
},
{
"_id": 1130,
"id": 1448,
"pid": 169,
"city_code": "101051206",
"city_name": "南山区"
},
{
"_id": 1131,
"id": 1452,
"pid": 169,
"city_code": "101051203",
"city_name": "萝北县"
},
{
"_id": 1132,
"id": 1453,
"pid": 169,
"city_code": "101051202",
"city_name": "绥滨县"
},
{
"_id": 1133,
"id": 1455,
"pid": 170,
"city_code": "101050605",
"city_name": "五大连池市"
},
{
"_id": 1134,
"id": 1456,
"pid": 170,
"city_code": "101050606",
"city_name": "北安市"
},
{
"_id": 1135,
"id": 1457,
"pid": 170,
"city_code": "101050602",
"city_name": "嫩江县"
},
{
"_id": 1136,
"id": 1458,
"pid": 170,
"city_code": "101050604",
"city_name": "逊克县"
},
{
"_id": 1137,
"id": 1459,
"pid": 170,
"city_code": "101050603",
"city_name": "孙吴县"
},
{
"_id": 1138,
"id": 1465,
"pid": 171,
"city_code": "101051102",
"city_name": "虎林市"
},
{
"_id": 1139,
"id": 1466,
"pid": 171,
"city_code": "101051103",
"city_name": "密山市"
},
{
"_id": 1140,
"id": 1467,
"pid": 171,
"city_code": "101051104",
"city_name": "鸡东县"
},
{
"_id": 1141,
"id": 1472,
"pid": 172,
"city_code": "101050406",
"city_name": "同江市"
},
{
"_id": 1142,
"id": 1473,
"pid": 172,
"city_code": "101050407",
"city_name": "富锦市"
},
{
"_id": 1143,
"id": 1474,
"pid": 172,
"city_code": "101050405",
"city_name": "桦南县"
},
{
"_id": 1144,
"id": 1475,
"pid": 172,
"city_code": "101050404",
"city_name": "桦川县"
},
{
"_id": 1145,
"id": 1476,
"pid": 172,
"city_code": "101050402",
"city_name": "汤原县"
},
{
"_id": 1146,
"id": 1477,
"pid": 172,
"city_code": "101050403",
"city_name": "抚远县"
},
{
"_id": 1147,
"id": 1482,
"pid": 173,
"city_code": "101050305",
"city_name": "绥芬河市"
},
{
"_id": 1148,
"id": 1483,
"pid": 173,
"city_code": "101050302",
"city_name": "海林市"
},
{
"_id": 1149,
"id": 1484,
"pid": 173,
"city_code": "101050306",
"city_name": "宁安市"
},
{
"_id": 1150,
"id": 1485,
"pid": 173,
"city_code": "101050303",
"city_name": "穆棱市"
},
{
"_id": 1151,
"id": 1486,
"pid": 173,
"city_code": "101050307",
"city_name": "东宁县"
},
{
"_id": 1152,
"id": 1487,
"pid": 173,
"city_code": "101050304",
"city_name": "林口县"
},
{
"_id": 1153,
"id": 1491,
"pid": 174,
"city_code": "101051002",
"city_name": "勃利县"
},
{
"_id": 1154,
"id": 1499,
"pid": 175,
"city_code": "101050202",
"city_name": "讷河市"
},
{
"_id": 1155,
"id": 1500,
"pid": 175,
"city_code": "101050203",
"city_name": "龙江县"
},
{
"_id": 1156,
"id": 1501,
"pid": 175,
"city_code": "101050206",
"city_name": "依安县"
},
{
"_id": 1157,
"id": 1502,
"pid": 175,
"city_code": "101050210",
"city_name": "泰来县"
},
{
"_id": 1158,
"id": 1503,
"pid": 175,
"city_code": "101050204",
"city_name": "甘南县"
},
{
"_id": 1159,
"id": 1504,
"pid": 175,
"city_code": "101050205",
"city_name": "富裕县"
},
{
"_id": 1160,
"id": 1505,
"pid": 175,
"city_code": "101050208",
"city_name": "克山县"
},
{
"_id": 1161,
"id": 1506,
"pid": 175,
"city_code": "101050209",
"city_name": "克东县"
},
{
"_id": 1162,
"id": 1507,
"pid": 175,
"city_code": "101050207",
"city_name": "拜泉县"
},
{
"_id": 1163,
"id": 1512,
"pid": 176,
"city_code": "101051302",
"city_name": "集贤县"
},
{
"_id": 1164,
"id": 1513,
"pid": 176,
"city_code": "101051305",
"city_name": "友谊县"
},
{
"_id": 1165,
"id": 1514,
"pid": 176,
"city_code": "101051303",
"city_name": "宝清县"
},
{
"_id": 1166,
"id": 1515,
"pid": 176,
"city_code": "101051304",
"city_name": "饶河县"
},
{
"_id": 1167,
"id": 1517,
"pid": 177,
"city_code": "101050503",
"city_name": "安达市"
},
{
"_id": 1168,
"id": 1518,
"pid": 177,
"city_code": "101050502",
"city_name": "肇东市"
},
{
"_id": 1169,
"id": 1519,
"pid": 177,
"city_code": "101050504",
"city_name": "海伦市"
},
{
"_id": 1170,
"id": 1520,
"pid": 177,
"city_code": "101050506",
"city_name": "望奎县"
},
{
"_id": 1171,
"id": 1521,
"pid": 177,
"city_code": "101050507",
"city_name": "兰西县"
},
{
"_id": 1172,
"id": 1522,
"pid": 177,
"city_code": "101050508",
"city_name": "青冈县"
},
{
"_id": 1173,
"id": 1523,
"pid": 177,
"city_code": "101050509",
"city_name": "庆安县"
},
{
"_id": 1174,
"id": 1524,
"pid": 177,
"city_code": "101050505",
"city_name": "明水县"
},
{
"_id": 1175,
"id": 1525,
"pid": 177,
"city_code": "101050510",
"city_name": "绥棱县"
},
{
"_id": 1176,
"id": 1526,
"pid": 178,
"city_code": "101050801",
"city_name": "伊春区"
},
{
"_id": 1177,
"id": 1536,
"pid": 178,
"city_code": "101050803",
"city_name": "五营区"
},
{
"_id": 1178,
"id": 1540,
"pid": 178,
"city_code": "101050802",
"city_name": "乌伊岭区"
},
{
"_id": 1179,
"id": 1541,
"pid": 178,
"city_code": "101050804",
"city_name": "铁力市"
},
{
"_id": 1180,
"id": 1542,
"pid": 178,
"city_code": "101050805",
"city_name": "嘉荫县"
},
{
"_id": 1181,
"id": 1550,
"pid": 179,
"city_code": "101200106",
"city_name": "东西湖区"
},
{
"_id": 1182,
"id": 1552,
"pid": 179,
"city_code": "101200102",
"city_name": "蔡甸区"
},
{
"_id": 1183,
"id": 1553,
"pid": 179,
"city_code": "101200105",
"city_name": "江夏区"
},
{
"_id": 1184,
"id": 1554,
"pid": 179,
"city_code": "101200103",
"city_name": "黄陂区"
},
{
"_id": 1185,
"id": 1555,
"pid": 179,
"city_code": "101200104",
"city_name": "新洲区"
},
{
"_id": 1186,
"id": 1560,
"pid": 181,
"city_code": "101200302",
"city_name": "梁子湖区"
},
{
"_id": 1187,
"id": 1562,
"pid": 182,
"city_code": "101200503",
"city_name": "麻城市"
},
{
"_id": 1188,
"id": 1563,
"pid": 182,
"city_code": "101200509",
"city_name": "武穴市"
},
{
"_id": 1189,
"id": 1564,
"pid": 182,
"city_code": "101200510",
"city_name": "团风县"
},
{
"_id": 1190,
"id": 1565,
"pid": 182,
"city_code": "101200502",
"city_name": "红安县"
},
{
"_id": 1191,
"id": 1566,
"pid": 182,
"city_code": "101200504",
"city_name": "罗田县"
},
{
"_id": 1192,
"id": 1567,
"pid": 182,
"city_code": "101200505",
"city_name": "英山县"
},
{
"_id": 1193,
"id": 1568,
"pid": 182,
"city_code": "101200506",
"city_name": "浠水县"
},
{
"_id": 1194,
"id": 1569,
"pid": 182,
"city_code": "101200507",
"city_name": "蕲春县"
},
{
"_id": 1195,
"id": 1570,
"pid": 182,
"city_code": "101200508",
"city_name": "黄梅县"
},
{
"_id": 1196,
"id": 1572,
"pid": 183,
"city_code": "101200606",
"city_name": "西塞山区"
},
{
"_id": 1197,
"id": 1573,
"pid": 183,
"city_code": "101200605",
"city_name": "下陆区"
},
{
"_id": 1198,
"id": 1574,
"pid": 183,
"city_code": "101200604",
"city_name": "铁山区"
},
{
"_id": 1199,
"id": 1575,
"pid": 183,
"city_code": "101200602",
"city_name": "大冶市"
},
{
"_id": 1200,
"id": 1576,
"pid": 183,
"city_code": "101200603",
"city_name": "阳新县"
},
{
"_id": 1201,
"id": 1578,
"pid": 184,
"city_code": "101201404",
"city_name": "掇刀区"
},
{
"_id": 1202,
"id": 1579,
"pid": 184,
"city_code": "101201402",
"city_name": "钟祥市"
},
{
"_id": 1203,
"id": 1580,
"pid": 184,
"city_code": "101201403",
"city_name": "京山县"
},
{
"_id": 1204,
"id": 1581,
"pid": 184,
"city_code": "101201405",
"city_name": "沙洋县"
},
{
"_id": 1205,
"id": 1583,
"pid": 185,
"city_code": "101200801",
"city_name": "荆州区"
},
{
"_id": 1206,
"id": 1584,
"pid": 185,
"city_code": "101200804",
"city_name": "石首市"
},
{
"_id": 1207,
"id": 1585,
"pid": 185,
"city_code": "101200806",
"city_name": "洪湖市"
},
{
"_id": 1208,
"id": 1586,
"pid": 185,
"city_code": "101200807",
"city_name": "松滋市"
},
{
"_id": 1209,
"id": 1587,
"pid": 185,
"city_code": "101200803",
"city_name": "公安县"
},
{
"_id": 1210,
"id": 1588,
"pid": 185,
"city_code": "101200805",
"city_name": "监利县"
},
{
"_id": 1211,
"id": 1589,
"pid": 185,
"city_code": "101200802",
"city_name": "江陵县"
},
{
"_id": 1212,
"id": 1590,
"pid": 186,
"city_code": "101201701",
"city_name": "潜江市"
},
{
"_id": 1213,
"id": 1592,
"pid": 188,
"city_code": "101201109",
"city_name": "张湾区"
},
{
"_id": 1214,
"id": 1593,
"pid": 188,
"city_code": "101201108",
"city_name": "茅箭区"
},
{
"_id": 1215,
"id": 1594,
"pid": 188,
"city_code": "101201107",
"city_name": "丹江口市"
},
{
"_id": 1216,
"id": 1595,
"pid": 188,
"city_code": "101201104",
"city_name": "郧县"
},
{
"_id": 1217,
"id": 1596,
"pid": 188,
"city_code": "101201103",
"city_name": "郧西县"
},
{
"_id": 1218,
"id": 1597,
"pid": 188,
"city_code": "101201105",
"city_name": "竹山县"
},
{
"_id": 1219,
"id": 1598,
"pid": 188,
"city_code": "101201102",
"city_name": "竹溪县"
},
{
"_id": 1220,
"id": 1599,
"pid": 188,
"city_code": "101201106",
"city_name": "房县"
},
{
"_id": 1221,
"id": 1601,
"pid": 189,
"city_code": "101201302",
"city_name": "广水市"
},
{
"_id": 1222,
"id": 1602,
"pid": 190,
"city_code": "101201501",
"city_name": "天门市"
},
{
"_id": 1223,
"id": 1604,
"pid": 191,
"city_code": "101200702",
"city_name": "赤壁市"
},
{
"_id": 1224,
"id": 1605,
"pid": 191,
"city_code": "101200703",
"city_name": "嘉鱼县"
},
{
"_id": 1225,
"id": 1606,
"pid": 191,
"city_code": "101200705",
"city_name": "通城县"
},
{
"_id": 1226,
"id": 1607,
"pid": 191,
"city_code": "101200704",
"city_name": "崇阳县"
},
{
"_id": 1227,
"id": 1608,
"pid": 191,
"city_code": "101200706",
"city_name": "通山县"
},
{
"_id": 1228,
"id": 1611,
"pid": 192,
"city_code": "101200202",
"city_name": "襄州区"
},
{
"_id": 1229,
"id": 1612,
"pid": 192,
"city_code": "101200206",
"city_name": "老河口市"
},
{
"_id": 1230,
"id": 1613,
"pid": 192,
"city_code": "101200208",
"city_name": "枣阳市"
},
{
"_id": 1231,
"id": 1614,
"pid": 192,
"city_code": "101200205",
"city_name": "宜城市"
},
{
"_id": 1232,
"id": 1615,
"pid": 192,
"city_code": "101200204",
"city_name": "南漳县"
},
{
"_id": 1233,
"id": 1616,
"pid": 192,
"city_code": "101200207",
"city_name": "谷城县"
},
{
"_id": 1234,
"id": 1617,
"pid": 192,
"city_code": "101200203",
"city_name": "保康县"
},
{
"_id": 1235,
"id": 1619,
"pid": 193,
"city_code": "101200405",
"city_name": "应城市"
},
{
"_id": 1236,
"id": 1620,
"pid": 193,
"city_code": "101200402",
"city_name": "安陆市"
},
{
"_id": 1237,
"id": 1621,
"pid": 193,
"city_code": "101200406",
"city_name": "汉川市"
},
{
"_id": 1238,
"id": 1622,
"pid": 193,
"city_code": "101200407",
"city_name": "孝昌县"
},
{
"_id": 1239,
"id": 1623,
"pid": 193,
"city_code": "101200404",
"city_name": "大悟县"
},
{
"_id": 1240,
"id": 1624,
"pid": 193,
"city_code": "101200403",
"city_name": "云梦县"
},
{
"_id": 1241,
"id": 1625,
"pid": 194,
"city_code": "101200908",
"city_name": "长阳县"
},
{
"_id": 1242,
"id": 1626,
"pid": 194,
"city_code": "101200906",
"city_name": "五峰县"
},
{
"_id": 1243,
"id": 1631,
"pid": 194,
"city_code": "101200912",
"city_name": "夷陵区"
},
{
"_id": 1244,
"id": 1632,
"pid": 194,
"city_code": "101200909",
"city_name": "宜都市"
},
{
"_id": 1245,
"id": 1633,
"pid": 194,
"city_code": "101200907",
"city_name": "当阳市"
},
{
"_id": 1246,
"id": 1634,
"pid": 194,
"city_code": "101200910",
"city_name": "枝江市"
},
{
"_id": 1247,
"id": 1635,
"pid": 194,
"city_code": "101200902",
"city_name": "远安县"
},
{
"_id": 1248,
"id": 1636,
"pid": 194,
"city_code": "101200904",
"city_name": "兴山县"
},
{
"_id": 1249,
"id": 1637,
"pid": 194,
"city_code": "101200903",
"city_name": "秭归县"
},
{
"_id": 1250,
"id": 1638,
"pid": 195,
"city_code": "101201001",
"city_name": "恩施市"
},
{
"_id": 1251,
"id": 1639,
"pid": 195,
"city_code": "101201002",
"city_name": "利川市"
},
{
"_id": 1252,
"id": 1640,
"pid": 195,
"city_code": "101201003",
"city_name": "建始县"
},
{
"_id": 1253,
"id": 1641,
"pid": 195,
"city_code": "101201008",
"city_name": "巴东县"
},
{
"_id": 1254,
"id": 1642,
"pid": 195,
"city_code": "101201005",
"city_name": "宣恩县"
},
{
"_id": 1255,
"id": 1643,
"pid": 195,
"city_code": "101201004",
"city_name": "咸丰县"
},
{
"_id": 1256,
"id": 1644,
"pid": 195,
"city_code": "101201007",
"city_name": "来凤县"
},
{
"_id": 1257,
"id": 1645,
"pid": 195,
"city_code": "101201006",
"city_name": "鹤峰县"
},
{
"_id": 1258,
"id": 1652,
"pid": 196,
"city_code": "101250103",
"city_name": "浏阳市"
},
{
"_id": 1259,
"id": 1653,
"pid": 196,
"city_code": "101250101",
"city_name": "长沙县"
},
{
"_id": 1260,
"id": 1654,
"pid": 196,
"city_code": "101250105",
"city_name": "望城县"
},
{
"_id": 1261,
"id": 1655,
"pid": 196,
"city_code": "101250102",
"city_name": "宁乡县"
},
{
"_id": 1262,
"id": 1657,
"pid": 197,
"city_code": "101251104",
"city_name": "武陵源区"
},
{
"_id": 1263,
"id": 1658,
"pid": 197,
"city_code": "101251103",
"city_name": "慈利县"
},
{
"_id": 1264,
"id": 1659,
"pid": 197,
"city_code": "101251102",
"city_name": "桑植县"
},
{
"_id": 1265,
"id": 1662,
"pid": 198,
"city_code": "101250608",
"city_name": "津市市"
},
{
"_id": 1266,
"id": 1663,
"pid": 198,
"city_code": "101250602",
"city_name": "安乡县"
},
{
"_id": 1267,
"id": 1664,
"pid": 198,
"city_code": "101250604",
"city_name": "汉寿县"
},
{
"_id": 1268,
"id": 1665,
"pid": 198,
"city_code": "101250605",
"city_name": "澧县"
},
{
"_id": 1269,
"id": 1666,
"pid": 198,
"city_code": "101250606",
"city_name": "临澧县"
},
{
"_id": 1270,
"id": 1667,
"pid": 198,
"city_code": "101250603",
"city_name": "桃源县"
},
{
"_id": 1271,
"id": 1668,
"pid": 198,
"city_code": "101250607",
"city_name": "石门县"
},
{
"_id": 1272,
"id": 1670,
"pid": 199,
"city_code": "101250512",
"city_name": "苏仙区"
},
{
"_id": 1273,
"id": 1671,
"pid": 199,
"city_code": "101250507",
"city_name": "资兴市"
},
{
"_id": 1274,
"id": 1672,
"pid": 199,
"city_code": "101250502",
"city_name": "桂阳县"
},
{
"_id": 1275,
"id": 1673,
"pid": 199,
"city_code": "101250504",
"city_name": "宜章县"
},
{
"_id": 1276,
"id": 1674,
"pid": 199,
"city_code": "101250510",
"city_name": "永兴县"
},
{
"_id": 1277,
"id": 1675,
"pid": 199,
"city_code": "101250503",
"city_name": "嘉禾县"
},
{
"_id": 1278,
"id": 1676,
"pid": 199,
"city_code": "101250505",
"city_name": "临武县"
},
{
"_id": 1279,
"id": 1677,
"pid": 199,
"city_code": "101250508",
"city_name": "汝城县"
},
{
"_id": 1280,
"id": 1678,
"pid": 199,
"city_code": "101250511",
"city_name": "桂东县"
},
{
"_id": 1281,
"id": 1679,
"pid": 199,
"city_code": "101250509",
"city_name": "安仁县"
},
{
"_id": 1282,
"id": 1684,
"pid": 200,
"city_code": "101250409",
"city_name": "南岳区"
},
{
"_id": 1283,
"id": 1685,
"pid": 200,
"city_code": "101250408",
"city_name": "耒阳市"
},
{
"_id": 1284,
"id": 1686,
"pid": 200,
"city_code": "101250406",
"city_name": "常宁市"
},
{
"_id": 1285,
"id": 1687,
"pid": 200,
"city_code": "101250405",
"city_name": "衡阳县"
},
{
"_id": 1286,
"id": 1688,
"pid": 200,
"city_code": "101250407",
"city_name": "衡南县"
},
{
"_id": 1287,
"id": 1689,
"pid": 200,
"city_code": "101250402",
"city_name": "衡山县"
},
{
"_id": 1288,
"id": 1690,
"pid": 200,
"city_code": "101250403",
"city_name": "衡东县"
},
{
"_id": 1289,
"id": 1691,
"pid": 200,
"city_code": "101250404",
"city_name": "祁东县"
},
{
"_id": 1290,
"id": 1692,
"pid": 201,
"city_code": "101251202",
"city_name": "鹤城区"
},
{
"_id": 1291,
"id": 1693,
"pid": 201,
"city_code": "101251205",
"city_name": "靖州县"
},
{
"_id": 1292,
"id": 1694,
"pid": 201,
"city_code": "101251208",
"city_name": "麻阳县"
},
{
"_id": 1293,
"id": 1695,
"pid": 201,
"city_code": "101251207",
"city_name": "通道县"
},
{
"_id": 1294,
"id": 1696,
"pid": 201,
"city_code": "101251209",
"city_name": "新晃县"
},
{
"_id": 1295,
"id": 1697,
"pid": 201,
"city_code": "101251210",
"city_name": "芷江县"
},
{
"_id": 1296,
"id": 1698,
"pid": 201,
"city_code": "101251203",
"city_name": "沅陵县"
},
{
"_id": 1297,
"id": 1699,
"pid": 201,
"city_code": "101251204",
"city_name": "辰溪县"
},
{
"_id": 1298,
"id": 1700,
"pid": 201,
"city_code": "101251211",
"city_name": "溆浦县"
},
{
"_id": 1299,
"id": 1701,
"pid": 201,
"city_code": "101251212",
"city_name": "中方县"
},
{
"_id": 1300,
"id": 1702,
"pid": 201,
"city_code": "101251206",
"city_name": "会同县"
},
{
"_id": 1301,
"id": 1703,
"pid": 201,
"city_code": "101251213",
"city_name": "洪江市"
},
{
"_id": 1302,
"id": 1705,
"pid": 202,
"city_code": "101250803",
"city_name": "冷水江市"
},
{
"_id": 1303,
"id": 1706,
"pid": 202,
"city_code": "101250806",
"city_name": "涟源市"
},
{
"_id": 1304,
"id": 1707,
"pid": 202,
"city_code": "101250802",
"city_name": "双峰县"
},
{
"_id": 1305,
"id": 1708,
"pid": 202,
"city_code": "101250805",
"city_name": "新化县"
},
{
"_id": 1306,
"id": 1709,
"pid": 203,
"city_code": "101250909",
"city_name": "城步县"
},
{
"_id": 1307,
"id": 1713,
"pid": 203,
"city_code": "101250908",
"city_name": "武冈市"
},
{
"_id": 1308,
"id": 1714,
"pid": 203,
"city_code": "101250905",
"city_name": "邵东县"
},
{
"_id": 1309,
"id": 1715,
"pid": 203,
"city_code": "101250904",
"city_name": "新邵县"
},
{
"_id": 1310,
"id": 1716,
"pid": 203,
"city_code": "101250910",
"city_name": "邵阳县"
},
{
"_id": 1311,
"id": 1717,
"pid": 203,
"city_code": "101250902",
"city_name": "隆回县"
},
{
"_id": 1312,
"id": 1718,
"pid": 203,
"city_code": "101250903",
"city_name": "洞口县"
},
{
"_id": 1313,
"id": 1719,
"pid": 203,
"city_code": "101250906",
"city_name": "绥宁县"
},
{
"_id": 1314,
"id": 1720,
"pid": 203,
"city_code": "101250907",
"city_name": "新宁县"
},
{
"_id": 1315,
"id": 1723,
"pid": 204,
"city_code": "101250203",
"city_name": "湘乡市"
},
{
"_id": 1316,
"id": 1724,
"pid": 204,
"city_code": "101250202",
"city_name": "韶山市"
},
{
"_id": 1317,
"id": 1725,
"pid": 204,
"city_code": "101250201",
"city_name": "湘潭县"
},
{
"_id": 1318,
"id": 1726,
"pid": 205,
"city_code": "101251501",
"city_name": "吉首市"
},
{
"_id": 1319,
"id": 1727,
"pid": 205,
"city_code": "101251506",
"city_name": "泸溪县"
},
{
"_id": 1320,
"id": 1728,
"pid": 205,
"city_code": "101251505",
"city_name": "凤凰县"
},
{
"_id": 1321,
"id": 1729,
"pid": 205,
"city_code": "101251508",
"city_name": "花垣县"
},
{
"_id": 1322,
"id": 1730,
"pid": 205,
"city_code": "101251502",
"city_name": "保靖县"
},
{
"_id": 1323,
"id": 1731,
"pid": 205,
"city_code": "101251504",
"city_name": "古丈县"
},
{
"_id": 1324,
"id": 1732,
"pid": 205,
"city_code": "101251503",
"city_name": "永顺县"
},
{
"_id": 1325,
"id": 1733,
"pid": 205,
"city_code": "101251507",
"city_name": "龙山县"
},
{
"_id": 1326,
"id": 1734,
"pid": 206,
"city_code": "101250701",
"city_name": "赫山区"
},
{
"_id": 1327,
"id": 1736,
"pid": 206,
"city_code": "101250705",
"city_name": "沅江市"
},
{
"_id": 1328,
"id": 1737,
"pid": 206,
"city_code": "101250702",
"city_name": "南县"
},
{
"_id": 1329,
"id": 1738,
"pid": 206,
"city_code": "101250703",
"city_name": "桃江县"
},
{
"_id": 1330,
"id": 1739,
"pid": 206,
"city_code": "101250704",
"city_name": "安化县"
},
{
"_id": 1331,
"id": 1740,
"pid": 207,
"city_code": "101251410",
"city_name": "江华县"
},
{
"_id": 1332,
"id": 1743,
"pid": 207,
"city_code": "101251402",
"city_name": "祁阳县"
},
{
"_id": 1333,
"id": 1744,
"pid": 207,
"city_code": "101251403",
"city_name": "东安县"
},
{
"_id": 1334,
"id": 1745,
"pid": 207,
"city_code": "101251404",
"city_name": "双牌县"
},
{
"_id": 1335,
"id": 1746,
"pid": 207,
"city_code": "101251405",
"city_name": "道县"
},
{
"_id": 1336,
"id": 1747,
"pid": 207,
"city_code": "101251407",
"city_name": "江永县"
},
{
"_id": 1337,
"id": 1748,
"pid": 207,
"city_code": "101251406",
"city_name": "宁远县"
},
{
"_id": 1338,
"id": 1749,
"pid": 207,
"city_code": "101251408",
"city_name": "蓝山县"
},
{
"_id": 1339,
"id": 1750,
"pid": 207,
"city_code": "101251409",
"city_name": "新田县"
},
{
"_id": 1340,
"id": 1754,
"pid": 208,
"city_code": "101251004",
"city_name": "汨罗市"
},
{
"_id": 1341,
"id": 1755,
"pid": 208,
"city_code": "101251006",
"city_name": "临湘市"
},
{
"_id": 1342,
"id": 1756,
"pid": 208,
"city_code": "101251001",
"city_name": "岳阳县"
},
{
"_id": 1343,
"id": 1757,
"pid": 208,
"city_code": "101251002",
"city_name": "华容县"
},
{
"_id": 1344,
"id": 1758,
"pid": 208,
"city_code": "101251003",
"city_name": "湘阴县"
},
{
"_id": 1345,
"id": 1759,
"pid": 208,
"city_code": "101251005",
"city_name": "平江县"
},
{
"_id": 1346,
"id": 1764,
"pid": 209,
"city_code": "101250303",
"city_name": "醴陵市"
},
{
"_id": 1347,
"id": 1765,
"pid": 209,
"city_code": "101250304",
"city_name": "株洲县"
},
{
"_id": 1348,
"id": 1766,
"pid": 209,
"city_code": "101250302",
"city_name": "攸县"
},
{
"_id": 1349,
"id": 1767,
"pid": 209,
"city_code": "101250305",
"city_name": "茶陵县"
},
{
"_id": 1350,
"id": 1768,
"pid": 209,
"city_code": "101250306",
"city_name": "炎陵县"
},
{
"_id": 1351,
"id": 1774,
"pid": 210,
"city_code": "101060106",
"city_name": "双阳区"
},
{
"_id": 1352,
"id": 1779,
"pid": 210,
"city_code": "101060103",
"city_name": "德惠市"
},
{
"_id": 1353,
"id": 1780,
"pid": 210,
"city_code": "101060104",
"city_name": "九台市"
},
{
"_id": 1354,
"id": 1781,
"pid": 210,
"city_code": "101060105",
"city_name": "榆树市"
},
{
"_id": 1355,
"id": 1782,
"pid": 210,
"city_code": "101060102",
"city_name": "农安县"
},
{
"_id": 1356,
"id": 1787,
"pid": 211,
"city_code": "101060204",
"city_name": "蛟河市"
},
{
"_id": 1357,
"id": 1788,
"pid": 211,
"city_code": "101060206",
"city_name": "桦甸市"
},
{
"_id": 1358,
"id": 1789,
"pid": 211,
"city_code": "101060202",
"city_name": "舒兰市"
},
{
"_id": 1359,
"id": 1790,
"pid": 211,
"city_code": "101060205",
"city_name": "磐石市"
},
{
"_id": 1360,
"id": 1791,
"pid": 211,
"city_code": "101060203",
"city_name": "永吉县"
},
{
"_id": 1361,
"id": 1793,
"pid": 212,
"city_code": "101060602",
"city_name": "洮南市"
},
{
"_id": 1362,
"id": 1794,
"pid": 212,
"city_code": "101060603",
"city_name": "大安市"
},
{
"_id": 1363,
"id": 1795,
"pid": 212,
"city_code": "101060604",
"city_name": "镇赉县"
},
{
"_id": 1364,
"id": 1796,
"pid": 212,
"city_code": "101060605",
"city_name": "通榆县"
},
{
"_id": 1365,
"id": 1797,
"pid": 213,
"city_code": "101060907",
"city_name": "江源区"
},
{
"_id": 1366,
"id": 1799,
"pid": 213,
"city_code": "101060905",
"city_name": "长白县"
},
{
"_id": 1367,
"id": 1800,
"pid": 213,
"city_code": "101060903",
"city_name": "临江市"
},
{
"_id": 1368,
"id": 1801,
"pid": 213,
"city_code": "101060906",
"city_name": "抚松县"
},
{
"_id": 1369,
"id": 1802,
"pid": 213,
"city_code": "101060902",
"city_name": "靖宇县"
},
{
"_id": 1370,
"id": 1805,
"pid": 214,
"city_code": "101060702",
"city_name": "东丰县"
},
{
"_id": 1371,
"id": 1806,
"pid": 214,
"city_code": "101060703",
"city_name": "东辽县"
},
{
"_id": 1372,
"id": 1809,
"pid": 215,
"city_code": "101060405",
"city_name": "伊通县"
},
{
"_id": 1373,
"id": 1810,
"pid": 215,
"city_code": "101060404",
"city_name": "公主岭市"
},
{
"_id": 1374,
"id": 1811,
"pid": 215,
"city_code": "101060402",
"city_name": "双辽市"
},
{
"_id": 1375,
"id": 1812,
"pid": 215,
"city_code": "101060403",
"city_name": "梨树县"
},
{
"_id": 1376,
"id": 1813,
"pid": 216,
"city_code": "101060803",
"city_name": "前郭尔罗斯"
},
{
"_id": 1377,
"id": 1815,
"pid": 216,
"city_code": "101060804",
"city_name": "长岭县"
},
{
"_id": 1378,
"id": 1816,
"pid": 216,
"city_code": "101060802",
"city_name": "乾安县"
},
{
"_id": 1379,
"id": 1817,
"pid": 216,
"city_code": "101060805",
"city_name": "扶余市"
},
{
"_id": 1380,
"id": 1820,
"pid": 217,
"city_code": "101060502",
"city_name": "梅河口市"
},
{
"_id": 1381,
"id": 1821,
"pid": 217,
"city_code": "101060505",
"city_name": "集安市"
},
{
"_id": 1382,
"id": 1822,
"pid": 217,
"city_code": "101060506",
"city_name": "通化县"
},
{
"_id": 1383,
"id": 1823,
"pid": 217,
"city_code": "101060504",
"city_name": "辉南县"
},
{
"_id": 1384,
"id": 1824,
"pid": 217,
"city_code": "101060503",
"city_name": "柳河县"
},
{
"_id": 1385,
"id": 1825,
"pid": 218,
"city_code": "101060301",
"city_name": "延吉市"
},
{
"_id": 1386,
"id": 1826,
"pid": 218,
"city_code": "101060309",
"city_name": "图们市"
},
{
"_id": 1387,
"id": 1827,
"pid": 218,
"city_code": "101060302",
"city_name": "敦化市"
},
{
"_id": 1388,
"id": 1828,
"pid": 218,
"city_code": "101060308",
"city_name": "珲春市"
},
{
"_id": 1389,
"id": 1829,
"pid": 218,
"city_code": "101060307",
"city_name": "龙井市"
},
{
"_id": 1390,
"id": 1830,
"pid": 218,
"city_code": "101060305",
"city_name": "和龙市"
},
{
"_id": 1391,
"id": 1831,
"pid": 218,
"city_code": "101060303",
"city_name": "安图县"
},
{
"_id": 1392,
"id": 1832,
"pid": 218,
"city_code": "101060304",
"city_name": "汪清县"
},
{
"_id": 1393,
"id": 1841,
"pid": 219,
"city_code": "101190107",
"city_name": "浦口区"
},
{
"_id": 1394,
"id": 1842,
"pid": 219,
"city_code": "101190104",
"city_name": "江宁区"
},
{
"_id": 1395,
"id": 1843,
"pid": 219,
"city_code": "101190105",
"city_name": "六合区"
},
{
"_id": 1396,
"id": 1844,
"pid": 219,
"city_code": "101190102",
"city_name": "溧水区"
},
{
"_id": 1397,
"id": 1845,
"pid": 219,
"city_code": "101190103",
"city_name": "高淳县"
},
{
"_id": 1398,
"id": 1850,
"pid": 220,
"city_code": "101190405",
"city_name": "吴中区"
},
{
"_id": 1399,
"id": 1853,
"pid": 220,
"city_code": "101190404",
"city_name": "昆山市"
},
{
"_id": 1400,
"id": 1854,
"pid": 220,
"city_code": "101190402",
"city_name": "常熟市"
},
{
"_id": 1401,
"id": 1855,
"pid": 220,
"city_code": "101190403",
"city_name": "张家港市"
},
{
"_id": 1402,
"id": 1867,
"pid": 220,
"city_code": "101190407",
"city_name": "吴江区"
},
{
"_id": 1403,
"id": 1868,
"pid": 220,
"city_code": "101190408",
"city_name": "太仓市"
},
{
"_id": 1404,
"id": 1872,
"pid": 221,
"city_code": "101190204",
"city_name": "锡山区"
},
{
"_id": 1405,
"id": 1876,
"pid": 221,
"city_code": "101190202",
"city_name": "江阴市"
},
{
"_id": 1406,
"id": 1877,
"pid": 221,
"city_code": "101190203",
"city_name": "宜兴市"
},
{
"_id": 1407,
"id": 1883,
"pid": 222,
"city_code": "101191104",
"city_name": "武进区"
},
{
"_id": 1408,
"id": 1884,
"pid": 222,
"city_code": "101191102",
"city_name": "溧阳市"
},
{
"_id": 1409,
"id": 1885,
"pid": 222,
"city_code": "101191103",
"city_name": "金坛区"
},
{
"_id": 1410,
"id": 1888,
"pid": 223,
"city_code": "101190908",
"city_name": "楚州区"
},
{
"_id": 1411,
"id": 1889,
"pid": 223,
"city_code": "101190907",
"city_name": "淮阴区"
},
{
"_id": 1412,
"id": 1890,
"pid": 223,
"city_code": "101190905",
"city_name": "涟水县"
},
{
"_id": 1413,
"id": 1891,
"pid": 223,
"city_code": "101190904",
"city_name": "洪泽县"
},
{
"_id": 1414,
"id": 1892,
"pid": 223,
"city_code": "101190903",
"city_name": "盱眙县"
},
{
"_id": 1415,
"id": 1893,
"pid": 223,
"city_code": "101190902",
"city_name": "金湖县"
},
{
"_id": 1416,
"id": 1897,
"pid": 224,
"city_code": "101191003",
"city_name": "赣榆县"
},
{
"_id": 1417,
"id": 1898,
"pid": 224,
"city_code": "101191002",
"city_name": "东海县"
},
{
"_id": 1418,
"id": 1899,
"pid": 224,
"city_code": "101191004",
"city_name": "灌云县"
},
{
"_id": 1419,
"id": 1900,
"pid": 224,
"city_code": "101191005",
"city_name": "灌南县"
},
{
"_id": 1420,
"id": 1904,
"pid": 225,
"city_code": "101190507",
"city_name": "启东市"
},
{
"_id": 1421,
"id": 1905,
"pid": 225,
"city_code": "101190503",
"city_name": "如皋市"
},
{
"_id": 1422,
"id": 1906,
"pid": 225,
"city_code": "101190509",
"city_name": "通州区"
},
{
"_id": 1423,
"id": 1907,
"pid": 225,
"city_code": "101190508",
"city_name": "海门市"
},
{
"_id": 1424,
"id": 1908,
"pid": 225,
"city_code": "101190502",
"city_name": "海安县"
},
{
"_id": 1425,
"id": 1909,
"pid": 225,
"city_code": "101190504",
"city_name": "如东县"
},
{
"_id": 1426,
"id": 1911,
"pid": 226,
"city_code": "101191305",
"city_name": "宿豫区"
},
{
"_id": 1427,
"id": 1912,
"pid": 226,
"city_code": "101191305",
"city_name": "宿豫县"
},
{
"_id": 1428,
"id": 1913,
"pid": 226,
"city_code": "101191302",
"city_name": "沭阳县"
},
{
"_id": 1429,
"id": 1914,
"pid": 226,
"city_code": "101191303",
"city_name": "泗阳县"
},
{
"_id": 1430,
"id": 1915,
"pid": 226,
"city_code": "101191304",
"city_name": "泗洪县"
},
{
"_id": 1431,
"id": 1918,
"pid": 227,
"city_code": "101191202",
"city_name": "兴化市"
},
{
"_id": 1432,
"id": 1919,
"pid": 227,
"city_code": "101191205",
"city_name": "靖江市"
},
{
"_id": 1433,
"id": 1920,
"pid": 227,
"city_code": "101191203",
"city_name": "泰兴市"
},
{
"_id": 1434,
"id": 1921,
"pid": 227,
"city_code": "101191204",
"city_name": "姜堰区"
},
{
"_id": 1435,
"id": 1927,
"pid": 228,
"city_code": "101190807",
"city_name": "新沂市"
},
{
"_id": 1436,
"id": 1928,
"pid": 228,
"city_code": "101190805",
"city_name": "邳州市"
},
{
"_id": 1437,
"id": 1929,
"pid": 228,
"city_code": "101190803",
"city_name": "丰县"
},
{
"_id": 1438,
"id": 1930,
"pid": 228,
"city_code": "101190804",
"city_name": "沛县"
},
{
"_id": 1439,
"id": 1931,
"pid": 228,
"city_code": "101190802",
"city_name": "铜山区"
},
{
"_id": 1440,
"id": 1932,
"pid": 228,
"city_code": "101190806",
"city_name": "睢宁县"
},
{
"_id": 1441,
"id": 1935,
"pid": 229,
"city_code": "101190709",
"city_name": "盐都区"
},
{
"_id": 1442,
"id": 1937,
"pid": 229,
"city_code": "101190707",
"city_name": "东台市"
},
{
"_id": 1443,
"id": 1938,
"pid": 229,
"city_code": "101190708",
"city_name": "大丰区"
},
{
"_id": 1444,
"id": 1939,
"pid": 229,
"city_code": "101190702",
"city_name": "响水县"
},
{
"_id": 1445,
"id": 1940,
"pid": 229,
"city_code": "101190703",
"city_name": "滨海县"
},
{
"_id": 1446,
"id": 1941,
"pid": 229,
"city_code": "101190704",
"city_name": "阜宁县"
},
{
"_id": 1447,
"id": 1942,
"pid": 229,
"city_code": "101190705",
"city_name": "射阳县"
},
{
"_id": 1448,
"id": 1943,
"pid": 229,
"city_code": "101190706",
"city_name": "建湖县"
},
{
"_id": 1449,
"id": 1946,
"pid": 230,
"city_code": "101190606",
"city_name": "邗江区"
},
{
"_id": 1450,
"id": 1947,
"pid": 230,
"city_code": "101190603",
"city_name": "仪征市"
},
{
"_id": 1451,
"id": 1948,
"pid": 230,
"city_code": "101190604",
"city_name": "高邮市"
},
{
"_id": 1452,
"id": 1949,
"pid": 230,
"city_code": "101190605",
"city_name": "江都市"
},
{
"_id": 1453,
"id": 1950,
"pid": 230,
"city_code": "101190602",
"city_name": "宝应县"
},
{
"_id": 1454,
"id": 1953,
"pid": 231,
"city_code": "101190305",
"city_name": "丹徒区"
},
{
"_id": 1455,
"id": 1954,
"pid": 231,
"city_code": "101190302",
"city_name": "丹阳市"
},
{
"_id": 1456,
"id": 1955,
"pid": 231,
"city_code": "101190303",
"city_name": "扬中市"
},
{
"_id": 1457,
"id": 1956,
"pid": 231,
"city_code": "101190304",
"city_name": "句容市"
},
{
"_id": 1458,
"id": 1965,
"pid": 232,
"city_code": "101240103",
"city_name": "南昌县"
},
{
"_id": 1459,
"id": 1966,
"pid": 232,
"city_code": "101240102",
"city_name": "新建县"
},
{
"_id": 1460,
"id": 1967,
"pid": 232,
"city_code": "101240104",
"city_name": "安义县"
},
{
"_id": 1461,
"id": 1968,
"pid": 232,
"city_code": "101240105",
"city_name": "进贤县"
},
{
"_id": 1462,
"id": 1970,
"pid": 233,
"city_code": "101240408",
"city_name": "南城县"
},
{
"_id": 1463,
"id": 1971,
"pid": 233,
"city_code": "101240410",
"city_name": "黎川县"
},
{
"_id": 1464,
"id": 1972,
"pid": 233,
"city_code": "101240409",
"city_name": "南丰县"
},
{
"_id": 1465,
"id": 1973,
"pid": 233,
"city_code": "101240404",
"city_name": "崇仁县"
},
{
"_id": 1466,
"id": 1974,
"pid": 233,
"city_code": "101240403",
"city_name": "乐安县"
},
{
"_id": 1467,
"id": 1975,
"pid": 233,
"city_code": "101240407",
"city_name": "宜黄县"
},
{
"_id": 1468,
"id": 1976,
"pid": 233,
"city_code": "101240405",
"city_name": "金溪县"
},
{
"_id": 1469,
"id": 1977,
"pid": 233,
"city_code": "101240406",
"city_name": "资溪县"
},
{
"_id": 1470,
"id": 1978,
"pid": 233,
"city_code": "101240411",
"city_name": "东乡县"
},
{
"_id": 1471,
"id": 1979,
"pid": 233,
"city_code": "101240402",
"city_name": "广昌县"
},
{
"_id": 1472,
"id": 1981,
"pid": 234,
"city_code": "101240710",
"city_name": "于都县"
},
{
"_id": 1473,
"id": 1982,
"pid": 234,
"city_code": "101240709",
"city_name": "瑞金市"
},
{
"_id": 1474,
"id": 1983,
"pid": 234,
"city_code": "101240704",
"city_name": "南康市"
},
{
"_id": 1475,
"id": 1984,
"pid": 234,
"city_code": "101240718",
"city_name": "赣县"
},
{
"_id": 1476,
"id": 1985,
"pid": 234,
"city_code": "101240706",
"city_name": "信丰县"
},
{
"_id": 1477,
"id": 1986,
"pid": 234,
"city_code": "101240705",
"city_name": "大余县"
},
{
"_id": 1478,
"id": 1987,
"pid": 234,
"city_code": "101240703",
"city_name": "上犹县"
},
{
"_id": 1479,
"id": 1988,
"pid": 234,
"city_code": "101240702",
"city_name": "崇义县"
},
{
"_id": 1480,
"id": 1989,
"pid": 234,
"city_code": "101240712",
"city_name": "安远县"
},
{
"_id": 1481,
"id": 1990,
"pid": 234,
"city_code": "101240714",
"city_name": "龙南县"
},
{
"_id": 1482,
"id": 1991,
"pid": 234,
"city_code": "101240715",
"city_name": "定南县"
},
{
"_id": 1483,
"id": 1992,
"pid": 234,
"city_code": "101240713",
"city_name": "全南县"
},
{
"_id": 1484,
"id": 1993,
"pid": 234,
"city_code": "101240707",
"city_name": "宁都县"
},
{
"_id": 1485,
"id": 1994,
"pid": 234,
"city_code": "101240717",
"city_name": "兴国县"
},
{
"_id": 1486,
"id": 1995,
"pid": 234,
"city_code": "101240711",
"city_name": "会昌县"
},
{
"_id": 1487,
"id": 1996,
"pid": 234,
"city_code": "101240716",
"city_name": "寻乌县"
},
{
"_id": 1488,
"id": 1997,
"pid": 234,
"city_code": "101240708",
"city_name": "石城县"
},
{
"_id": 1489,
"id": 1998,
"pid": 235,
"city_code": "101240612",
"city_name": "安福县"
},
{
"_id": 1490,
"id": 2001,
"pid": 235,
"city_code": "101240608",
"city_name": "井冈山市"
},
{
"_id": 1491,
"id": 2002,
"pid": 235,
"city_code": "101240602",
"city_name": "吉安县"
},
{
"_id": 1492,
"id": 2003,
"pid": 235,
"city_code": "101240603",
"city_name": "吉水县"
},
{
"_id": 1493,
"id": 2004,
"pid": 235,
"city_code": "101240605",
"city_name": "峡江县"
},
{
"_id": 1494,
"id": 2005,
"pid": 235,
"city_code": "101240604",
"city_name": "新干县"
},
{
"_id": 1495,
"id": 2006,
"pid": 235,
"city_code": "101240606",
"city_name": "永丰县"
},
{
"_id": 1496,
"id": 2007,
"pid": 235,
"city_code": "101240611",
"city_name": "泰和县"
},
{
"_id": 1497,
"id": 2008,
"pid": 235,
"city_code": "101240610",
"city_name": "遂川县"
},
{
"_id": 1498,
"id": 2009,
"pid": 235,
"city_code": "101240609",
"city_name": "万安县"
},
{
"_id": 1499,
"id": 2010,
"pid": 235,
"city_code": "101240607",
"city_name": "永新县"
},
{
"_id": 1500,
"id": 2013,
"pid": 236,
"city_code": "101240802",
"city_name": "乐平市"
},
{
"_id": 1501,
"id": 2014,
"pid": 236,
"city_code": "101240803",
"city_name": "浮梁县"
},
{
"_id": 1502,
"id": 2016,
"pid": 237,
"city_code": "101240203",
"city_name": "庐山区"
},
{
"_id": 1503,
"id": 2017,
"pid": 237,
"city_code": "101240202",
"city_name": "瑞昌市"
},
{
"_id": 1504,
"id": 2018,
"pid": 237,
"city_code": "101240201",
"city_name": "九江县"
},
{
"_id": 1505,
"id": 2019,
"pid": 237,
"city_code": "101240204",
"city_name": "武宁县"
},
{
"_id": 1506,
"id": 2020,
"pid": 237,
"city_code": "101240212",
"city_name": "修水县"
},
{
"_id": 1507,
"id": 2021,
"pid": 237,
"city_code": "101240206",
"city_name": "永修县"
},
{
"_id": 1508,
"id": 2022,
"pid": 237,
"city_code": "101240205",
"city_name": "德安县"
},
{
"_id": 1509,
"id": 2023,
"pid": 237,
"city_code": "101240209",
"city_name": "星子县"
},
{
"_id": 1510,
"id": 2024,
"pid": 237,
"city_code": "101240210",
"city_name": "都昌县"
},
{
"_id": 1511,
"id": 2025,
"pid": 237,
"city_code": "101240207",
"city_name": "湖口县"
},
{
"_id": 1512,
"id": 2026,
"pid": 237,
"city_code": "101240208",
"city_name": "彭泽县"
},
{
"_id": 1513,
"id": 2027,
"pid": 238,
"city_code": "101240904",
"city_name": "安源区"
},
{
"_id": 1514,
"id": 2028,
"pid": 238,
"city_code": "101240906",
"city_name": "湘东区"
},
{
"_id": 1515,
"id": 2029,
"pid": 238,
"city_code": "101240902",
"city_name": "莲花县"
},
{
"_id": 1516,
"id": 2030,
"pid": 238,
"city_code": "101240905",
"city_name": "芦溪县"
},
{
"_id": 1517,
"id": 2031,
"pid": 238,
"city_code": "101240903",
"city_name": "上栗县"
},
{
"_id": 1518,
"id": 2033,
"pid": 239,
"city_code": "101240307",
"city_name": "德兴市"
},
{
"_id": 1519,
"id": 2034,
"pid": 239,
"city_code": "101240308",
"city_name": "上饶县"
},
{
"_id": 1520,
"id": 2035,
"pid": 239,
"city_code": "101240313",
"city_name": "广丰县"
},
{
"_id": 1521,
"id": 2036,
"pid": 239,
"city_code": "101240312",
"city_name": "玉山县"
},
{
"_id": 1522,
"id": 2037,
"pid": 239,
"city_code": "101240311",
"city_name": "铅山县"
},
{
"_id": 1523,
"id": 2038,
"pid": 239,
"city_code": "101240310",
"city_name": "横峰县"
},
{
"_id": 1524,
"id": 2039,
"pid": 239,
"city_code": "101240309",
"city_name": "弋阳县"
},
{
"_id": 1525,
"id": 2040,
"pid": 239,
"city_code": "101240305",
"city_name": "余干县"
},
{
"_id": 1526,
"id": 2041,
"pid": 239,
"city_code": "101240302",
"city_name": "鄱阳县"
},
{
"_id": 1527,
"id": 2042,
"pid": 239,
"city_code": "101240306",
"city_name": "万年县"
},
{
"_id": 1528,
"id": 2043,
"pid": 239,
"city_code": "101240303",
"city_name": "婺源县"
},
{
"_id": 1529,
"id": 2045,
"pid": 240,
"city_code": "101241002",
"city_name": "分宜县"
},
{
"_id": 1530,
"id": 2047,
"pid": 241,
"city_code": "101240510",
"city_name": "丰城市"
},
{
"_id": 1531,
"id": 2048,
"pid": 241,
"city_code": "101240509",
"city_name": "樟树市"
},
{
"_id": 1532,
"id": 2049,
"pid": 241,
"city_code": "101240508",
"city_name": "高安市"
},
{
"_id": 1533,
"id": 2050,
"pid": 241,
"city_code": "101240507",
"city_name": "奉新县"
},
{
"_id": 1534,
"id": 2051,
"pid": 241,
"city_code": "101240504",
"city_name": "万载县"
},
{
"_id": 1535,
"id": 2052,
"pid": 241,
"city_code": "101240505",
"city_name": "上高县"
},
{
"_id": 1536,
"id": 2053,
"pid": 241,
"city_code": "101240503",
"city_name": "宜丰县"
},
{
"_id": 1537,
"id": 2054,
"pid": 241,
"city_code": "101240506",
"city_name": "靖安县"
},
{
"_id": 1538,
"id": 2055,
"pid": 241,
"city_code": "101240502",
"city_name": "铜鼓县"
},
{
"_id": 1539,
"id": 2057,
"pid": 242,
"city_code": "101241103",
"city_name": "贵溪市"
},
{
"_id": 1540,
"id": 2058,
"pid": 242,
"city_code": "101241102",
"city_name": "余江县"
},
{
"_id": 1541,
"id": 2064,
"pid": 243,
"city_code": "101070102",
"city_name": "苏家屯区"
},
{
"_id": 1542,
"id": 2067,
"pid": 243,
"city_code": "101070107",
"city_name": "于洪区"
},
{
"_id": 1543,
"id": 2069,
"pid": 243,
"city_code": "101070106",
"city_name": "新民市"
},
{
"_id": 1544,
"id": 2070,
"pid": 243,
"city_code": "101070103",
"city_name": "辽中县"
},
{
"_id": 1545,
"id": 2071,
"pid": 243,
"city_code": "101070104",
"city_name": "康平县"
},
{
"_id": 1546,
"id": 2072,
"pid": 243,
"city_code": "101070105",
"city_name": "法库县"
},
{
"_id": 1547,
"id": 2077,
"pid": 244,
"city_code": "101070205",
"city_name": "旅顺口区"
},
{
"_id": 1548,
"id": 2078,
"pid": 244,
"city_code": "101070203",
"city_name": "金州区"
},
{
"_id": 1549,
"id": 2080,
"pid": 244,
"city_code": "101070202",
"city_name": "瓦房店市"
},
{
"_id": 1550,
"id": 2081,
"pid": 244,
"city_code": "101070204",
"city_name": "普兰店市"
},
{
"_id": 1551,
"id": 2082,
"pid": 244,
"city_code": "101070207",
"city_name": "庄河市"
},
{
"_id": 1552,
"id": 2083,
"pid": 244,
"city_code": "101070206",
"city_name": "长海县"
},
{
"_id": 1553,
"id": 2088,
"pid": 245,
"city_code": "101070303",
"city_name": "岫岩县"
},
{
"_id": 1554,
"id": 2089,
"pid": 245,
"city_code": "101070304",
"city_name": "海城市"
},
{
"_id": 1555,
"id": 2090,
"pid": 245,
"city_code": "101070302",
"city_name": "台安县"
},
{
"_id": 1556,
"id": 2091,
"pid": 246,
"city_code": "101070502",
"city_name": "本溪县"
},
{
"_id": 1557,
"id": 2096,
"pid": 246,
"city_code": "101070504",
"city_name": "桓仁县"
},
{
"_id": 1558,
"id": 2099,
"pid": 247,
"city_code": "101071204",
"city_name": "喀喇沁左翼蒙古族自治县"
},
{
"_id": 1559,
"id": 2100,
"pid": 247,
"city_code": "101071205",
"city_name": "北票市"
},
{
"_id": 1560,
"id": 2101,
"pid": 247,
"city_code": "101071203",
"city_name": "凌源市"
},
{
"_id": 1561,
"id": 2103,
"pid": 247,
"city_code": "101071207",
"city_name": "建平县"
},
{
"_id": 1562,
"id": 2107,
"pid": 248,
"city_code": "101070603",
"city_name": "宽甸县"
},
{
"_id": 1563,
"id": 2108,
"pid": 248,
"city_code": "101070604",
"city_name": "东港市"
},
{
"_id": 1564,
"id": 2109,
"pid": 248,
"city_code": "101070602",
"city_name": "凤城市"
},
{
"_id": 1565,
"id": 2114,
"pid": 249,
"city_code": "101070403",
"city_name": "清原县"
},
{
"_id": 1566,
"id": 2115,
"pid": 249,
"city_code": "101070402",
"city_name": "新宾县"
},
{
"_id": 1567,
"id": 2116,
"pid": 249,
"city_code": "101070401",
"city_name": "抚顺县"
},
{
"_id": 1568,
"id": 2123,
"pid": 250,
"city_code": "101070902",
"city_name": "彰武县"
},
{
"_id": 1569,
"id": 2127,
"pid": 251,
"city_code": "101071404",
"city_name": "兴城市"
},
{
"_id": 1570,
"id": 2128,
"pid": 251,
"city_code": "101071403",
"city_name": "绥中县"
},
{
"_id": 1571,
"id": 2129,
"pid": 251,
"city_code": "101071402",
"city_name": "建昌县"
},
{
"_id": 1572,
"id": 2133,
"pid": 252,
"city_code": "101070702",
"city_name": "凌海市"
},
{
"_id": 1573,
"id": 2134,
"pid": 252,
"city_code": "101070706",
"city_name": "北镇市"
},
{
"_id": 1574,
"id": 2135,
"pid": 252,
"city_code": "101070705",
"city_name": "黑山县"
},
{
"_id": 1575,
"id": 2136,
"pid": 252,
"city_code": "101070704",
"city_name": "义县"
},
{
"_id": 1576,
"id": 2141,
"pid": 253,
"city_code": "101071004",
"city_name": "弓长岭区"
},
{
"_id": 1577,
"id": 2142,
"pid": 253,
"city_code": "101071003",
"city_name": "灯塔市"
},
{
"_id": 1578,
"id": 2143,
"pid": 253,
"city_code": "101071002",
"city_name": "辽阳县"
},
{
"_id": 1579,
"id": 2146,
"pid": 254,
"city_code": "101071302",
"city_name": "大洼县"
},
{
"_id": 1580,
"id": 2147,
"pid": 254,
"city_code": "101071303",
"city_name": "盘山县"
},
{
"_id": 1581,
"id": 2150,
"pid": 255,
"city_code": "101071105",
"city_name": "调兵山市"
},
{
"_id": 1582,
"id": 2151,
"pid": 255,
"city_code": "101071102",
"city_name": "开原市"
},
{
"_id": 1583,
"id": 2152,
"pid": 255,
"city_code": "101071101",
"city_name": "铁岭县"
},
{
"_id": 1584,
"id": 2153,
"pid": 255,
"city_code": "101071104",
"city_name": "西丰县"
},
{
"_id": 1585,
"id": 2154,
"pid": 255,
"city_code": "101071103",
"city_name": "昌图县"
},
{
"_id": 1586,
"id": 2159,
"pid": 256,
"city_code": "101070803",
"city_name": "盖州市"
},
{
"_id": 1587,
"id": 2160,
"pid": 256,
"city_code": "101070802",
"city_name": "大石桥市"
},
{
"_id": 1588,
"id": 2165,
"pid": 257,
"city_code": "101080105",
"city_name": "清水河县"
},
{
"_id": 1589,
"id": 2166,
"pid": 257,
"city_code": "101080102",
"city_name": "土默特左旗"
},
{
"_id": 1590,
"id": 2167,
"pid": 257,
"city_code": "101080103",
"city_name": "托克托县"
},
{
"_id": 1591,
"id": 2168,
"pid": 257,
"city_code": "101080104",
"city_name": "和林格尔县"
},
{
"_id": 1592,
"id": 2169,
"pid": 257,
"city_code": "101080107",
"city_name": "武川县"
},
{
"_id": 1593,
"id": 2170,
"pid": 258,
"city_code": "101081201",
"city_name": "阿拉善左旗"
},
{
"_id": 1594,
"id": 2171,
"pid": 258,
"city_code": "101081202",
"city_name": "阿拉善右旗"
},
{
"_id": 1595,
"id": 2172,
"pid": 258,
"city_code": "101081203",
"city_name": "额济纳旗"
},
{
"_id": 1596,
"id": 2173,
"pid": 259,
"city_code": "101080801",
"city_name": "临河区"
},
{
"_id": 1597,
"id": 2174,
"pid": 259,
"city_code": "101080802",
"city_name": "五原县"
},
{
"_id": 1598,
"id": 2175,
"pid": 259,
"city_code": "101080803",
"city_name": "磴口县"
},
{
"_id": 1599,
"id": 2176,
"pid": 259,
"city_code": "101080804",
"city_name": "乌拉特前旗"
},
{
"_id": 1600,
"id": 2177,
"pid": 259,
"city_code": "101080806",
"city_name": "乌拉特中旗"
},
{
"_id": 1601,
"id": 2178,
"pid": 259,
"city_code": "101080807",
"city_name": "乌拉特后旗"
},
{
"_id": 1602,
"id": 2179,
"pid": 259,
"city_code": "101080810",
"city_name": "杭锦后旗"
},
{
"_id": 1603,
"id": 2184,
"pid": 260,
"city_code": "101080207",
"city_name": "石拐区"
},
{
"_id": 1604,
"id": 2185,
"pid": 260,
"city_code": "101080202",
"city_name": "白云鄂博"
},
{
"_id": 1605,
"id": 2186,
"pid": 260,
"city_code": "101080204",
"city_name": "土默特右旗"
},
{
"_id": 1606,
"id": 2187,
"pid": 260,
"city_code": "101080205",
"city_name": "固阳县"
},
{
"_id": 1607,
"id": 2188,
"pid": 260,
"city_code": "101080206",
"city_name": "达尔罕茂明安联合旗"
},
{
"_id": 1608,
"id": 2192,
"pid": 261,
"city_code": "101080603",
"city_name": "阿鲁科尔沁旗"
},
{
"_id": 1609,
"id": 2193,
"pid": 261,
"city_code": "101080605",
"city_name": "巴林左旗"
},
{
"_id": 1610,
"id": 2194,
"pid": 261,
"city_code": "101080606",
"city_name": "巴林右旗"
},
{
"_id": 1611,
"id": 2195,
"pid": 261,
"city_code": "101080607",
"city_name": "林西县"
},
{
"_id": 1612,
"id": 2196,
"pid": 261,
"city_code": "101080608",
"city_name": "克什克腾旗"
},
{
"_id": 1613,
"id": 2197,
"pid": 261,
"city_code": "101080609",
"city_name": "翁牛特旗"
},
{
"_id": 1614,
"id": 2198,
"pid": 261,
"city_code": "101080611",
"city_name": "喀喇沁旗"
},
{
"_id": 1615,
"id": 2199,
"pid": 261,
"city_code": "101080613",
"city_name": "宁城县"
},
{
"_id": 1616,
"id": 2200,
"pid": 261,
"city_code": "101080614",
"city_name": "敖汉旗"
},
{
"_id": 1617,
"id": 2201,
"pid": 262,
"city_code": "101080713",
"city_name": "东胜区"
},
{
"_id": 1618,
"id": 2202,
"pid": 262,
"city_code": "101080703",
"city_name": "达拉特旗"
},
{
"_id": 1619,
"id": 2203,
"pid": 262,
"city_code": "101080704",
"city_name": "准格尔旗"
},
{
"_id": 1620,
"id": 2204,
"pid": 262,
"city_code": "101080705",
"city_name": "鄂托克前旗"
},
{
"_id": 1621,
"id": 2205,
"pid": 262,
"city_code": "101080708",
"city_name": "鄂托克旗"
},
{
"_id": 1622,
"id": 2206,
"pid": 262,
"city_code": "101080709",
"city_name": "杭锦旗"
},
{
"_id": 1623,
"id": 2207,
"pid": 262,
"city_code": "101080710",
"city_name": "乌审旗"
},
{
"_id": 1624,
"id": 2208,
"pid": 262,
"city_code": "101080711",
"city_name": "伊金霍洛旗"
},
{
"_id": 1625,
"id": 2209,
"pid": 263,
"city_code": "101081001",
"city_name": "海拉尔区"
},
{
"_id": 1626,
"id": 2210,
"pid": 263,
"city_code": "101081004",
"city_name": "莫力达瓦"
},
{
"_id": 1627,
"id": 2211,
"pid": 263,
"city_code": "101081010",
"city_name": "满洲里市"
},
{
"_id": 1628,
"id": 2212,
"pid": 263,
"city_code": "101081011",
"city_name": "牙克石市"
},
{
"_id": 1629,
"id": 2213,
"pid": 263,
"city_code": "101081012",
"city_name": "扎兰屯市"
},
{
"_id": 1630,
"id": 2214,
"pid": 263,
"city_code": "101081014",
"city_name": "额尔古纳市"
},
{
"_id": 1631,
"id": 2215,
"pid": 263,
"city_code": "101081015",
"city_name": "根河市"
},
{
"_id": 1632,
"id": 2216,
"pid": 263,
"city_code": "101081003",
"city_name": "阿荣旗"
},
{
"_id": 1633,
"id": 2217,
"pid": 263,
"city_code": "101081005",
"city_name": "鄂伦春自治旗"
},
{
"_id": 1634,
"id": 2218,
"pid": 263,
"city_code": "101081006",
"city_name": "鄂温克族自治旗"
},
{
"_id": 1635,
"id": 2219,
"pid": 263,
"city_code": "101081007",
"city_name": "陈巴尔虎旗"
},
{
"_id": 1636,
"id": 2220,
"pid": 263,
"city_code": "101081008",
"city_name": "新巴尔虎左旗"
},
{
"_id": 1637,
"id": 2221,
"pid": 263,
"city_code": "101081009",
"city_name": "新巴尔虎右旗"
},
{
"_id": 1638,
"id": 2223,
"pid": 264,
"city_code": "101080512",
"city_name": "霍林郭勒市"
},
{
"_id": 1639,
"id": 2224,
"pid": 264,
"city_code": "101080503",
"city_name": "科尔沁左翼中旗"
},
{
"_id": 1640,
"id": 2225,
"pid": 264,
"city_code": "101080504",
"city_name": "科尔沁左翼后旗"
},
{
"_id": 1641,
"id": 2226,
"pid": 264,
"city_code": "101080506",
"city_name": "开鲁县"
},
{
"_id": 1642,
"id": 2227,
"pid": 264,
"city_code": "101080507",
"city_name": "库伦旗"
},
{
"_id": 1643,
"id": 2228,
"pid": 264,
"city_code": "101080508",
"city_name": "奈曼旗"
},
{
"_id": 1644,
"id": 2229,
"pid": 264,
"city_code": "101080509",
"city_name": "扎鲁特旗"
},
{
"_id": 1645,
"id": 2233,
"pid": 266,
"city_code": "101080403",
"city_name": "化德县"
},
{
"_id": 1646,
"id": 2234,
"pid": 266,
"city_code": "101080401",
"city_name": "集宁区"
},
{
"_id": 1647,
"id": 2235,
"pid": 266,
"city_code": "101080412",
"city_name": "丰镇市"
},
{
"_id": 1648,
"id": 2236,
"pid": 266,
"city_code": "101080402",
"city_name": "卓资县"
},
{
"_id": 1649,
"id": 2237,
"pid": 266,
"city_code": "101080404",
"city_name": "商都县"
},
{
"_id": 1650,
"id": 2238,
"pid": 266,
"city_code": "101080406",
"city_name": "兴和县"
},
{
"_id": 1651,
"id": 2239,
"pid": 266,
"city_code": "101080407",
"city_name": "凉城县"
},
{
"_id": 1652,
"id": 2240,
"pid": 266,
"city_code": "101080408",
"city_name": "察哈尔右翼前旗"
},
{
"_id": 1653,
"id": 2241,
"pid": 266,
"city_code": "101080409",
"city_name": "察哈尔右翼中旗"
},
{
"_id": 1654,
"id": 2242,
"pid": 266,
"city_code": "101080410",
"city_name": "察哈尔右翼后旗"
},
{
"_id": 1655,
"id": 2243,
"pid": 266,
"city_code": "101080411",
"city_name": "四子王旗"
},
{
"_id": 1656,
"id": 2244,
"pid": 267,
"city_code": "101080903",
"city_name": "二连浩特市"
},
{
"_id": 1657,
"id": 2245,
"pid": 267,
"city_code": "101080901",
"city_name": "锡林浩特市"
},
{
"_id": 1658,
"id": 2246,
"pid": 267,
"city_code": "101080904",
"city_name": "阿巴嘎旗"
},
{
"_id": 1659,
"id": 2247,
"pid": 267,
"city_code": "101080906",
"city_name": "苏尼特左旗"
},
{
"_id": 1660,
"id": 2248,
"pid": 267,
"city_code": "101080907",
"city_name": "苏尼特右旗"
},
{
"_id": 1661,
"id": 2249,
"pid": 267,
"city_code": "101080909",
"city_name": "东乌珠穆沁旗"
},
{
"_id": 1662,
"id": 2250,
"pid": 267,
"city_code": "101080910",
"city_name": "西乌珠穆沁旗"
},
{
"_id": 1663,
"id": 2251,
"pid": 267,
"city_code": "101080911",
"city_name": "太仆寺旗"
},
{
"_id": 1664,
"id": 2252,
"pid": 267,
"city_code": "101080912",
"city_name": "镶黄旗"
},
{
"_id": 1665,
"id": 2253,
"pid": 267,
"city_code": "101080913",
"city_name": "正镶白旗"
},
{
"_id": 1666,
"id": 2255,
"pid": 267,
"city_code": "101080915",
"city_name": "多伦县"
},
{
"_id": 1667,
"id": 2256,
"pid": 268,
"city_code": "101081101",
"city_name": "乌兰浩特市"
},
{
"_id": 1668,
"id": 2257,
"pid": 268,
"city_code": "101081102",
"city_name": "阿尔山市"
},
{
"_id": 1669,
"id": 2258,
"pid": 268,
"city_code": "101081109",
"city_name": "科尔沁右翼前旗"
},
{
"_id": 1670,
"id": 2259,
"pid": 268,
"city_code": "101081103",
"city_name": "科尔沁右翼中旗"
},
{
"_id": 1671,
"id": 2260,
"pid": 268,
"city_code": "101081105",
"city_name": "扎赉特旗"
},
{
"_id": 1672,
"id": 2261,
"pid": 268,
"city_code": "101081107",
"city_name": "突泉县"
},
{
"_id": 1673,
"id": 2265,
"pid": 269,
"city_code": "101170103",
"city_name": "灵武市"
},
{
"_id": 1674,
"id": 2266,
"pid": 269,
"city_code": "101170102",
"city_name": "永宁县"
},
{
"_id": 1675,
"id": 2267,
"pid": 269,
"city_code": "101170104",
"city_name": "贺兰县"
},
{
"_id": 1676,
"id": 2270,
"pid": 270,
"city_code": "101170402",
"city_name": "西吉县"
},
{
"_id": 1677,
"id": 2271,
"pid": 270,
"city_code": "101170403",
"city_name": "隆德县"
},
{
"_id": 1678,
"id": 2272,
"pid": 270,
"city_code": "101170404",
"city_name": "泾源县"
},
{
"_id": 1679,
"id": 2273,
"pid": 270,
"city_code": "101170406",
"city_name": "彭阳县"
},
{
"_id": 1680,
"id": 2274,
"pid": 271,
"city_code": "101170202",
"city_name": "惠农区"
},
{
"_id": 1681,
"id": 2275,
"pid": 271,
"city_code": "101170206",
"city_name": "大武口区"
},
{
"_id": 1682,
"id": 2276,
"pid": 271,
"city_code": "101170202",
"city_name": "惠农区"
},
{
"_id": 1683,
"id": 2277,
"pid": 271,
"city_code": "101170204",
"city_name": "陶乐县"
},
{
"_id": 1684,
"id": 2278,
"pid": 271,
"city_code": "101170203",
"city_name": "平罗县"
},
{
"_id": 1685,
"id": 2281,
"pid": 272,
"city_code": "101170306",
"city_name": "青铜峡市"
},
{
"_id": 1686,
"id": 2283,
"pid": 272,
"city_code": "101170303",
"city_name": "盐池县"
},
{
"_id": 1687,
"id": 2284,
"pid": 272,
"city_code": "101170302",
"city_name": "同心县"
},
{
"_id": 1688,
"id": 2286,
"pid": 273,
"city_code": "101170504",
"city_name": "海原县"
},
{
"_id": 1689,
"id": 2287,
"pid": 273,
"city_code": "101170502",
"city_name": "中宁县"
},
{
"_id": 1690,
"id": 2292,
"pid": 274,
"city_code": "101150104",
"city_name": "湟中县"
},
{
"_id": 1691,
"id": 2293,
"pid": 274,
"city_code": "101150103",
"city_name": "湟源县"
},
{
"_id": 1692,
"id": 2294,
"pid": 274,
"city_code": "101150102",
"city_name": "大通县"
},
{
"_id": 1693,
"id": 2295,
"pid": 275,
"city_code": "101150508",
"city_name": "玛沁县"
},
{
"_id": 1694,
"id": 2296,
"pid": 275,
"city_code": "101150502",
"city_name": "班玛县"
},
{
"_id": 1695,
"id": 2297,
"pid": 275,
"city_code": "101150503",
"city_name": "甘德县"
},
{
"_id": 1696,
"id": 2298,
"pid": 275,
"city_code": "101150504",
"city_name": "达日县"
},
{
"_id": 1697,
"id": 2299,
"pid": 275,
"city_code": "101150505",
"city_name": "久治县"
},
{
"_id": 1698,
"id": 2300,
"pid": 275,
"city_code": "101150506",
"city_name": "玛多县"
},
{
"_id": 1699,
"id": 2301,
"pid": 276,
"city_code": "101150804",
"city_name": "海晏县"
},
{
"_id": 1700,
"id": 2302,
"pid": 276,
"city_code": "101150803",
"city_name": "祁连县"
},
{
"_id": 1701,
"id": 2303,
"pid": 276,
"city_code": "101150806",
"city_name": "刚察县"
},
{
"_id": 1702,
"id": 2304,
"pid": 276,
"city_code": "101150802",
"city_name": "门源县"
},
{
"_id": 1703,
"id": 2305,
"pid": 277,
"city_code": "101150208",
"city_name": "平安县"
},
{
"_id": 1704,
"id": 2306,
"pid": 277,
"city_code": "101150202",
"city_name": "乐都县"
},
{
"_id": 1705,
"id": 2307,
"pid": 277,
"city_code": "101150203",
"city_name": "民和县"
},
{
"_id": 1706,
"id": 2308,
"pid": 277,
"city_code": "101150204",
"city_name": "互助县"
},
{
"_id": 1707,
"id": 2309,
"pid": 277,
"city_code": "101150205",
"city_name": "化隆县"
},
{
"_id": 1708,
"id": 2310,
"pid": 277,
"city_code": "101150206",
"city_name": "循化县"
},
{
"_id": 1709,
"id": 2311,
"pid": 278,
"city_code": "101150409",
"city_name": "共和县"
},
{
"_id": 1710,
"id": 2312,
"pid": 278,
"city_code": "101150408",
"city_name": "同德县"
},
{
"_id": 1711,
"id": 2313,
"pid": 278,
"city_code": "101150404",
"city_name": "贵德县"
},
{
"_id": 1712,
"id": 2314,
"pid": 278,
"city_code": "101150406",
"city_name": "兴海县"
},
{
"_id": 1713,
"id": 2315,
"pid": 278,
"city_code": "101150407",
"city_name": "贵南县"
},
{
"_id": 1714,
"id": 2316,
"pid": 279,
"city_code": "101150716",
"city_name": "德令哈市"
},
{
"_id": 1715,
"id": 2317,
"pid": 279,
"city_code": "101150702",
"city_name": "格尔木市"
},
{
"_id": 1716,
"id": 2318,
"pid": 279,
"city_code": "101150709",
"city_name": "乌兰县"
},
{
"_id": 1717,
"id": 2319,
"pid": 279,
"city_code": "101150710",
"city_name": "都兰县"
},
{
"_id": 1718,
"id": 2320,
"pid": 279,
"city_code": "101150708",
"city_name": "天峻县"
},
{
"_id": 1719,
"id": 2321,
"pid": 280,
"city_code": "101150305",
"city_name": "同仁县"
},
{
"_id": 1720,
"id": 2322,
"pid": 280,
"city_code": "101150302",
"city_name": "尖扎县"
},
{
"_id": 1721,
"id": 2323,
"pid": 280,
"city_code": "101150303",
"city_name": "泽库县"
},
{
"_id": 1722,
"id": 2324,
"pid": 280,
"city_code": "101150304",
"city_name": "河南蒙古族自治县"
},
{
"_id": 1723,
"id": 2325,
"pid": 281,
"city_code": "101150601",
"city_name": "玉树县"
},
{
"_id": 1724,
"id": 2326,
"pid": 281,
"city_code": "101150604",
"city_name": "杂多县"
},
{
"_id": 1725,
"id": 2327,
"pid": 281,
"city_code": "101150602",
"city_name": "称多县"
},
{
"_id": 1726,
"id": 2328,
"pid": 281,
"city_code": "101150603",
"city_name": "治多县"
},
{
"_id": 1727,
"id": 2329,
"pid": 281,
"city_code": "101150605",
"city_name": "囊谦县"
},
{
"_id": 1728,
"id": 2330,
"pid": 281,
"city_code": "101150606",
"city_name": "曲麻莱县"
},
{
"_id": 1729,
"id": 2336,
"pid": 282,
"city_code": "101120102",
"city_name": "长清区"
},
{
"_id": 1730,
"id": 2337,
"pid": 282,
"city_code": "101120104",
"city_name": "章丘市"
},
{
"_id": 1731,
"id": 2338,
"pid": 282,
"city_code": "101120105",
"city_name": "平阴县"
},
{
"_id": 1732,
"id": 2339,
"pid": 282,
"city_code": "101120106",
"city_name": "济阳县"
},
{
"_id": 1733,
"id": 2340,
"pid": 282,
"city_code": "101120103",
"city_name": "商河县"
},
{
"_id": 1734,
"id": 2347,
"pid": 283,
"city_code": "101120202",
"city_name": "崂山区"
},
{
"_id": 1735,
"id": 2348,
"pid": 283,
"city_code": "101120205",
"city_name": "胶州市"
},
{
"_id": 1736,
"id": 2349,
"pid": 283,
"city_code": "101120204",
"city_name": "即墨市"
},
{
"_id": 1737,
"id": 2350,
"pid": 283,
"city_code": "101120208",
"city_name": "平度市"
},
{
"_id": 1738,
"id": 2351,
"pid": 283,
"city_code": "101120206",
"city_name": "胶南市"
},
{
"_id": 1739,
"id": 2352,
"pid": 283,
"city_code": "101120207",
"city_name": "莱西市"
},
{
"_id": 1740,
"id": 2354,
"pid": 284,
"city_code": "101121105",
"city_name": "惠民县"
},
{
"_id": 1741,
"id": 2355,
"pid": 284,
"city_code": "101121104",
"city_name": "阳信县"
},
{
"_id": 1742,
"id": 2356,
"pid": 284,
"city_code": "101121103",
"city_name": "无棣县"
},
{
"_id": 1743,
"id": 2357,
"pid": 284,
"city_code": "101121106",
"city_name": "沾化县"
},
{
"_id": 1744,
"id": 2358,
"pid": 284,
"city_code": "101121102",
"city_name": "博兴县"
},
{
"_id": 1745,
"id": 2359,
"pid": 284,
"city_code": "101121107",
"city_name": "邹平县"
},
{
"_id": 1746,
"id": 2361,
"pid": 285,
"city_code": "101120404",
"city_name": "陵县"
},
{
"_id": 1747,
"id": 2362,
"pid": 285,
"city_code": "101120406",
"city_name": "乐陵市"
},
{
"_id": 1748,
"id": 2363,
"pid": 285,
"city_code": "101120411",
"city_name": "禹城市"
},
{
"_id": 1749,
"id": 2364,
"pid": 285,
"city_code": "101120409",
"city_name": "宁津县"
},
{
"_id": 1750,
"id": 2365,
"pid": 285,
"city_code": "101120407",
"city_name": "庆云县"
},
{
"_id": 1751,
"id": 2366,
"pid": 285,
"city_code": "101120403",
"city_name": "临邑县"
},
{
"_id": 1752,
"id": 2367,
"pid": 285,
"city_code": "101120405",
"city_name": "齐河县"
},
{
"_id": 1753,
"id": 2368,
"pid": 285,
"city_code": "101120408",
"city_name": "平原县"
},
{
"_id": 1754,
"id": 2369,
"pid": 285,
"city_code": "101120410",
"city_name": "夏津县"
},
{
"_id": 1755,
"id": 2370,
"pid": 285,
"city_code": "101120402",
"city_name": "武城县"
},
{
"_id": 1756,
"id": 2371,
"pid": 286,
"city_code": "101121201",
"city_name": "东营区"
},
{
"_id": 1757,
"id": 2372,
"pid": 286,
"city_code": "101121202",
"city_name": "河口区"
},
{
"_id": 1758,
"id": 2373,
"pid": 286,
"city_code": "101121203",
"city_name": "垦利县"
},
{
"_id": 1759,
"id": 2374,
"pid": 286,
"city_code": "101121204",
"city_name": "利津县"
},
{
"_id": 1760,
"id": 2375,
"pid": 286,
"city_code": "101121205",
"city_name": "广饶县"
},
{
"_id": 1761,
"id": 2377,
"pid": 287,
"city_code": "101121007",
"city_name": "曹县"
},
{
"_id": 1762,
"id": 2378,
"pid": 287,
"city_code": "101121009",
"city_name": "单县"
},
{
"_id": 1763,
"id": 2379,
"pid": 287,
"city_code": "101121008",
"city_name": "成武县"
},
{
"_id": 1764,
"id": 2380,
"pid": 287,
"city_code": "101121006",
"city_name": "巨野县"
},
{
"_id": 1765,
"id": 2381,
"pid": 287,
"city_code": "101121003",
"city_name": "郓城县"
},
{
"_id": 1766,
"id": 2382,
"pid": 287,
"city_code": "101121002",
"city_name": "鄄城县"
},
{
"_id": 1767,
"id": 2383,
"pid": 287,
"city_code": "101121005",
"city_name": "定陶县"
},
{
"_id": 1768,
"id": 2384,
"pid": 287,
"city_code": "101121004",
"city_name": "东明县"
},
{
"_id": 1769,
"id": 2387,
"pid": 288,
"city_code": "101120710",
"city_name": "曲阜市"
},
{
"_id": 1770,
"id": 2388,
"pid": 288,
"city_code": "101120705",
"city_name": "兖州市"
},
{
"_id": 1771,
"id": 2389,
"pid": 288,
"city_code": "101120711",
"city_name": "邹城市"
},
{
"_id": 1772,
"id": 2390,
"pid": 288,
"city_code": "101120703",
"city_name": "微山县"
},
{
"_id": 1773,
"id": 2391,
"pid": 288,
"city_code": "101120704",
"city_name": "鱼台县"
},
{
"_id": 1774,
"id": 2392,
"pid": 288,
"city_code": "101120706",
"city_name": "金乡县"
},
{
"_id": 1775,
"id": 2393,
"pid": 288,
"city_code": "101120702",
"city_name": "嘉祥县"
},
{
"_id": 1776,
"id": 2394,
"pid": 288,
"city_code": "101120707",
"city_name": "汶上县"
},
{
"_id": 1777,
"id": 2395,
"pid": 288,
"city_code": "101120708",
"city_name": "泗水县"
},
{
"_id": 1778,
"id": 2396,
"pid": 288,
"city_code": "101120709",
"city_name": "梁山县"
},
{
"_id": 1779,
"id": 2400,
"pid": 290,
"city_code": "101121707",
"city_name": "临清市"
},
{
"_id": 1780,
"id": 2401,
"pid": 290,
"city_code": "101121703",
"city_name": "阳谷县"
},
{
"_id": 1781,
"id": 2402,
"pid": 290,
"city_code": "101121709",
"city_name": "莘县"
},
{
"_id": 1782,
"id": 2403,
"pid": 290,
"city_code": "101121705",
"city_name": "茌平县"
},
{
"_id": 1783,
"id": 2404,
"pid": 290,
"city_code": "101121706",
"city_name": "东阿县"
},
{
"_id": 1784,
"id": 2405,
"pid": 290,
"city_code": "101121702",
"city_name": "冠县"
},
{
"_id": 1785,
"id": 2406,
"pid": 290,
"city_code": "101121704",
"city_name": "高唐县"
},
{
"_id": 1786,
"id": 2410,
"pid": 291,
"city_code": "101120903",
"city_name": "沂南县"
},
{
"_id": 1787,
"id": 2411,
"pid": 291,
"city_code": "101120906",
"city_name": "郯城县"
},
{
"_id": 1788,
"id": 2412,
"pid": 291,
"city_code": "101120910",
"city_name": "沂水县"
},
{
"_id": 1789,
"id": 2413,
"pid": 291,
"city_code": "101120904",
"city_name": "兰陵县"
},
{
"_id": 1790,
"id": 2414,
"pid": 291,
"city_code": "101120909",
"city_name": "费县"
},
{
"_id": 1791,
"id": 2415,
"pid": 291,
"city_code": "101120908",
"city_name": "平邑县"
},
{
"_id": 1792,
"id": 2416,
"pid": 291,
"city_code": "101120902",
"city_name": "莒南县"
},
{
"_id": 1793,
"id": 2417,
"pid": 291,
"city_code": "101120907",
"city_name": "蒙阴县"
},
{
"_id": 1794,
"id": 2418,
"pid": 291,
"city_code": "101120905",
"city_name": "临沭县"
},
{
"_id": 1795,
"id": 2421,
"pid": 292,
"city_code": "101121502",
"city_name": "五莲县"
},
{
"_id": 1796,
"id": 2422,
"pid": 292,
"city_code": "101121503",
"city_name": "莒县"
},
{
"_id": 1797,
"id": 2423,
"pid": 293,
"city_code": "101120803",
"city_name": "泰山区"
},
{
"_id": 1798,
"id": 2425,
"pid": 293,
"city_code": "101120802",
"city_name": "新泰市"
},
{
"_id": 1799,
"id": 2426,
"pid": 293,
"city_code": "101120804",
"city_name": "肥城市"
},
{
"_id": 1800,
"id": 2427,
"pid": 293,
"city_code": "101120806",
"city_name": "宁阳县"
},
{
"_id": 1801,
"id": 2428,
"pid": 293,
"city_code": "101120805",
"city_name": "东平县"
},
{
"_id": 1802,
"id": 2429,
"pid": 294,
"city_code": "101121303",
"city_name": "荣成市"
},
{
"_id": 1803,
"id": 2430,
"pid": 294,
"city_code": "101121304",
"city_name": "乳山市"
},
{
"_id": 1804,
"id": 2432,
"pid": 294,
"city_code": "101121302",
"city_name": "文登市"
},
{
"_id": 1805,
"id": 2437,
"pid": 295,
"city_code": "101120602",
"city_name": "青州市"
},
{
"_id": 1806,
"id": 2438,
"pid": 295,
"city_code": "101120609",
"city_name": "诸城市"
},
{
"_id": 1807,
"id": 2439,
"pid": 295,
"city_code": "101120603",
"city_name": "寿光市"
},
{
"_id": 1808,
"id": 2440,
"pid": 295,
"city_code": "101120607",
"city_name": "安丘市"
},
{
"_id": 1809,
"id": 2441,
"pid": 295,
"city_code": "101120608",
"city_name": "高密市"
},
{
"_id": 1810,
"id": 2442,
"pid": 295,
"city_code": "101120606",
"city_name": "昌邑市"
},
{
"_id": 1811,
"id": 2443,
"pid": 295,
"city_code": "101120604",
"city_name": "临朐县"
},
{
"_id": 1812,
"id": 2444,
"pid": 295,
"city_code": "101120605",
"city_name": "昌乐县"
},
{
"_id": 1813,
"id": 2446,
"pid": 296,
"city_code": "101120508",
"city_name": "福山区"
},
{
"_id": 1814,
"id": 2447,
"pid": 296,
"city_code": "101120509",
"city_name": "牟平区"
},
{
"_id": 1815,
"id": 2450,
"pid": 296,
"city_code": "101120505",
"city_name": "龙口市"
},
{
"_id": 1816,
"id": 2451,
"pid": 296,
"city_code": "101120510",
"city_name": "莱阳市"
},
{
"_id": 1817,
"id": 2452,
"pid": 296,
"city_code": "101120502",
"city_name": "莱州市"
},
{
"_id": 1818,
"id": 2453,
"pid": 296,
"city_code": "101120504",
"city_name": "蓬莱市"
},
{
"_id": 1819,
"id": 2454,
"pid": 296,
"city_code": "101120506",
"city_name": "招远市"
},
{
"_id": 1820,
"id": 2455,
"pid": 296,
"city_code": "101120507",
"city_name": "栖霞市"
},
{
"_id": 1821,
"id": 2456,
"pid": 296,
"city_code": "101120511",
"city_name": "海阳市"
},
{
"_id": 1822,
"id": 2457,
"pid": 296,
"city_code": "101120503",
"city_name": "长岛县"
},
{
"_id": 1823,
"id": 2460,
"pid": 297,
"city_code": "101121403",
"city_name": "峄城区"
},
{
"_id": 1824,
"id": 2461,
"pid": 297,
"city_code": "101121404",
"city_name": "台儿庄区"
},
{
"_id": 1825,
"id": 2462,
"pid": 297,
"city_code": "101121402",
"city_name": "薛城区"
},
{
"_id": 1826,
"id": 2463,
"pid": 297,
"city_code": "101121405",
"city_name": "滕州市"
},
{
"_id": 1827,
"id": 2465,
"pid": 298,
"city_code": "101120308",
"city_name": "临淄区"
},
{
"_id": 1828,
"id": 2466,
"pid": 298,
"city_code": "101120302",
"city_name": "淄川区"
},
{
"_id": 1829,
"id": 2467,
"pid": 298,
"city_code": "101120303",
"city_name": "博山区"
},
{
"_id": 1830,
"id": 2468,
"pid": 298,
"city_code": "101120305",
"city_name": "周村区"
},
{
"_id": 1831,
"id": 2469,
"pid": 298,
"city_code": "101120307",
"city_name": "桓台县"
},
{
"_id": 1832,
"id": 2470,
"pid": 298,
"city_code": "101120304",
"city_name": "高青县"
},
{
"_id": 1833,
"id": 2471,
"pid": 298,
"city_code": "101120306",
"city_name": "沂源县"
},
{
"_id": 1834,
"id": 2481,
"pid": 299,
"city_code": "101100102",
"city_name": "清徐县"
},
{
"_id": 1835,
"id": 2482,
"pid": 299,
"city_code": "101100103",
"city_name": "阳曲县"
},
{
"_id": 1836,
"id": 2483,
"pid": 299,
"city_code": "101100104",
"city_name": "娄烦县"
},
{
"_id": 1837,
"id": 2484,
"pid": 299,
"city_code": "101100105",
"city_name": "古交市"
},
{
"_id": 1838,
"id": 2487,
"pid": 300,
"city_code": "101100508",
"city_name": "沁县"
},
{
"_id": 1839,
"id": 2488,
"pid": 300,
"city_code": "101100504",
"city_name": "潞城市"
},
{
"_id": 1840,
"id": 2489,
"pid": 300,
"city_code": "101100501",
"city_name": "长治县"
},
{
"_id": 1841,
"id": 2490,
"pid": 300,
"city_code": "101100505",
"city_name": "襄垣县"
},
{
"_id": 1842,
"id": 2491,
"pid": 300,
"city_code": "101100503",
"city_name": "屯留县"
},
{
"_id": 1843,
"id": 2492,
"pid": 300,
"city_code": "101100506",
"city_name": "平顺县"
},
{
"_id": 1844,
"id": 2493,
"pid": 300,
"city_code": "101100502",
"city_name": "黎城县"
},
{
"_id": 1845,
"id": 2494,
"pid": 300,
"city_code": "101100511",
"city_name": "壶关县"
},
{
"_id": 1846,
"id": 2495,
"pid": 300,
"city_code": "101100509",
"city_name": "长子县"
},
{
"_id": 1847,
"id": 2496,
"pid": 300,
"city_code": "101100507",
"city_name": "武乡县"
},
{
"_id": 1848,
"id": 2497,
"pid": 300,
"city_code": "101100510",
"city_name": "沁源县"
},
{
"_id": 1849,
"id": 2502,
"pid": 301,
"city_code": "101100202",
"city_name": "阳高县"
},
{
"_id": 1850,
"id": 2503,
"pid": 301,
"city_code": "101100204",
"city_name": "天镇县"
},
{
"_id": 1851,
"id": 2504,
"pid": 301,
"city_code": "101100205",
"city_name": "广灵县"
},
{
"_id": 1852,
"id": 2505,
"pid": 301,
"city_code": "101100206",
"city_name": "灵丘县"
},
{
"_id": 1853,
"id": 2506,
"pid": 301,
"city_code": "101100207",
"city_name": "浑源县"
},
{
"_id": 1854,
"id": 2507,
"pid": 301,
"city_code": "101100208",
"city_name": "左云县"
},
{
"_id": 1855,
"id": 2508,
"pid": 301,
"city_code": "101100203",
"city_name": "大同县"
},
{
"_id": 1856,
"id": 2510,
"pid": 302,
"city_code": "101100605",
"city_name": "高平市"
},
{
"_id": 1857,
"id": 2511,
"pid": 302,
"city_code": "101100602",
"city_name": "沁水县"
},
{
"_id": 1858,
"id": 2512,
"pid": 302,
"city_code": "101100603",
"city_name": "阳城县"
},
{
"_id": 1859,
"id": 2513,
"pid": 302,
"city_code": "101100604",
"city_name": "陵川县"
},
{
"_id": 1860,
"id": 2514,
"pid": 302,
"city_code": "101100606",
"city_name": "泽州县"
},
{
"_id": 1861,
"id": 2515,
"pid": 303,
"city_code": "101100402",
"city_name": "榆次区"
},
{
"_id": 1862,
"id": 2516,
"pid": 303,
"city_code": "101100412",
"city_name": "介休市"
},
{
"_id": 1863,
"id": 2517,
"pid": 303,
"city_code": "101100403",
"city_name": "榆社县"
},
{
"_id": 1864,
"id": 2518,
"pid": 303,
"city_code": "101100404",
"city_name": "左权县"
},
{
"_id": 1865,
"id": 2519,
"pid": 303,
"city_code": "101100405",
"city_name": "和顺县"
},
{
"_id": 1866,
"id": 2520,
"pid": 303,
"city_code": "101100406",
"city_name": "昔阳县"
},
{
"_id": 1867,
"id": 2521,
"pid": 303,
"city_code": "101100407",
"city_name": "寿阳县"
},
{
"_id": 1868,
"id": 2522,
"pid": 303,
"city_code": "101100408",
"city_name": "太谷县"
},
{
"_id": 1869,
"id": 2523,
"pid": 303,
"city_code": "101100409",
"city_name": "祁县"
},
{
"_id": 1870,
"id": 2524,
"pid": 303,
"city_code": "101100410",
"city_name": "平遥县"
},
{
"_id": 1871,
"id": 2525,
"pid": 303,
"city_code": "101100411",
"city_name": "灵石县"
},
{
"_id": 1872,
"id": 2527,
"pid": 304,
"city_code": "101100714",
"city_name": "侯马市"
},
{
"_id": 1873,
"id": 2528,
"pid": 304,
"city_code": "101100711",
"city_name": "霍州市"
},
{
"_id": 1874,
"id": 2529,
"pid": 304,
"city_code": "101100702",
"city_name": "曲沃县"
},
{
"_id": 1875,
"id": 2530,
"pid": 304,
"city_code": "101100713",
"city_name": "翼城县"
},
{
"_id": 1876,
"id": 2531,
"pid": 304,
"city_code": "101100707",
"city_name": "襄汾县"
},
{
"_id": 1877,
"id": 2532,
"pid": 304,
"city_code": "101100710",
"city_name": "洪洞县"
},
{
"_id": 1878,
"id": 2533,
"pid": 304,
"city_code": "101100706",
"city_name": "吉县"
},
{
"_id": 1879,
"id": 2534,
"pid": 304,
"city_code": "101100716",
"city_name": "安泽县"
},
{
"_id": 1880,
"id": 2535,
"pid": 304,
"city_code": "101100715",
"city_name": "浮山县"
},
{
"_id": 1881,
"id": 2536,
"pid": 304,
"city_code": "101100717",
"city_name": "古县"
},
{
"_id": 1882,
"id": 2537,
"pid": 304,
"city_code": "101100712",
"city_name": "乡宁县"
},
{
"_id": 1883,
"id": 2538,
"pid": 304,
"city_code": "101100705",
"city_name": "大宁县"
},
{
"_id": 1884,
"id": 2539,
"pid": 304,
"city_code": "101100704",
"city_name": "隰县"
},
{
"_id": 1885,
"id": 2540,
"pid": 304,
"city_code": "101100703",
"city_name": "永和县"
},
{
"_id": 1886,
"id": 2541,
"pid": 304,
"city_code": "101100708",
"city_name": "蒲县"
},
{
"_id": 1887,
"id": 2542,
"pid": 304,
"city_code": "101100709",
"city_name": "汾西县"
},
{
"_id": 1888,
"id": 2543,
"pid": 305,
"city_code": "101101101",
"city_name": "离石市"
},
{
"_id": 1889,
"id": 2544,
"pid": 305,
"city_code": "101101101",
"city_name": "离石区"
},
{
"_id": 1890,
"id": 2545,
"pid": 305,
"city_code": "101101110",
"city_name": "孝义市"
},
{
"_id": 1891,
"id": 2546,
"pid": 305,
"city_code": "101101111",
"city_name": "汾阳市"
},
{
"_id": 1892,
"id": 2547,
"pid": 305,
"city_code": "101101112",
"city_name": "文水县"
},
{
"_id": 1893,
"id": 2548,
"pid": 305,
"city_code": "101101113",
"city_name": "交城县"
},
{
"_id": 1894,
"id": 2549,
"pid": 305,
"city_code": "101101103",
"city_name": "兴县"
},
{
"_id": 1895,
"id": 2550,
"pid": 305,
"city_code": "101101102",
"city_name": "临县"
},
{
"_id": 1896,
"id": 2551,
"pid": 305,
"city_code": "101101105",
"city_name": "柳林县"
},
{
"_id": 1897,
"id": 2552,
"pid": 305,
"city_code": "101101106",
"city_name": "石楼县"
},
{
"_id": 1898,
"id": 2553,
"pid": 305,
"city_code": "101101104",
"city_name": "岚县"
},
{
"_id": 1899,
"id": 2554,
"pid": 305,
"city_code": "101101107",
"city_name": "方山县"
},
{
"_id": 1900,
"id": 2555,
"pid": 305,
"city_code": "101101109",
"city_name": "中阳县"
},
{
"_id": 1901,
"id": 2556,
"pid": 305,
"city_code": "101101108",
"city_name": "交口县"
},
{
"_id": 1902,
"id": 2558,
"pid": 306,
"city_code": "101100902",
"city_name": "平鲁区"
},
{
"_id": 1903,
"id": 2559,
"pid": 306,
"city_code": "101100903",
"city_name": "山阴县"
},
{
"_id": 1904,
"id": 2560,
"pid": 306,
"city_code": "101100905",
"city_name": "应县"
},
{
"_id": 1905,
"id": 2561,
"pid": 306,
"city_code": "101100904",
"city_name": "右玉县"
},
{
"_id": 1906,
"id": 2562,
"pid": 306,
"city_code": "101100906",
"city_name": "怀仁县"
},
{
"_id": 1907,
"id": 2564,
"pid": 307,
"city_code": "101101015",
"city_name": "原平市"
},
{
"_id": 1908,
"id": 2565,
"pid": 307,
"city_code": "101101002",
"city_name": "定襄县"
},
{
"_id": 1909,
"id": 2566,
"pid": 307,
"city_code": "101101003",
"city_name": "五台县"
},
{
"_id": 1910,
"id": 2567,
"pid": 307,
"city_code": "101101008",
"city_name": "代县"
},
{
"_id": 1911,
"id": 2568,
"pid": 307,
"city_code": "101101009",
"city_name": "繁峙县"
},
{
"_id": 1912,
"id": 2569,
"pid": 307,
"city_code": "101101007",
"city_name": "宁武县"
},
{
"_id": 1913,
"id": 2570,
"pid": 307,
"city_code": "101101012",
"city_name": "静乐县"
},
{
"_id": 1914,
"id": 2571,
"pid": 307,
"city_code": "101101006",
"city_name": "神池县"
},
{
"_id": 1915,
"id": 2572,
"pid": 307,
"city_code": "101101014",
"city_name": "五寨县"
},
{
"_id": 1916,
"id": 2573,
"pid": 307,
"city_code": "101101013",
"city_name": "岢岚县"
},
{
"_id": 1917,
"id": 2574,
"pid": 307,
"city_code": "101101004",
"city_name": "河曲县"
},
{
"_id": 1918,
"id": 2575,
"pid": 307,
"city_code": "101101011",
"city_name": "保德县"
},
{
"_id": 1919,
"id": 2576,
"pid": 307,
"city_code": "101101005",
"city_name": "偏关县"
},
{
"_id": 1920,
"id": 2580,
"pid": 308,
"city_code": "101100303",
"city_name": "平定县"
},
{
"_id": 1921,
"id": 2581,
"pid": 308,
"city_code": "101100302",
"city_name": "盂县"
},
{
"_id": 1922,
"id": 2583,
"pid": 309,
"city_code": "101100810",
"city_name": "永济市"
},
{
"_id": 1923,
"id": 2584,
"pid": 309,
"city_code": "101100805",
"city_name": "河津市"
},
{
"_id": 1924,
"id": 2585,
"pid": 309,
"city_code": "101100802",
"city_name": "临猗县"
},
{
"_id": 1925,
"id": 2586,
"pid": 309,
"city_code": "101100804",
"city_name": "万荣县"
},
{
"_id": 1926,
"id": 2587,
"pid": 309,
"city_code": "101100808",
"city_name": "闻喜县"
},
{
"_id": 1927,
"id": 2588,
"pid": 309,
"city_code": "101100803",
"city_name": "稷山县"
},
{
"_id": 1928,
"id": 2589,
"pid": 309,
"city_code": "101100806",
"city_name": "新绛县"
},
{
"_id": 1929,
"id": 2590,
"pid": 309,
"city_code": "101100807",
"city_name": "绛县"
},
{
"_id": 1930,
"id": 2591,
"pid": 309,
"city_code": "101100809",
"city_name": "垣曲县"
},
{
"_id": 1931,
"id": 2592,
"pid": 309,
"city_code": "101100812",
"city_name": "夏县"
},
{
"_id": 1932,
"id": 2593,
"pid": 309,
"city_code": "101100813",
"city_name": "平陆县"
},
{
"_id": 1933,
"id": 2594,
"pid": 309,
"city_code": "101100811",
"city_name": "芮城县"
},
{
"_id": 1934,
"id": 2602,
"pid": 310,
"city_code": "101110103",
"city_name": "临潼区"
},
{
"_id": 1935,
"id": 2603,
"pid": 310,
"city_code": "101110102",
"city_name": "长安区"
},
{
"_id": 1936,
"id": 2604,
"pid": 310,
"city_code": "101110104",
"city_name": "蓝田县"
},
{
"_id": 1937,
"id": 2605,
"pid": 310,
"city_code": "101110105",
"city_name": "周至县"
},
{
"_id": 1938,
"id": 2606,
"pid": 310,
"city_code": "101110106",
"city_name": "户县"
},
{
"_id": 1939,
"id": 2607,
"pid": 310,
"city_code": "101110107",
"city_name": "高陵县"
},
{
"_id": 1940,
"id": 2609,
"pid": 311,
"city_code": "101110704",
"city_name": "汉阴县"
},
{
"_id": 1941,
"id": 2610,
"pid": 311,
"city_code": "101110703",
"city_name": "石泉县"
},
{
"_id": 1942,
"id": 2611,
"pid": 311,
"city_code": "101110710",
"city_name": "宁陕县"
},
{
"_id": 1943,
"id": 2612,
"pid": 311,
"city_code": "101110702",
"city_name": "紫阳县"
},
{
"_id": 1944,
"id": 2613,
"pid": 311,
"city_code": "101110706",
"city_name": "岚皋县"
},
{
"_id": 1945,
"id": 2614,
"pid": 311,
"city_code": "101110707",
"city_name": "平利县"
},
{
"_id": 1946,
"id": 2615,
"pid": 311,
"city_code": "101110709",
"city_name": "镇坪县"
},
{
"_id": 1947,
"id": 2616,
"pid": 311,
"city_code": "101110705",
"city_name": "旬阳县"
},
{
"_id": 1948,
"id": 2617,
"pid": 311,
"city_code": "101110708",
"city_name": "白河县"
},
{
"_id": 1949,
"id": 2618,
"pid": 312,
"city_code": "101110912",
"city_name": "陈仓区"
},
{
"_id": 1950,
"id": 2621,
"pid": 312,
"city_code": "101110906",
"city_name": "凤翔县"
},
{
"_id": 1951,
"id": 2622,
"pid": 312,
"city_code": "101110905",
"city_name": "岐山县"
},
{
"_id": 1952,
"id": 2623,
"pid": 312,
"city_code": "101110907",
"city_name": "扶风县"
},
{
"_id": 1953,
"id": 2624,
"pid": 312,
"city_code": "101110908",
"city_name": "眉县"
},
{
"_id": 1954,
"id": 2625,
"pid": 312,
"city_code": "101110911",
"city_name": "陇县"
},
{
"_id": 1955,
"id": 2626,
"pid": 312,
"city_code": "101110903",
"city_name": "千阳县"
},
{
"_id": 1956,
"id": 2627,
"pid": 312,
"city_code": "101110904",
"city_name": "麟游县"
},
{
"_id": 1957,
"id": 2628,
"pid": 312,
"city_code": "101110910",
"city_name": "凤县"
},
{
"_id": 1958,
"id": 2629,
"pid": 312,
"city_code": "101110909",
"city_name": "太白县"
},
{
"_id": 1959,
"id": 2631,
"pid": 313,
"city_code": "101110810",
"city_name": "南郑县"
},
{
"_id": 1960,
"id": 2632,
"pid": 313,
"city_code": "101110806",
"city_name": "城固县"
},
{
"_id": 1961,
"id": 2633,
"pid": 313,
"city_code": "101110805",
"city_name": "洋县"
},
{
"_id": 1962,
"id": 2634,
"pid": 313,
"city_code": "101110807",
"city_name": "西乡县"
},
{
"_id": 1963,
"id": 2635,
"pid": 313,
"city_code": "101110803",
"city_name": "勉县"
},
{
"_id": 1964,
"id": 2636,
"pid": 313,
"city_code": "101110809",
"city_name": "宁强县"
},
{
"_id": 1965,
"id": 2637,
"pid": 313,
"city_code": "101110802",
"city_name": "略阳县"
},
{
"_id": 1966,
"id": 2638,
"pid": 313,
"city_code": "101110811",
"city_name": "镇巴县"
},
{
"_id": 1967,
"id": 2639,
"pid": 313,
"city_code": "101110804",
"city_name": "留坝县"
},
{
"_id": 1968,
"id": 2640,
"pid": 313,
"city_code": "101110808",
"city_name": "佛坪县"
},
{
"_id": 1969,
"id": 2641,
"pid": 314,
"city_code": "101110604",
"city_name": "商州区"
},
{
"_id": 1970,
"id": 2642,
"pid": 314,
"city_code": "101110602",
"city_name": "洛南县"
},
{
"_id": 1971,
"id": 2643,
"pid": 314,
"city_code": "101110606",
"city_name": "丹凤县"
},
{
"_id": 1972,
"id": 2644,
"pid": 314,
"city_code": "101110607",
"city_name": "商南县"
},
{
"_id": 1973,
"id": 2645,
"pid": 314,
"city_code": "101110608",
"city_name": "山阳县"
},
{
"_id": 1974,
"id": 2646,
"pid": 314,
"city_code": "101110605",
"city_name": "镇安县"
},
{
"_id": 1975,
"id": 2647,
"pid": 314,
"city_code": "101110603",
"city_name": "柞水县"
},
{
"_id": 1976,
"id": 2648,
"pid": 315,
"city_code": "101111004",
"city_name": "耀州区"
},
{
"_id": 1977,
"id": 2651,
"pid": 315,
"city_code": "101111003",
"city_name": "宜君县"
},
{
"_id": 1978,
"id": 2653,
"pid": 316,
"city_code": "101110510",
"city_name": "韩城市"
},
{
"_id": 1979,
"id": 2654,
"pid": 316,
"city_code": "101110511",
"city_name": "华阴市"
},
{
"_id": 1980,
"id": 2655,
"pid": 316,
"city_code": "101110502",
"city_name": "华县"
},
{
"_id": 1981,
"id": 2656,
"pid": 316,
"city_code": "101110503",
"city_name": "潼关县"
},
{
"_id": 1982,
"id": 2657,
"pid": 316,
"city_code": "101110504",
"city_name": "大荔县"
},
{
"_id": 1983,
"id": 2658,
"pid": 316,
"city_code": "101110509",
"city_name": "合阳县"
},
{
"_id": 1984,
"id": 2659,
"pid": 316,
"city_code": "101110508",
"city_name": "澄城县"
},
{
"_id": 1985,
"id": 2660,
"pid": 316,
"city_code": "101110507",
"city_name": "蒲城县"
},
{
"_id": 1986,
"id": 2661,
"pid": 316,
"city_code": "101110505",
"city_name": "白水县"
},
{
"_id": 1987,
"id": 2662,
"pid": 316,
"city_code": "101110506",
"city_name": "富平县"
},
{
"_id": 1988,
"id": 2666,
"pid": 317,
"city_code": "101110211",
"city_name": "兴平市"
},
{
"_id": 1989,
"id": 2667,
"pid": 317,
"city_code": "101110201",
"city_name": "三原县"
},
{
"_id": 1990,
"id": 2668,
"pid": 317,
"city_code": "101110205",
"city_name": "泾阳县"
},
{
"_id": 1991,
"id": 2669,
"pid": 317,
"city_code": "101110207",
"city_name": "乾县"
},
{
"_id": 1992,
"id": 2670,
"pid": 317,
"city_code": "101110202",
"city_name": "礼泉县"
},
{
"_id": 1993,
"id": 2671,
"pid": 317,
"city_code": "101110203",
"city_name": "永寿县"
},
{
"_id": 1994,
"id": 2672,
"pid": 317,
"city_code": "101110208",
"city_name": "彬县"
},
{
"_id": 1995,
"id": 2673,
"pid": 317,
"city_code": "101110209",
"city_name": "长武县"
},
{
"_id": 1996,
"id": 2674,
"pid": 317,
"city_code": "101110210",
"city_name": "旬邑县"
},
{
"_id": 1997,
"id": 2675,
"pid": 317,
"city_code": "101110204",
"city_name": "淳化县"
},
{
"_id": 1998,
"id": 2676,
"pid": 317,
"city_code": "101110206",
"city_name": "武功县"
},
{
"_id": 1999,
"id": 2677,
"pid": 318,
"city_code": "101110312",
"city_name": "吴起县"
},
{
"_id": 2000,
"id": 2679,
"pid": 318,
"city_code": "101110301",
"city_name": "延长县"
},
{
"_id": 2001,
"id": 2680,
"pid": 318,
"city_code": "101110302",
"city_name": "延川县"
},
{
"_id": 2002,
"id": 2681,
"pid": 318,
"city_code": "101110303",
"city_name": "子长县"
},
{
"_id": 2003,
"id": 2682,
"pid": 318,
"city_code": "101110307",
"city_name": "安塞县"
},
{
"_id": 2004,
"id": 2683,
"pid": 318,
"city_code": "101110306",
"city_name": "志丹县"
},
{
"_id": 2005,
"id": 2684,
"pid": 318,
"city_code": "101110308",
"city_name": "甘泉县"
},
{
"_id": 2006,
"id": 2685,
"pid": 318,
"city_code": "101110305",
"city_name": "富县"
},
{
"_id": 2007,
"id": 2686,
"pid": 318,
"city_code": "101110309",
"city_name": "洛川县"
},
{
"_id": 2008,
"id": 2687,
"pid": 318,
"city_code": "101110304",
"city_name": "宜川县"
},
{
"_id": 2009,
"id": 2688,
"pid": 318,
"city_code": "101110311",
"city_name": "黄龙县"
},
{
"_id": 2010,
"id": 2689,
"pid": 318,
"city_code": "101110310",
"city_name": "黄陵县"
},
{
"_id": 2011,
"id": 2690,
"pid": 319,
"city_code": "101110413",
"city_name": "榆阳区"
},
{
"_id": 2012,
"id": 2691,
"pid": 319,
"city_code": "101110403",
"city_name": "神木县"
},
{
"_id": 2013,
"id": 2692,
"pid": 319,
"city_code": "101110402",
"city_name": "府谷县"
},
{
"_id": 2014,
"id": 2693,
"pid": 319,
"city_code": "101110407",
"city_name": "横山县"
},
{
"_id": 2015,
"id": 2694,
"pid": 319,
"city_code": "101110406",
"city_name": "靖边县"
},
{
"_id": 2016,
"id": 2695,
"pid": 319,
"city_code": "101110405",
"city_name": "定边县"
},
{
"_id": 2017,
"id": 2696,
"pid": 319,
"city_code": "101110410",
"city_name": "绥德县"
},
{
"_id": 2018,
"id": 2697,
"pid": 319,
"city_code": "101110408",
"city_name": "米脂县"
},
{
"_id": 2019,
"id": 2698,
"pid": 319,
"city_code": "101110404",
"city_name": "佳县"
},
{
"_id": 2020,
"id": 2699,
"pid": 319,
"city_code": "101110411",
"city_name": "吴堡县"
},
{
"_id": 2021,
"id": 2700,
"pid": 319,
"city_code": "101110412",
"city_name": "清涧县"
},
{
"_id": 2022,
"id": 2701,
"pid": 319,
"city_code": "101110409",
"city_name": "子洲县"
},
{
"_id": 2023,
"id": 2704,
"pid": 24,
"city_code": "101020200",
"city_name": "闵行区"
},
{
"_id": 2024,
"id": 2706,
"pid": 24,
"city_code": "101021300",
"city_name": "浦东新区"
},
{
"_id": 2025,
"id": 2714,
"pid": 24,
"city_code": "101020900",
"city_name": "松江区"
},
{
"_id": 2026,
"id": 2715,
"pid": 24,
"city_code": "101020500",
"city_name": "嘉定区"
},
{
"_id": 2027,
"id": 2716,
"pid": 24,
"city_code": "101020300",
"city_name": "宝山区"
},
{
"_id": 2028,
"id": 2717,
"pid": 24,
"city_code": "101020800",
"city_name": "青浦区"
},
{
"_id": 2029,
"id": 2718,
"pid": 24,
"city_code": "101020700",
"city_name": "金山区"
},
{
"_id": 2030,
"id": 2719,
"pid": 24,
"city_code": "101021000",
"city_name": "奉贤区"
},
{
"_id": 2031,
"id": 2720,
"pid": 24,
"city_code": "101021100",
"city_name": "崇明区"
},
{
"_id": 2032,
"id": 2726,
"pid": 321,
"city_code": "101270102",
"city_name": "龙泉驿区"
},
{
"_id": 2033,
"id": 2727,
"pid": 321,
"city_code": "101270115",
"city_name": "青白江区"
},
{
"_id": 2034,
"id": 2728,
"pid": 321,
"city_code": "101270103",
"city_name": "新都区"
},
{
"_id": 2035,
"id": 2729,
"pid": 321,
"city_code": "101270104",
"city_name": "温江区"
},
{
"_id": 2036,
"id": 2732,
"pid": 321,
"city_code": "101270111",
"city_name": "都江堰市"
},
{
"_id": 2037,
"id": 2733,
"pid": 321,
"city_code": "101270112",
"city_name": "彭州市"
},
{
"_id": 2038,
"id": 2734,
"pid": 321,
"city_code": "101270113",
"city_name": "邛崃市"
},
{
"_id": 2039,
"id": 2735,
"pid": 321,
"city_code": "101270114",
"city_name": "崇州市"
},
{
"_id": 2040,
"id": 2736,
"pid": 321,
"city_code": "101270105",
"city_name": "金堂县"
},
{
"_id": 2041,
"id": 2737,
"pid": 321,
"city_code": "101270106",
"city_name": "双流县"
},
{
"_id": 2042,
"id": 2738,
"pid": 321,
"city_code": "101270107",
"city_name": "郫县"
},
{
"_id": 2043,
"id": 2739,
"pid": 321,
"city_code": "101270108",
"city_name": "大邑县"
},
{
"_id": 2044,
"id": 2740,
"pid": 321,
"city_code": "101270109",
"city_name": "蒲江县"
},
{
"_id": 2045,
"id": 2741,
"pid": 321,
"city_code": "101270110",
"city_name": "新津县"
},
{
"_id": 2046,
"id": 2754,
"pid": 322,
"city_code": "101270408",
"city_name": "江油市"
},
{
"_id": 2047,
"id": 2755,
"pid": 322,
"city_code": "101270403",
"city_name": "盐亭县"
},
{
"_id": 2048,
"id": 2756,
"pid": 322,
"city_code": "101270402",
"city_name": "三台县"
},
{
"_id": 2049,
"id": 2757,
"pid": 322,
"city_code": "101270407",
"city_name": "平武县"
},
{
"_id": 2050,
"id": 2758,
"pid": 322,
"city_code": "101270404",
"city_name": "安县"
},
{
"_id": 2051,
"id": 2759,
"pid": 322,
"city_code": "101270405",
"city_name": "梓潼县"
},
{
"_id": 2052,
"id": 2760,
"pid": 322,
"city_code": "101270406",
"city_name": "北川县"
},
{
"_id": 2053,
"id": 2761,
"pid": 323,
"city_code": "101271910",
"city_name": "马尔康县"
},
{
"_id": 2054,
"id": 2762,
"pid": 323,
"city_code": "101271902",
"city_name": "汶川县"
},
{
"_id": 2055,
"id": 2763,
"pid": 323,
"city_code": "101271903",
"city_name": "理县"
},
{
"_id": 2056,
"id": 2764,
"pid": 323,
"city_code": "101271904",
"city_name": "茂县"
},
{
"_id": 2057,
"id": 2765,
"pid": 323,
"city_code": "101271905",
"city_name": "松潘县"
},
{
"_id": 2058,
"id": 2766,
"pid": 323,
"city_code": "101271906",
"city_name": "九寨沟县"
},
{
"_id": 2059,
"id": 2767,
"pid": 323,
"city_code": "101271907",
"city_name": "金川县"
},
{
"_id": 2060,
"id": 2768,
"pid": 323,
"city_code": "101271908",
"city_name": "小金县"
},
{
"_id": 2061,
"id": 2769,
"pid": 323,
"city_code": "101271909",
"city_name": "黑水县"
},
{
"_id": 2062,
"id": 2770,
"pid": 323,
"city_code": "101271911",
"city_name": "壤塘县"
},
{
"_id": 2063,
"id": 2771,
"pid": 323,
"city_code": "101271901",
"city_name": "阿坝县"
},
{
"_id": 2064,
"id": 2772,
"pid": 323,
"city_code": "101271912",
"city_name": "若尔盖县"
},
{
"_id": 2065,
"id": 2773,
"pid": 323,
"city_code": "101271913",
"city_name": "红原县"
},
{
"_id": 2066,
"id": 2775,
"pid": 324,
"city_code": "101270902",
"city_name": "通江县"
},
{
"_id": 2067,
"id": 2776,
"pid": 324,
"city_code": "101270903",
"city_name": "南江县"
},
{
"_id": 2068,
"id": 2777,
"pid": 324,
"city_code": "101270904",
"city_name": "平昌县"
},
{
"_id": 2069,
"id": 2779,
"pid": 325,
"city_code": "101270606",
"city_name": "万源市"
},
{
"_id": 2070,
"id": 2780,
"pid": 325,
"city_code": "101270608",
"city_name": "达川区"
},
{
"_id": 2071,
"id": 2781,
"pid": 325,
"city_code": "101270602",
"city_name": "宣汉县"
},
{
"_id": 2072,
"id": 2782,
"pid": 325,
"city_code": "101270603",
"city_name": "开江县"
},
{
"_id": 2073,
"id": 2783,
"pid": 325,
"city_code": "101270604",
"city_name": "大竹县"
},
{
"_id": 2074,
"id": 2784,
"pid": 325,
"city_code": "101270605",
"city_name": "渠县"
},
{
"_id": 2075,
"id": 2786,
"pid": 326,
"city_code": "101272003",
"city_name": "广汉市"
},
{
"_id": 2076,
"id": 2787,
"pid": 326,
"city_code": "101272004",
"city_name": "什邡市"
},
{
"_id": 2077,
"id": 2788,
"pid": 326,
"city_code": "101272005",
"city_name": "绵竹市"
},
{
"_id": 2078,
"id": 2789,
"pid": 326,
"city_code": "101272006",
"city_name": "罗江县"
},
{
"_id": 2079,
"id": 2790,
"pid": 326,
"city_code": "101272002",
"city_name": "中江县"
},
{
"_id": 2080,
"id": 2791,
"pid": 327,
"city_code": "101271802",
"city_name": "康定县"
},
{
"_id": 2081,
"id": 2792,
"pid": 327,
"city_code": "101271804",
"city_name": "丹巴县"
},
{
"_id": 2082,
"id": 2793,
"pid": 327,
"city_code": "101271803",
"city_name": "泸定县"
},
{
"_id": 2083,
"id": 2794,
"pid": 327,
"city_code": "101271808",
"city_name": "炉霍县"
},
{
"_id": 2084,
"id": 2795,
"pid": 327,
"city_code": "101271805",
"city_name": "九龙县"
},
{
"_id": 2085,
"id": 2796,
"pid": 327,
"city_code": "101271801",
"city_name": "甘孜县"
},
{
"_id": 2086,
"id": 2797,
"pid": 327,
"city_code": "101271806",
"city_name": "雅江县"
},
{
"_id": 2087,
"id": 2798,
"pid": 327,
"city_code": "101271809",
"city_name": "新龙县"
},
{
"_id": 2088,
"id": 2799,
"pid": 327,
"city_code": "101271807",
"city_name": "道孚县"
},
{
"_id": 2089,
"id": 2800,
"pid": 327,
"city_code": "101271811",
"city_name": "白玉县"
},
{
"_id": 2090,
"id": 2801,
"pid": 327,
"city_code": "101271814",
"city_name": "理塘县"
},
{
"_id": 2091,
"id": 2802,
"pid": 327,
"city_code": "101271810",
"city_name": "德格县"
},
{
"_id": 2092,
"id": 2803,
"pid": 327,
"city_code": "101271816",
"city_name": "乡城县"
},
{
"_id": 2093,
"id": 2804,
"pid": 327,
"city_code": "101271812",
"city_name": "石渠县"
},
{
"_id": 2094,
"id": 2805,
"pid": 327,
"city_code": "101271817",
"city_name": "稻城县"
},
{
"_id": 2095,
"id": 2806,
"pid": 327,
"city_code": "101271813",
"city_name": "色达县"
},
{
"_id": 2096,
"id": 2807,
"pid": 327,
"city_code": "101271815",
"city_name": "巴塘县"
},
{
"_id": 2097,
"id": 2808,
"pid": 327,
"city_code": "101271818",
"city_name": "得荣县"
},
{
"_id": 2098,
"id": 2809,
"pid": 328,
"city_code": "101270801",
"city_name": "广安区"
},
{
"_id": 2099,
"id": 2810,
"pid": 328,
"city_code": "101270805",
"city_name": "华蓥市"
},
{
"_id": 2100,
"id": 2811,
"pid": 328,
"city_code": "101270802",
"city_name": "岳池县"
},
{
"_id": 2101,
"id": 2812,
"pid": 328,
"city_code": "101270803",
"city_name": "武胜县"
},
{
"_id": 2102,
"id": 2813,
"pid": 328,
"city_code": "101270804",
"city_name": "邻水县"
},
{
"_id": 2103,
"id": 2817,
"pid": 329,
"city_code": "101272102",
"city_name": "旺苍县"
},
{
"_id": 2104,
"id": 2818,
"pid": 329,
"city_code": "101272103",
"city_name": "青川县"
},
{
"_id": 2105,
"id": 2819,
"pid": 329,
"city_code": "101272104",
"city_name": "剑阁县"
},
{
"_id": 2106,
"id": 2820,
"pid": 329,
"city_code": "101272105",
"city_name": "苍溪县"
},
{
"_id": 2107,
"id": 2821,
"pid": 330,
"city_code": "101271409",
"city_name": "峨眉山市"
},
{
"_id": 2108,
"id": 2823,
"pid": 330,
"city_code": "101271402",
"city_name": "犍为县"
},
{
"_id": 2109,
"id": 2824,
"pid": 330,
"city_code": "101271403",
"city_name": "井研县"
},
{
"_id": 2110,
"id": 2825,
"pid": 330,
"city_code": "101271404",
"city_name": "夹江县"
},
{
"_id": 2111,
"id": 2826,
"pid": 330,
"city_code": "101271405",
"city_name": "沐川县"
},
{
"_id": 2112,
"id": 2827,
"pid": 330,
"city_code": "101271406",
"city_name": "峨边县"
},
{
"_id": 2113,
"id": 2828,
"pid": 330,
"city_code": "101271407",
"city_name": "马边县"
},
{
"_id": 2114,
"id": 2829,
"pid": 331,
"city_code": "101271610",
"city_name": "西昌市"
},
{
"_id": 2115,
"id": 2830,
"pid": 331,
"city_code": "101271604",
"city_name": "盐源县"
},
{
"_id": 2116,
"id": 2831,
"pid": 331,
"city_code": "101271605",
"city_name": "德昌县"
},
{
"_id": 2117,
"id": 2832,
"pid": 331,
"city_code": "101271606",
"city_name": "会理县"
},
{
"_id": 2118,
"id": 2833,
"pid": 331,
"city_code": "101271607",
"city_name": "会东县"
},
{
"_id": 2119,
"id": 2834,
"pid": 331,
"city_code": "101271608",
"city_name": "宁南县"
},
{
"_id": 2120,
"id": 2835,
"pid": 331,
"city_code": "101271609",
"city_name": "普格县"
},
{
"_id": 2121,
"id": 2836,
"pid": 331,
"city_code": "101271619",
"city_name": "布拖县"
},
{
"_id": 2122,
"id": 2837,
"pid": 331,
"city_code": "101271611",
"city_name": "金阳县"
},
{
"_id": 2123,
"id": 2838,
"pid": 331,
"city_code": "101271612",
"city_name": "昭觉县"
},
{
"_id": 2124,
"id": 2839,
"pid": 331,
"city_code": "101271613",
"city_name": "喜德县"
},
{
"_id": 2125,
"id": 2840,
"pid": 331,
"city_code": "101271614",
"city_name": "冕宁县"
},
{
"_id": 2126,
"id": 2841,
"pid": 331,
"city_code": "101271615",
"city_name": "越西县"
},
{
"_id": 2127,
"id": 2842,
"pid": 331,
"city_code": "101271616",
"city_name": "甘洛县"
},
{
"_id": 2128,
"id": 2843,
"pid": 331,
"city_code": "101271618",
"city_name": "美姑县"
},
{
"_id": 2129,
"id": 2844,
"pid": 331,
"city_code": "101271617",
"city_name": "雷波县"
},
{
"_id": 2130,
"id": 2845,
"pid": 331,
"city_code": "101271603",
"city_name": "木里县"
},
{
"_id": 2131,
"id": 2847,
"pid": 332,
"city_code": "101271502",
"city_name": "仁寿县"
},
{
"_id": 2132,
"id": 2848,
"pid": 332,
"city_code": "101271503",
"city_name": "彭山县"
},
{
"_id": 2133,
"id": 2849,
"pid": 332,
"city_code": "101271504",
"city_name": "洪雅县"
},
{
"_id": 2134,
"id": 2850,
"pid": 332,
"city_code": "101271505",
"city_name": "丹棱县"
},
{
"_id": 2135,
"id": 2851,
"pid": 332,
"city_code": "101271506",
"city_name": "青神县"
},
{
"_id": 2136,
"id": 2852,
"pid": 333,
"city_code": "101270507",
"city_name": "阆中市"
},
{
"_id": 2137,
"id": 2853,
"pid": 333,
"city_code": "101270502",
"city_name": "南部县"
},
{
"_id": 2138,
"id": 2854,
"pid": 333,
"city_code": "101270503",
"city_name": "营山县"
},
{
"_id": 2139,
"id": 2855,
"pid": 333,
"city_code": "101270504",
"city_name": "蓬安县"
},
{
"_id": 2140,
"id": 2856,
"pid": 333,
"city_code": "101270505",
"city_name": "仪陇县"
},
{
"_id": 2141,
"id": 2860,
"pid": 333,
"city_code": "101270506",
"city_name": "西充县"
},
{
"_id": 2142,
"id": 2862,
"pid": 334,
"city_code": "101271202",
"city_name": "东兴区"
},
{
"_id": 2143,
"id": 2863,
"pid": 334,
"city_code": "101271203",
"city_name": "威远县"
},
{
"_id": 2144,
"id": 2864,
"pid": 334,
"city_code": "101271204",
"city_name": "资中县"
},
{
"_id": 2145,
"id": 2865,
"pid": 334,
"city_code": "101271205",
"city_name": "隆昌县"
},
{
"_id": 2146,
"id": 2868,
"pid": 335,
"city_code": "101270202",
"city_name": "仁和区"
},
{
"_id": 2147,
"id": 2869,
"pid": 335,
"city_code": "101270203",
"city_name": "米易县"
},
{
"_id": 2148,
"id": 2870,
"pid": 335,
"city_code": "101270204",
"city_name": "盐边县"
},
{
"_id": 2149,
"id": 2873,
"pid": 336,
"city_code": "101270702",
"city_name": "蓬溪县"
},
{
"_id": 2150,
"id": 2874,
"pid": 336,
"city_code": "101270703",
"city_name": "射洪县"
},
{
"_id": 2151,
"id": 2877,
"pid": 337,
"city_code": "101271702",
"city_name": "名山县"
},
{
"_id": 2152,
"id": 2878,
"pid": 337,
"city_code": "101271703",
"city_name": "荥经县"
},
{
"_id": 2153,
"id": 2879,
"pid": 337,
"city_code": "101271704",
"city_name": "汉源县"
},
{
"_id": 2154,
"id": 2880,
"pid": 337,
"city_code": "101271705",
"city_name": "石棉县"
},
{
"_id": 2155,
"id": 2881,
"pid": 337,
"city_code": "101271706",
"city_name": "天全县"
},
{
"_id": 2156,
"id": 2882,
"pid": 337,
"city_code": "101271707",
"city_name": "芦山县"
},
{
"_id": 2157,
"id": 2883,
"pid": 337,
"city_code": "101271708",
"city_name": "宝兴县"
},
{
"_id": 2158,
"id": 2885,
"pid": 338,
"city_code": "101271103",
"city_name": "宜宾县"
},
{
"_id": 2159,
"id": 2886,
"pid": 338,
"city_code": "101271104",
"city_name": "南溪县"
},
{
"_id": 2160,
"id": 2887,
"pid": 338,
"city_code": "101271105",
"city_name": "江安县"
},
{
"_id": 2161,
"id": 2888,
"pid": 338,
"city_code": "101271106",
"city_name": "长宁县"
},
{
"_id": 2162,
"id": 2889,
"pid": 338,
"city_code": "101271107",
"city_name": "高县"
},
{
"_id": 2163,
"id": 2890,
"pid": 338,
"city_code": "101271108",
"city_name": "珙县"
},
{
"_id": 2164,
"id": 2891,
"pid": 338,
"city_code": "101271109",
"city_name": "筠连县"
},
{
"_id": 2165,
"id": 2892,
"pid": 338,
"city_code": "101271110",
"city_name": "兴文县"
},
{
"_id": 2166,
"id": 2893,
"pid": 338,
"city_code": "101271111",
"city_name": "屏山县"
},
{
"_id": 2167,
"id": 2895,
"pid": 321,
"city_code": "101271304",
"city_name": "简阳市"
},
{
"_id": 2168,
"id": 2896,
"pid": 339,
"city_code": "101271302",
"city_name": "安岳县"
},
{
"_id": 2169,
"id": 2897,
"pid": 339,
"city_code": "101271303",
"city_name": "乐至县"
},
{
"_id": 2170,
"id": 2902,
"pid": 340,
"city_code": "101270303",
"city_name": "荣县"
},
{
"_id": 2171,
"id": 2903,
"pid": 340,
"city_code": "101270302",
"city_name": "富顺县"
},
{
"_id": 2172,
"id": 2905,
"pid": 341,
"city_code": "101271007",
"city_name": "纳溪区"
},
{
"_id": 2173,
"id": 2907,
"pid": 341,
"city_code": "101271003",
"city_name": "泸县"
},
{
"_id": 2174,
"id": 2908,
"pid": 341,
"city_code": "101271004",
"city_name": "合江县"
},
{
"_id": 2175,
"id": 2909,
"pid": 341,
"city_code": "101271005",
"city_name": "叙永县"
},
{
"_id": 2176,
"id": 2910,
"pid": 341,
"city_code": "101271006",
"city_name": "古蔺县"
},
{
"_id": 2177,
"id": 2917,
"pid": 26,
"city_code": "101030400",
"city_name": "东丽区"
},
{
"_id": 2178,
"id": 2918,
"pid": 26,
"city_code": "101031000",
"city_name": "津南区"
},
{
"_id": 2179,
"id": 2919,
"pid": 26,
"city_code": "101030500",
"city_name": "西青区"
},
{
"_id": 2180,
"id": 2920,
"pid": 26,
"city_code": "101030600",
"city_name": "北辰区"
},
{
"_id": 2181,
"id": 2921,
"pid": 26,
"city_code": "101031100",
"city_name": "塘沽区"
},
{
"_id": 2182,
"id": 2922,
"pid": 26,
"city_code": "101030800",
"city_name": "汉沽区"
},
{
"_id": 2183,
"id": 2923,
"pid": 26,
"city_code": "101031200",
"city_name": "大港区"
},
{
"_id": 2184,
"id": 2924,
"pid": 26,
"city_code": "101030200",
"city_name": "武清区"
},
{
"_id": 2185,
"id": 2925,
"pid": 26,
"city_code": "101030300",
"city_name": "宝坻区"
},
{
"_id": 2186,
"id": 2927,
"pid": 26,
"city_code": "101030700",
"city_name": "宁河区"
},
{
"_id": 2187,
"id": 2928,
"pid": 26,
"city_code": "101030900",
"city_name": "静海区"
},
{
"_id": 2188,
"id": 2929,
"pid": 26,
"city_code": "101031400",
"city_name": "蓟州区"
},
{
"_id": 2189,
"id": 2931,
"pid": 343,
"city_code": "101140104",
"city_name": "林周县"
},
{
"_id": 2190,
"id": 2932,
"pid": 343,
"city_code": "101140102",
"city_name": "当雄县"
},
{
"_id": 2191,
"id": 2933,
"pid": 343,
"city_code": "101140103",
"city_name": "尼木县"
},
{
"_id": 2192,
"id": 2934,
"pid": 343,
"city_code": "101140106",
"city_name": "曲水县"
},
{
"_id": 2193,
"id": 2935,
"pid": 343,
"city_code": "101140105",
"city_name": "堆龙德庆县"
},
{
"_id": 2194,
"id": 2936,
"pid": 343,
"city_code": "101140107",
"city_name": "达孜县"
},
{
"_id": 2195,
"id": 2937,
"pid": 343,
"city_code": "101140108",
"city_name": "墨竹工卡县"
},
{
"_id": 2196,
"id": 2938,
"pid": 344,
"city_code": "101140707",
"city_name": "噶尔县"
},
{
"_id": 2197,
"id": 2939,
"pid": 344,
"city_code": "101140705",
"city_name": "普兰县"
},
{
"_id": 2198,
"id": 2940,
"pid": 344,
"city_code": "101140706",
"city_name": "札达县"
},
{
"_id": 2199,
"id": 2941,
"pid": 344,
"city_code": "101140708",
"city_name": "日土县"
},
{
"_id": 2200,
"id": 2942,
"pid": 344,
"city_code": "101140709",
"city_name": "革吉县"
},
{
"_id": 2201,
"id": 2943,
"pid": 344,
"city_code": "101140702",
"city_name": "改则县"
},
{
"_id": 2202,
"id": 2944,
"pid": 344,
"city_code": "101140710",
"city_name": "措勤县"
},
{
"_id": 2203,
"id": 2945,
"pid": 345,
"city_code": "101140501",
"city_name": "昌都县"
},
{
"_id": 2204,
"id": 2946,
"pid": 345,
"city_code": "101140509",
"city_name": "江达县"
},
{
"_id": 2205,
"id": 2947,
"pid": 345,
"city_code": "101140511",
"city_name": "贡觉县"
},
{
"_id": 2206,
"id": 2948,
"pid": 345,
"city_code": "101140503",
"city_name": "类乌齐县"
},
{
"_id": 2207,
"id": 2949,
"pid": 345,
"city_code": "101140502",
"city_name": "丁青县"
},
{
"_id": 2208,
"id": 2950,
"pid": 345,
"city_code": "101140510",
"city_name": "察雅县"
},
{
"_id": 2209,
"id": 2951,
"pid": 345,
"city_code": "101140507",
"city_name": "八宿县"
},
{
"_id": 2210,
"id": 2952,
"pid": 345,
"city_code": "101140505",
"city_name": "左贡县"
},
{
"_id": 2211,
"id": 2953,
"pid": 345,
"city_code": "101140506",
"city_name": "芒康县"
},
{
"_id": 2212,
"id": 2954,
"pid": 345,
"city_code": "101140504",
"city_name": "洛隆县"
},
{
"_id": 2213,
"id": 2955,
"pid": 345,
"city_code": "101140503",
"city_name": "边坝县"
},
{
"_id": 2214,
"id": 2956,
"pid": 346,
"city_code": "101140401",
"city_name": "林芝县"
},
{
"_id": 2215,
"id": 2957,
"pid": 346,
"city_code": "101140405",
"city_name": "工布江达县"
},
{
"_id": 2216,
"id": 2958,
"pid": 346,
"city_code": "101140403",
"city_name": "米林县"
},
{
"_id": 2217,
"id": 2959,
"pid": 346,
"city_code": "101140407",
"city_name": "墨脱县"
},
{
"_id": 2218,
"id": 2960,
"pid": 346,
"city_code": "101140402",
"city_name": "波密县"
},
{
"_id": 2219,
"id": 2961,
"pid": 346,
"city_code": "101140404",
"city_name": "察隅县"
},
{
"_id": 2220,
"id": 2962,
"pid": 346,
"city_code": "101140406",
"city_name": "朗县"
},
{
"_id": 2221,
"id": 2963,
"pid": 347,
"city_code": "101140601",
"city_name": "那曲县"
},
{
"_id": 2222,
"id": 2964,
"pid": 347,
"city_code": "101140603",
"city_name": "嘉黎县"
},
{
"_id": 2223,
"id": 2965,
"pid": 347,
"city_code": "101140607",
"city_name": "比如县"
},
{
"_id": 2224,
"id": 2966,
"pid": 347,
"city_code": "101140607",
"city_name": "聂荣县"
},
{
"_id": 2225,
"id": 2967,
"pid": 347,
"city_code": "101140605",
"city_name": "安多县"
},
{
"_id": 2226,
"id": 2968,
"pid": 347,
"city_code": "101140703",
"city_name": "申扎县"
},
{
"_id": 2227,
"id": 2969,
"pid": 347,
"city_code": "101140606",
"city_name": "索县"
},
{
"_id": 2228,
"id": 2970,
"pid": 347,
"city_code": "101140604",
"city_name": "班戈县"
},
{
"_id": 2229,
"id": 2971,
"pid": 347,
"city_code": "101140608",
"city_name": "巴青县"
},
{
"_id": 2230,
"id": 2972,
"pid": 347,
"city_code": "101140602",
"city_name": "尼玛县"
},
{
"_id": 2231,
"id": 2973,
"pid": 348,
"city_code": "101140201",
"city_name": "日喀则市"
},
{
"_id": 2232,
"id": 2974,
"pid": 348,
"city_code": "101140203",
"city_name": "南木林县"
},
{
"_id": 2233,
"id": 2975,
"pid": 348,
"city_code": "101140206",
"city_name": "江孜县"
},
{
"_id": 2234,
"id": 2976,
"pid": 348,
"city_code": "101140205",
"city_name": "定日县"
},
{
"_id": 2235,
"id": 2977,
"pid": 348,
"city_code": "101140213",
"city_name": "萨迦县"
},
{
"_id": 2236,
"id": 2978,
"pid": 348,
"city_code": "101140202",
"city_name": "拉孜县"
},
{
"_id": 2237,
"id": 2979,
"pid": 348,
"city_code": "101140211",
"city_name": "昂仁县"
},
{
"_id": 2238,
"id": 2980,
"pid": 348,
"city_code": "101140214",
"city_name": "谢通门县"
},
{
"_id": 2239,
"id": 2981,
"pid": 348,
"city_code": "101140217",
"city_name": "白朗县"
},
{
"_id": 2240,
"id": 2982,
"pid": 348,
"city_code": "101140220",
"city_name": "仁布县"
},
{
"_id": 2241,
"id": 2983,
"pid": 348,
"city_code": "101140219",
"city_name": "康马县"
},
{
"_id": 2242,
"id": 2984,
"pid": 348,
"city_code": "101140212",
"city_name": "定结县"
},
{
"_id": 2243,
"id": 2985,
"pid": 348,
"city_code": "101140208",
"city_name": "仲巴县"
},
{
"_id": 2244,
"id": 2986,
"pid": 348,
"city_code": "101140218",
"city_name": "亚东县"
},
{
"_id": 2245,
"id": 2987,
"pid": 348,
"city_code": "101140210",
"city_name": "吉隆县"
},
{
"_id": 2246,
"id": 2988,
"pid": 348,
"city_code": "101140204",
"city_name": "聂拉木县"
},
{
"_id": 2247,
"id": 2989,
"pid": 348,
"city_code": "101140209",
"city_name": "萨嘎县"
},
{
"_id": 2248,
"id": 2990,
"pid": 348,
"city_code": "101140216",
"city_name": "岗巴县"
},
{
"_id": 2249,
"id": 2991,
"pid": 349,
"city_code": "101140309",
"city_name": "乃东县"
},
{
"_id": 2250,
"id": 2992,
"pid": 349,
"city_code": "101140303",
"city_name": "扎囊县"
},
{
"_id": 2251,
"id": 2993,
"pid": 349,
"city_code": "101140302",
"city_name": "贡嘎县"
},
{
"_id": 2252,
"id": 2994,
"pid": 349,
"city_code": "101140310",
"city_name": "桑日县"
},
{
"_id": 2253,
"id": 2995,
"pid": 349,
"city_code": "101140303",
"city_name": "琼结县"
},
{
"_id": 2254,
"id": 2996,
"pid": 349,
"city_code": "101140314",
"city_name": "曲松县"
},
{
"_id": 2255,
"id": 2997,
"pid": 349,
"city_code": "101140312",
"city_name": "措美县"
},
{
"_id": 2256,
"id": 2998,
"pid": 349,
"city_code": "101140311",
"city_name": "洛扎县"
},
{
"_id": 2257,
"id": 2999,
"pid": 349,
"city_code": "101140304",
"city_name": "加查县"
},
{
"_id": 2258,
"id": 3000,
"pid": 349,
"city_code": "101140307",
"city_name": "隆子县"
},
{
"_id": 2259,
"id": 3001,
"pid": 349,
"city_code": "101140306",
"city_name": "错那县"
},
{
"_id": 2260,
"id": 3002,
"pid": 349,
"city_code": "101140305",
"city_name": "浪卡子县"
},
{
"_id": 2261,
"id": 3008,
"pid": 350,
"city_code": "101130105",
"city_name": "达坂城区"
},
{
"_id": 2262,
"id": 3010,
"pid": 350,
"city_code": "101130101",
"city_name": "乌鲁木齐县"
},
{
"_id": 2263,
"id": 3011,
"pid": 351,
"city_code": "101130801",
"city_name": "阿克苏市"
},
{
"_id": 2264,
"id": 3012,
"pid": 351,
"city_code": "101130803",
"city_name": "温宿县"
},
{
"_id": 2265,
"id": 3013,
"pid": 351,
"city_code": "101130807",
"city_name": "库车县"
},
{
"_id": 2266,
"id": 3014,
"pid": 351,
"city_code": "101130806",
"city_name": "沙雅县"
},
{
"_id": 2267,
"id": 3015,
"pid": 351,
"city_code": "101130805",
"city_name": "新和县"
},
{
"_id": 2268,
"id": 3016,
"pid": 351,
"city_code": "101130804",
"city_name": "拜城县"
},
{
"_id": 2269,
"id": 3017,
"pid": 351,
"city_code": "101130802",
"city_name": "乌什县"
},
{
"_id": 2270,
"id": 3018,
"pid": 351,
"city_code": "101130809",
"city_name": "阿瓦提县"
},
{
"_id": 2271,
"id": 3019,
"pid": 351,
"city_code": "101130808",
"city_name": "柯坪县"
},
{
"_id": 2272,
"id": 3020,
"pid": 352,
"city_code": "101130701",
"city_name": "阿拉尔市"
},
{
"_id": 2273,
"id": 3021,
"pid": 353,
"city_code": "101130601",
"city_name": "库尔勒"
},
{
"_id": 2274,
"id": 3022,
"pid": 353,
"city_code": "101130602",
"city_name": "轮台县"
},
{
"_id": 2275,
"id": 3023,
"pid": 353,
"city_code": "101130603",
"city_name": "尉犁县"
},
{
"_id": 2276,
"id": 3024,
"pid": 353,
"city_code": "101130604",
"city_name": "若羌县"
},
{
"_id": 2277,
"id": 3025,
"pid": 353,
"city_code": "101130605",
"city_name": "且末县"
},
{
"_id": 2278,
"id": 3026,
"pid": 353,
"city_code": "101130607",
"city_name": "焉耆县"
},
{
"_id": 2279,
"id": 3027,
"pid": 353,
"city_code": "101130606",
"city_name": "和静县"
},
{
"_id": 2280,
"id": 3028,
"pid": 353,
"city_code": "101130608",
"city_name": "和硕县"
},
{
"_id": 2281,
"id": 3029,
"pid": 353,
"city_code": "101130612",
"city_name": "博湖县"
},
{
"_id": 2282,
"id": 3030,
"pid": 354,
"city_code": "101131601",
"city_name": "博乐市"
},
{
"_id": 2283,
"id": 3031,
"pid": 354,
"city_code": "101131603",
"city_name": "精河县"
},
{
"_id": 2284,
"id": 3032,
"pid": 354,
"city_code": "101131602",
"city_name": "温泉县"
},
{
"_id": 2285,
"id": 3033,
"pid": 355,
"city_code": "101130402",
"city_name": "呼图壁县"
},
{
"_id": 2286,
"id": 3034,
"pid": 355,
"city_code": "101130403",
"city_name": "米泉市"
},
{
"_id": 2287,
"id": 3035,
"pid": 355,
"city_code": "101130401",
"city_name": "昌吉市"
},
{
"_id": 2288,
"id": 3036,
"pid": 355,
"city_code": "101130404",
"city_name": "阜康市"
},
{
"_id": 2289,
"id": 3037,
"pid": 355,
"city_code": "101130407",
"city_name": "玛纳斯县"
},
{
"_id": 2290,
"id": 3038,
"pid": 355,
"city_code": "101130406",
"city_name": "奇台县"
},
{
"_id": 2291,
"id": 3039,
"pid": 355,
"city_code": "101130405",
"city_name": "吉木萨尔县"
},
{
"_id": 2292,
"id": 3040,
"pid": 355,
"city_code": "101130408",
"city_name": "木垒县"
},
{
"_id": 2293,
"id": 3041,
"pid": 356,
"city_code": "101131201",
"city_name": "哈密市"
},
{
"_id": 2294,
"id": 3042,
"pid": 356,
"city_code": "101131204",
"city_name": "伊吾县"
},
{
"_id": 2295,
"id": 3043,
"pid": 356,
"city_code": "101131203",
"city_name": "巴里坤"
},
{
"_id": 2296,
"id": 3044,
"pid": 357,
"city_code": "101131301",
"city_name": "和田市"
},
{
"_id": 2297,
"id": 3045,
"pid": 357,
"city_code": "101131301",
"city_name": "和田县"
},
{
"_id": 2298,
"id": 3046,
"pid": 357,
"city_code": "101131304",
"city_name": "墨玉县"
},
{
"_id": 2299,
"id": 3047,
"pid": 357,
"city_code": "101131302",
"city_name": "皮山县"
},
{
"_id": 2300,
"id": 3048,
"pid": 357,
"city_code": "101131305",
"city_name": "洛浦县"
},
{
"_id": 2301,
"id": 3049,
"pid": 357,
"city_code": "101131303",
"city_name": "策勒县"
},
{
"_id": 2302,
"id": 3050,
"pid": 357,
"city_code": "101131307",
"city_name": "于田县"
},
{
"_id": 2303,
"id": 3051,
"pid": 357,
"city_code": "101131306",
"city_name": "民丰县"
},
{
"_id": 2304,
"id": 3052,
"pid": 358,
"city_code": "101130901",
"city_name": "喀什市"
},
{
"_id": 2305,
"id": 3053,
"pid": 358,
"city_code": "101130911",
"city_name": "疏附县"
},
{
"_id": 2306,
"id": 3054,
"pid": 358,
"city_code": "101130912",
"city_name": "疏勒县"
},
{
"_id": 2307,
"id": 3055,
"pid": 358,
"city_code": "101130902",
"city_name": "英吉沙县"
},
{
"_id": 2308,
"id": 3056,
"pid": 358,
"city_code": "101130907",
"city_name": "泽普县"
},
{
"_id": 2309,
"id": 3057,
"pid": 358,
"city_code": "101130905",
"city_name": "莎车县"
},
{
"_id": 2310,
"id": 3058,
"pid": 358,
"city_code": "101130906",
"city_name": "叶城县"
},
{
"_id": 2311,
"id": 3059,
"pid": 358,
"city_code": "101130904",
"city_name": "麦盖提县"
},
{
"_id": 2312,
"id": 3060,
"pid": 358,
"city_code": "101130909",
"city_name": "岳普湖县"
},
{
"_id": 2313,
"id": 3061,
"pid": 358,
"city_code": "101130910",
"city_name": "伽师县"
},
{
"_id": 2314,
"id": 3062,
"pid": 358,
"city_code": "101130908",
"city_name": "巴楚县"
},
{
"_id": 2315,
"id": 3063,
"pid": 358,
"city_code": "101130903",
"city_name": "塔什库尔干"
},
{
"_id": 2316,
"id": 3064,
"pid": 359,
"city_code": "101130201",
"city_name": "克拉玛依市"
},
{
"_id": 2317,
"id": 3065,
"pid": 360,
"city_code": "101131501",
"city_name": "阿图什市"
},
{
"_id": 2318,
"id": 3066,
"pid": 360,
"city_code": "101131503",
"city_name": "阿克陶县"
},
{
"_id": 2319,
"id": 3067,
"pid": 360,
"city_code": "101131504",
"city_name": "阿合奇县"
},
{
"_id": 2320,
"id": 3068,
"pid": 360,
"city_code": "101131502",
"city_name": "乌恰县"
},
{
"_id": 2321,
"id": 3069,
"pid": 361,
"city_code": "101130301",
"city_name": "石河子市"
},
{
"_id": 2322,
"id": 3071,
"pid": 363,
"city_code": "101130501",
"city_name": "吐鲁番市"
},
{
"_id": 2323,
"id": 3072,
"pid": 363,
"city_code": "101130504",
"city_name": "鄯善县"
},
{
"_id": 2324,
"id": 3073,
"pid": 363,
"city_code": "101130502",
"city_name": "托克逊县"
},
{
"_id": 2325,
"id": 3075,
"pid": 365,
"city_code": "101131401",
"city_name": "阿勒泰"
},
{
"_id": 2326,
"id": 3076,
"pid": 365,
"city_code": "101131104",
"city_name": "和布克赛尔"
},
{
"_id": 2327,
"id": 3077,
"pid": 365,
"city_code": "101131001",
"city_name": "伊宁市"
},
{
"_id": 2328,
"id": 3078,
"pid": 365,
"city_code": "101131406",
"city_name": "布尔津县"
},
{
"_id": 2329,
"id": 3079,
"pid": 365,
"city_code": "101131011",
"city_name": "奎屯市"
},
{
"_id": 2330,
"id": 3080,
"pid": 365,
"city_code": "101131106",
"city_name": "乌苏市"
},
{
"_id": 2331,
"id": 3081,
"pid": 365,
"city_code": "101131103",
"city_name": "额敏县"
},
{
"_id": 2332,
"id": 3082,
"pid": 365,
"city_code": "101131408",
"city_name": "富蕴县"
},
{
"_id": 2333,
"id": 3083,
"pid": 365,
"city_code": "101131004",
"city_name": "伊宁县"
},
{
"_id": 2334,
"id": 3084,
"pid": 365,
"city_code": "101131407",
"city_name": "福海县"
},
{
"_id": 2335,
"id": 3085,
"pid": 365,
"city_code": "101131009",
"city_name": "霍城县"
},
{
"_id": 2336,
"id": 3086,
"pid": 365,
"city_code": "101131107",
"city_name": "沙湾县"
},
{
"_id": 2337,
"id": 3087,
"pid": 365,
"city_code": "101131005",
"city_name": "巩留县"
},
{
"_id": 2338,
"id": 3088,
"pid": 365,
"city_code": "101131402",
"city_name": "哈巴河县"
},
{
"_id": 2339,
"id": 3089,
"pid": 365,
"city_code": "101131105",
"city_name": "托里县"
},
{
"_id": 2340,
"id": 3090,
"pid": 365,
"city_code": "101131409",
"city_name": "青河县"
},
{
"_id": 2341,
"id": 3091,
"pid": 365,
"city_code": "101131006",
"city_name": "新源县"
},
{
"_id": 2342,
"id": 3092,
"pid": 365,
"city_code": "101131102",
"city_name": "裕民县"
},
{
"_id": 2343,
"id": 3094,
"pid": 365,
"city_code": "101131405",
"city_name": "吉木乃县"
},
{
"_id": 2344,
"id": 3095,
"pid": 365,
"city_code": "101131007",
"city_name": "昭苏县"
},
{
"_id": 2345,
"id": 3096,
"pid": 365,
"city_code": "101131008",
"city_name": "特克斯县"
},
{
"_id": 2346,
"id": 3097,
"pid": 365,
"city_code": "101131003",
"city_name": "尼勒克县"
},
{
"_id": 2347,
"id": 3098,
"pid": 365,
"city_code": "101131002",
"city_name": "察布查尔"
},
{
"_id": 2348,
"id": 3103,
"pid": 366,
"city_code": "101290103",
"city_name": "东川区"
},
{
"_id": 2349,
"id": 3104,
"pid": 366,
"city_code": "101290112",
"city_name": "安宁市"
},
{
"_id": 2350,
"id": 3105,
"pid": 366,
"city_code": "101290108",
"city_name": "呈贡县"
},
{
"_id": 2351,
"id": 3106,
"pid": 366,
"city_code": "101290105",
"city_name": "晋宁县"
},
{
"_id": 2352,
"id": 3107,
"pid": 366,
"city_code": "101290109",
"city_name": "富民县"
},
{
"_id": 2353,
"id": 3108,
"pid": 366,
"city_code": "101290106",
"city_name": "宜良县"
},
{
"_id": 2354,
"id": 3109,
"pid": 366,
"city_code": "101290110",
"city_name": "嵩明县"
},
{
"_id": 2355,
"id": 3110,
"pid": 366,
"city_code": "101290107",
"city_name": "石林县"
},
{
"_id": 2356,
"id": 3111,
"pid": 366,
"city_code": "101290111",
"city_name": "禄劝县"
},
{
"_id": 2357,
"id": 3112,
"pid": 366,
"city_code": "101290104",
"city_name": "寻甸县"
},
{
"_id": 2358,
"id": 3113,
"pid": 367,
"city_code": "101291204",
"city_name": "兰坪县"
},
{
"_id": 2359,
"id": 3114,
"pid": 367,
"city_code": "101291205",
"city_name": "泸水县"
},
{
"_id": 2360,
"id": 3115,
"pid": 367,
"city_code": "101291203",
"city_name": "福贡县"
},
{
"_id": 2361,
"id": 3116,
"pid": 367,
"city_code": "101291207",
"city_name": "贡山县"
},
{
"_id": 2362,
"id": 3117,
"pid": 368,
"city_code": "101290912",
"city_name": "宁洱县"
},
{
"_id": 2363,
"id": 3118,
"pid": 368,
"city_code": "101290901",
"city_name": "思茅区"
},
{
"_id": 2364,
"id": 3119,
"pid": 368,
"city_code": "101290906",
"city_name": "墨江县"
},
{
"_id": 2365,
"id": 3120,
"pid": 368,
"city_code": "101290903",
"city_name": "景东县"
},
{
"_id": 2366,
"id": 3121,
"pid": 368,
"city_code": "101290902",
"city_name": "景谷县"
},
{
"_id": 2367,
"id": 3122,
"pid": 368,
"city_code": "101290911",
"city_name": "镇沅县"
},
{
"_id": 2368,
"id": 3123,
"pid": 368,
"city_code": "101290907",
"city_name": "江城县"
},
{
"_id": 2369,
"id": 3124,
"pid": 368,
"city_code": "101290908",
"city_name": "孟连县"
},
{
"_id": 2370,
"id": 3125,
"pid": 368,
"city_code": "101290904",
"city_name": "澜沧县"
},
{
"_id": 2371,
"id": 3126,
"pid": 368,
"city_code": "101290909",
"city_name": "西盟县"
},
{
"_id": 2372,
"id": 3128,
"pid": 369,
"city_code": "101291404",
"city_name": "宁蒗县"
},
{
"_id": 2373,
"id": 3130,
"pid": 369,
"city_code": "101291402",
"city_name": "永胜县"
},
{
"_id": 2374,
"id": 3131,
"pid": 369,
"city_code": "101291403",
"city_name": "华坪县"
},
{
"_id": 2375,
"id": 3133,
"pid": 370,
"city_code": "101290504",
"city_name": "施甸县"
},
{
"_id": 2376,
"id": 3134,
"pid": 370,
"city_code": "101290506",
"city_name": "腾冲县"
},
{
"_id": 2377,
"id": 3135,
"pid": 370,
"city_code": "101290503",
"city_name": "龙陵县"
},
{
"_id": 2378,
"id": 3136,
"pid": 370,
"city_code": "101290505",
"city_name": "昌宁县"
},
{
"_id": 2379,
"id": 3137,
"pid": 371,
"city_code": "101290801",
"city_name": "楚雄市"
},
{
"_id": 2380,
"id": 3138,
"pid": 371,
"city_code": "101290809",
"city_name": "双柏县"
},
{
"_id": 2381,
"id": 3139,
"pid": 371,
"city_code": "101290805",
"city_name": "牟定县"
},
{
"_id": 2382,
"id": 3140,
"pid": 371,
"city_code": "101290806",
"city_name": "南华县"
},
{
"_id": 2383,
"id": 3141,
"pid": 371,
"city_code": "101290804",
"city_name": "姚安县"
},
{
"_id": 2384,
"id": 3142,
"pid": 371,
"city_code": "101290802",
"city_name": "大姚县"
},
{
"_id": 2385,
"id": 3143,
"pid": 371,
"city_code": "101290810",
"city_name": "永仁县"
},
{
"_id": 2386,
"id": 3144,
"pid": 371,
"city_code": "101290803",
"city_name": "元谋县"
},
{
"_id": 2387,
"id": 3145,
"pid": 371,
"city_code": "101290807",
"city_name": "武定县"
},
{
"_id": 2388,
"id": 3146,
"pid": 371,
"city_code": "101290808",
"city_name": "禄丰县"
},
{
"_id": 2389,
"id": 3147,
"pid": 372,
"city_code": "101290201",
"city_name": "大理市"
},
{
"_id": 2390,
"id": 3148,
"pid": 372,
"city_code": "101290207",
"city_name": "祥云县"
},
{
"_id": 2391,
"id": 3149,
"pid": 372,
"city_code": "101290205",
"city_name": "宾川县"
},
{
"_id": 2392,
"id": 3150,
"pid": 372,
"city_code": "101290206",
"city_name": "弥渡县"
},
{
"_id": 2393,
"id": 3151,
"pid": 372,
"city_code": "101290204",
"city_name": "永平县"
},
{
"_id": 2394,
"id": 3152,
"pid": 372,
"city_code": "101290202",
"city_name": "云龙县"
},
{
"_id": 2395,
"id": 3153,
"pid": 372,
"city_code": "101290210",
"city_name": "洱源县"
},
{
"_id": 2396,
"id": 3154,
"pid": 372,
"city_code": "101290209",
"city_name": "剑川县"
},
{
"_id": 2397,
"id": 3155,
"pid": 372,
"city_code": "101290211",
"city_name": "鹤庆县"
},
{
"_id": 2398,
"id": 3156,
"pid": 372,
"city_code": "101290203",
"city_name": "漾濞县"
},
{
"_id": 2399,
"id": 3157,
"pid": 372,
"city_code": "101290212",
"city_name": "南涧县"
},
{
"_id": 2400,
"id": 3158,
"pid": 372,
"city_code": "101290208",
"city_name": "巍山县"
},
{
"_id": 2401,
"id": 3159,
"pid": 373,
"city_code": "101291508",
"city_name": "潞西市"
},
{
"_id": 2402,
"id": 3160,
"pid": 373,
"city_code": "101291506",
"city_name": "瑞丽市"
},
{
"_id": 2403,
"id": 3161,
"pid": 373,
"city_code": "101291507",
"city_name": "梁河县"
},
{
"_id": 2404,
"id": 3162,
"pid": 373,
"city_code": "101291504",
"city_name": "盈江县"
},
{
"_id": 2405,
"id": 3163,
"pid": 373,
"city_code": "101291503",
"city_name": "陇川县"
},
{
"_id": 2406,
"id": 3164,
"pid": 374,
"city_code": "101291301",
"city_name": "香格里拉县"
},
{
"_id": 2407,
"id": 3165,
"pid": 374,
"city_code": "101291302",
"city_name": "德钦县"
},
{
"_id": 2408,
"id": 3166,
"pid": 374,
"city_code": "101291303",
"city_name": "维西县"
},
{
"_id": 2409,
"id": 3167,
"pid": 375,
"city_code": "101290311",
"city_name": "泸西县"
},
{
"_id": 2410,
"id": 3168,
"pid": 375,
"city_code": "101290309",
"city_name": "蒙自市"
},
{
"_id": 2411,
"id": 3169,
"pid": 375,
"city_code": "101290308",
"city_name": "个旧市"
},
{
"_id": 2412,
"id": 3170,
"pid": 375,
"city_code": "101290307",
"city_name": "开远市"
},
{
"_id": 2413,
"id": 3171,
"pid": 375,
"city_code": "101290306",
"city_name": "绿春县"
},
{
"_id": 2414,
"id": 3172,
"pid": 375,
"city_code": "101290303",
"city_name": "建水县"
},
{
"_id": 2415,
"id": 3173,
"pid": 375,
"city_code": "101290302",
"city_name": "石屏县"
},
{
"_id": 2416,
"id": 3174,
"pid": 375,
"city_code": "101290304",
"city_name": "弥勒县"
},
{
"_id": 2417,
"id": 3175,
"pid": 375,
"city_code": "101290305",
"city_name": "元阳县"
},
{
"_id": 2418,
"id": 3176,
"pid": 375,
"city_code": "101290301",
"city_name": "红河县"
},
{
"_id": 2419,
"id": 3177,
"pid": 375,
"city_code": "101290312",
"city_name": "金平县"
},
{
"_id": 2420,
"id": 3178,
"pid": 375,
"city_code": "101290313",
"city_name": "河口县"
},
{
"_id": 2421,
"id": 3179,
"pid": 375,
"city_code": "101290310",
"city_name": "屏边县"
},
{
"_id": 2422,
"id": 3181,
"pid": 376,
"city_code": "101291105",
"city_name": "凤庆县"
},
{
"_id": 2423,
"id": 3182,
"pid": 376,
"city_code": "101291107",
"city_name": "云县"
},
{
"_id": 2424,
"id": 3183,
"pid": 376,
"city_code": "101291106",
"city_name": "永德县"
},
{
"_id": 2425,
"id": 3184,
"pid": 376,
"city_code": "101291108",
"city_name": "镇康县"
},
{
"_id": 2426,
"id": 3185,
"pid": 376,
"city_code": "101291104",
"city_name": "双江县"
},
{
"_id": 2427,
"id": 3186,
"pid": 376,
"city_code": "101291103",
"city_name": "耿马县"
},
{
"_id": 2428,
"id": 3187,
"pid": 376,
"city_code": "101291102",
"city_name": "沧源县"
},
{
"_id": 2429,
"id": 3189,
"pid": 377,
"city_code": "101290409",
"city_name": "宣威市"
},
{
"_id": 2430,
"id": 3190,
"pid": 377,
"city_code": "101290405",
"city_name": "马龙县"
},
{
"_id": 2431,
"id": 3191,
"pid": 377,
"city_code": "101290403",
"city_name": "陆良县"
},
{
"_id": 2432,
"id": 3192,
"pid": 377,
"city_code": "101290406",
"city_name": "师宗县"
},
{
"_id": 2433,
"id": 3193,
"pid": 377,
"city_code": "101290407",
"city_name": "罗平县"
},
{
"_id": 2434,
"id": 3194,
"pid": 377,
"city_code": "101290404",
"city_name": "富源县"
},
{
"_id": 2435,
"id": 3195,
"pid": 377,
"city_code": "101290408",
"city_name": "会泽县"
},
{
"_id": 2436,
"id": 3196,
"pid": 377,
"city_code": "101290402",
"city_name": "沾益县"
},
{
"_id": 2437,
"id": 3197,
"pid": 378,
"city_code": "101290601",
"city_name": "文山县"
},
{
"_id": 2438,
"id": 3198,
"pid": 378,
"city_code": "101290605",
"city_name": "砚山县"
},
{
"_id": 2439,
"id": 3199,
"pid": 378,
"city_code": "101290602",
"city_name": "西畴县"
},
{
"_id": 2440,
"id": 3200,
"pid": 378,
"city_code": "101290604",
"city_name": "麻栗坡县"
},
{
"_id": 2441,
"id": 3201,
"pid": 378,
"city_code": "101290603",
"city_name": "马关县"
},
{
"_id": 2442,
"id": 3202,
"pid": 378,
"city_code": "101290606",
"city_name": "丘北县"
},
{
"_id": 2443,
"id": 3203,
"pid": 378,
"city_code": "101290607",
"city_name": "广南县"
},
{
"_id": 2444,
"id": 3204,
"pid": 378,
"city_code": "101290608",
"city_name": "富宁县"
},
{
"_id": 2445,
"id": 3205,
"pid": 379,
"city_code": "101291601",
"city_name": "景洪市"
},
{
"_id": 2446,
"id": 3206,
"pid": 379,
"city_code": "101291603",
"city_name": "勐海县"
},
{
"_id": 2447,
"id": 3207,
"pid": 379,
"city_code": "101291605",
"city_name": "勐腊县"
},
{
"_id": 2448,
"id": 3209,
"pid": 380,
"city_code": "101290703",
"city_name": "江川县"
},
{
"_id": 2449,
"id": 3210,
"pid": 380,
"city_code": "101290702",
"city_name": "澄江县"
},
{
"_id": 2450,
"id": 3211,
"pid": 380,
"city_code": "101290704",
"city_name": "通海县"
},
{
"_id": 2451,
"id": 3212,
"pid": 380,
"city_code": "101290705",
"city_name": "华宁县"
},
{
"_id": 2452,
"id": 3213,
"pid": 380,
"city_code": "101290707",
"city_name": "易门县"
},
{
"_id": 2453,
"id": 3214,
"pid": 380,
"city_code": "101290708",
"city_name": "峨山县"
},
{
"_id": 2454,
"id": 3215,
"pid": 380,
"city_code": "101290706",
"city_name": "新平县"
},
{
"_id": 2455,
"id": 3216,
"pid": 380,
"city_code": "101290709",
"city_name": "元江县"
},
{
"_id": 2456,
"id": 3218,
"pid": 381,
"city_code": "101291002",
"city_name": "鲁甸县"
},
{
"_id": 2457,
"id": 3219,
"pid": 381,
"city_code": "101291006",
"city_name": "巧家县"
},
{
"_id": 2458,
"id": 3220,
"pid": 381,
"city_code": "101291009",
"city_name": "盐津县"
},
{
"_id": 2459,
"id": 3221,
"pid": 381,
"city_code": "101291010",
"city_name": "大关县"
},
{
"_id": 2460,
"id": 3222,
"pid": 381,
"city_code": "101291008",
"city_name": "永善县"
},
{
"_id": 2461,
"id": 3223,
"pid": 381,
"city_code": "101291007",
"city_name": "绥江县"
},
{
"_id": 2462,
"id": 3224,
"pid": 381,
"city_code": "101291004",
"city_name": "镇雄县"
},
{
"_id": 2463,
"id": 3225,
"pid": 381,
"city_code": "101291003",
"city_name": "彝良县"
},
{
"_id": 2464,
"id": 3226,
"pid": 381,
"city_code": "101291005",
"city_name": "威信县"
},
{
"_id": 2465,
"id": 3227,
"pid": 381,
"city_code": "101291011",
"city_name": "水富县"
},
{
"_id": 2466,
"id": 3234,
"pid": 382,
"city_code": "101210102",
"city_name": "萧山区"
},
{
"_id": 2467,
"id": 3235,
"pid": 382,
"city_code": "101210106",
"city_name": "余杭区"
},
{
"_id": 2468,
"id": 3237,
"pid": 382,
"city_code": "101210105",
"city_name": "建德市"
},
{
"_id": 2469,
"id": 3238,
"pid": 382,
"city_code": "101210108",
"city_name": "富阳区"
},
{
"_id": 2470,
"id": 3239,
"pid": 382,
"city_code": "101210107",
"city_name": "临安市"
},
{
"_id": 2471,
"id": 3240,
"pid": 382,
"city_code": "101210103",
"city_name": "桐庐县"
},
{
"_id": 2472,
"id": 3241,
"pid": 382,
"city_code": "101210104",
"city_name": "淳安县"
},
{
"_id": 2473,
"id": 3244,
"pid": 383,
"city_code": "101210204",
"city_name": "德清县"
},
{
"_id": 2474,
"id": 3245,
"pid": 383,
"city_code": "101210202",
"city_name": "长兴县"
},
{
"_id": 2475,
"id": 3246,
"pid": 383,
"city_code": "101210203",
"city_name": "安吉县"
},
{
"_id": 2476,
"id": 3249,
"pid": 384,
"city_code": "101210303",
"city_name": "海宁市"
},
{
"_id": 2477,
"id": 3250,
"pid": 384,
"city_code": "101210302",
"city_name": "嘉善县"
},
{
"_id": 2478,
"id": 3251,
"pid": 384,
"city_code": "101210305",
"city_name": "平湖市"
},
{
"_id": 2479,
"id": 3252,
"pid": 384,
"city_code": "101210304",
"city_name": "桐乡市"
},
{
"_id": 2480,
"id": 3253,
"pid": 384,
"city_code": "101210306",
"city_name": "海盐县"
},
{
"_id": 2481,
"id": 3256,
"pid": 385,
"city_code": "101210903",
"city_name": "兰溪市"
},
{
"_id": 2482,
"id": 3257,
"pid": 385,
"city_code": "101210904",
"city_name": "义乌市"
},
{
"_id": 2483,
"id": 3264,
"pid": 385,
"city_code": "101210905",
"city_name": "东阳市"
},
{
"_id": 2484,
"id": 3265,
"pid": 385,
"city_code": "101210907",
"city_name": "永康市"
},
{
"_id": 2485,
"id": 3266,
"pid": 385,
"city_code": "101210906",
"city_name": "武义县"
},
{
"_id": 2486,
"id": 3267,
"pid": 385,
"city_code": "101210902",
"city_name": "浦江县"
},
{
"_id": 2487,
"id": 3268,
"pid": 385,
"city_code": "101210908",
"city_name": "磐安县"
},
{
"_id": 2488,
"id": 3270,
"pid": 386,
"city_code": "101210803",
"city_name": "龙泉市"
},
{
"_id": 2489,
"id": 3271,
"pid": 386,
"city_code": "101210805",
"city_name": "青田县"
},
{
"_id": 2490,
"id": 3272,
"pid": 386,
"city_code": "101210804",
"city_name": "缙云县"
},
{
"_id": 2491,
"id": 3273,
"pid": 386,
"city_code": "101210802",
"city_name": "遂昌县"
},
{
"_id": 2492,
"id": 3274,
"pid": 386,
"city_code": "101210808",
"city_name": "松阳县"
},
{
"_id": 2493,
"id": 3275,
"pid": 386,
"city_code": "101210806",
"city_name": "云和县"
},
{
"_id": 2494,
"id": 3276,
"pid": 386,
"city_code": "101210807",
"city_name": "庆元县"
},
{
"_id": 2495,
"id": 3277,
"pid": 386,
"city_code": "101210809",
"city_name": "景宁县"
},
{
"_id": 2496,
"id": 3281,
"pid": 387,
"city_code": "101210412",
"city_name": "镇海区"
},
{
"_id": 2497,
"id": 3282,
"pid": 387,
"city_code": "101210410",
"city_name": "北仑区"
},
{
"_id": 2498,
"id": 3283,
"pid": 387,
"city_code": "101210411",
"city_name": "鄞州区"
},
{
"_id": 2499,
"id": 3284,
"pid": 387,
"city_code": "101210404",
"city_name": "余姚市"
},
{
"_id": 2500,
"id": 3285,
"pid": 387,
"city_code": "101210403",
"city_name": "慈溪市"
},
{
"_id": 2501,
"id": 3286,
"pid": 387,
"city_code": "101210405",
"city_name": "奉化区"
},
{
"_id": 2502,
"id": 3287,
"pid": 387,
"city_code": "101210406",
"city_name": "象山县"
},
{
"_id": 2503,
"id": 3288,
"pid": 387,
"city_code": "101210408",
"city_name": "宁海县"
},
{
"_id": 2504,
"id": 3290,
"pid": 388,
"city_code": "101210503",
"city_name": "上虞区"
},
{
"_id": 2505,
"id": 3291,
"pid": 388,
"city_code": "101210505",
"city_name": "嵊州市"
},
{
"_id": 2506,
"id": 3292,
"pid": 388,
"city_code": "101210501",
"city_name": "绍兴县"
},
{
"_id": 2507,
"id": 3293,
"pid": 388,
"city_code": "101210504",
"city_name": "新昌县"
},
{
"_id": 2508,
"id": 3294,
"pid": 388,
"city_code": "101210502",
"city_name": "诸暨市"
},
{
"_id": 2509,
"id": 3295,
"pid": 389,
"city_code": "101210611",
"city_name": "椒江区"
},
{
"_id": 2510,
"id": 3296,
"pid": 389,
"city_code": "101210612",
"city_name": "黄岩区"
},
{
"_id": 2511,
"id": 3297,
"pid": 389,
"city_code": "101210613",
"city_name": "路桥区"
},
{
"_id": 2512,
"id": 3298,
"pid": 389,
"city_code": "101210607",
"city_name": "温岭市"
},
{
"_id": 2513,
"id": 3299,
"pid": 389,
"city_code": "101210610",
"city_name": "临海市"
},
{
"_id": 2514,
"id": 3300,
"pid": 389,
"city_code": "101210603",
"city_name": "玉环县"
},
{
"_id": 2515,
"id": 3301,
"pid": 389,
"city_code": "101210604",
"city_name": "三门县"
},
{
"_id": 2516,
"id": 3302,
"pid": 389,
"city_code": "101210605",
"city_name": "天台县"
},
{
"_id": 2517,
"id": 3303,
"pid": 389,
"city_code": "101210606",
"city_name": "仙居县"
},
{
"_id": 2518,
"id": 3307,
"pid": 390,
"city_code": "101210705",
"city_name": "瑞安市"
},
{
"_id": 2519,
"id": 3308,
"pid": 390,
"city_code": "101210707",
"city_name": "乐清市"
},
{
"_id": 2520,
"id": 3309,
"pid": 390,
"city_code": "101210706",
"city_name": "洞头区"
},
{
"_id": 2521,
"id": 3310,
"pid": 390,
"city_code": "101210708",
"city_name": "永嘉县"
},
{
"_id": 2522,
"id": 3311,
"pid": 390,
"city_code": "101210704",
"city_name": "平阳县"
},
{
"_id": 2523,
"id": 3312,
"pid": 390,
"city_code": "101210709",
"city_name": "苍南县"
},
{
"_id": 2524,
"id": 3313,
"pid": 390,
"city_code": "101210703",
"city_name": "文成县"
},
{
"_id": 2525,
"id": 3314,
"pid": 390,
"city_code": "101210702",
"city_name": "泰顺县"
},
{
"_id": 2526,
"id": 3315,
"pid": 391,
"city_code": "101211106",
"city_name": "定海区"
},
{
"_id": 2527,
"id": 3316,
"pid": 391,
"city_code": "101211105",
"city_name": "普陀区"
},
{
"_id": 2528,
"id": 3317,
"pid": 391,
"city_code": "101211104",
"city_name": "岱山县"
},
{
"_id": 2529,
"id": 3318,
"pid": 391,
"city_code": "101211102",
"city_name": "嵊泗县"
},
{
"_id": 2530,
"id": 3319,
"pid": 392,
"city_code": "101211006",
"city_name": "衢江区"
},
{
"_id": 2531,
"id": 3320,
"pid": 392,
"city_code": "101211005",
"city_name": "江山市"
},
{
"_id": 2532,
"id": 3321,
"pid": 392,
"city_code": "101211002",
"city_name": "常山县"
},
{
"_id": 2533,
"id": 3322,
"pid": 392,
"city_code": "101211003",
"city_name": "开化县"
},
{
"_id": 2534,
"id": 3323,
"pid": 392,
"city_code": "101211004",
"city_name": "龙游县"
},
{
"_id": 2535,
"id": 3324,
"pid": 31,
"city_code": "101040300",
"city_name": "合川区"
},
{
"_id": 2536,
"id": 3325,
"pid": 31,
"city_code": "101040500",
"city_name": "江津区"
},
{
"_id": 2537,
"id": 3326,
"pid": 31,
"city_code": "101040400",
"city_name": "南川区"
},
{
"_id": 2538,
"id": 3327,
"pid": 31,
"city_code": "101040200",
"city_name": "永川区"
},
{
"_id": 2539,
"id": 3329,
"pid": 31,
"city_code": "101040700",
"city_name": "渝北区"
},
{
"_id": 2540,
"id": 3330,
"pid": 31,
"city_code": "101040600",
"city_name": "万盛区"
},
{
"_id": 2541,
"id": 3332,
"pid": 31,
"city_code": "101041300",
"city_name": "万州区"
},
{
"_id": 2542,
"id": 3333,
"pid": 31,
"city_code": "101040800",
"city_name": "北碚区"
},
{
"_id": 2543,
"id": 3334,
"pid": 31,
"city_code": "101043700",
"city_name": "沙坪坝区"
},
{
"_id": 2544,
"id": 3335,
"pid": 31,
"city_code": "101040900",
"city_name": "巴南区"
},
{
"_id": 2545,
"id": 3336,
"pid": 31,
"city_code": "101041400",
"city_name": "涪陵区"
},
{
"_id": 2546,
"id": 3340,
"pid": 31,
"city_code": "101041100",
"city_name": "黔江区"
},
{
"_id": 2547,
"id": 3341,
"pid": 31,
"city_code": "101041000",
"city_name": "长寿区"
},
{
"_id": 2548,
"id": 3343,
"pid": 31,
"city_code": "101043300",
"city_name": "綦江区"
},
{
"_id": 2549,
"id": 3344,
"pid": 31,
"city_code": "101042100",
"city_name": "潼南区"
},
{
"_id": 2550,
"id": 3345,
"pid": 31,
"city_code": "101042800",
"city_name": "铜梁区"
},
{
"_id": 2551,
"id": 3346,
"pid": 31,
"city_code": "101042600",
"city_name": "大足县"
},
{
"_id": 2552,
"id": 3347,
"pid": 31,
"city_code": "101042700",
"city_name": "荣昌区"
},
{
"_id": 2553,
"id": 3348,
"pid": 31,
"city_code": "101042900",
"city_name": "璧山区"
},
{
"_id": 2554,
"id": 3349,
"pid": 31,
"city_code": "101042200",
"city_name": "垫江县"
},
{
"_id": 2555,
"id": 3350,
"pid": 31,
"city_code": "101043100",
"city_name": "武隆县"
},
{
"_id": 2556,
"id": 3351,
"pid": 31,
"city_code": "101043000",
"city_name": "丰都县"
},
{
"_id": 2557,
"id": 3352,
"pid": 31,
"city_code": "101041600",
"city_name": "城口县"
},
{
"_id": 2558,
"id": 3353,
"pid": 31,
"city_code": "101042300",
"city_name": "梁平县"
},
{
"_id": 2559,
"id": 3354,
"pid": 31,
"city_code": "101041500",
"city_name": "开县"
},
{
"_id": 2560,
"id": 3355,
"pid": 31,
"city_code": "101041800",
"city_name": "巫溪县"
},
{
"_id": 2561,
"id": 3356,
"pid": 31,
"city_code": "101042000",
"city_name": "巫山县"
},
{
"_id": 2562,
"id": 3357,
"pid": 31,
"city_code": "101041900",
"city_name": "奉节县"
},
{
"_id": 2563,
"id": 3358,
"pid": 31,
"city_code": "101041700",
"city_name": "云阳县"
},
{
"_id": 2564,
"id": 3359,
"pid": 31,
"city_code": "101042400",
"city_name": "忠县"
},
{
"_id": 2565,
"id": 3360,
"pid": 31,
"city_code": "101042500",
"city_name": "石柱县"
},
{
"_id": 2566,
"id": 3361,
"pid": 31,
"city_code": "101043200",
"city_name": "彭水县"
},
{
"_id": 2567,
"id": 3362,
"pid": 31,
"city_code": "101043400",
"city_name": "酉阳县"
},
{
"_id": 2568,
"id": 3363,
"pid": 31,
"city_code": "101043600",
"city_name": "秀山县"
},
{
"_id": 2569,
"id": 3368,
"pid": 32,
"city_code": "101320102",
"city_name": "九龙城区"
},
{
"_id": 2570,
"id": 3383,
"pid": 34,
"city_code": "101340101",
"city_name": "台北"
},
{
"_id": 2571,
"id": 3384,
"pid": 34,
"city_code": "101340201",
"city_name": "高雄"
},
{
"_id": 2572,
"id": 3385,
"pid": 34,
"city_code": "CHTW0006",
"city_name": "基隆"
},
{
"_id": 2573,
"id": 3386,
"pid": 34,
"city_code": "101340401",
"city_name": "台中"
},
{
"_id": 2574,
"id": 3387,
"pid": 34,
"city_code": "101340301",
"city_name": "台南"
},
{
"_id": 2575,
"id": 3388,
"pid": 34,
"city_code": "101340103",
"city_name": "新竹"
},
{
"_id": 2576,
"id": 3389,
"pid": 34,
"city_code": "101340901",
"city_name": "嘉义"
},
{
"_id": 2577,
"id": 3390,
"pid": 34,
"city_code": "101340701",
"city_name": "宜兰县"
},
{
"_id": 2578,
"id": 3391,
"pid": 34,
"city_code": "101340102",
"city_name": "桃园县"
},
{
"_id": 2579,
"id": 3392,
"pid": 34,
"city_code": "CHTW0016",
"city_name": "苗栗县"
},
{
"_id": 2580,
"id": 3393,
"pid": 34,
"city_code": "CHTW0017",
"city_name": "彰化县"
},
{
"_id": 2581,
"id": 3394,
"pid": 34,
"city_code": "101340404",
"city_name": "南投县"
},
{
"_id": 2582,
"id": 3395,
"pid": 34,
"city_code": "101340406",
"city_name": "云林县"
},
{
"_id": 2583,
"id": 3396,
"pid": 34,
"city_code": "101340205",
"city_name": "屏东县"
},
{
"_id": 2584,
"id": 3397,
"pid": 34,
"city_code": "101341101",
"city_name": "台东县"
},
{
"_id": 2585,
"id": 3398,
"pid": 34,
"city_code": "101340405",
"city_name": "花莲县"
},
{
"_id": 2586,
"id": 3400,
"pid": 2,
"city_code": "101220101",
"city_name": "合肥"
},
{
"_id": 2587,
"id": 3405,
"pid": 3400,
"city_code": "101220102",
"city_name": "长丰县"
},
{
"_id": 2588,
"id": 3406,
"pid": 3400,
"city_code": "101220103",
"city_name": "肥东县"
},
{
"_id": 2589,
"id": 3407,
"pid": 3400,
"city_code": "101220104",
"city_name": "肥西县"
},
{
"_id": 2590,
"id": 3259,
"pid": 168,
"city_code": "101050708",
"city_name": "加格达奇区"
},
{
"_id": 2591,
"id": 3261,
"pid": 168,
"city_code": "101050706",
"city_name": "新林区"
},
{
"_id": 2592,
"id": 3262,
"pid": 168,
"city_code": "101050705",
"city_name": "呼中区"
},
{
"_id": 2593,
"id": 1856,
"pid": 365,
"city_code": "101131101",
"city_name": "塔城市"
},
{
"_id": 2594,
"id": 3657,
"pid": 28,
"city_code": "",
"city_name": "北屯"
},
{
"_id": 2595,
"id": 3661,
"pid": 8,
"city_code": "",
"city_name": "三沙"
}
]
"""
def print_c():
return json.loads(a) | true | true |
f71a872cde99c049e202b8d9270f4ff266420483 | 3,749 | gyp | Python | sync/sync.gyp | nagineni/chromium-crosswalk | 5725642f1c67d0f97e8613ec1c3e8107ab53fdf8 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 231 | 2015-01-08T09:04:44.000Z | 2021-12-30T03:03:10.000Z | sync/sync.gyp | j4ckfrost/android_external_chromium_org | a1a3dad8b08d1fcf6b6b36c267158ed63217c780 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1 | 2017-02-14T21:55:58.000Z | 2017-02-14T21:55:58.000Z | sync/sync.gyp | j4ckfrost/android_external_chromium_org | a1a3dad8b08d1fcf6b6b36c267158ed63217c780 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 268 | 2015-01-21T05:53:28.000Z | 2022-03-25T22:09:01.000Z | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'includes': [
'sync_android.gypi',
'sync_tests.gypi',
],
'conditions': [
# Notes:
# 1) In static mode, the public 'sync' target has a target type of 'none',
# and is composed of the static library targets 'sync_api', 'sync_core',
# 'sync_internal_api', 'sync_notifier', and 'sync_proto'.
# 2) In component mode, we build the public 'sync' target into a single DLL,
# which includes the contents of sync_api.gypi, sync_core.gypi,
# sync_internal_api.gypi, sync_notifier.gypi, and sync_proto.gypi.
# 3) All external targets that depend on anything in sync/ must simply
# declare a dependency on 'sync.gyp:sync'
['component=="static_library"', {
'targets': [
# The public sync static library target.
{
'target_name': 'sync',
'type': 'none',
'dependencies': [
'sync_api',
'sync_core',
'sync_internal_api',
'sync_notifier',
'sync_proto',
],
'export_dependent_settings': [
'sync_notifier',
'sync_proto',
],
},
# The sync external API library.
{
'target_name': 'sync_api',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_api.gypi',
],
'dependencies': [
'sync_internal_api',
'sync_proto',
],
},
# The core sync library.
{
'target_name': 'sync_core',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_core.gypi',
],
'dependencies': [
'sync_notifier',
'sync_proto',
],
'export_dependent_settings': [
'sync_notifier',
'sync_proto',
],
},
# The sync internal API library.
{
'target_name': 'sync_internal_api',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_internal_api.gypi',
],
'dependencies': [
'sync_core',
'sync_notifier',
'sync_proto',
],
'export_dependent_settings': [
'sync_core',
'sync_proto',
],
},
# The sync notifications library.
{
'target_name': 'sync_notifier',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_notifier.gypi',
],
},
# The sync protocol buffer library.
{
'target_name': 'sync_proto',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_proto.gypi',
],
},
],
},
{ # component != static_library
'targets': [
# The public sync shared library target.
{
'target_name': 'sync',
'type': 'shared_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_api.gypi',
'sync_core.gypi',
'sync_internal_api.gypi',
'sync_notifier.gypi',
'sync_proto.gypi',
],
},
],
}],
],
}
| 27.77037 | 80 | 0.497999 |
{
'variables': {
'chromium_code': 1,
},
'includes': [
'sync_android.gypi',
'sync_tests.gypi',
],
'conditions': [
['component=="static_library"', {
'targets': [
{
'target_name': 'sync',
'type': 'none',
'dependencies': [
'sync_api',
'sync_core',
'sync_internal_api',
'sync_notifier',
'sync_proto',
],
'export_dependent_settings': [
'sync_notifier',
'sync_proto',
],
},
{
'target_name': 'sync_api',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_api.gypi',
],
'dependencies': [
'sync_internal_api',
'sync_proto',
],
},
{
'target_name': 'sync_core',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_core.gypi',
],
'dependencies': [
'sync_notifier',
'sync_proto',
],
'export_dependent_settings': [
'sync_notifier',
'sync_proto',
],
},
{
'target_name': 'sync_internal_api',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_internal_api.gypi',
],
'dependencies': [
'sync_core',
'sync_notifier',
'sync_proto',
],
'export_dependent_settings': [
'sync_core',
'sync_proto',
],
},
{
'target_name': 'sync_notifier',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_notifier.gypi',
],
},
{
'target_name': 'sync_proto',
'type': 'static_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_proto.gypi',
],
},
],
},
{
'targets': [
{
'target_name': 'sync',
'type': 'shared_library',
'variables': { 'enable_wexit_time_destructors': 1, },
'includes': [
'sync_api.gypi',
'sync_core.gypi',
'sync_internal_api.gypi',
'sync_notifier.gypi',
'sync_proto.gypi',
],
},
],
}],
],
}
| true | true |
f71a87d374e64809b39fcfe0bfb79d5e0281482c | 641 | py | Python | dkey/__init__.py | NOhs/dkey | 5305e0028a858d7ec7cdf4889783650b026ad4f5 | [
"MIT"
] | 2 | 2019-01-18T19:53:07.000Z | 2019-02-01T12:30:45.000Z | dkey/__init__.py | NOhs/dkey | 5305e0028a858d7ec7cdf4889783650b026ad4f5 | [
"MIT"
] | 15 | 2019-01-14T18:11:20.000Z | 2019-03-30T14:22:35.000Z | dkey/__init__.py | NOhs/dkey | 5305e0028a858d7ec7cdf4889783650b026ad4f5 | [
"MIT"
] | 1 | 2019-02-01T21:51:15.000Z | 2019-02-01T21:51:15.000Z | """Module containing tools to deprecate the use of selected keys in a given dictionary.
This module provides:
deprecate_keys
==============
Class to wrap a dict to deprecate some keys in it.
dkey
====
Function to generate deprecated keys.
__version__
===========
A string indicating which version of dkey is currently used.
version_info
============
A tuple containing the currently used version.
"""
from ._dkey import deprecate_keys as deprecate_keys
from ._dkey import dkey as dkey
from pbr.version import VersionInfo
_v = VersionInfo('mgen').semantic_version()
__version__ = _v.release_string()
version_info = _v.version_tuple()
| 21.366667 | 87 | 0.74415 | from ._dkey import deprecate_keys as deprecate_keys
from ._dkey import dkey as dkey
from pbr.version import VersionInfo
_v = VersionInfo('mgen').semantic_version()
__version__ = _v.release_string()
version_info = _v.version_tuple()
| true | true |
f71a87fef420481a4397ce77f7a2bd37c708dd91 | 6,159 | py | Python | pydec/math/kd_tree.py | hirani/pydec | 0574d1148952510b0e59b1f5cb1d9a673193be7f | [
"BSD-3-Clause"
] | 49 | 2016-07-03T14:40:48.000Z | 2022-03-08T01:33:03.000Z | pydec/math/kd_tree.py | hirani/pydec | 0574d1148952510b0e59b1f5cb1d9a673193be7f | [
"BSD-3-Clause"
] | 4 | 2016-09-16T18:51:06.000Z | 2020-06-20T03:53:24.000Z | pydec/math/kd_tree.py | hirani/pydec | 0574d1148952510b0e59b1f5cb1d9a673193be7f | [
"BSD-3-Clause"
] | 15 | 2015-09-26T20:06:57.000Z | 2021-06-21T17:01:02.000Z | __all__ = ['kd_tree']
from math import sqrt
from heapq import heappush,heappop
class kd_tree:
class node:
def point_distance(self,point):
return sqrt(sum([ (a - b)**2 for (a,b) in zip(point,self.point)]))
def separator_distance(self,point):
return point[self.axis] - self.point[self.axis]
def __repr__(self):
output = ""
return "kd_tree< %s points in %s-dimensions >"% (self.num_points,self.k)
def __init__(self, points, values=None):
"""kD-Tree spatial data structure
Parameters
----------
points : array-like
An N-by-K array of N point coordinates in K dimensions
Optional Parameters
-------------------
values : array-like
A sequence of N elements associated with the points.
By default, the integers [0,1,...N-1] are used.
Examples
--------
>>> points = [[0,0],[1,0],[0,1],[1,1]]
>>> values = ['A','B','C','D']
>>> kd = kd_tree(points, values)
>>> kd
kd_tree< 4 points in 2-dimensions >
>>> kd.nearest([2,0])
'B'
>>> kd.nearest_n([2,0],2)
['B', 'D']
>>> kd.in_sphere([0.1,0.2], 1.1)
['A', 'C', 'B']
"""
lengths = [len(p) for p in points]
min_dim,max_dim = min(lengths),max(lengths)
if min_dim != max_dim:
raise ValueError('points must all have the same dimension')
if values is None:
values = range(len(points))
if len(points) != len(values):
raise ValueError('points and values must have the same lengths')
self.k = min_dim
self.num_points = len(points)
self.root = self.__build(zip(points,values),depth=0)
def __build(self, pv_pairs, depth):
if not pv_pairs:
return None
axis = depth % self.k #cycle axis
pv_pairs = sorted(pv_pairs, key=lambda x: x[0][axis])
mid = len(pv_pairs) // 2
node = self.node()
node.axis = axis
node.point = pv_pairs[mid][0]
node.value = pv_pairs[mid][1]
node.left_child = self.__build(pv_pairs[:mid], depth+1)
node.right_child = self.__build(pv_pairs[mid+1:], depth+1)
return node
def nearest(self, point, max_dist=float('inf')):
"""Returns the value associated with the nearest points to a given location
Parameters
----------
point : array-like
Location in space, e.g. [1.5, 2.0]
Optional Parameters
-------------------
max_dist : float
Ignore points farther than max_dist away from the query point.
Returns
-------
value : single element
The value associated with the point nearest to the query point.
Returns None if no points lie within max_dist of the query point
or the tree is empty.
"""
x = self.nearest_n(point,n=1,max_dist=max_dist) #list with 0 or 1 elements
if len(x) == 0:
return None
else:
return x[0]
def in_sphere(self, point, radius, max_points=None):
"""Returns the values of all points in a given sphere
Parameters
----------
point : array-like
Center of the sphere, e.g. [1.5, 2.0]
radius : float
Radius of the sphere, e.g. 0.3
Optional Parameters
-------------------
max_points : integer
An upper-bound on the number of points to return.
Returns
-------
values : list
List of values associated with all points in the sphere
defined by point and radius.
"""
if max_points is None:
max_points = float('inf')
return self.nearest_n(point, n=max_points, max_dist=radius)
def nearest_n(self, point, n, max_dist=float('inf')):
"""Returns the values of the nearest n points to a given location
Parameters
----------
point : array-like
Location in space, e.g. [1.5, 2.0]
n : integer
(Maximum) Number of values to return. Will return
fewer than n values if the kd_tree contains fewer
than n points.
Optional Parameters
-------------------
max_dist : float
Ignore points farther than max_dist away from the query point.
Returns
-------
values : list
List of values associated with the n nearest points to
the query location.
"""
heap = []
self.__nearest_n(point, n, max_dist, self.root, heap)
heap.sort()
return [ node.value for (neg_dist,node) in reversed(heap) ]
def __nearest_n(self,point,n,max_dist,current,heap):
if current is None:
return max_dist
pt_dist = current.point_distance(point) #distance to this node's point
sep_dist = current.separator_distance(point) #signed distance to this node's separating plane
if pt_dist < max_dist:
heappush(heap,(-pt_dist,current)) #add this point to the queue
if len(heap) > n:
heappop(heap)
if len(heap) == n:
max_dist = min(-heap[0][0],max_dist)
if sep_dist < 0:
max_dist = self.__nearest_n(point,n,max_dist,current.left_child,heap)
else:
max_dist = self.__nearest_n(point,n,max_dist,current.right_child,heap)
if abs(sep_dist) < max_dist:
#explore other subtree
if sep_dist < 0:
return self.__nearest_n(point,n,max_dist,current.right_child,heap)
else:
return self.__nearest_n(point,n,max_dist,current.left_child,heap)
else:
return max_dist
##def inorder(x):
## if x is not None:
## return inorder(x.left_child) + [x.value] + inorder(x.right_child)
## else:
## return []
| 30.339901 | 102 | 0.539211 | __all__ = ['kd_tree']
from math import sqrt
from heapq import heappush,heappop
class kd_tree:
class node:
def point_distance(self,point):
return sqrt(sum([ (a - b)**2 for (a,b) in zip(point,self.point)]))
def separator_distance(self,point):
return point[self.axis] - self.point[self.axis]
def __repr__(self):
output = ""
return "kd_tree< %s points in %s-dimensions >"% (self.num_points,self.k)
def __init__(self, points, values=None):
lengths = [len(p) for p in points]
min_dim,max_dim = min(lengths),max(lengths)
if min_dim != max_dim:
raise ValueError('points must all have the same dimension')
if values is None:
values = range(len(points))
if len(points) != len(values):
raise ValueError('points and values must have the same lengths')
self.k = min_dim
self.num_points = len(points)
self.root = self.__build(zip(points,values),depth=0)
def __build(self, pv_pairs, depth):
if not pv_pairs:
return None
axis = depth % self.k
pv_pairs = sorted(pv_pairs, key=lambda x: x[0][axis])
mid = len(pv_pairs) // 2
node = self.node()
node.axis = axis
node.point = pv_pairs[mid][0]
node.value = pv_pairs[mid][1]
node.left_child = self.__build(pv_pairs[:mid], depth+1)
node.right_child = self.__build(pv_pairs[mid+1:], depth+1)
return node
def nearest(self, point, max_dist=float('inf')):
x = self.nearest_n(point,n=1,max_dist=max_dist)
if len(x) == 0:
return None
else:
return x[0]
def in_sphere(self, point, radius, max_points=None):
if max_points is None:
max_points = float('inf')
return self.nearest_n(point, n=max_points, max_dist=radius)
def nearest_n(self, point, n, max_dist=float('inf')):
heap = []
self.__nearest_n(point, n, max_dist, self.root, heap)
heap.sort()
return [ node.value for (neg_dist,node) in reversed(heap) ]
def __nearest_n(self,point,n,max_dist,current,heap):
if current is None:
return max_dist
pt_dist = current.point_distance(point)
sep_dist = current.separator_distance(point) #signed distance to this node's separating plane
if pt_dist < max_dist:
heappush(heap,(-pt_dist,current))
if len(heap) > n:
heappop(heap)
if len(heap) == n:
max_dist = min(-heap[0][0],max_dist)
if sep_dist < 0:
max_dist = self.__nearest_n(point,n,max_dist,current.left_child,heap)
else:
max_dist = self.__nearest_n(point,n,max_dist,current.right_child,heap)
if abs(sep_dist) < max_dist:
if sep_dist < 0:
return self.__nearest_n(point,n,max_dist,current.right_child,heap)
else:
return self.__nearest_n(point,n,max_dist,current.left_child,heap)
else:
return max_dist
| true | true |
f71a88666c1fd19fd83b8b1279071950abcd31d2 | 3,505 | py | Python | model/config.py | yhl111/PCNN | 2e0967aec962d55df1eb7d149a44b91c6c751a1a | [
"Apache-2.0"
] | 99 | 2018-05-19T03:59:47.000Z | 2022-03-17T07:25:10.000Z | model/config.py | yhl111/PCNN | 2e0967aec962d55df1eb7d149a44b91c6c751a1a | [
"Apache-2.0"
] | 10 | 2018-05-21T13:16:42.000Z | 2022-03-26T06:06:51.000Z | model/config.py | yhl111/PCNN | 2e0967aec962d55df1eb7d149a44b91c6c751a1a | [
"Apache-2.0"
] | 38 | 2018-05-19T10:20:57.000Z | 2022-01-25T12:37:08.000Z | import os
import numpy as np
from .general_utils import get_logger
from .data_utils import load_vocab, get_processing_word
class Config():
def __init__(self, load=True):
"""Initialize hyperparameters and load vocabs
Args:
load_embeddings: (bool) if True, load embeddings into
np array, else None
"""
# directory for training outputs
if not os.path.exists(self.dir_output):
os.makedirs(self.dir_output)
# create instance of logger
self.logger = get_logger(self.path_log)
# load if requested (default)
if load:
self.load()
def load(self):
"""Loads vocabulary, processing functions and embeddings
Supposes that build_data.py has been run successfully and that
the corresponding files have been created (vocab and trimmed
vectors)
"""
# 1. vocabulary
self.vocab_words = load_vocab(self.filename_words)
self.vocab_relations = load_vocab(self.filename_relation)
self.nwords = len(self.vocab_words)
self.nrelations = len(self.vocab_relations)
# 2. get processing functions that map str -> id
self.processing_word = get_processing_word(self.vocab_words, UNK = "<UNK>")
self.processing_relation = get_processing_word(self.vocab_relations, UNK='NA')
# 3. get pre-trained embeddings
self.embeddings = (np.load(self.filename_embeddings)['vec']
if self.use_pretrained else None)
# general config
dir_output = "./results/test/"
graph_output = "./graph"
dir_model = dir_output + "model.weights/" # directory to save models
path_log = dir_output + "log.txt"
restore_model = "./results/test/model.weights/early_best.ckpt"
# embeddings
dim_word = 50
dim_pos = 5
dim = dim_word + 2*dim_pos
# position range in sentence
nposition = 500
# convolution
window_size = 3
feature_maps = 230
filename_train_origin = "./data/origin_data/train.txt"
filename_train = "./data/processed_data/train.txt"
filename_train_wrong = "./data/processed_data/wrong_parse_train.txt"
filename_dev = "./data/processed_data/test.txt"
filename_test_origin = "./data/origin_data/test.txt"
filename_test = "./data/processed_data/test.txt"
filename_test_wrong = "./data/processed_data/wrong_parse_test.txt"
max_iter = None # if not None, max number of examples in Dataset
# vocab (created from dataset with build_data.py)
filename_words = "./data/processed_data/words.txt"
filename_embeddings = "./data/processed_data/vectors.npz"
filename_relation_origin = "./data/origin_data/relation2id.txt"
filename_relation = "./data/processed_data/relation.txt"
# word vectors file
filename_wordvectors = "./data/origin_data/vec.txt"
use_pretrained = True
MIL = False # if True, using multi-instances learning
shuffle = False # if True, shuffle train dataset
max_iter = None # if not None, max number of examples in Dataset
# training
train_word_embeddings = False
train_pos_embeddings = True
nepochs = 15
dropout = 0.5
batch_size = 50
lr_method = "adadelta"
lr = 0.001
lr_decay = 0.9
clip = -1 # if negative, no clipping
nepoch_no_imprv = 3
early_stop = True
max_train_step = 100000
| 31.294643 | 87 | 0.653067 | import os
import numpy as np
from .general_utils import get_logger
from .data_utils import load_vocab, get_processing_word
class Config():
def __init__(self, load=True):
if not os.path.exists(self.dir_output):
os.makedirs(self.dir_output)
self.logger = get_logger(self.path_log)
if load:
self.load()
def load(self):
self.vocab_words = load_vocab(self.filename_words)
self.vocab_relations = load_vocab(self.filename_relation)
self.nwords = len(self.vocab_words)
self.nrelations = len(self.vocab_relations)
self.processing_word = get_processing_word(self.vocab_words, UNK = "<UNK>")
self.processing_relation = get_processing_word(self.vocab_relations, UNK='NA')
self.embeddings = (np.load(self.filename_embeddings)['vec']
if self.use_pretrained else None)
dir_output = "./results/test/"
graph_output = "./graph"
dir_model = dir_output + "model.weights/"
path_log = dir_output + "log.txt"
restore_model = "./results/test/model.weights/early_best.ckpt"
dim_word = 50
dim_pos = 5
dim = dim_word + 2*dim_pos
nposition = 500
window_size = 3
feature_maps = 230
filename_train_origin = "./data/origin_data/train.txt"
filename_train = "./data/processed_data/train.txt"
filename_train_wrong = "./data/processed_data/wrong_parse_train.txt"
filename_dev = "./data/processed_data/test.txt"
filename_test_origin = "./data/origin_data/test.txt"
filename_test = "./data/processed_data/test.txt"
filename_test_wrong = "./data/processed_data/wrong_parse_test.txt"
max_iter = None
filename_words = "./data/processed_data/words.txt"
filename_embeddings = "./data/processed_data/vectors.npz"
filename_relation_origin = "./data/origin_data/relation2id.txt"
filename_relation = "./data/processed_data/relation.txt"
filename_wordvectors = "./data/origin_data/vec.txt"
use_pretrained = True
MIL = False
shuffle = False
max_iter = None
train_word_embeddings = False
train_pos_embeddings = True
nepochs = 15
dropout = 0.5
batch_size = 50
lr_method = "adadelta"
lr = 0.001
lr_decay = 0.9
clip = -1
nepoch_no_imprv = 3
early_stop = True
max_train_step = 100000
| true | true |
f71a89aa8b0e3fea02389bae72a1e0206e098bc4 | 4,846 | py | Python | tests/algorithms/test_tracking_smoothing.py | thompson318/scikit-surgerycore | 22867073a5a3e87def68b4a76e70fe54d085be32 | [
"BSD-3-Clause"
] | 3 | 2020-09-26T18:19:49.000Z | 2021-09-19T08:43:00.000Z | tests/algorithms/test_tracking_smoothing.py | thompson318/scikit-surgerycore | 22867073a5a3e87def68b4a76e70fe54d085be32 | [
"BSD-3-Clause"
] | 45 | 2020-04-27T09:12:28.000Z | 2020-04-27T09:50:49.000Z | tests/algorithms/test_tracking_smoothing.py | SciKit-Surgery/scikit-surgerycore | 22867073a5a3e87def68b4a76e70fe54d085be32 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""Tests for BARD pointer module"""
import math
import numpy as np
import pytest
import sksurgerycore.algorithms.tracking_smoothing as reg
def test_rvec_to_quaterion():
"""
Does it convert correctly
"""
#a 90 degree rotation about the x axis
rvec = np.array([math.pi/2.0, 0.0, 0.0])
quaternion = reg._rvec_to_quaternion(rvec) # pylint: disable=protected-access
assert quaternion[0] == math.cos(math.pi/4.0)
assert quaternion[1] == 1.0 * math.sin(math.pi/4.0)
assert quaternion[2] == 0.0
assert quaternion[3] == 0.0
def test_quaterion_to_matrix():
"""
Test conversion on a 90 degree rotation about y axis.
"""
quaternion = np.array([math.cos(math.pi/4.0), 0.0,
1.0 * math.sin(math.pi/4.0), 0.0])
rot_mat = reg.quaternion_to_matrix(quaternion)
rot_mat1 = np.eye(3, dtype=np.float64)
rot_mat1[0, 0] = 0.0
rot_mat1[0, 2] = 1.0
rot_mat1[2, 0] = -1.0
rot_mat1[2, 2] = 0.0
assert np.allclose(rot_mat, rot_mat1, rtol=1e-05, atol=1e-10)
def test_rolling_mean_no_buffer():
"""
Try doing a rolling mean with zero buffer.
"""
with pytest.raises(ValueError):
_ = reg.RollingMean(vector_size=3, buffer_size=0)
def test_rolling_mean_returns_nan():
"""
Tests for rolling mean class.
"""
mean_buffer = reg.RollingMean(vector_size=3, buffer_size=5)
assert np.isnan(mean_buffer.getmean()).all
def test_rolling_mean_single_value():
"""
Test rolling mean returns vector value for single entry
"""
vector = [5.4, 1.2, 3.4]
mean_buffer = reg.RollingMean(vector_size=3, buffer_size=5)
mean_buffer.pop(vector)
assert np.allclose(vector, mean_buffer.getmean(), rtol=1e-05, atol=1e-10)
def test_rolling_mean_four_values():
"""
Test rolling mean returns vector value for single entry
"""
vector0 = [5.4, 1.2, 3.4]
vector1 = [7.4, -1.2, -1.4]
vector2 = [-2.6, 4.2, 2.6]
vector3 = [9.0, 3.3, 3.6]
expected_answer0 = [3.4, 1.4, 1.533333]
expected_answer1 = [4.6, 2.1, 1.6]
mean_buffer = reg.RollingMean(vector_size=3, buffer_size=3)
mean_buffer.pop(vector0)
mean_buffer.pop(vector1)
mean_buffer.pop(vector2)
assert np.allclose(expected_answer0, mean_buffer.getmean(), rtol=1e-05,
atol=1e-6)
mean_buffer.pop(vector3)
assert np.allclose(expected_answer1, mean_buffer.getmean(), rtol=1e-05,
atol=1e-10)
def test_rolling_rotation_no_buffer():
"""
Try doing a rolling rotation mean with zero buffer.
"""
with pytest.raises(ValueError):
_ = reg.RollingMeanRotation(buffer_size=0)
def test_rolling_rot_returns_nan():
"""
Tests for rolling mean rotation class.
"""
mean_buffer = reg.RollingMeanRotation(buffer_size=5)
assert np.isnan(mean_buffer.getmean()).all
def test_rolling_rot_single_value():
"""
Test rolling mean rotation returns vector value for single entry
"""
rvec = np.array([0.0, -math.pi/2.0, 0.0])
expected_quaternion = np.array([math.cos(math.pi/4.0), 0.0,
-1.0 * math.sin(math.pi/4.0), 0.0])
mean_buffer = reg.RollingMeanRotation(buffer_size=5)
mean_buffer.pop(rvec)
assert np.allclose(expected_quaternion, mean_buffer.getmean(),
rtol=1e-05, atol=1e-10)
def test_r_rot_sgl_value_sgl_buff():
"""
Test rolling mean rotation returns vector value for single entry
"""
rvec = np.array([0.0, 0.0, -math.pi/2.0])
expected_quaternion = np.array([math.cos(math.pi/4.0), 0.0, 0.0,
-1.0 * math.sin(math.pi/4.0)])
mean_buffer = reg.RollingMeanRotation(buffer_size=1)
mean_buffer.pop(rvec)
assert np.allclose(expected_quaternion, mean_buffer.getmean(),
rtol=1e-05, atol=1e-10)
def test_rolling_rot_four_values():
"""
Test rolling mean returns vector value for single entry
"""
rvec0 = [0.0, 0.0, 0.0]
rvec1 = [np.NaN, np.NaN, np.NaN]
rvec2 = [0.0, 0.0, -math.pi/2.0]
rvec3 = [0.0, math.pi/3.0, 0.0]
expected_answer0 = reg._rvec_to_quaternion([0.0, 0.0, -math.pi/4.0]) # pylint: disable=protected-access
#the next ones more of a regression test, I haven't independently
#calculated this answer.
expected_answer1 = [-0.87602709, 0.0, -0.27843404, 0.39376519]
mean_buffer = reg.RollingMeanRotation(buffer_size=3)
mean_buffer.pop(rvec0)
mean_buffer.pop(rvec1)
mean_buffer.pop(rvec2)
assert np.allclose(expected_answer0, mean_buffer.getmean(), rtol=1e-05,
atol=1e-6)
mean_buffer.pop(rvec3)
assert np.allclose(expected_answer1, mean_buffer.getmean(), rtol=1e-05,
atol=1e-10)
| 27.691429 | 107 | 0.636401 |
import math
import numpy as np
import pytest
import sksurgerycore.algorithms.tracking_smoothing as reg
def test_rvec_to_quaterion():
rvec = np.array([math.pi/2.0, 0.0, 0.0])
quaternion = reg._rvec_to_quaternion(rvec)
assert quaternion[0] == math.cos(math.pi/4.0)
assert quaternion[1] == 1.0 * math.sin(math.pi/4.0)
assert quaternion[2] == 0.0
assert quaternion[3] == 0.0
def test_quaterion_to_matrix():
quaternion = np.array([math.cos(math.pi/4.0), 0.0,
1.0 * math.sin(math.pi/4.0), 0.0])
rot_mat = reg.quaternion_to_matrix(quaternion)
rot_mat1 = np.eye(3, dtype=np.float64)
rot_mat1[0, 0] = 0.0
rot_mat1[0, 2] = 1.0
rot_mat1[2, 0] = -1.0
rot_mat1[2, 2] = 0.0
assert np.allclose(rot_mat, rot_mat1, rtol=1e-05, atol=1e-10)
def test_rolling_mean_no_buffer():
with pytest.raises(ValueError):
_ = reg.RollingMean(vector_size=3, buffer_size=0)
def test_rolling_mean_returns_nan():
mean_buffer = reg.RollingMean(vector_size=3, buffer_size=5)
assert np.isnan(mean_buffer.getmean()).all
def test_rolling_mean_single_value():
vector = [5.4, 1.2, 3.4]
mean_buffer = reg.RollingMean(vector_size=3, buffer_size=5)
mean_buffer.pop(vector)
assert np.allclose(vector, mean_buffer.getmean(), rtol=1e-05, atol=1e-10)
def test_rolling_mean_four_values():
vector0 = [5.4, 1.2, 3.4]
vector1 = [7.4, -1.2, -1.4]
vector2 = [-2.6, 4.2, 2.6]
vector3 = [9.0, 3.3, 3.6]
expected_answer0 = [3.4, 1.4, 1.533333]
expected_answer1 = [4.6, 2.1, 1.6]
mean_buffer = reg.RollingMean(vector_size=3, buffer_size=3)
mean_buffer.pop(vector0)
mean_buffer.pop(vector1)
mean_buffer.pop(vector2)
assert np.allclose(expected_answer0, mean_buffer.getmean(), rtol=1e-05,
atol=1e-6)
mean_buffer.pop(vector3)
assert np.allclose(expected_answer1, mean_buffer.getmean(), rtol=1e-05,
atol=1e-10)
def test_rolling_rotation_no_buffer():
with pytest.raises(ValueError):
_ = reg.RollingMeanRotation(buffer_size=0)
def test_rolling_rot_returns_nan():
mean_buffer = reg.RollingMeanRotation(buffer_size=5)
assert np.isnan(mean_buffer.getmean()).all
def test_rolling_rot_single_value():
rvec = np.array([0.0, -math.pi/2.0, 0.0])
expected_quaternion = np.array([math.cos(math.pi/4.0), 0.0,
-1.0 * math.sin(math.pi/4.0), 0.0])
mean_buffer = reg.RollingMeanRotation(buffer_size=5)
mean_buffer.pop(rvec)
assert np.allclose(expected_quaternion, mean_buffer.getmean(),
rtol=1e-05, atol=1e-10)
def test_r_rot_sgl_value_sgl_buff():
rvec = np.array([0.0, 0.0, -math.pi/2.0])
expected_quaternion = np.array([math.cos(math.pi/4.0), 0.0, 0.0,
-1.0 * math.sin(math.pi/4.0)])
mean_buffer = reg.RollingMeanRotation(buffer_size=1)
mean_buffer.pop(rvec)
assert np.allclose(expected_quaternion, mean_buffer.getmean(),
rtol=1e-05, atol=1e-10)
def test_rolling_rot_four_values():
rvec0 = [0.0, 0.0, 0.0]
rvec1 = [np.NaN, np.NaN, np.NaN]
rvec2 = [0.0, 0.0, -math.pi/2.0]
rvec3 = [0.0, math.pi/3.0, 0.0]
expected_answer0 = reg._rvec_to_quaternion([0.0, 0.0, -math.pi/4.0])
#calculated this answer.
expected_answer1 = [-0.87602709, 0.0, -0.27843404, 0.39376519]
mean_buffer = reg.RollingMeanRotation(buffer_size=3)
mean_buffer.pop(rvec0)
mean_buffer.pop(rvec1)
mean_buffer.pop(rvec2)
assert np.allclose(expected_answer0, mean_buffer.getmean(), rtol=1e-05,
atol=1e-6)
mean_buffer.pop(rvec3)
assert np.allclose(expected_answer1, mean_buffer.getmean(), rtol=1e-05,
atol=1e-10)
| true | true |
f71a8a1a3d48005a0ee4af6cf7d83fd52dcee595 | 1,223 | py | Python | web/playlists/migrations/0001__initial.py | vtalks/vtalks.net | 80fb19ff9684e0854c6abe5f0eef73e80ec326a6 | [
"Apache-2.0"
] | 1 | 2017-11-28T03:17:23.000Z | 2017-11-28T03:17:23.000Z | web/playlists/migrations/0001__initial.py | vtalks/vtalks.net | 80fb19ff9684e0854c6abe5f0eef73e80ec326a6 | [
"Apache-2.0"
] | 56 | 2018-01-14T18:03:03.000Z | 2018-06-25T17:59:02.000Z | web/playlists/migrations/0001__initial.py | vtalks/vtalks.net | 80fb19ff9684e0854c6abe5f0eef73e80ec326a6 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 2.0.2 on 2018-02-23 08:56
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Playlists',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(default=None, max_length=100, unique=True)),
('title', models.CharField(default=None, max_length=200)),
('slug', models.SlugField(default=None, max_length=200, unique=True)),
('description', models.TextField(blank=True)),
('created', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date created')),
('updated', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date updated')),
],
options={
'verbose_name': 'Playlists',
'verbose_name_plural': 'Playlists',
'ordering': ['-created'],
'get_latest_by': ['-created'],
},
),
]
| 35.970588 | 114 | 0.578087 |
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Playlists',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(default=None, max_length=100, unique=True)),
('title', models.CharField(default=None, max_length=200)),
('slug', models.SlugField(default=None, max_length=200, unique=True)),
('description', models.TextField(blank=True)),
('created', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date created')),
('updated', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date updated')),
],
options={
'verbose_name': 'Playlists',
'verbose_name_plural': 'Playlists',
'ordering': ['-created'],
'get_latest_by': ['-created'],
},
),
]
| true | true |
f71a8adea750a9a9bae32bcd140b26a2c336c2a6 | 3,692 | py | Python | trees-and-graphs/minimal-tree.py | georgeRenard/CrackingTheCodingInterview | ba9866e8e7a8c9942464d76b13af08ea6b15f3f9 | [
"MIT"
] | null | null | null | trees-and-graphs/minimal-tree.py | georgeRenard/CrackingTheCodingInterview | ba9866e8e7a8c9942464d76b13af08ea6b15f3f9 | [
"MIT"
] | null | null | null | trees-and-graphs/minimal-tree.py | georgeRenard/CrackingTheCodingInterview | ba9866e8e7a8c9942464d76b13af08ea6b15f3f9 | [
"MIT"
] | null | null | null | import sys
def problem():
"""
Minimal Tree: Given a sorted (increasing order) array with unique integer elements, write an algorithm to create a binary search tree with minimal height.
"""
pass
class BST:
def __init__(self):
self.count = 0
self.root = None
@staticmethod
def build_from_sorted_array(arr):
root = BST.__build_from_sorted_array(arr)
bst = BST()
bst.root = root
return bst
@staticmethod
def __build_from_sorted_array(arr):
size = len(arr)
if size == 1:
return BST.BSTNode(arr[0])
if size == 0:
return
median_index = size // 2
left = arr[0: median_index]
right = arr[median_index + 1 : ]
root_val = arr[median_index]
root = BST.BSTNode(root_val)
left_subtree_root = BST.__build_from_sorted_array(left)
right_subtree_root = BST.__build_from_sorted_array(right)
root.left = left_subtree_root
root.right = right_subtree_root
return root
def is_BST(self):
if self.root is None:
raise Exception("You have got yourself an empty tree")
return self.__is_BST(self.root)
def __is_BST(self, current):
if current is None:
return True
res = True
if current.left is not None:
res = res and current.value >= current.left.value
if current.right is not None:
res = res and current.value <= current.right.value
res = res and self.__is_BST(current.left)
res = res and self.__is_BST(current.right)
return res
def __update_height(self, current):
if current is None:
return 0
left = 0 if current.left is None else current.left.height
right = 0 if current.right is None else current.right.height
if abs(left - right) > 1:
raise Exception("The tree is unbalanced")
current.height = 1 + self.__update_height(current.left) + self.__update_height(current.right)
return current.height
def is_balanced(self):
if self.root is None:
raise Exception("An empty tree can never be balanced")
self.__update_height(self.root)
return self.__is_balanced(self.root)
# You could further optimize this by attaching the balance factor to the node directly and adjusting as needed
# It would just happen to look like an AVL tree
def __is_balanced(self, current):
return abs(self.root.left.height - self.root.right.height) <= 1
def dump(self):
if self.root is None:
raise Exception("Cannot dump an empty tree")
self.__dump(self.root)
def __dump(self, current, indent = 0):
if current is None:
return
print("{0}{1}".format(" " * indent, current.value))
self.__dump(current.left, indent + 2)
self.__dump(current.right, indent + 2)
def __repr__(self):
return self.root.__repr__()
class BSTNode:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
self.height = 0
if __name__ == '__main__':
args = sys.argv[1:]
arr = [1, 2, 4, 12, 35, 41, 72, 102, 562]
tree = BST.build_from_sorted_array(arr)
tree.dump()
print("The tree {0} BST".format("is" if tree.is_BST() else "is not"))
print("The tree {0} balanced".format("is" if tree.is_balanced() else "is not"))
| 22.512195 | 170 | 0.581798 | import sys
def problem():
pass
class BST:
def __init__(self):
self.count = 0
self.root = None
@staticmethod
def build_from_sorted_array(arr):
root = BST.__build_from_sorted_array(arr)
bst = BST()
bst.root = root
return bst
@staticmethod
def __build_from_sorted_array(arr):
size = len(arr)
if size == 1:
return BST.BSTNode(arr[0])
if size == 0:
return
median_index = size // 2
left = arr[0: median_index]
right = arr[median_index + 1 : ]
root_val = arr[median_index]
root = BST.BSTNode(root_val)
left_subtree_root = BST.__build_from_sorted_array(left)
right_subtree_root = BST.__build_from_sorted_array(right)
root.left = left_subtree_root
root.right = right_subtree_root
return root
def is_BST(self):
if self.root is None:
raise Exception("You have got yourself an empty tree")
return self.__is_BST(self.root)
def __is_BST(self, current):
if current is None:
return True
res = True
if current.left is not None:
res = res and current.value >= current.left.value
if current.right is not None:
res = res and current.value <= current.right.value
res = res and self.__is_BST(current.left)
res = res and self.__is_BST(current.right)
return res
def __update_height(self, current):
if current is None:
return 0
left = 0 if current.left is None else current.left.height
right = 0 if current.right is None else current.right.height
if abs(left - right) > 1:
raise Exception("The tree is unbalanced")
current.height = 1 + self.__update_height(current.left) + self.__update_height(current.right)
return current.height
def is_balanced(self):
if self.root is None:
raise Exception("An empty tree can never be balanced")
self.__update_height(self.root)
return self.__is_balanced(self.root)
def __is_balanced(self, current):
return abs(self.root.left.height - self.root.right.height) <= 1
def dump(self):
if self.root is None:
raise Exception("Cannot dump an empty tree")
self.__dump(self.root)
def __dump(self, current, indent = 0):
if current is None:
return
print("{0}{1}".format(" " * indent, current.value))
self.__dump(current.left, indent + 2)
self.__dump(current.right, indent + 2)
def __repr__(self):
return self.root.__repr__()
class BSTNode:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
self.height = 0
if __name__ == '__main__':
args = sys.argv[1:]
arr = [1, 2, 4, 12, 35, 41, 72, 102, 562]
tree = BST.build_from_sorted_array(arr)
tree.dump()
print("The tree {0} BST".format("is" if tree.is_BST() else "is not"))
print("The tree {0} balanced".format("is" if tree.is_balanced() else "is not"))
| true | true |
f71a8bd251c72ed72e4f3105b3a56a3e0dee9bf4 | 14,903 | py | Python | xero_python/accounting/models/repeating_invoice.py | sidtrengove/xero-python | 52f1ec2232def4c8e773e8e5fd6f766c059517b2 | [
"MIT"
] | 1 | 2020-06-05T15:03:15.000Z | 2020-06-05T15:03:15.000Z | xero_python/accounting/models/repeating_invoice.py | sidtrengove/xero-python | 52f1ec2232def4c8e773e8e5fd6f766c059517b2 | [
"MIT"
] | null | null | null | xero_python/accounting/models/repeating_invoice.py | sidtrengove/xero-python | 52f1ec2232def4c8e773e8e5fd6f766c059517b2 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Accounting API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: 2.1.6
Contact: api@xero.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
from xero_python.models import BaseModel
class RepeatingInvoice(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
"type": "str",
"contact": "Contact",
"schedule": "Schedule",
"line_items": "list[LineItem]",
"line_amount_types": "LineAmountTypes",
"reference": "str",
"branding_theme_id": "str",
"currency_code": "CurrencyCode",
"status": "str",
"sub_total": "float",
"total_tax": "float",
"total": "float",
"repeating_invoice_id": "str",
"id": "str",
"has_attachments": "bool",
"attachments": "list[Attachment]",
}
attribute_map = {
"type": "Type",
"contact": "Contact",
"schedule": "Schedule",
"line_items": "LineItems",
"line_amount_types": "LineAmountTypes",
"reference": "Reference",
"branding_theme_id": "BrandingThemeID",
"currency_code": "CurrencyCode",
"status": "Status",
"sub_total": "SubTotal",
"total_tax": "TotalTax",
"total": "Total",
"repeating_invoice_id": "RepeatingInvoiceID",
"id": "ID",
"has_attachments": "HasAttachments",
"attachments": "Attachments",
}
def __init__(
self,
type=None,
contact=None,
schedule=None,
line_items=None,
line_amount_types=None,
reference=None,
branding_theme_id=None,
currency_code=None,
status=None,
sub_total=None,
total_tax=None,
total=None,
repeating_invoice_id=None,
id=None,
has_attachments=False,
attachments=None,
): # noqa: E501
"""RepeatingInvoice - a model defined in OpenAPI""" # noqa: E501
self._type = None
self._contact = None
self._schedule = None
self._line_items = None
self._line_amount_types = None
self._reference = None
self._branding_theme_id = None
self._currency_code = None
self._status = None
self._sub_total = None
self._total_tax = None
self._total = None
self._repeating_invoice_id = None
self._id = None
self._has_attachments = None
self._attachments = None
self.discriminator = None
if type is not None:
self.type = type
if contact is not None:
self.contact = contact
if schedule is not None:
self.schedule = schedule
if line_items is not None:
self.line_items = line_items
if line_amount_types is not None:
self.line_amount_types = line_amount_types
if reference is not None:
self.reference = reference
if branding_theme_id is not None:
self.branding_theme_id = branding_theme_id
if currency_code is not None:
self.currency_code = currency_code
if status is not None:
self.status = status
if sub_total is not None:
self.sub_total = sub_total
if total_tax is not None:
self.total_tax = total_tax
if total is not None:
self.total = total
if repeating_invoice_id is not None:
self.repeating_invoice_id = repeating_invoice_id
if id is not None:
self.id = id
if has_attachments is not None:
self.has_attachments = has_attachments
if attachments is not None:
self.attachments = attachments
@property
def type(self):
"""Gets the type of this RepeatingInvoice. # noqa: E501
See Invoice Types # noqa: E501
:return: The type of this RepeatingInvoice. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this RepeatingInvoice.
See Invoice Types # noqa: E501
:param type: The type of this RepeatingInvoice. # noqa: E501
:type: str
"""
allowed_values = ["ACCPAY", "ACCREC"] # noqa: E501
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}".format( # noqa: E501
type, allowed_values
)
)
self._type = type
@property
def contact(self):
"""Gets the contact of this RepeatingInvoice. # noqa: E501
:return: The contact of this RepeatingInvoice. # noqa: E501
:rtype: Contact
"""
return self._contact
@contact.setter
def contact(self, contact):
"""Sets the contact of this RepeatingInvoice.
:param contact: The contact of this RepeatingInvoice. # noqa: E501
:type: Contact
"""
self._contact = contact
@property
def schedule(self):
"""Gets the schedule of this RepeatingInvoice. # noqa: E501
:return: The schedule of this RepeatingInvoice. # noqa: E501
:rtype: Schedule
"""
return self._schedule
@schedule.setter
def schedule(self, schedule):
"""Sets the schedule of this RepeatingInvoice.
:param schedule: The schedule of this RepeatingInvoice. # noqa: E501
:type: Schedule
"""
self._schedule = schedule
@property
def line_items(self):
"""Gets the line_items of this RepeatingInvoice. # noqa: E501
See LineItems # noqa: E501
:return: The line_items of this RepeatingInvoice. # noqa: E501
:rtype: list[LineItem]
"""
return self._line_items
@line_items.setter
def line_items(self, line_items):
"""Sets the line_items of this RepeatingInvoice.
See LineItems # noqa: E501
:param line_items: The line_items of this RepeatingInvoice. # noqa: E501
:type: list[LineItem]
"""
self._line_items = line_items
@property
def line_amount_types(self):
"""Gets the line_amount_types of this RepeatingInvoice. # noqa: E501
:return: The line_amount_types of this RepeatingInvoice. # noqa: E501
:rtype: LineAmountTypes
"""
return self._line_amount_types
@line_amount_types.setter
def line_amount_types(self, line_amount_types):
"""Sets the line_amount_types of this RepeatingInvoice.
:param line_amount_types: The line_amount_types of this RepeatingInvoice. # noqa: E501
:type: LineAmountTypes
"""
self._line_amount_types = line_amount_types
@property
def reference(self):
"""Gets the reference of this RepeatingInvoice. # noqa: E501
ACCREC only – additional reference number # noqa: E501
:return: The reference of this RepeatingInvoice. # noqa: E501
:rtype: str
"""
return self._reference
@reference.setter
def reference(self, reference):
"""Sets the reference of this RepeatingInvoice.
ACCREC only – additional reference number # noqa: E501
:param reference: The reference of this RepeatingInvoice. # noqa: E501
:type: str
"""
self._reference = reference
@property
def branding_theme_id(self):
"""Gets the branding_theme_id of this RepeatingInvoice. # noqa: E501
See BrandingThemes # noqa: E501
:return: The branding_theme_id of this RepeatingInvoice. # noqa: E501
:rtype: str
"""
return self._branding_theme_id
@branding_theme_id.setter
def branding_theme_id(self, branding_theme_id):
"""Sets the branding_theme_id of this RepeatingInvoice.
See BrandingThemes # noqa: E501
:param branding_theme_id: The branding_theme_id of this RepeatingInvoice. # noqa: E501
:type: str
"""
self._branding_theme_id = branding_theme_id
@property
def currency_code(self):
"""Gets the currency_code of this RepeatingInvoice. # noqa: E501
:return: The currency_code of this RepeatingInvoice. # noqa: E501
:rtype: CurrencyCode
"""
return self._currency_code
@currency_code.setter
def currency_code(self, currency_code):
"""Sets the currency_code of this RepeatingInvoice.
:param currency_code: The currency_code of this RepeatingInvoice. # noqa: E501
:type: CurrencyCode
"""
self._currency_code = currency_code
@property
def status(self):
"""Gets the status of this RepeatingInvoice. # noqa: E501
One of the following - DRAFT or AUTHORISED – See Invoice Status Codes # noqa: E501
:return: The status of this RepeatingInvoice. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this RepeatingInvoice.
One of the following - DRAFT or AUTHORISED – See Invoice Status Codes # noqa: E501
:param status: The status of this RepeatingInvoice. # noqa: E501
:type: str
"""
allowed_values = ["DRAFT", "AUTHORISED", "DELETED"] # noqa: E501
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}".format( # noqa: E501
status, allowed_values
)
)
self._status = status
@property
def sub_total(self):
"""Gets the sub_total of this RepeatingInvoice. # noqa: E501
Total of invoice excluding taxes # noqa: E501
:return: The sub_total of this RepeatingInvoice. # noqa: E501
:rtype: float
"""
return self._sub_total
@sub_total.setter
def sub_total(self, sub_total):
"""Sets the sub_total of this RepeatingInvoice.
Total of invoice excluding taxes # noqa: E501
:param sub_total: The sub_total of this RepeatingInvoice. # noqa: E501
:type: float
"""
self._sub_total = sub_total
@property
def total_tax(self):
"""Gets the total_tax of this RepeatingInvoice. # noqa: E501
Total tax on invoice # noqa: E501
:return: The total_tax of this RepeatingInvoice. # noqa: E501
:rtype: float
"""
return self._total_tax
@total_tax.setter
def total_tax(self, total_tax):
"""Sets the total_tax of this RepeatingInvoice.
Total tax on invoice # noqa: E501
:param total_tax: The total_tax of this RepeatingInvoice. # noqa: E501
:type: float
"""
self._total_tax = total_tax
@property
def total(self):
"""Gets the total of this RepeatingInvoice. # noqa: E501
Total of Invoice tax inclusive (i.e. SubTotal + TotalTax) # noqa: E501
:return: The total of this RepeatingInvoice. # noqa: E501
:rtype: float
"""
return self._total
@total.setter
def total(self, total):
"""Sets the total of this RepeatingInvoice.
Total of Invoice tax inclusive (i.e. SubTotal + TotalTax) # noqa: E501
:param total: The total of this RepeatingInvoice. # noqa: E501
:type: float
"""
self._total = total
@property
def repeating_invoice_id(self):
"""Gets the repeating_invoice_id of this RepeatingInvoice. # noqa: E501
Xero generated unique identifier for repeating invoice template # noqa: E501
:return: The repeating_invoice_id of this RepeatingInvoice. # noqa: E501
:rtype: str
"""
return self._repeating_invoice_id
@repeating_invoice_id.setter
def repeating_invoice_id(self, repeating_invoice_id):
"""Sets the repeating_invoice_id of this RepeatingInvoice.
Xero generated unique identifier for repeating invoice template # noqa: E501
:param repeating_invoice_id: The repeating_invoice_id of this RepeatingInvoice. # noqa: E501
:type: str
"""
self._repeating_invoice_id = repeating_invoice_id
@property
def id(self):
"""Gets the id of this RepeatingInvoice. # noqa: E501
Xero generated unique identifier for repeating invoice template # noqa: E501
:return: The id of this RepeatingInvoice. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this RepeatingInvoice.
Xero generated unique identifier for repeating invoice template # noqa: E501
:param id: The id of this RepeatingInvoice. # noqa: E501
:type: str
"""
self._id = id
@property
def has_attachments(self):
"""Gets the has_attachments of this RepeatingInvoice. # noqa: E501
boolean to indicate if an invoice has an attachment # noqa: E501
:return: The has_attachments of this RepeatingInvoice. # noqa: E501
:rtype: bool
"""
return self._has_attachments
@has_attachments.setter
def has_attachments(self, has_attachments):
"""Sets the has_attachments of this RepeatingInvoice.
boolean to indicate if an invoice has an attachment # noqa: E501
:param has_attachments: The has_attachments of this RepeatingInvoice. # noqa: E501
:type: bool
"""
self._has_attachments = has_attachments
@property
def attachments(self):
"""Gets the attachments of this RepeatingInvoice. # noqa: E501
Displays array of attachments from the API # noqa: E501
:return: The attachments of this RepeatingInvoice. # noqa: E501
:rtype: list[Attachment]
"""
return self._attachments
@attachments.setter
def attachments(self, attachments):
"""Sets the attachments of this RepeatingInvoice.
Displays array of attachments from the API # noqa: E501
:param attachments: The attachments of this RepeatingInvoice. # noqa: E501
:type: list[Attachment]
"""
self._attachments = attachments
| 28.881783 | 124 | 0.609072 |
import re
from xero_python.models import BaseModel
class RepeatingInvoice(BaseModel):
openapi_types = {
"type": "str",
"contact": "Contact",
"schedule": "Schedule",
"line_items": "list[LineItem]",
"line_amount_types": "LineAmountTypes",
"reference": "str",
"branding_theme_id": "str",
"currency_code": "CurrencyCode",
"status": "str",
"sub_total": "float",
"total_tax": "float",
"total": "float",
"repeating_invoice_id": "str",
"id": "str",
"has_attachments": "bool",
"attachments": "list[Attachment]",
}
attribute_map = {
"type": "Type",
"contact": "Contact",
"schedule": "Schedule",
"line_items": "LineItems",
"line_amount_types": "LineAmountTypes",
"reference": "Reference",
"branding_theme_id": "BrandingThemeID",
"currency_code": "CurrencyCode",
"status": "Status",
"sub_total": "SubTotal",
"total_tax": "TotalTax",
"total": "Total",
"repeating_invoice_id": "RepeatingInvoiceID",
"id": "ID",
"has_attachments": "HasAttachments",
"attachments": "Attachments",
}
def __init__(
self,
type=None,
contact=None,
schedule=None,
line_items=None,
line_amount_types=None,
reference=None,
branding_theme_id=None,
currency_code=None,
status=None,
sub_total=None,
total_tax=None,
total=None,
repeating_invoice_id=None,
id=None,
has_attachments=False,
attachments=None,
):
self._type = None
self._contact = None
self._schedule = None
self._line_items = None
self._line_amount_types = None
self._reference = None
self._branding_theme_id = None
self._currency_code = None
self._status = None
self._sub_total = None
self._total_tax = None
self._total = None
self._repeating_invoice_id = None
self._id = None
self._has_attachments = None
self._attachments = None
self.discriminator = None
if type is not None:
self.type = type
if contact is not None:
self.contact = contact
if schedule is not None:
self.schedule = schedule
if line_items is not None:
self.line_items = line_items
if line_amount_types is not None:
self.line_amount_types = line_amount_types
if reference is not None:
self.reference = reference
if branding_theme_id is not None:
self.branding_theme_id = branding_theme_id
if currency_code is not None:
self.currency_code = currency_code
if status is not None:
self.status = status
if sub_total is not None:
self.sub_total = sub_total
if total_tax is not None:
self.total_tax = total_tax
if total is not None:
self.total = total
if repeating_invoice_id is not None:
self.repeating_invoice_id = repeating_invoice_id
if id is not None:
self.id = id
if has_attachments is not None:
self.has_attachments = has_attachments
if attachments is not None:
self.attachments = attachments
@property
def type(self):
return self._type
@type.setter
def type(self, type):
allowed_values = ["ACCPAY", "ACCREC"]
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}".format(
type, allowed_values
)
)
self._type = type
@property
def contact(self):
return self._contact
@contact.setter
def contact(self, contact):
self._contact = contact
@property
def schedule(self):
return self._schedule
@schedule.setter
def schedule(self, schedule):
self._schedule = schedule
@property
def line_items(self):
return self._line_items
@line_items.setter
def line_items(self, line_items):
self._line_items = line_items
@property
def line_amount_types(self):
return self._line_amount_types
@line_amount_types.setter
def line_amount_types(self, line_amount_types):
self._line_amount_types = line_amount_types
@property
def reference(self):
return self._reference
@reference.setter
def reference(self, reference):
self._reference = reference
@property
def branding_theme_id(self):
return self._branding_theme_id
@branding_theme_id.setter
def branding_theme_id(self, branding_theme_id):
self._branding_theme_id = branding_theme_id
@property
def currency_code(self):
return self._currency_code
@currency_code.setter
def currency_code(self, currency_code):
self._currency_code = currency_code
@property
def status(self):
return self._status
@status.setter
def status(self, status):
allowed_values = ["DRAFT", "AUTHORISED", "DELETED"]
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}".format(
status, allowed_values
)
)
self._status = status
@property
def sub_total(self):
return self._sub_total
@sub_total.setter
def sub_total(self, sub_total):
self._sub_total = sub_total
@property
def total_tax(self):
return self._total_tax
@total_tax.setter
def total_tax(self, total_tax):
self._total_tax = total_tax
@property
def total(self):
return self._total
@total.setter
def total(self, total):
self._total = total
@property
def repeating_invoice_id(self):
return self._repeating_invoice_id
@repeating_invoice_id.setter
def repeating_invoice_id(self, repeating_invoice_id):
self._repeating_invoice_id = repeating_invoice_id
@property
def id(self):
return self._id
@id.setter
def id(self, id):
self._id = id
@property
def has_attachments(self):
return self._has_attachments
@has_attachments.setter
def has_attachments(self, has_attachments):
self._has_attachments = has_attachments
@property
def attachments(self):
return self._attachments
@attachments.setter
def attachments(self, attachments):
self._attachments = attachments
| true | true |
f71a8c220df84bc1e3780600d32aac91ad2146a3 | 2,355 | py | Python | scripts/dca.py | Fu-Om/bitbank-dca | 17b24bc09bc1980b90f63113909bb8d62c8ff885 | [
"MIT"
] | null | null | null | scripts/dca.py | Fu-Om/bitbank-dca | 17b24bc09bc1980b90f63113909bb8d62c8ff885 | [
"MIT"
] | null | null | null | scripts/dca.py | Fu-Om/bitbank-dca | 17b24bc09bc1980b90f63113909bb8d62c8ff885 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import python_bitbankcc
from math import floor
from datetime import datetime
import pathlib
import csv
from settings import BITBANK_API_KEY, BITBANK_API_SECRET
class BitBankPubAPI:
def __init__(self):
self.pub = python_bitbankcc.public()
def get_ticker(self, pair):
try:
value = self.pub.get_ticker(pair)
return value
except Exception as e:
print(e)
return None
class BitBankPrvAPI:
def __init__(self):
api_key = BITBANK_API_KEY
api_secret = BITBANK_API_SECRET
self.prv = python_bitbankcc.private(api_key, api_secret)
def get_asset(self):
try:
value = self.prv.get_asset()
return value
except Exception as e:
print(e)
return None
def buy_order(self, order_price, amount):
try:
value = self.prv.order('btc_jpy', order_price, amount, 'buy', 'limit')
return value
except Exception as e:
print(e)
return None
def main():
unit = 5000 # unit of rounding order
dca_amount = 3000 # jpy to buy for each day
log_file_path = pathlib.Path.home() / 'Devel/bitbank-dca/log.csv' # log file path
pub_set = BitBankPubAPI()
prv_set = BitBankPrvAPI()
ticker = pub_set.get_ticker('btc_jpy')
last_price = int(ticker['last'])
if last_price % unit == 0:
order_price = last_price-2000
else:
order_price = unit * (last_price // unit)
# find amount closest to dca amount on the 4th decimal
amount = dca_amount / order_price
amount = floor(amount * 10 ** 4 + 0.5) / 10 ** 4
t = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if log_file_path.exists():
with open(log_file_path, 'a', newline='') as f:
writer = csv.writer(f)
writer.writerow([t, str(order_price), str(amount), str(last_price)])
else:
log_file_path.touch()
with open(log_file_path, 'w+', newline='') as f:
writer = csv.writer(f)
writer.writerow(['time', 'order_price', 'amount', 'current_price'])
writer.writerow([t, str(order_price), str(amount), str(last_price)])
prv_set.buy_order(order_price=str(order_price), amount=str(amount))
if __name__ == '__main__':
main()
| 28.719512 | 86 | 0.61104 |
import python_bitbankcc
from math import floor
from datetime import datetime
import pathlib
import csv
from settings import BITBANK_API_KEY, BITBANK_API_SECRET
class BitBankPubAPI:
def __init__(self):
self.pub = python_bitbankcc.public()
def get_ticker(self, pair):
try:
value = self.pub.get_ticker(pair)
return value
except Exception as e:
print(e)
return None
class BitBankPrvAPI:
def __init__(self):
api_key = BITBANK_API_KEY
api_secret = BITBANK_API_SECRET
self.prv = python_bitbankcc.private(api_key, api_secret)
def get_asset(self):
try:
value = self.prv.get_asset()
return value
except Exception as e:
print(e)
return None
def buy_order(self, order_price, amount):
try:
value = self.prv.order('btc_jpy', order_price, amount, 'buy', 'limit')
return value
except Exception as e:
print(e)
return None
def main():
unit = 5000
dca_amount = 3000
log_file_path = pathlib.Path.home() / 'Devel/bitbank-dca/log.csv'
pub_set = BitBankPubAPI()
prv_set = BitBankPrvAPI()
ticker = pub_set.get_ticker('btc_jpy')
last_price = int(ticker['last'])
if last_price % unit == 0:
order_price = last_price-2000
else:
order_price = unit * (last_price // unit)
amount = dca_amount / order_price
amount = floor(amount * 10 ** 4 + 0.5) / 10 ** 4
t = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if log_file_path.exists():
with open(log_file_path, 'a', newline='') as f:
writer = csv.writer(f)
writer.writerow([t, str(order_price), str(amount), str(last_price)])
else:
log_file_path.touch()
with open(log_file_path, 'w+', newline='') as f:
writer = csv.writer(f)
writer.writerow(['time', 'order_price', 'amount', 'current_price'])
writer.writerow([t, str(order_price), str(amount), str(last_price)])
prv_set.buy_order(order_price=str(order_price), amount=str(amount))
if __name__ == '__main__':
main()
| true | true |
f71a8d0e28e916e21d6205427d7bc48512999cec | 15,536 | py | Python | tensorflow_probability/python/distributions/deterministic.py | bourov/probability | 1e4053a0938b4773c3425bcbb07b3f1e5d50c7e2 | [
"Apache-2.0"
] | 2 | 2020-12-17T20:43:24.000Z | 2021-06-11T22:09:16.000Z | tensorflow_probability/python/distributions/deterministic.py | bourov/probability | 1e4053a0938b4773c3425bcbb07b3f1e5d50c7e2 | [
"Apache-2.0"
] | 2 | 2021-08-25T16:14:51.000Z | 2022-02-10T04:47:11.000Z | tensorflow_probability/python/distributions/deterministic.py | bourov/probability | 1e4053a0938b4773c3425bcbb07b3f1e5d50c7e2 | [
"Apache-2.0"
] | 1 | 2020-12-19T13:05:15.000Z | 2020-12-19T13:05:15.000Z | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The Deterministic distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
# Dependency imports
import six
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import kullback_leibler
from tensorflow_probability.python.internal import assert_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import reparameterization
from tensorflow_probability.python.internal import tensor_util
from tensorflow_probability.python.internal import tensorshape_util
__all__ = [
'Deterministic',
'VectorDeterministic',
]
@six.add_metaclass(abc.ABCMeta)
class _BaseDeterministic(distribution.Distribution):
"""Base class for Deterministic distributions."""
def __init__(self,
loc,
atol=None,
rtol=None,
is_vector=False,
validate_args=False,
allow_nan_stats=True,
parameters=None,
name='_BaseDeterministic'):
"""Initialize a batch of `_BaseDeterministic` distributions.
The `atol` and `rtol` parameters allow for some slack in `pmf`, `cdf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if Abs(x - loc) <= atol + rtol * Abs(loc),
= 0, otherwise.
```
Args:
loc: Numeric `Tensor`. The point (or batch of points) on which this
distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
is_vector: Python `bool`. If `True`, this is for `VectorDeterministic`,
else `Deterministic`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value '`NaN`' to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
parameters: Dict of locals to facilitate copy construction.
name: Python `str` name prefixed to Ops created by this class.
Raises:
ValueError: If `loc` is a scalar.
"""
with tf.name_scope(name) as name:
dtype = dtype_util.common_dtype([loc, atol, rtol], dtype_hint=tf.float32)
self._loc = tensor_util.convert_nonref_to_tensor(
loc, dtype_hint=dtype, name='loc')
self._atol = tensor_util.convert_nonref_to_tensor(
0 if atol is None else atol, dtype=dtype, name='atol')
self._rtol = tensor_util.convert_nonref_to_tensor(
0 if rtol is None else rtol, dtype=dtype, name='rtol')
self._is_vector = is_vector
super(_BaseDeterministic, self).__init__(
dtype=self._loc.dtype,
reparameterization_type=(
reparameterization.FULLY_REPARAMETERIZED
if dtype_util.is_floating(self._loc.dtype)
else reparameterization.NOT_REPARAMETERIZED),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
def _slack(self, loc):
# Avoid using the large broadcast with self.loc if possible.
if self.parameters['rtol'] is None:
return self.atol
else:
return self.atol + self.rtol * tf.abs(loc)
@property
def loc(self):
"""Point (or batch of points) at which this distribution is supported."""
return self._loc
@property
def atol(self):
"""Absolute tolerance for comparing points to `self.loc`."""
return self._atol
@property
def rtol(self):
"""Relative tolerance for comparing points to `self.loc`."""
return self._rtol
def _entropy(self):
return tf.zeros(self.batch_shape_tensor(), dtype=self.dtype)
def _mean(self):
return tf.identity(self.loc)
def _variance(self):
return tf.zeros_like(self.loc)
def _mode(self):
return self.mean()
def _sample_n(self, n, seed=None):
del seed # unused
loc = tf.convert_to_tensor(self.loc)
return tf.broadcast_to(
loc,
tf.concat([[n], self._batch_shape_tensor(loc=loc),
self._event_shape_tensor(loc=loc)],
axis=0))
def _default_event_space_bijector(self):
return
def _parameter_control_dependencies(self, is_init):
assertions = []
# In init, we can always build shape and dtype checks because
# we assume shape doesn't change for Variable backed args.
if is_init and self._is_vector:
msg = 'Argument `loc` must be at least rank 1.'
if tensorshape_util.rank(self.loc.shape) is not None:
if tensorshape_util.rank(self.loc.shape) < 1:
raise ValueError(msg)
elif self.validate_args:
assertions.append(
assert_util.assert_rank_at_least(self.loc, 1, message=msg))
if not self.validate_args:
assert not assertions # Should never happen
return []
if is_init != tensor_util.is_ref(self.atol):
assertions.append(
assert_util.assert_non_negative(
self.atol, message='Argument "atol" must be non-negative'))
if is_init != tensor_util.is_ref(self.rtol):
assertions.append(
assert_util.assert_non_negative(
self.rtol, message='Argument "rtol" must be non-negative'))
return assertions
class Deterministic(_BaseDeterministic):
"""Scalar `Deterministic` distribution on the real line.
The scalar `Deterministic` distribution is parameterized by a [batch] point
`loc` on the real line. The distribution is supported at this point only,
and corresponds to a random variable that is constant, equal to `loc`.
See [Degenerate rv](https://en.wikipedia.org/wiki/Degenerate_distribution).
#### Mathematical Details
The probability mass function (pmf) and cumulative distribution function (cdf)
are
```none
pmf(x; loc) = 1, if x == loc, else 0
cdf(x; loc) = 1, if x >= loc, else 0
```
#### Examples
```python
# Initialize a single Deterministic supported at zero.
constant = tfp.distributions.Deterministic(0.)
constant.prob(0.)
==> 1.
constant.prob(2.)
==> 0.
# Initialize a [2, 2] batch of scalar constants.
loc = [[0., 1.], [2., 3.]]
x = [[0., 1.1], [1.99, 3.]]
constant = tfp.distributions.Deterministic(loc)
constant.prob(x)
==> [[1., 0.], [0., 1.]]
```
"""
def __init__(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name='Deterministic'):
"""Initialize a scalar `Deterministic` distribution.
The `atol` and `rtol` parameters allow for some slack in `pmf`, `cdf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if Abs(x - loc) <= atol + rtol * Abs(loc),
= 0, otherwise.
```
Args:
loc: Numeric `Tensor` of shape `[B1, ..., Bb]`, with `b >= 0`.
The point (or batch of points) on which this distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value '`NaN`' to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
super(Deterministic, self).__init__(
loc,
atol=atol,
rtol=rtol,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _params_event_ndims(cls):
return dict(loc=0, atol=0, rtol=0)
def _batch_shape_tensor(self, loc=None):
return tf.broadcast_dynamic_shape(
tf.shape(self.loc if loc is None else loc),
tf.broadcast_dynamic_shape(tf.shape(self.atol), tf.shape(self.rtol)))
def _batch_shape(self):
return tf.broadcast_static_shape(
self.loc.shape,
tf.broadcast_static_shape(self.atol.shape, self.rtol.shape))
def _event_shape_tensor(self, loc=None):
del loc
return tf.constant([], dtype=tf.int32)
def _event_shape(self):
return tf.TensorShape([])
def _prob(self, x):
loc = tf.convert_to_tensor(self.loc)
# Enforces dtype of probability to be float, when self.dtype is not.
prob_dtype = self.dtype if dtype_util.is_floating(
self.dtype) else tf.float32
return tf.cast(tf.abs(x - loc) <= self._slack(loc), dtype=prob_dtype)
def _cdf(self, x):
loc = tf.identity(self.loc)
return tf.cast(x >= loc - self._slack(loc), dtype=self.dtype)
class VectorDeterministic(_BaseDeterministic):
"""Vector `Deterministic` distribution on `R^k`.
The `VectorDeterministic` distribution is parameterized by a [batch] point
`loc in R^k`. The distribution is supported at this point only,
and corresponds to a random variable that is constant, equal to `loc`.
See [Degenerate rv](https://en.wikipedia.org/wiki/Degenerate_distribution).
#### Mathematical Details
The probability mass function (pmf) is
```none
pmf(x; loc)
= 1, if All[Abs(x - loc) <= atol + rtol * Abs(loc)],
= 0, otherwise.
```
#### Examples
```python
tfd = tfp.distributions
# Initialize a single VectorDeterministic supported at [0., 2.] in R^2.
constant = tfd.Deterministic([0., 2.])
constant.prob([0., 2.])
==> 1.
constant.prob([0., 3.])
==> 0.
# Initialize a [3] batch of constants on R^2.
loc = [[0., 1.], [2., 3.], [4., 5.]]
constant = tfd.VectorDeterministic(loc)
constant.prob([[0., 1.], [1.9, 3.], [3.99, 5.]])
==> [1., 0., 0.]
```
"""
def __init__(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name='VectorDeterministic'):
"""Initialize a `VectorDeterministic` distribution on `R^k`, for `k >= 0`.
Note that there is only one point in `R^0`, the 'point' `[]`. So if `k = 0`
then `self.prob([]) == 1`.
The `atol` and `rtol` parameters allow for some slack in `pmf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if All[Abs(x - loc) <= atol + rtol * Abs(loc)],
= 0, otherwise
```
Args:
loc: Numeric `Tensor` of shape `[B1, ..., Bb, k]`, with `b >= 0`, `k >= 0`
The point (or batch of points) on which this distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value '`NaN`' to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
super(VectorDeterministic, self).__init__(
loc,
atol=atol,
rtol=rtol,
is_vector=True,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _params_event_ndims(cls):
return dict(loc=1, atol=1, rtol=1)
def _batch_shape_tensor(self, loc=None):
return tf.broadcast_dynamic_shape(
tf.shape(self.loc if loc is None else loc),
tf.broadcast_dynamic_shape(tf.shape(self.atol),
tf.shape(self.rtol)))[:-1]
def _batch_shape(self):
return tf.broadcast_static_shape(
self.loc.shape,
tf.broadcast_static_shape(self.atol.shape, self.rtol.shape))[:-1]
def _event_shape_tensor(self, loc=None):
return tf.shape(self.loc if loc is None else loc)[-1:]
def _event_shape(self):
return self.loc.shape[-1:]
def _prob(self, x):
loc = tf.convert_to_tensor(self.loc)
return tf.cast(
tf.reduce_all(tf.abs(x - loc) <= self._slack(loc), axis=-1),
dtype=self.dtype)
def _sample_control_dependencies(self, x):
assertions = []
if not self.validate_args:
return assertions
assertions.append(assert_util.assert_rank_at_least(x, 1))
assertions.append(assert_util.assert_equal(
self.event_shape_tensor(), tf.gather(tf.shape(x), tf.rank(x) - 1),
message=('Argument `x` not defined in the same space '
'R**k as this distribution')))
return assertions
@kullback_leibler.RegisterKL(_BaseDeterministic, distribution.Distribution)
def _kl_deterministic_distribution(a, b, name=None):
"""Calculate the batched KL divergence `KL(a || b)` with `a` Deterministic.
Args:
a: instance of a Deterministic distribution object.
b: instance of a Distribution distribution object.
name: (optional) Name to use for created operations. Default is
'kl_deterministic_distribution'.
Returns:
Batchwise `KL(a || b)`.
"""
with tf.name_scope(name or 'kl_deterministic_distribution'):
return -b.log_prob(a.loc)
| 34.524444 | 80 | 0.655381 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import kullback_leibler
from tensorflow_probability.python.internal import assert_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import reparameterization
from tensorflow_probability.python.internal import tensor_util
from tensorflow_probability.python.internal import tensorshape_util
__all__ = [
'Deterministic',
'VectorDeterministic',
]
@six.add_metaclass(abc.ABCMeta)
class _BaseDeterministic(distribution.Distribution):
def __init__(self,
loc,
atol=None,
rtol=None,
is_vector=False,
validate_args=False,
allow_nan_stats=True,
parameters=None,
name='_BaseDeterministic'):
with tf.name_scope(name) as name:
dtype = dtype_util.common_dtype([loc, atol, rtol], dtype_hint=tf.float32)
self._loc = tensor_util.convert_nonref_to_tensor(
loc, dtype_hint=dtype, name='loc')
self._atol = tensor_util.convert_nonref_to_tensor(
0 if atol is None else atol, dtype=dtype, name='atol')
self._rtol = tensor_util.convert_nonref_to_tensor(
0 if rtol is None else rtol, dtype=dtype, name='rtol')
self._is_vector = is_vector
super(_BaseDeterministic, self).__init__(
dtype=self._loc.dtype,
reparameterization_type=(
reparameterization.FULLY_REPARAMETERIZED
if dtype_util.is_floating(self._loc.dtype)
else reparameterization.NOT_REPARAMETERIZED),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
def _slack(self, loc):
if self.parameters['rtol'] is None:
return self.atol
else:
return self.atol + self.rtol * tf.abs(loc)
@property
def loc(self):
return self._loc
@property
def atol(self):
return self._atol
@property
def rtol(self):
return self._rtol
def _entropy(self):
return tf.zeros(self.batch_shape_tensor(), dtype=self.dtype)
def _mean(self):
return tf.identity(self.loc)
def _variance(self):
return tf.zeros_like(self.loc)
def _mode(self):
return self.mean()
def _sample_n(self, n, seed=None):
del seed
loc = tf.convert_to_tensor(self.loc)
return tf.broadcast_to(
loc,
tf.concat([[n], self._batch_shape_tensor(loc=loc),
self._event_shape_tensor(loc=loc)],
axis=0))
def _default_event_space_bijector(self):
return
def _parameter_control_dependencies(self, is_init):
assertions = []
if is_init and self._is_vector:
msg = 'Argument `loc` must be at least rank 1.'
if tensorshape_util.rank(self.loc.shape) is not None:
if tensorshape_util.rank(self.loc.shape) < 1:
raise ValueError(msg)
elif self.validate_args:
assertions.append(
assert_util.assert_rank_at_least(self.loc, 1, message=msg))
if not self.validate_args:
assert not assertions # Should never happen
return []
if is_init != tensor_util.is_ref(self.atol):
assertions.append(
assert_util.assert_non_negative(
self.atol, message='Argument "atol" must be non-negative'))
if is_init != tensor_util.is_ref(self.rtol):
assertions.append(
assert_util.assert_non_negative(
self.rtol, message='Argument "rtol" must be non-negative'))
return assertions
class Deterministic(_BaseDeterministic):
def __init__(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name='Deterministic'):
parameters = dict(locals())
super(Deterministic, self).__init__(
loc,
atol=atol,
rtol=rtol,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _params_event_ndims(cls):
return dict(loc=0, atol=0, rtol=0)
def _batch_shape_tensor(self, loc=None):
return tf.broadcast_dynamic_shape(
tf.shape(self.loc if loc is None else loc),
tf.broadcast_dynamic_shape(tf.shape(self.atol), tf.shape(self.rtol)))
def _batch_shape(self):
return tf.broadcast_static_shape(
self.loc.shape,
tf.broadcast_static_shape(self.atol.shape, self.rtol.shape))
def _event_shape_tensor(self, loc=None):
del loc
return tf.constant([], dtype=tf.int32)
def _event_shape(self):
return tf.TensorShape([])
def _prob(self, x):
loc = tf.convert_to_tensor(self.loc)
# Enforces dtype of probability to be float, when self.dtype is not.
prob_dtype = self.dtype if dtype_util.is_floating(
self.dtype) else tf.float32
return tf.cast(tf.abs(x - loc) <= self._slack(loc), dtype=prob_dtype)
def _cdf(self, x):
loc = tf.identity(self.loc)
return tf.cast(x >= loc - self._slack(loc), dtype=self.dtype)
class VectorDeterministic(_BaseDeterministic):
def __init__(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name='VectorDeterministic'):
parameters = dict(locals())
super(VectorDeterministic, self).__init__(
loc,
atol=atol,
rtol=rtol,
is_vector=True,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _params_event_ndims(cls):
return dict(loc=1, atol=1, rtol=1)
def _batch_shape_tensor(self, loc=None):
return tf.broadcast_dynamic_shape(
tf.shape(self.loc if loc is None else loc),
tf.broadcast_dynamic_shape(tf.shape(self.atol),
tf.shape(self.rtol)))[:-1]
def _batch_shape(self):
return tf.broadcast_static_shape(
self.loc.shape,
tf.broadcast_static_shape(self.atol.shape, self.rtol.shape))[:-1]
def _event_shape_tensor(self, loc=None):
return tf.shape(self.loc if loc is None else loc)[-1:]
def _event_shape(self):
return self.loc.shape[-1:]
def _prob(self, x):
loc = tf.convert_to_tensor(self.loc)
return tf.cast(
tf.reduce_all(tf.abs(x - loc) <= self._slack(loc), axis=-1),
dtype=self.dtype)
def _sample_control_dependencies(self, x):
assertions = []
if not self.validate_args:
return assertions
assertions.append(assert_util.assert_rank_at_least(x, 1))
assertions.append(assert_util.assert_equal(
self.event_shape_tensor(), tf.gather(tf.shape(x), tf.rank(x) - 1),
message=('Argument `x` not defined in the same space '
'R**k as this distribution')))
return assertions
@kullback_leibler.RegisterKL(_BaseDeterministic, distribution.Distribution)
def _kl_deterministic_distribution(a, b, name=None):
with tf.name_scope(name or 'kl_deterministic_distribution'):
return -b.log_prob(a.loc)
| true | true |
f71a8d25f1b81ca9d952d8f9624d010c487bd0bf | 185 | py | Python | tests/test_app/apps.py | JiriKr/django-migrate-sql | b848acb14679ce8bf472d91e52c85afcce2c5db2 | [
"ISC"
] | 13 | 2016-01-05T12:21:11.000Z | 2021-08-30T05:41:39.000Z | tests/test_app/apps.py | JiriKr/django-migrate-sql | b848acb14679ce8bf472d91e52c85afcce2c5db2 | [
"ISC"
] | 10 | 2015-12-27T14:40:31.000Z | 2020-04-01T11:40:36.000Z | tests/test_app/apps.py | JiriKr/django-migrate-sql | b848acb14679ce8bf472d91e52c85afcce2c5db2 | [
"ISC"
] | 3 | 2017-10-29T11:26:27.000Z | 2019-01-03T17:16:54.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class TestAppConfig(AppConfig):
name = 'test_app'
verbose_name = 'Test App'
| 18.5 | 39 | 0.718919 |
from __future__ import unicode_literals
from django.apps import AppConfig
class TestAppConfig(AppConfig):
name = 'test_app'
verbose_name = 'Test App'
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.