code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
""" The MIT License (MIT) Copyright (c) 2021-present Village Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from __future__ import annotations import asyncio import datetime import sys from typing import TYPE_CHECKING import aiohttp from . import __version__ from .errors import ( BadRequest, Forbidden, HTTPException, MethodNotAllowed, NotFound, ServerError, Unauthorized, ) from .missing import MISSING __all__ = ("HTTP",) if TYPE_CHECKING: from typing import ( Any, ClassVar, Coroutine, Dict, Mapping, Optional, Sequence, Type, TypeVar, Union, ) from .file import File from .missing import Missing from .types.interactions import ( ApplicationCommand, InteractionResponse, PartialApplicationCommand, ) from .types.message import Message T = TypeVar("T") Response = Coroutine[Any, Any, T] class Bucket: __slots__ = ( "http", "route_key", "key", "release_immediately", "lock", "limit", "remaining", "reset", "reset_after", "bucket", "global_", ) def __init__(self, http: HTTP, route_key: str, key: str, global_: bool, /) -> None: self.http: HTTP = http self.route_key: str = route_key self.key: str = key self.release_immediately: bool = True self.lock: asyncio.Lock = asyncio.Lock() self.limit: Optional[int] = None self.remaining: Optional[int] = None self.reset: Optional[float] = None self.reset_after: Optional[float] = None self.bucket: Optional[str] = None self.global_: bool = global_ async def __aenter__(self) -> Bucket: await self.lock.acquire() if self.http.global_ratelimit.is_set() and self.global_: await self.http.global_ratelimit.wait() return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[Any], ) -> None: if self.release_immediately: self.release() def delay_amount(self) -> float: utc = datetime.timezone.utc now = datetime.datetime.now(utc) if self.reset_after is None and self.reset is None: return 0 return ( self.reset_after or ( datetime.datetime.fromtimestamp( # self.reset will never be None here float(self.reset), # type: ignore utc, ) - now ).total_seconds() ) def delay_release(self) -> None: self.release_immediately = False self.http.loop.call_later(self.delay_amount(), self.release) def release(self) -> None: self.lock.release() if not self.lock._waiters and not self.lock.locked(): # type: ignore if self.delay_amount() > 0: self.http.loop.call_later(self.delay_amount(), self.expire) else: self.expire() def expire(self) -> None: if not self.lock._waiters and not self.lock.locked(): # type: ignore del self.http.buckets[self.key] @staticmethod def bucket_key( bucket: Optional[str], channel_id: Optional[int] = None, guild_id: Optional[int] = None, webhook_id: Optional[int] = None, webhook_token: Optional[str] = None, ) -> str: return f"{bucket}___{channel_id}/{guild_id}/{webhook_id}/{webhook_token}" @classmethod def from_major_parameters( cls, http: HTTP, route_key: str, global_: bool = True, /, channel_id: Optional[int] = None, guild_id: Optional[int] = None, webhook_id: Optional[int] = None, webhook_token: Optional[str] = None, ) -> Bucket: key = cls.bucket_key( http.route_buckets.get(route_key, route_key), channel_id=channel_id, guild_id=guild_id, webhook_id=webhook_id, webhook_token=webhook_token, ) bucket_ = http.buckets.get(key) if bucket_ is None: bucket_ = cls(http, route_key, key, global_) http.buckets[key] = bucket_ return bucket_ async def handle_ratelimit( self, response: aiohttp.ClientResponse, data: Union[str, Dict[str, Any]], / ) -> None: remaining = response.headers.get("X-RateLimit-Remaining") if remaining is not None: self.remaining = int(remaining) limit = response.headers.get("X-RateLimit-Limit") if limit is not None: self.limit = int(limit) reset = response.headers.get("X-RateLimit-Reset") if reset is not None: self.reset = float(reset) reset_after = response.headers.get("X-RateLimit-Reset-After") if reset_after is not None: self.reset_after = float(reset_after) bucket = response.headers.get("X-RateLimit-Bucket") if bucket is not None and self.bucket is None: self.bucket = bucket self.http.route_buckets[self.route_key] = bucket self.http.buckets.pop(self.key) self.key = bucket + self.key.split("___")[1] self.http.buckets[self.key] = self if self.remaining == 0: self.delay_release() if response.status == 429: if isinstance(data, str): raise HTTPException(response, data) global_ = response.headers.get( "X-RateLimit-Global" ) is not None and data.get("global") if global_: self.http.global_ratelimit.set() await asyncio.sleep(data["retry_after"]) if global_: self.http.global_ratelimit.clear() class HTTP: BASE_URL: ClassVar[str] = "https://discord.com/api/v10" USER_AGENT: ClassVar[str] = f"DiscordBot (https://github.com/mrvillage/quarrel {__version__}) Python/{sys.version_info[0]}.{sys.version_info[1]} aiohttp/{aiohttp.__version__}" # type: ignore def __init__( self, session: aiohttp.ClientSession, token: str, application_id: int, loop: asyncio.AbstractEventLoop, /, ) -> None: self.session: aiohttp.ClientSession = session self.token: str = token self.application_id: int = application_id self.loop: asyncio.AbstractEventLoop = loop self.buckets: Dict[str, Bucket] = {} self.global_ratelimit: asyncio.Event = asyncio.Event() self.headers = { "User-Agent": self.USER_AGENT, "Authorization": f"Bot {self.token}", } self.route_buckets: Dict[str, str] = {} async def request( self, method: str, path: str, route_parameters: Missing[Mapping[str, Any]] = MISSING, files: Missing[Sequence[File]] = MISSING, *, global_: bool = True, **kwargs: Any, ) -> Any: route_parameters = route_parameters or {} if files is not MISSING: form_data = aiohttp.FormData() form_data.add_field(name="payload_json", value=kwargs.pop("json")) for index, file in enumerate(files): form_data.add_field( name=f"file{index}", value=file.buffer, filename=file.name, content_type="application/octet-stream", ) kwargs["form"] = form_data url = f"{self.BASE_URL}{path.format_map(route_parameters)}" async with Bucket.from_major_parameters( self, method + path, global_, channel_id=route_parameters.get("channel_id"), guild_id=route_parameters.get("guild_id"), webhook_id=route_parameters.get("webhook_id"), webhook_token=route_parameters.get("webhook_token"), ) as bucket: response = None data = None for try_ in range(3): async with self.session.request( method, url, headers=self.headers, **kwargs ) as response: if response.headers["content-type"] == "application/json": data = await response.json() else: data = await response.text() await bucket.handle_ratelimit(response, data) if 300 > response.status >= 200: return data if response.status == 400: raise BadRequest(response, data) if response.status == 401: raise Unauthorized(response, data) if response.status == 403: raise Forbidden(response, data) if response.status == 404: raise NotFound(response, data) if response.status == 405: raise MethodNotAllowed(response, data) if response.status in {500, 502, 504}: await asyncio.sleep(1 + try_) continue if response.status >= 500: raise ServerError(response, data) if response.status != 429: raise HTTPException(response, data) if response is not None: if response.status >= 500: raise ServerError(response, data) raise HTTPException(response, data) async def get_gateway_bot( self, encoding: str = "json", compress: bool = True, v: int = 10 ) -> str: data = await self.request("GET", "/gateway/bot") if compress: return f"{data['url']}?encoding={encoding}&v={v}&compress=zlib-stream" else: return f"{data['url']}?encoding={encoding}&v={v}" def bulk_upsert_global_application_commands( self, commands: Sequence[PartialApplicationCommand] ) -> Response[Sequence[ApplicationCommand]]: return self.request( "PUT", "/applications/{application_id}/commands", {"application_id": self.application_id}, json=commands, ) def bulk_upsert_guild_application_commands( self, guild_id: int, commands: Sequence[PartialApplicationCommand] ) -> Response[Sequence[ApplicationCommand]]: return self.request( "PUT", "/applications/{application_id}/guilds/{guild_id}/commands", {"application_id": self.application_id, "guild_id": guild_id}, json=commands, ) def create_interaction_response( self, interaction_id: int, token: str, data: InteractionResponse ) -> Response[None]: return self.request( "POST", "/interactions/{interaction_id}/{webhook_token}/callback", {"interaction_id": interaction_id, "webhook_token": token}, json=data, ) def get_original_interaction_response(self, token: str) -> Response[Message]: return self.request( "GET", "/interactions/{application_id}/{webhook_token}/messages/@original", { "application_id": self.application_id, "webhook_token": token, }, ) # TODO proper typing for editing def edit_original_interaction_response( self, token: str, data: Any ) -> Response[Message]: return self.request( "PATCH", "/interactions/{application_id}/{webhook_token}/messages/@original", { "application_id": self.application_id, "webhook_token": token, }, json=data, ) def delete_original_interaction_response(self, token: str) -> Response[None]: return self.request( "DELETE", "/interactions/{application_id}/{webhook_token}/messages/@original", { "application_id": self.application_id, "webhook_token": token, }, ) # TODO proper typing for creating def create_followup_message(self, token: str, data: Any) -> Response[Message]: return self.request( "POST", "/interactions/{application_id}/{webhook_token}", { "application_id": self.application_id, "webhook_token": token, }, json=data, ) def get_followup_message(self, token: str, message_id: int) -> Response[Message]: return self.request( "GET", "/interactions/{application_id}/{webhook_token}/messages/{message_id}", { "application_id": self.application_id, "webhook_token": token, "message_id": message_id, }, ) # TODO proper typing for editing def edit_followup_message( self, token: str, message_id: int, data: Any ) -> Response[Message]: return self.request( "PATCH", "/interactions/{application_id}/{webhook_token}/messages/{message_id}", { "application_id": self.application_id, "webhook_token": token, "message_id": message_id, }, json=data, ) def delete_followup_message(self, token: str, message_id: int) -> Response[None]: return self.request( "DELETE", "/interactions/{application_id}/{webhook_token}/messages/{message_id}", { "application_id": self.application_id, "webhook_token": token, "message_id": message_id, }, )
[ "asyncio.sleep", "aiohttp.FormData", "asyncio.Event", "asyncio.Lock", "typing.TypeVar", "datetime.datetime.now" ]
[((1898, 1910), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (1905, 1910), False, 'from typing import Any, ClassVar, Coroutine, Dict, Mapping, Optional, Sequence, Type, TypeVar, Union\n'), ((2471, 2485), 'asyncio.Lock', 'asyncio.Lock', ([], {}), '()\n', (2483, 2485), False, 'import asyncio\n'), ((3291, 3317), 'datetime.datetime.now', 'datetime.datetime.now', (['utc'], {}), '(utc)\n', (3312, 3317), False, 'import datetime\n'), ((7727, 7742), 'asyncio.Event', 'asyncio.Event', ([], {}), '()\n', (7740, 7742), False, 'import asyncio\n'), ((8296, 8314), 'aiohttp.FormData', 'aiohttp.FormData', ([], {}), '()\n', (8312, 8314), False, 'import aiohttp\n'), ((6877, 6911), 'asyncio.sleep', 'asyncio.sleep', (["data['retry_after']"], {}), "(data['retry_after'])\n", (6890, 6911), False, 'import asyncio\n'), ((10370, 10393), 'asyncio.sleep', 'asyncio.sleep', (['(1 + try_)'], {}), '(1 + try_)\n', (10383, 10393), False, 'import asyncio\n')]
from collections import defaultdict from dss_vae.structs import GlobalNames from dss_vae.structs import FScore from dss_vae.structs import PhraseTree from dss_vae.utils.utility import write_docs from dss_vae.preprocess import s2b_to_s2t from dss_vae.preprocess import s2t_check from dss_vae.preprocess import s2t_fix from dss_vae.preprocess import s2t_to_tree def eval_s2t(preds, golds): error_count = 0 eval_gold = [] eval_pred = [] for pred, gold in zip(preds, golds): if s2t_check(pred): eval_gold.append(s2t_to_tree(s2t_str=gold)) eval_pred.append(s2t_to_tree(s2t_str=pred)) else: eval_gold.append(s2t_to_tree(s2t_str=gold)) eval_pred.append("(TOP XX)") error_count += 1 return FScore.eval_seq_list(gold_seqs=eval_gold, test_seqs=eval_pred), error_count def eval_s2t_robust(preds, golds): error_count = 0 error_sum_fix = 0.0 error_fix_sents = 0.0 eval_gold = [] eval_pred = [] for pred, gold in zip(preds, golds): pred, error_fix = s2t_fix(pred, fm=GlobalNames.get_fm()) error_sum_fix += error_fix if error_fix > 0: error_fix_sents += 1 gold, _ = s2t_fix(gold, fm=GlobalNames.get_fm()) if s2t_check(pred): eval_gold.append(s2t_to_tree(s2t_str=gold)) eval_pred.append(s2t_to_tree(s2t_str=pred)) else: eval_gold.append(s2t_to_tree(s2t_str=gold)) eval_pred.append("(TOP XX)") error_count += 1 avg_error = error_sum_fix / error_fix_sents if error_fix_sents > 0 else 0.0 return FScore.eval_seq_list(gold_seqs=eval_gold, test_seqs=eval_pred), "{},avg_fix:{}".format(error_count, avg_error) def eval_s2b(preds, golds): error_count = 0 error_sum_fix = 0.0 error_fix_sents = 0.0 eval_gold = [] eval_pred = [] for pred, gold in zip(preds, golds): pred, error_fix = s2b_to_s2t(pred, fm=GlobalNames.get_fm()) error_sum_fix += error_fix if error_fix > 0: error_fix_sents += 1 gold, _ = s2b_to_s2t(gold, fm=GlobalNames.get_fm()) if s2t_check(pred): eval_gold.append(s2t_to_tree(s2t_str=gold)) eval_pred.append(s2t_to_tree(s2t_str=pred)) else: eval_gold.append(s2t_to_tree(s2t_str=gold)) eval_pred.append("(TOP XX)") error_count += 1 avg_error = error_sum_fix / error_fix_sents if error_fix_sents > 0 else 0.0 return FScore.eval_seq_list(gold_seqs=eval_gold, test_seqs=eval_pred), "{},avg_fix:{}".format(error_count, avg_error) def eval_file(pred_file, gold_file): pred = [] gold = [] with open(pred_file, 'r') as f: for line in f: pred.append(line) with open(gold_file, 'r') as f: for line in f: gold.append(line) return eval_s2t(pred, gold) def extract_origin_grammar(tree_file, out_file="grammar.out"): grammar_dict = defaultdict(int) trees = PhraseTree.load_treefile(tree_file) for tree in trees: tree.grammar(grammar_dict) grammar_list = [grammar for grammar, val in grammar_dict.items()] write_docs(fname=out_file, docs=grammar_list) return grammar_dict def extract_binary_grammar(tree_file, out_file="grammar.out"): grammar_dict = defaultdict(int) trees = PhraseTree.load_treefile(tree_file) for tree in trees: tree.binarize() tree.grammar(grammar_dict) grammar_list = [grammar for grammar, val in grammar_dict.items()] write_docs(fname=out_file, docs=grammar_list) return grammar_dict def evaluate_coverage(dict_a, dict_b): sum_val = 0.0 count = 0.0 for item, _ in dict_b.items(): sum_val += 1.0 if item in dict_a: count += 1.0 return count * 100.0 / sum_val def evaluate_using_ratio(dict_a, dict_b): sum_val = 0.0 count = 0.0 for item, val in dict_b.items(): sum_val += val if item in dict_a: count += val return count * 100.0 / sum_val def evaluate_grammar_coverage(train_file, dev_file, test_file, grammar_type='.binary'): if grammar_type == '.binary': extract_grammar = extract_binary_grammar else: extract_grammar = extract_origin_grammar train_dict = extract_grammar(train_file, train_file + grammar_type) dev_dict = extract_grammar(dev_file, dev_file + grammar_type) test_dict = extract_grammar(test_file, test_file + grammar_type) print("cover dev:{}".format(evaluate_coverage(train_dict, dev_dict))) print("cover test:{}".format(evaluate_coverage(train_dict, test_dict))) print("ratio dev:{}".format(evaluate_using_ratio(train_dict, dev_dict))) print("ratio test:{}".format(evaluate_using_ratio(train_dict, test_dict)))
[ "dss_vae.structs.FScore.eval_seq_list", "dss_vae.structs.GlobalNames.get_fm", "dss_vae.preprocess.s2t_to_tree", "dss_vae.utils.utility.write_docs", "dss_vae.structs.PhraseTree.load_treefile", "collections.defaultdict", "dss_vae.preprocess.s2t_check" ]
[((2990, 3006), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (3001, 3006), False, 'from collections import defaultdict\n'), ((3019, 3054), 'dss_vae.structs.PhraseTree.load_treefile', 'PhraseTree.load_treefile', (['tree_file'], {}), '(tree_file)\n', (3043, 3054), False, 'from dss_vae.structs import PhraseTree\n'), ((3187, 3232), 'dss_vae.utils.utility.write_docs', 'write_docs', ([], {'fname': 'out_file', 'docs': 'grammar_list'}), '(fname=out_file, docs=grammar_list)\n', (3197, 3232), False, 'from dss_vae.utils.utility import write_docs\n'), ((3341, 3357), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (3352, 3357), False, 'from collections import defaultdict\n'), ((3370, 3405), 'dss_vae.structs.PhraseTree.load_treefile', 'PhraseTree.load_treefile', (['tree_file'], {}), '(tree_file)\n', (3394, 3405), False, 'from dss_vae.structs import PhraseTree\n'), ((3562, 3607), 'dss_vae.utils.utility.write_docs', 'write_docs', ([], {'fname': 'out_file', 'docs': 'grammar_list'}), '(fname=out_file, docs=grammar_list)\n', (3572, 3607), False, 'from dss_vae.utils.utility import write_docs\n'), ((501, 516), 'dss_vae.preprocess.s2t_check', 's2t_check', (['pred'], {}), '(pred)\n', (510, 516), False, 'from dss_vae.preprocess import s2t_check\n'), ((782, 844), 'dss_vae.structs.FScore.eval_seq_list', 'FScore.eval_seq_list', ([], {'gold_seqs': 'eval_gold', 'test_seqs': 'eval_pred'}), '(gold_seqs=eval_gold, test_seqs=eval_pred)\n', (802, 844), False, 'from dss_vae.structs import FScore\n'), ((1271, 1286), 'dss_vae.preprocess.s2t_check', 's2t_check', (['pred'], {}), '(pred)\n', (1280, 1286), False, 'from dss_vae.preprocess import s2t_check\n'), ((1633, 1695), 'dss_vae.structs.FScore.eval_seq_list', 'FScore.eval_seq_list', ([], {'gold_seqs': 'eval_gold', 'test_seqs': 'eval_pred'}), '(gold_seqs=eval_gold, test_seqs=eval_pred)\n', (1653, 1695), False, 'from dss_vae.structs import FScore\n'), ((2156, 2171), 'dss_vae.preprocess.s2t_check', 's2t_check', (['pred'], {}), '(pred)\n', (2165, 2171), False, 'from dss_vae.preprocess import s2t_check\n'), ((2518, 2580), 'dss_vae.structs.FScore.eval_seq_list', 'FScore.eval_seq_list', ([], {'gold_seqs': 'eval_gold', 'test_seqs': 'eval_pred'}), '(gold_seqs=eval_gold, test_seqs=eval_pred)\n', (2538, 2580), False, 'from dss_vae.structs import FScore\n'), ((547, 572), 'dss_vae.preprocess.s2t_to_tree', 's2t_to_tree', ([], {'s2t_str': 'gold'}), '(s2t_str=gold)\n', (558, 572), False, 'from dss_vae.preprocess import s2t_to_tree\n'), ((603, 628), 'dss_vae.preprocess.s2t_to_tree', 's2t_to_tree', ([], {'s2t_str': 'pred'}), '(s2t_str=pred)\n', (614, 628), False, 'from dss_vae.preprocess import s2t_to_tree\n'), ((673, 698), 'dss_vae.preprocess.s2t_to_tree', 's2t_to_tree', ([], {'s2t_str': 'gold'}), '(s2t_str=gold)\n', (684, 698), False, 'from dss_vae.preprocess import s2t_to_tree\n'), ((1087, 1107), 'dss_vae.structs.GlobalNames.get_fm', 'GlobalNames.get_fm', ([], {}), '()\n', (1105, 1107), False, 'from dss_vae.structs import GlobalNames\n'), ((1238, 1258), 'dss_vae.structs.GlobalNames.get_fm', 'GlobalNames.get_fm', ([], {}), '()\n', (1256, 1258), False, 'from dss_vae.structs import GlobalNames\n'), ((1317, 1342), 'dss_vae.preprocess.s2t_to_tree', 's2t_to_tree', ([], {'s2t_str': 'gold'}), '(s2t_str=gold)\n', (1328, 1342), False, 'from dss_vae.preprocess import s2t_to_tree\n'), ((1373, 1398), 'dss_vae.preprocess.s2t_to_tree', 's2t_to_tree', ([], {'s2t_str': 'pred'}), '(s2t_str=pred)\n', (1384, 1398), False, 'from dss_vae.preprocess import s2t_to_tree\n'), ((1443, 1468), 'dss_vae.preprocess.s2t_to_tree', 's2t_to_tree', ([], {'s2t_str': 'gold'}), '(s2t_str=gold)\n', (1454, 1468), False, 'from dss_vae.preprocess import s2t_to_tree\n'), ((1969, 1989), 'dss_vae.structs.GlobalNames.get_fm', 'GlobalNames.get_fm', ([], {}), '()\n', (1987, 1989), False, 'from dss_vae.structs import GlobalNames\n'), ((2123, 2143), 'dss_vae.structs.GlobalNames.get_fm', 'GlobalNames.get_fm', ([], {}), '()\n', (2141, 2143), False, 'from dss_vae.structs import GlobalNames\n'), ((2202, 2227), 'dss_vae.preprocess.s2t_to_tree', 's2t_to_tree', ([], {'s2t_str': 'gold'}), '(s2t_str=gold)\n', (2213, 2227), False, 'from dss_vae.preprocess import s2t_to_tree\n'), ((2258, 2283), 'dss_vae.preprocess.s2t_to_tree', 's2t_to_tree', ([], {'s2t_str': 'pred'}), '(s2t_str=pred)\n', (2269, 2283), False, 'from dss_vae.preprocess import s2t_to_tree\n'), ((2328, 2353), 'dss_vae.preprocess.s2t_to_tree', 's2t_to_tree', ([], {'s2t_str': 'gold'}), '(s2t_str=gold)\n', (2339, 2353), False, 'from dss_vae.preprocess import s2t_to_tree\n')]
from .resnet import resnet50 import torch.nn as nn import torch.nn.functional as F import torch import numpy as np class fpn_module_global(nn.Module): def __init__(self, numClass): super(fpn_module_global, self).__init__() self._up_kwargs = {'mode': 'bilinear'} # Top layer self.toplayer = nn.Conv2d(2048, 256, kernel_size=1, stride=1, padding=0) # Reduce channels # Lateral layers self.latlayer1 = nn.Conv2d(1024, 256, kernel_size=1, stride=1, padding=0) self.latlayer2 = nn.Conv2d(512, 256, kernel_size=1, stride=1, padding=0) self.latlayer3 = nn.Conv2d(256, 256, kernel_size=1, stride=1, padding=0) # Smooth layers self.smooth1_1 = nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1) self.smooth2_1 = nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1) self.smooth3_1 = nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1) self.smooth4_1 = nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1) self.smooth1_2 = nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1) self.smooth2_2 = nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1) self.smooth3_2 = nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1) self.smooth4_2 = nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1) # Classify layers self.classify = nn.Conv2d(128*4, numClass, kernel_size=3, stride=1, padding=1) # Local2Global: double #channels #################################### # Top layer self.toplayer_ext = nn.Conv2d(2048*2, 256, kernel_size=1, stride=1, padding=0) # Reduce channels # Lateral layers self.latlayer1_ext = nn.Conv2d(1024*2, 256, kernel_size=1, stride=1, padding=0) self.latlayer2_ext = nn.Conv2d(512*2, 256, kernel_size=1, stride=1, padding=0) self.latlayer3_ext = nn.Conv2d(256*2, 256, kernel_size=1, stride=1, padding=0) # Smooth layers self.smooth1_1_ext = nn.Conv2d(256*2, 256, kernel_size=3, stride=1, padding=1) self.smooth2_1_ext = nn.Conv2d(256*2, 256, kernel_size=3, stride=1, padding=1) self.smooth3_1_ext = nn.Conv2d(256*2, 256, kernel_size=3, stride=1, padding=1) self.smooth4_1_ext = nn.Conv2d(256*2, 256, kernel_size=3, stride=1, padding=1) self.smooth1_2_ext = nn.Conv2d(256*2, 128, kernel_size=3, stride=1, padding=1) self.smooth2_2_ext = nn.Conv2d(256*2, 128, kernel_size=3, stride=1, padding=1) self.smooth3_2_ext = nn.Conv2d(256*2, 128, kernel_size=3, stride=1, padding=1) self.smooth4_2_ext = nn.Conv2d(256*2, 128, kernel_size=3, stride=1, padding=1) self.smooth = nn.Conv2d(128*4*2, 128*4, kernel_size=3, stride=1, padding=1) def _concatenate(self, p5, p4, p3, p2): _, _, H, W = p2.size() p5 = F.interpolate(p5, size=(H, W), **self._up_kwargs) p4 = F.interpolate(p4, size=(H, W), **self._up_kwargs) p3 = F.interpolate(p3, size=(H, W), **self._up_kwargs) return torch.cat([p5, p4, p3, p2], dim=1) def _upsample_add(self, x, y): '''Upsample and add two feature maps. Args: x: (Variable) top feature map to be upsampled. y: (Variable) lateral feature map. Returns: (Variable) added feature map. Note in PyTorch, when input size is odd, the upsampled feature map with `F.interpolate(..., scale_factor=2, mode='nearest')` maybe not equal to the lateral feature map size. e.g. original input size: [N,_,15,15] -> conv2d feature map size: [N,_,8,8] -> upsampled feature map size: [N,_,16,16] So we choose bilinear upsample which supports arbitrary output sizes. ''' _, _, H, W = y.size() return F.interpolate(x, size=(H, W), **self._up_kwargs) + y def forward(self, c2, c3, c4, c5, c2_ext=None, c3_ext=None, c4_ext=None, c5_ext=None, ps0_ext=None, ps1_ext=None, ps2_ext=None): # Top-down if c5_ext is None: p5 = self.toplayer(c5) p4 = self._upsample_add(p5, self.latlayer1(c4)) p3 = self._upsample_add(p4, self.latlayer2(c3)) p2 = self._upsample_add(p3, self.latlayer3(c2)) else: p5 = self.toplayer_ext(torch.cat((c5, c5_ext), dim=1)) p4 = self._upsample_add(p5, self.latlayer1_ext(torch.cat((c4, c4_ext), dim=1))) p3 = self._upsample_add(p4, self.latlayer2_ext(torch.cat((c3, c3_ext), dim=1))) p2 = self._upsample_add(p3, self.latlayer3_ext(torch.cat((c2, c2_ext), dim=1))) ps0 = [p5, p4, p3, p2] # Smooth if ps0_ext is None: p5 = self.smooth1_1(p5) p4 = self.smooth2_1(p4) p3 = self.smooth3_1(p3) p2 = self.smooth4_1(p2) else: p5 = self.smooth1_1_ext(torch.cat((p5, ps0_ext[0]), dim=1)) p4 = self.smooth2_1_ext(torch.cat((p4, ps0_ext[1]), dim=1)) p3 = self.smooth3_1_ext(torch.cat((p3, ps0_ext[2]), dim=1)) p2 = self.smooth4_1_ext(torch.cat((p2, ps0_ext[3]), dim=1)) ps1 = [p5, p4, p3, p2] if ps1_ext is None: p5 = self.smooth1_2(p5) p4 = self.smooth2_2(p4) p3 = self.smooth3_2(p3) p2 = self.smooth4_2(p2) else: p5 = self.smooth1_2_ext(torch.cat((p5, ps1_ext[0]), dim=1)) p4 = self.smooth2_2_ext(torch.cat((p4, ps1_ext[1]), dim=1)) p3 = self.smooth3_2_ext(torch.cat((p3, ps1_ext[2]), dim=1)) p2 = self.smooth4_2_ext(torch.cat((p2, ps1_ext[3]), dim=1)) ps2 = [p5, p4, p3, p2] # Classify if ps2_ext is None: ps3 = self._concatenate(p5, p4, p3, p2) output = self.classify(ps3) else: p = self._concatenate( torch.cat((p5, ps2_ext[0]), dim=1), torch.cat((p4, ps2_ext[1]), dim=1), torch.cat((p3, ps2_ext[2]), dim=1), torch.cat((p2, ps2_ext[3]), dim=1) ) ps3 = self.smooth(p) output = self.classify(ps3) return output, ps0, ps1, ps2, ps3 class fpn_module_local(nn.Module): def __init__(self, numClass): super(fpn_module_local, self).__init__() self._up_kwargs = {'mode': 'bilinear'} # Top layer fold = 2 self.toplayer = nn.Conv2d(2048 * fold, 256, kernel_size=1, stride=1, padding=0) # Reduce channels # Lateral layers [C] self.latlayer1 = nn.Conv2d(1024 * fold, 256, kernel_size=1, stride=1, padding=0) self.latlayer2 = nn.Conv2d(512 * fold, 256, kernel_size=1, stride=1, padding=0) self.latlayer3 = nn.Conv2d(256 * fold, 256, kernel_size=1, stride=1, padding=0) # Smooth layers # ps0 self.smooth1_1 = nn.Conv2d(256 * fold, 256, kernel_size=3, stride=1, padding=1) self.smooth2_1 = nn.Conv2d(256 * fold, 256, kernel_size=3, stride=1, padding=1) self.smooth3_1 = nn.Conv2d(256 * fold, 256, kernel_size=3, stride=1, padding=1) self.smooth4_1 = nn.Conv2d(256 * fold, 256, kernel_size=3, stride=1, padding=1) # ps1 self.smooth1_2 = nn.Conv2d(256 * fold, 128, kernel_size=3, stride=1, padding=1) self.smooth2_2 = nn.Conv2d(256 * fold, 128, kernel_size=3, stride=1, padding=1) self.smooth3_2 = nn.Conv2d(256 * fold, 128, kernel_size=3, stride=1, padding=1) self.smooth4_2 = nn.Conv2d(256 * fold, 128, kernel_size=3, stride=1, padding=1) # ps2 is concatenation # Classify layers self.smooth = nn.Conv2d(128*4*fold, 128*4, kernel_size=3, stride=1, padding=1) self.classify = nn.Conv2d(128*4, numClass, kernel_size=3, stride=1, padding=1) def _concatenate(self, p5, p4, p3, p2): _, _, H, W = p2.size() p5 = F.interpolate(p5, size=(H, W), **self._up_kwargs) p4 = F.interpolate(p4, size=(H, W), **self._up_kwargs) p3 = F.interpolate(p3, size=(H, W), **self._up_kwargs) return torch.cat([p5, p4, p3, p2], dim=1) def _upsample_add(self, x, y): '''Upsample and add two feature maps. Args: x: (Variable) top feature map to be upsampled. y: (Variable) lateral feature map. Returns: (Variable) added feature map. Note in PyTorch, when input size is odd, the upsampled feature map with `F.interpolate(..., scale_factor=2, mode='nearest')` maybe not equal to the lateral feature map size. e.g. original input size: [N,_,15,15] -> conv2d feature map size: [N,_,8,8] -> upsampled feature map size: [N,_,16,16] So we choose bilinear upsample which supports arbitrary output sizes. ''' _, _, H, W = y.size() return F.interpolate(x, size=(H, W), **self._up_kwargs) + y def forward(self, c2, c3, c4, c5, c2_ext, c3_ext, c4_ext, c5_ext, ps0_ext, ps1_ext, ps2_ext): # Top-down p5 = self.toplayer(torch.cat([c5] + [F.interpolate(c5_ext[0], size=c5.size()[2:], **self._up_kwargs)], dim=1)) p4 = self._upsample_add(p5, self.latlayer1(torch.cat([c4] + [F.interpolate(c4_ext[0], size=c4.size()[2:], **self._up_kwargs)], dim=1))) p3 = self._upsample_add(p4, self.latlayer2(torch.cat([c3] + [F.interpolate(c3_ext[0], size=c3.size()[2:], **self._up_kwargs)], dim=1))) p2 = self._upsample_add(p3, self.latlayer3(torch.cat([c2] + [F.interpolate(c2_ext[0], size=c2.size()[2:], **self._up_kwargs)], dim=1))) ps0 = [p5, p4, p3, p2] # Smooth p5 = self.smooth1_1(torch.cat([p5] + [F.interpolate(ps0_ext[0][0], size=p5.size()[2:], **self._up_kwargs)], dim=1)) p4 = self.smooth2_1(torch.cat([p4] + [F.interpolate(ps0_ext[1][0], size=p4.size()[2:], **self._up_kwargs)], dim=1)) p3 = self.smooth3_1(torch.cat([p3] + [F.interpolate(ps0_ext[2][0], size=p3.size()[2:], **self._up_kwargs)], dim=1)) p2 = self.smooth4_1(torch.cat([p2] + [F.interpolate(ps0_ext[3][0], size=p2.size()[2:], **self._up_kwargs)], dim=1)) ps1 = [p5, p4, p3, p2] p5 = self.smooth1_2(torch.cat([p5] + [F.interpolate(ps1_ext[0][0], size=p5.size()[2:], **self._up_kwargs)], dim=1)) p4 = self.smooth2_2(torch.cat([p4] + [F.interpolate(ps1_ext[1][0], size=p4.size()[2:], **self._up_kwargs)], dim=1)) p3 = self.smooth3_2(torch.cat([p3] + [F.interpolate(ps1_ext[2][0], size=p3.size()[2:], **self._up_kwargs)], dim=1)) p2 = self.smooth4_2(torch.cat([p2] + [F.interpolate(ps1_ext[3][0], size=p2.size()[2:], **self._up_kwargs)], dim=1)) ps2 = [p5, p4, p3, p2] # Classify # use ps2_ext ps3 = self._concatenate( torch.cat([p5] + [F.interpolate(ps2_ext[0][0], size=p5.size()[2:], **self._up_kwargs)], dim=1), torch.cat([p4] + [F.interpolate(ps2_ext[1][0], size=p4.size()[2:], **self._up_kwargs)], dim=1), torch.cat([p3] + [F.interpolate(ps2_ext[2][0], size=p3.size()[2:], **self._up_kwargs)], dim=1), torch.cat([p2] + [F.interpolate(ps2_ext[3][0], size=p2.size()[2:], **self._up_kwargs)], dim=1) ) ps3 = self.smooth(ps3) output = self.classify(ps3) return output, ps0, ps1, ps2, ps3 class fpn(nn.Module): def __init__(self, numClass): super(fpn, self).__init__() self._up_kwargs = {'mode': 'bilinear'} # Res net self.resnet_global = resnet50(True) self.resnet_local = resnet50(True) # fpn module self.fpn_global = fpn_module_global(numClass) self.fpn_local = fpn_module_local(numClass) self.c2_g = None; self.c3_g = None; self.c4_g = None; self.c5_g = None; self.output_g = None self.ps0_g = None; self.ps1_g = None; self.ps2_g = None; self.ps3_g = None self.c2_l = []; self.c3_l = []; self.c4_l = []; self.c5_l = []; self.ps00_l = []; self.ps01_l = []; self.ps02_l = []; self.ps03_l = []; self.ps10_l = []; self.ps11_l = []; self.ps12_l = []; self.ps13_l = []; self.ps20_l = []; self.ps21_l = []; self.ps22_l = []; self.ps23_l = []; self.ps0_l = None; self.ps1_l = None; self.ps2_l = None self.ps3_l = []#; self.output_l = [] self.c2_b = None; self.c3_b = None; self.c4_b = None; self.c5_b = None; self.ps00_b = None; self.ps01_b = None; self.ps02_b = None; self.ps03_b = None; self.ps10_b = None; self.ps11_b = None; self.ps12_b = None; self.ps13_b = None; self.ps20_b = None; self.ps21_b = None; self.ps22_b = None; self.ps23_b = None; self.ps3_b = []#; self.output_b = [] self.patch_n = 0 self.mse = nn.MSELoss() self.ensemble_conv = nn.Conv2d(128*4 * 2, numClass, kernel_size=3, stride=1, padding=1) nn.init.normal_(self.ensemble_conv.weight, mean=0, std=0.01) # init fpn for m in self.fpn_global.children(): if hasattr(m, 'weight'): nn.init.normal_(m.weight, mean=0, std=0.01) if hasattr(m, 'bias'): nn.init.constant_(m.bias, 0) for m in self.fpn_local.children(): if hasattr(m, 'weight'): nn.init.normal_(m.weight, mean=0, std=0.01) if hasattr(m, 'bias'): nn.init.constant_(m.bias, 0) def clear_cache(self): self.c2_g = None; self.c3_g = None; self.c4_g = None; self.c5_g = None; self.output_g = None self.ps0_g = None; self.ps1_g = None; self.ps2_g = None; self.ps3_g = None self.c2_l = []; self.c3_l = []; self.c4_l = []; self.c5_l = []; self.ps00_l = []; self.ps01_l = []; self.ps02_l = []; self.ps03_l = []; self.ps10_l = []; self.ps11_l = []; self.ps12_l = []; self.ps13_l = []; self.ps20_l = []; self.ps21_l = []; self.ps22_l = []; self.ps23_l = []; self.ps0_l = None; self.ps1_l = None; self.ps2_l = None self.ps3_l = []; self.output_l = [] self.c2_b = None; self.c3_b = None; self.c4_b = None; self.c5_b = None; self.ps00_b = None; self.ps01_b = None; self.ps02_b = None; self.ps03_b = None; self.ps10_b = None; self.ps11_b = None; self.ps12_b = None; self.ps13_b = None; self.ps20_b = None; self.ps21_b = None; self.ps22_b = None; self.ps23_b = None; self.ps3_b = []; self.output_b = [] self.patch_n = 0 def _sample_grid(self, fm, bbox, sampleSize): """ :param fm: tensor(b,c,h,w) the global feature map :param bbox: list [b* nparray(x1, y1, x2, y2)] the (x1,y1) is the left_top of bbox, (x2, y2) is the right_bottom of bbox there are in range [0, 1]. x is corresponding to width dimension and y is corresponding to height dimension :param sampleSize: (oH, oW) the point to sample in height dimension and width dimension :return: tensor(b, c, oH, oW) sampled tensor """ b, c, h, w = fm.shape b_bbox = len(bbox) bbox = [x*2 - 1 for x in bbox] # range transform if b != b_bbox and b == 1: fm = torch.cat([fm,]*b_bbox, dim=0) grid = np.zeros((b_bbox,) + sampleSize + (2,), dtype=np.float32) gridMap = np.array([[(cnt_w/(sampleSize[1]-1), cnt_h/(sampleSize[0]-1)) for cnt_w in range(sampleSize[1])] for cnt_h in range(sampleSize[0])]) for cnt_b in range(b_bbox): grid[cnt_b, :, :, 0] = bbox[cnt_b][0] + (bbox[cnt_b][2] - bbox[cnt_b][0])*gridMap[:, :, 0] grid[cnt_b, :, :, 1] = bbox[cnt_b][1] + (bbox[cnt_b][3] - bbox[cnt_b][1])*gridMap[:, :, 1] grid = torch.from_numpy(grid).cuda() return F.grid_sample(fm, grid) def _crop_global(self, f_global, top_lefts, ratio): ''' top_lefts: [(top, left)] * b ''' _, c, H, W = f_global.size() b = len(top_lefts) h, w = int(np.round(H * ratio[0])), int(np.round(W * ratio[1])) # bbox = [ np.array([left, top, left + ratio, top + ratio]) for (top, left) in top_lefts ] # crop = self._sample_grid(f_global, bbox, (H, W)) crop = [] for i in range(b): top, left = int(np.round(top_lefts[i][0] * H)), int(np.round(top_lefts[i][1] * W)) # # global's sub-region & upsample # f_global_patch = F.interpolate(f_global[0:1, :, top:top+h, left:left+w], size=(h, w), mode='bilinear') f_global_patch = f_global[0:1, :, top:top+h, left:left+w] crop.append(f_global_patch[0]) crop = torch.stack(crop, dim=0) # stack into mini-batch return [crop] # return as a list for easy to torch.cat def _merge_local(self, f_local, merge, f_global, top_lefts, oped, ratio, template): ''' merge feature maps from local patches, and finally to a whole image's feature map (on cuda) f_local: a sub_batch_size of patch's feature map oped: [start, end) ''' b, _, _, _ = f_local.size() _, c, H, W = f_global.size() # match global feature size if merge is None: merge = torch.zeros((1, c, H, W)).cuda() h, w = int(np.round(H * ratio[0])), int(np.round(W * ratio[1])) for i in range(b): index = oped[0] + i top, left = int(np.round(H * top_lefts[index][0])), int(np.round(W * top_lefts[index][1])) merge[:, :, top:top+h, left:left+w] += F.interpolate(f_local[i:i+1], size=(h, w), **self._up_kwargs) if oped[1] >= len(top_lefts): template = F.interpolate(template, size=(H, W), **self._up_kwargs) template = template.expand_as(merge) # template = Variable(template).cuda() merge /= template return merge def ensemble(self, f_local, f_global): return self.ensemble_conv(torch.cat((f_local, f_global), dim=1)) def collect_local_fm(self, image_global, patches, ratio, top_lefts, oped, batch_size, global_model=None, template=None, n_patch_all=None): ''' patches: 1 patch top_lefts: all top-left oped: [start, end) ''' with torch.no_grad(): if self.patch_n == 0: self.c2_g, self.c3_g, self.c4_g, self.c5_g = global_model.module.resnet_global.forward(image_global) self.output_g, self.ps0_g, self.ps1_g, self.ps2_g, self.ps3_g = global_model.module.fpn_global.forward(self.c2_g, self.c3_g, self.c4_g, self.c5_g) # self.output_g = F.interpolate(self.output_g, image_global.size()[2:], mode='nearest') self.patch_n += patches.size()[0] self.patch_n %= n_patch_all self.resnet_local.eval() self.fpn_local.eval() c2, c3, c4, c5 = self.resnet_local.forward(patches) # global's 1x patch cat output, ps0, ps1, ps2, ps3 = self.fpn_local.forward( c2, c3, c4, c5, self._crop_global(self.c2_g, top_lefts[oped[0]:oped[1]], ratio), c3_ext=self._crop_global(self.c3_g, top_lefts[oped[0]:oped[1]], ratio), c4_ext=self._crop_global(self.c4_g, top_lefts[oped[0]:oped[1]], ratio), c5_ext=self._crop_global(self.c5_g, top_lefts[oped[0]:oped[1]], ratio), ps0_ext=[ self._crop_global(f, top_lefts[oped[0]:oped[1]], ratio) for f in self.ps0_g ], ps1_ext=[ self._crop_global(f, top_lefts[oped[0]:oped[1]], ratio) for f in self.ps1_g ], ps2_ext=[ self._crop_global(f, top_lefts[oped[0]:oped[1]], ratio) for f in self.ps2_g ] ) # output = F.interpolate(output, patches.size()[2:], mode='nearest') self.c2_b = self._merge_local(c2, self.c2_b, self.c2_g, top_lefts, oped, ratio, template) self.c3_b = self._merge_local(c3, self.c3_b, self.c3_g, top_lefts, oped, ratio, template) self.c4_b = self._merge_local(c4, self.c4_b, self.c4_g, top_lefts, oped, ratio, template) self.c5_b = self._merge_local(c5, self.c5_b, self.c5_g, top_lefts, oped, ratio, template) self.ps00_b = self._merge_local(ps0[0], self.ps00_b, self.ps0_g[0], top_lefts, oped, ratio, template) self.ps01_b = self._merge_local(ps0[1], self.ps01_b, self.ps0_g[1], top_lefts, oped, ratio, template) self.ps02_b = self._merge_local(ps0[2], self.ps02_b, self.ps0_g[2], top_lefts, oped, ratio, template) self.ps03_b = self._merge_local(ps0[3], self.ps03_b, self.ps0_g[3], top_lefts, oped, ratio, template) self.ps10_b = self._merge_local(ps1[0], self.ps10_b, self.ps1_g[0], top_lefts, oped, ratio, template) self.ps11_b = self._merge_local(ps1[1], self.ps11_b, self.ps1_g[1], top_lefts, oped, ratio, template) self.ps12_b = self._merge_local(ps1[2], self.ps12_b, self.ps1_g[2], top_lefts, oped, ratio, template) self.ps13_b = self._merge_local(ps1[3], self.ps13_b, self.ps1_g[3], top_lefts, oped, ratio, template) self.ps20_b = self._merge_local(ps2[0], self.ps20_b, self.ps2_g[0], top_lefts, oped, ratio, template) self.ps21_b = self._merge_local(ps2[1], self.ps21_b, self.ps2_g[1], top_lefts, oped, ratio, template) self.ps22_b = self._merge_local(ps2[2], self.ps22_b, self.ps2_g[2], top_lefts, oped, ratio, template) self.ps23_b = self._merge_local(ps2[3], self.ps23_b, self.ps2_g[3], top_lefts, oped, ratio, template) self.ps3_b.append(ps3.cpu()) # self.output_b.append(output.cpu()) # each output is 1, 7, h, w if self.patch_n == 0: # merged all patches into an image self.c2_l.append(self.c2_b); self.c3_l.append(self.c3_b); self.c4_l.append(self.c4_b); self.c5_l.append(self.c5_b); self.ps00_l.append(self.ps00_b); self.ps01_l.append(self.ps01_b); self.ps02_l.append(self.ps02_b); self.ps03_l.append(self.ps03_b) self.ps10_l.append(self.ps10_b); self.ps11_l.append(self.ps11_b); self.ps12_l.append(self.ps12_b); self.ps13_l.append(self.ps13_b) self.ps20_l.append(self.ps20_b); self.ps21_l.append(self.ps21_b); self.ps22_l.append(self.ps22_b); self.ps23_l.append(self.ps23_b) # collected all ps3 and output of patches as a (b) tensor, append into list self.ps3_l.append(torch.cat(self.ps3_b, dim=0)); # a list of tensors # self.output_l.append(torch.cat(self.output_b, dim=0)) # a list of 36, 7, h, w tensors self.c2_b = None; self.c3_b = None; self.c4_b = None; self.c5_b = None; self.ps00_b = None; self.ps01_b = None; self.ps02_b = None; self.ps03_b = None; self.ps10_b = None; self.ps11_b = None; self.ps12_b = None; self.ps13_b = None; self.ps20_b = None; self.ps21_b = None; self.ps22_b = None; self.ps23_b = None; self.ps3_b = []# ; self.output_b = [] if len(self.c2_l) == batch_size: self.c2_l = torch.cat(self.c2_l, dim=0)# .cuda() self.c3_l = torch.cat(self.c3_l, dim=0)# .cuda() self.c4_l = torch.cat(self.c4_l, dim=0)# .cuda() self.c5_l = torch.cat(self.c5_l, dim=0)# .cuda() self.ps00_l = torch.cat(self.ps00_l, dim=0)# .cuda() self.ps01_l = torch.cat(self.ps01_l, dim=0)# .cuda() self.ps02_l = torch.cat(self.ps02_l, dim=0)# .cuda() self.ps03_l = torch.cat(self.ps03_l, dim=0)# .cuda() self.ps10_l = torch.cat(self.ps10_l, dim=0)# .cuda() self.ps11_l = torch.cat(self.ps11_l, dim=0)# .cuda() self.ps12_l = torch.cat(self.ps12_l, dim=0)# .cuda() self.ps13_l = torch.cat(self.ps13_l, dim=0)# .cuda() self.ps20_l = torch.cat(self.ps20_l, dim=0)# .cuda() self.ps21_l = torch.cat(self.ps21_l, dim=0)# .cuda() self.ps22_l = torch.cat(self.ps22_l, dim=0)# .cuda() self.ps23_l = torch.cat(self.ps23_l, dim=0)# .cuda() self.ps0_l = [self.ps00_l, self.ps01_l, self.ps02_l, self.ps03_l] self.ps1_l = [self.ps10_l, self.ps11_l, self.ps12_l, self.ps13_l] self.ps2_l = [self.ps20_l, self.ps21_l, self.ps22_l, self.ps23_l] # self.ps3_l = torch.cat(self.ps3_l, dim=0)# .cuda() return self.ps3_l, output# self.output_l def forward(self, image_global, patches, top_lefts, ratio, mode=1, global_model=None, n_patch=None): if mode == 1: # train global model c2_g, c3_g, c4_g, c5_g = self.resnet_global.forward(image_global) output_g, ps0_g, ps1_g, ps2_g, ps3_g = self.fpn_global.forward(c2_g, c3_g, c4_g, c5_g) # imsize = image_global.size()[2:] # output_g = F.interpolate(output_g, imsize, mode='nearest') return output_g, None elif mode == 2: # train global2local model with torch.no_grad(): if self.patch_n == 0: # calculate global images only if patches belong to a new set of global images (when self.patch_n % n_patch == 0) self.c2_g, self.c3_g, self.c4_g, self.c5_g = self.resnet_global.forward(image_global) self.output_g, self.ps0_g, self.ps1_g, self.ps2_g, self.ps3_g = self.fpn_global.forward(self.c2_g, self.c3_g, self.c4_g, self.c5_g) # imsize_glb = image_global.size()[2:] # self.output_g = F.interpolate(self.output_g, imsize_glb, mode='nearest') self.patch_n += patches.size()[0] self.patch_n %= n_patch # train local model ####################################### c2_l, c3_l, c4_l, c5_l = self.resnet_local.forward(patches) # global's 1x patch cat output_l, ps0_l, ps1_l, ps2_l, ps3_l = self.fpn_local.forward(c2_l, c3_l, c4_l, c5_l, self._crop_global(self.c2_g, top_lefts, ratio), self._crop_global(self.c3_g, top_lefts, ratio), self._crop_global(self.c4_g, top_lefts, ratio), self._crop_global(self.c5_g, top_lefts, ratio), [ self._crop_global(f, top_lefts, ratio) for f in self.ps0_g ], [ self._crop_global(f, top_lefts, ratio) for f in self.ps1_g ], [ self._crop_global(f, top_lefts, ratio) for f in self.ps2_g ] ) # imsize = patches.size()[2:] # output_l = F.interpolate(output_l, imsize, mode='nearest') ps3_g2l = self._crop_global(self.ps3_g, top_lefts, ratio)[0] # only calculate loss on 1x ps3_g2l = F.interpolate(ps3_g2l, size=ps3_l.size()[2:], **self._up_kwargs) output = self.ensemble(ps3_l, ps3_g2l) # output = F.interpolate(output, imsize, mode='nearest') return output, self.output_g, output_l, self.mse(ps3_l, ps3_g2l) else: # train local2global model c2_g, c3_g, c4_g, c5_g = self.resnet_global.forward(image_global) # local patch cat into global output_g, ps0_g, ps1_g, ps2_g, ps3_g = self.fpn_global.forward(c2_g, c3_g, c4_g, c5_g, c2_ext=self.c2_l, c3_ext=self.c3_l, c4_ext=self.c4_l, c5_ext=self.c5_l, ps0_ext=self.ps0_l, ps1_ext=self.ps1_l, ps2_ext=self.ps2_l) # imsize = image_global.size()[2:] # output_g = F.interpolate(output_g, imsize, mode='nearest') self.clear_cache() return output_g, ps3_g
[ "torch.nn.MSELoss", "torch.stack", "torch.nn.functional.grid_sample", "torch.nn.Conv2d", "numpy.zeros", "torch.cat", "torch.nn.init.normal_", "torch.nn.init.constant_", "torch.zeros", "torch.nn.functional.interpolate", "torch.no_grad", "numpy.round", "torch.from_numpy" ]
[((328, 384), 'torch.nn.Conv2d', 'nn.Conv2d', (['(2048)', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(2048, 256, kernel_size=1, stride=1, padding=0)\n', (337, 384), True, 'import torch.nn as nn\n'), ((453, 509), 'torch.nn.Conv2d', 'nn.Conv2d', (['(1024)', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(1024, 256, kernel_size=1, stride=1, padding=0)\n', (462, 509), True, 'import torch.nn as nn\n'), ((535, 590), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(512, 256, kernel_size=1, stride=1, padding=0)\n', (544, 590), True, 'import torch.nn as nn\n'), ((616, 671), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(256, 256, kernel_size=1, stride=1, padding=0)\n', (625, 671), True, 'import torch.nn as nn\n'), ((721, 776), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256, 256, kernel_size=3, stride=1, padding=1)\n', (730, 776), True, 'import torch.nn as nn\n'), ((802, 857), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256, 256, kernel_size=3, stride=1, padding=1)\n', (811, 857), True, 'import torch.nn as nn\n'), ((883, 938), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256, 256, kernel_size=3, stride=1, padding=1)\n', (892, 938), True, 'import torch.nn as nn\n'), ((964, 1019), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256, 256, kernel_size=3, stride=1, padding=1)\n', (973, 1019), True, 'import torch.nn as nn\n'), ((1045, 1100), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256, 128, kernel_size=3, stride=1, padding=1)\n', (1054, 1100), True, 'import torch.nn as nn\n'), ((1126, 1181), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256, 128, kernel_size=3, stride=1, padding=1)\n', (1135, 1181), True, 'import torch.nn as nn\n'), ((1207, 1262), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256, 128, kernel_size=3, stride=1, padding=1)\n', (1216, 1262), True, 'import torch.nn as nn\n'), ((1288, 1343), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256, 128, kernel_size=3, stride=1, padding=1)\n', (1297, 1343), True, 'import torch.nn as nn\n'), ((1394, 1458), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128 * 4)', 'numClass'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(128 * 4, numClass, kernel_size=3, stride=1, padding=1)\n', (1403, 1458), True, 'import torch.nn as nn\n'), ((1584, 1644), 'torch.nn.Conv2d', 'nn.Conv2d', (['(2048 * 2)', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(2048 * 2, 256, kernel_size=1, stride=1, padding=0)\n', (1593, 1644), True, 'import torch.nn as nn\n'), ((1715, 1775), 'torch.nn.Conv2d', 'nn.Conv2d', (['(1024 * 2)', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(1024 * 2, 256, kernel_size=1, stride=1, padding=0)\n', (1724, 1775), True, 'import torch.nn as nn\n'), ((1803, 1862), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512 * 2)', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(512 * 2, 256, kernel_size=1, stride=1, padding=0)\n', (1812, 1862), True, 'import torch.nn as nn\n'), ((1890, 1949), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * 2)', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(256 * 2, 256, kernel_size=1, stride=1, padding=0)\n', (1899, 1949), True, 'import torch.nn as nn\n'), ((2001, 2060), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * 2)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * 2, 256, kernel_size=3, stride=1, padding=1)\n', (2010, 2060), True, 'import torch.nn as nn\n'), ((2088, 2147), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * 2)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * 2, 256, kernel_size=3, stride=1, padding=1)\n', (2097, 2147), True, 'import torch.nn as nn\n'), ((2175, 2234), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * 2)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * 2, 256, kernel_size=3, stride=1, padding=1)\n', (2184, 2234), True, 'import torch.nn as nn\n'), ((2262, 2321), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * 2)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * 2, 256, kernel_size=3, stride=1, padding=1)\n', (2271, 2321), True, 'import torch.nn as nn\n'), ((2349, 2408), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * 2)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * 2, 128, kernel_size=3, stride=1, padding=1)\n', (2358, 2408), True, 'import torch.nn as nn\n'), ((2436, 2495), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * 2)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * 2, 128, kernel_size=3, stride=1, padding=1)\n', (2445, 2495), True, 'import torch.nn as nn\n'), ((2523, 2582), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * 2)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * 2, 128, kernel_size=3, stride=1, padding=1)\n', (2532, 2582), True, 'import torch.nn as nn\n'), ((2610, 2669), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * 2)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * 2, 128, kernel_size=3, stride=1, padding=1)\n', (2619, 2669), True, 'import torch.nn as nn\n'), ((2690, 2757), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128 * 4 * 2)', '(128 * 4)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(128 * 4 * 2, 128 * 4, kernel_size=3, stride=1, padding=1)\n', (2699, 2757), True, 'import torch.nn as nn\n'), ((2841, 2890), 'torch.nn.functional.interpolate', 'F.interpolate', (['p5'], {'size': '(H, W)'}), '(p5, size=(H, W), **self._up_kwargs)\n', (2854, 2890), True, 'import torch.nn.functional as F\n'), ((2904, 2953), 'torch.nn.functional.interpolate', 'F.interpolate', (['p4'], {'size': '(H, W)'}), '(p4, size=(H, W), **self._up_kwargs)\n', (2917, 2953), True, 'import torch.nn.functional as F\n'), ((2967, 3016), 'torch.nn.functional.interpolate', 'F.interpolate', (['p3'], {'size': '(H, W)'}), '(p3, size=(H, W), **self._up_kwargs)\n', (2980, 3016), True, 'import torch.nn.functional as F\n'), ((3032, 3066), 'torch.cat', 'torch.cat', (['[p5, p4, p3, p2]'], {'dim': '(1)'}), '([p5, p4, p3, p2], dim=1)\n', (3041, 3066), False, 'import torch\n'), ((6465, 6528), 'torch.nn.Conv2d', 'nn.Conv2d', (['(2048 * fold)', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(2048 * fold, 256, kernel_size=1, stride=1, padding=0)\n', (6474, 6528), True, 'import torch.nn as nn\n'), ((6601, 6664), 'torch.nn.Conv2d', 'nn.Conv2d', (['(1024 * fold)', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(1024 * fold, 256, kernel_size=1, stride=1, padding=0)\n', (6610, 6664), True, 'import torch.nn as nn\n'), ((6690, 6752), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512 * fold)', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(512 * fold, 256, kernel_size=1, stride=1, padding=0)\n', (6699, 6752), True, 'import torch.nn as nn\n'), ((6778, 6840), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * fold)', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(256 * fold, 256, kernel_size=1, stride=1, padding=0)\n', (6787, 6840), True, 'import torch.nn as nn\n'), ((6904, 6966), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * fold)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * fold, 256, kernel_size=3, stride=1, padding=1)\n', (6913, 6966), True, 'import torch.nn as nn\n'), ((6992, 7054), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * fold)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * fold, 256, kernel_size=3, stride=1, padding=1)\n', (7001, 7054), True, 'import torch.nn as nn\n'), ((7080, 7142), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * fold)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * fold, 256, kernel_size=3, stride=1, padding=1)\n', (7089, 7142), True, 'import torch.nn as nn\n'), ((7168, 7230), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * fold)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * fold, 256, kernel_size=3, stride=1, padding=1)\n', (7177, 7230), True, 'import torch.nn as nn\n'), ((7270, 7332), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * fold)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * fold, 128, kernel_size=3, stride=1, padding=1)\n', (7279, 7332), True, 'import torch.nn as nn\n'), ((7358, 7420), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * fold)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * fold, 128, kernel_size=3, stride=1, padding=1)\n', (7367, 7420), True, 'import torch.nn as nn\n'), ((7446, 7508), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * fold)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * fold, 128, kernel_size=3, stride=1, padding=1)\n', (7455, 7508), True, 'import torch.nn as nn\n'), ((7534, 7596), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256 * fold)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256 * fold, 128, kernel_size=3, stride=1, padding=1)\n', (7543, 7596), True, 'import torch.nn as nn\n'), ((7676, 7746), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128 * 4 * fold)', '(128 * 4)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(128 * 4 * fold, 128 * 4, kernel_size=3, stride=1, padding=1)\n', (7685, 7746), True, 'import torch.nn as nn\n'), ((7765, 7829), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128 * 4)', 'numClass'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(128 * 4, numClass, kernel_size=3, stride=1, padding=1)\n', (7774, 7829), True, 'import torch.nn as nn\n'), ((7917, 7966), 'torch.nn.functional.interpolate', 'F.interpolate', (['p5'], {'size': '(H, W)'}), '(p5, size=(H, W), **self._up_kwargs)\n', (7930, 7966), True, 'import torch.nn.functional as F\n'), ((7980, 8029), 'torch.nn.functional.interpolate', 'F.interpolate', (['p4'], {'size': '(H, W)'}), '(p4, size=(H, W), **self._up_kwargs)\n', (7993, 8029), True, 'import torch.nn.functional as F\n'), ((8043, 8092), 'torch.nn.functional.interpolate', 'F.interpolate', (['p3'], {'size': '(H, W)'}), '(p3, size=(H, W), **self._up_kwargs)\n', (8056, 8092), True, 'import torch.nn.functional as F\n'), ((8108, 8142), 'torch.cat', 'torch.cat', (['[p5, p4, p3, p2]'], {'dim': '(1)'}), '([p5, p4, p3, p2], dim=1)\n', (8117, 8142), False, 'import torch\n'), ((12791, 12803), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (12801, 12803), True, 'import torch.nn as nn\n'), ((12834, 12902), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128 * 4 * 2)', 'numClass'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(128 * 4 * 2, numClass, kernel_size=3, stride=1, padding=1)\n', (12843, 12902), True, 'import torch.nn as nn\n'), ((12909, 12969), 'torch.nn.init.normal_', 'nn.init.normal_', (['self.ensemble_conv.weight'], {'mean': '(0)', 'std': '(0.01)'}), '(self.ensemble_conv.weight, mean=0, std=0.01)\n', (12924, 12969), True, 'import torch.nn as nn\n'), ((15157, 15214), 'numpy.zeros', 'np.zeros', (['((b_bbox,) + sampleSize + (2,))'], {'dtype': 'np.float32'}), '((b_bbox,) + sampleSize + (2,), dtype=np.float32)\n', (15165, 15214), True, 'import numpy as np\n'), ((15668, 15691), 'torch.nn.functional.grid_sample', 'F.grid_sample', (['fm', 'grid'], {}), '(fm, grid)\n', (15681, 15691), True, 'import torch.nn.functional as F\n'), ((16538, 16562), 'torch.stack', 'torch.stack', (['crop'], {'dim': '(0)'}), '(crop, dim=0)\n', (16549, 16562), False, 'import torch\n'), ((3806, 3854), 'torch.nn.functional.interpolate', 'F.interpolate', (['x'], {'size': '(H, W)'}), '(x, size=(H, W), **self._up_kwargs)\n', (3819, 3854), True, 'import torch.nn.functional as F\n'), ((8882, 8930), 'torch.nn.functional.interpolate', 'F.interpolate', (['x'], {'size': '(H, W)'}), '(x, size=(H, W), **self._up_kwargs)\n', (8895, 8930), True, 'import torch.nn.functional as F\n'), ((15111, 15142), 'torch.cat', 'torch.cat', (['([fm] * b_bbox)'], {'dim': '(0)'}), '([fm] * b_bbox, dim=0)\n', (15120, 15142), False, 'import torch\n'), ((17412, 17475), 'torch.nn.functional.interpolate', 'F.interpolate', (['f_local[i:i + 1]'], {'size': '(h, w)'}), '(f_local[i:i + 1], size=(h, w), **self._up_kwargs)\n', (17425, 17475), True, 'import torch.nn.functional as F\n'), ((17535, 17590), 'torch.nn.functional.interpolate', 'F.interpolate', (['template'], {'size': '(H, W)'}), '(template, size=(H, W), **self._up_kwargs)\n', (17548, 17590), True, 'import torch.nn.functional as F\n'), ((17820, 17857), 'torch.cat', 'torch.cat', (['(f_local, f_global)'], {'dim': '(1)'}), '((f_local, f_global), dim=1)\n', (17829, 17857), False, 'import torch\n'), ((18124, 18139), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (18137, 18139), False, 'import torch\n'), ((4304, 4334), 'torch.cat', 'torch.cat', (['(c5, c5_ext)'], {'dim': '(1)'}), '((c5, c5_ext), dim=1)\n', (4313, 4334), False, 'import torch\n'), ((4891, 4925), 'torch.cat', 'torch.cat', (['(p5, ps0_ext[0])'], {'dim': '(1)'}), '((p5, ps0_ext[0]), dim=1)\n', (4900, 4925), False, 'import torch\n'), ((4963, 4997), 'torch.cat', 'torch.cat', (['(p4, ps0_ext[1])'], {'dim': '(1)'}), '((p4, ps0_ext[1]), dim=1)\n', (4972, 4997), False, 'import torch\n'), ((5035, 5069), 'torch.cat', 'torch.cat', (['(p3, ps0_ext[2])'], {'dim': '(1)'}), '((p3, ps0_ext[2]), dim=1)\n', (5044, 5069), False, 'import torch\n'), ((5107, 5141), 'torch.cat', 'torch.cat', (['(p2, ps0_ext[3])'], {'dim': '(1)'}), '((p2, ps0_ext[3]), dim=1)\n', (5116, 5141), False, 'import torch\n'), ((5405, 5439), 'torch.cat', 'torch.cat', (['(p5, ps1_ext[0])'], {'dim': '(1)'}), '((p5, ps1_ext[0]), dim=1)\n', (5414, 5439), False, 'import torch\n'), ((5477, 5511), 'torch.cat', 'torch.cat', (['(p4, ps1_ext[1])'], {'dim': '(1)'}), '((p4, ps1_ext[1]), dim=1)\n', (5486, 5511), False, 'import torch\n'), ((5549, 5583), 'torch.cat', 'torch.cat', (['(p3, ps1_ext[2])'], {'dim': '(1)'}), '((p3, ps1_ext[2]), dim=1)\n', (5558, 5583), False, 'import torch\n'), ((5621, 5655), 'torch.cat', 'torch.cat', (['(p2, ps1_ext[3])'], {'dim': '(1)'}), '((p2, ps1_ext[3]), dim=1)\n', (5630, 5655), False, 'import torch\n'), ((5897, 5931), 'torch.cat', 'torch.cat', (['(p5, ps2_ext[0])'], {'dim': '(1)'}), '((p5, ps2_ext[0]), dim=1)\n', (5906, 5931), False, 'import torch\n'), ((5954, 5988), 'torch.cat', 'torch.cat', (['(p4, ps2_ext[1])'], {'dim': '(1)'}), '((p4, ps2_ext[1]), dim=1)\n', (5963, 5988), False, 'import torch\n'), ((6011, 6045), 'torch.cat', 'torch.cat', (['(p3, ps2_ext[2])'], {'dim': '(1)'}), '((p3, ps2_ext[2]), dim=1)\n', (6020, 6045), False, 'import torch\n'), ((6068, 6102), 'torch.cat', 'torch.cat', (['(p2, ps2_ext[3])'], {'dim': '(1)'}), '((p2, ps2_ext[3]), dim=1)\n', (6077, 6102), False, 'import torch\n'), ((13072, 13115), 'torch.nn.init.normal_', 'nn.init.normal_', (['m.weight'], {'mean': '(0)', 'std': '(0.01)'}), '(m.weight, mean=0, std=0.01)\n', (13087, 13115), True, 'import torch.nn as nn\n'), ((13151, 13179), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (13168, 13179), True, 'import torch.nn as nn\n'), ((13261, 13304), 'torch.nn.init.normal_', 'nn.init.normal_', (['m.weight'], {'mean': '(0)', 'std': '(0.01)'}), '(m.weight, mean=0, std=0.01)\n', (13276, 13304), True, 'import torch.nn as nn\n'), ((13340, 13368), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (13357, 13368), True, 'import torch.nn as nn\n'), ((15623, 15645), 'torch.from_numpy', 'torch.from_numpy', (['grid'], {}), '(grid)\n', (15639, 15645), False, 'import torch\n'), ((15893, 15915), 'numpy.round', 'np.round', (['(H * ratio[0])'], {}), '(H * ratio[0])\n', (15901, 15915), True, 'import numpy as np\n'), ((15922, 15944), 'numpy.round', 'np.round', (['(W * ratio[1])'], {}), '(W * ratio[1])\n', (15930, 15944), True, 'import numpy as np\n'), ((17146, 17168), 'numpy.round', 'np.round', (['(H * ratio[0])'], {}), '(H * ratio[0])\n', (17154, 17168), True, 'import numpy as np\n'), ((17175, 17197), 'numpy.round', 'np.round', (['(W * ratio[1])'], {}), '(W * ratio[1])\n', (17183, 17197), True, 'import numpy as np\n'), ((23010, 23037), 'torch.cat', 'torch.cat', (['self.c2_l'], {'dim': '(0)'}), '(self.c2_l, dim=0)\n', (23019, 23037), False, 'import torch\n'), ((23075, 23102), 'torch.cat', 'torch.cat', (['self.c3_l'], {'dim': '(0)'}), '(self.c3_l, dim=0)\n', (23084, 23102), False, 'import torch\n'), ((23140, 23167), 'torch.cat', 'torch.cat', (['self.c4_l'], {'dim': '(0)'}), '(self.c4_l, dim=0)\n', (23149, 23167), False, 'import torch\n'), ((23205, 23232), 'torch.cat', 'torch.cat', (['self.c5_l'], {'dim': '(0)'}), '(self.c5_l, dim=0)\n', (23214, 23232), False, 'import torch\n'), ((23272, 23301), 'torch.cat', 'torch.cat', (['self.ps00_l'], {'dim': '(0)'}), '(self.ps00_l, dim=0)\n', (23281, 23301), False, 'import torch\n'), ((23341, 23370), 'torch.cat', 'torch.cat', (['self.ps01_l'], {'dim': '(0)'}), '(self.ps01_l, dim=0)\n', (23350, 23370), False, 'import torch\n'), ((23410, 23439), 'torch.cat', 'torch.cat', (['self.ps02_l'], {'dim': '(0)'}), '(self.ps02_l, dim=0)\n', (23419, 23439), False, 'import torch\n'), ((23479, 23508), 'torch.cat', 'torch.cat', (['self.ps03_l'], {'dim': '(0)'}), '(self.ps03_l, dim=0)\n', (23488, 23508), False, 'import torch\n'), ((23548, 23577), 'torch.cat', 'torch.cat', (['self.ps10_l'], {'dim': '(0)'}), '(self.ps10_l, dim=0)\n', (23557, 23577), False, 'import torch\n'), ((23617, 23646), 'torch.cat', 'torch.cat', (['self.ps11_l'], {'dim': '(0)'}), '(self.ps11_l, dim=0)\n', (23626, 23646), False, 'import torch\n'), ((23686, 23715), 'torch.cat', 'torch.cat', (['self.ps12_l'], {'dim': '(0)'}), '(self.ps12_l, dim=0)\n', (23695, 23715), False, 'import torch\n'), ((23755, 23784), 'torch.cat', 'torch.cat', (['self.ps13_l'], {'dim': '(0)'}), '(self.ps13_l, dim=0)\n', (23764, 23784), False, 'import torch\n'), ((23824, 23853), 'torch.cat', 'torch.cat', (['self.ps20_l'], {'dim': '(0)'}), '(self.ps20_l, dim=0)\n', (23833, 23853), False, 'import torch\n'), ((23893, 23922), 'torch.cat', 'torch.cat', (['self.ps21_l'], {'dim': '(0)'}), '(self.ps21_l, dim=0)\n', (23902, 23922), False, 'import torch\n'), ((23962, 23991), 'torch.cat', 'torch.cat', (['self.ps22_l'], {'dim': '(0)'}), '(self.ps22_l, dim=0)\n', (23971, 23991), False, 'import torch\n'), ((24031, 24060), 'torch.cat', 'torch.cat', (['self.ps23_l'], {'dim': '(0)'}), '(self.ps23_l, dim=0)\n', (24040, 24060), False, 'import torch\n'), ((4395, 4425), 'torch.cat', 'torch.cat', (['(c4, c4_ext)'], {'dim': '(1)'}), '((c4, c4_ext), dim=1)\n', (4404, 4425), False, 'import torch\n'), ((4487, 4517), 'torch.cat', 'torch.cat', (['(c3, c3_ext)'], {'dim': '(1)'}), '((c3, c3_ext), dim=1)\n', (4496, 4517), False, 'import torch\n'), ((4579, 4609), 'torch.cat', 'torch.cat', (['(c2, c2_ext)'], {'dim': '(1)'}), '((c2, c2_ext), dim=1)\n', (4588, 4609), False, 'import torch\n'), ((16179, 16208), 'numpy.round', 'np.round', (['(top_lefts[i][0] * H)'], {}), '(top_lefts[i][0] * H)\n', (16187, 16208), True, 'import numpy as np\n'), ((16215, 16244), 'numpy.round', 'np.round', (['(top_lefts[i][1] * W)'], {}), '(top_lefts[i][1] * W)\n', (16223, 16244), True, 'import numpy as np\n'), ((17094, 17119), 'torch.zeros', 'torch.zeros', (['(1, c, H, W)'], {}), '((1, c, H, W))\n', (17105, 17119), False, 'import torch\n'), ((17286, 17319), 'numpy.round', 'np.round', (['(H * top_lefts[index][0])'], {}), '(H * top_lefts[index][0])\n', (17294, 17319), True, 'import numpy as np\n'), ((17326, 17359), 'numpy.round', 'np.round', (['(W * top_lefts[index][1])'], {}), '(W * top_lefts[index][1])\n', (17334, 17359), True, 'import numpy as np\n'), ((22351, 22379), 'torch.cat', 'torch.cat', (['self.ps3_b'], {'dim': '(0)'}), '(self.ps3_b, dim=0)\n', (22360, 22379), False, 'import torch\n'), ((25011, 25026), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (25024, 25026), False, 'import torch\n')]
import os.path import sys from functools import reduce from os.path import exists from werkzeug.utils import ImportStringError def import_from_string(import_name: str, silent: bool = False): import_name = import_name.replace(":", ".") try: try: __import__(import_name) except ImportError: if "." not in import_name: raise else: return sys.modules[import_name] module_name, obj_name = import_name.rsplit(".", 1) module = __import__(module_name, globals(), locals(), [obj_name]) try: return getattr(module, obj_name) except AttributeError as e: raise ImportError(e) except ImportError as e: if not silent: raise ImportStringError(import_name, e).with_traceback(sys.exc_info()[2]) return None def is_url_register(flask_app, url): for url_rule in flask_app.url_map.iter_rules(): if url_rule.rule == url: return True return False def concat_path(first, last, *more_path): path = os.path.join(first, last) if len(more_path) > 0: path = os.path.join(path, reduce(os.path.join, more_path)) return path def is_exists_path(path): return exists(path)
[ "functools.reduce", "os.path.exists", "werkzeug.utils.ImportStringError", "sys.exc_info" ]
[((1258, 1270), 'os.path.exists', 'exists', (['path'], {}), '(path)\n', (1264, 1270), False, 'from os.path import exists\n'), ((1170, 1201), 'functools.reduce', 'reduce', (['os.path.join', 'more_path'], {}), '(os.path.join, more_path)\n', (1176, 1201), False, 'from functools import reduce\n'), ((778, 811), 'werkzeug.utils.ImportStringError', 'ImportStringError', (['import_name', 'e'], {}), '(import_name, e)\n', (795, 811), False, 'from werkzeug.utils import ImportStringError\n'), ((827, 841), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (839, 841), False, 'import sys\n')]
import os import shutil from sys import platform from .base import Mingw64BaseRecipe from hardhat.urls import Urls from hardhat.util import patch class Mingw64OpenBlasRecipe(Mingw64BaseRecipe): def __init__(self, *args, **kwargs): super(Mingw64OpenBlasRecipe, self).__init__(*args, **kwargs) self.sha256 = 'c4f71a60e3f23a7a25693390af3be230' \ '8d374749ae3cb0bcfd8aab33a3c9ac09' # depends on wine but self.depends = ['wine'] doesn't work self.name = 'mingw64-openblas' self.version = 'fd4e68128e56beb3b97f37178edf07bef7ade5f1' self.url = Urls.github_commit('xianyi', 'OpenBLAS', self.version) # self.filename = os.path.join(self.tarball_dir, # 'openblas-%s.tar.gz' % self.version) self.libname = 'libopenblas.a' del self.environment['CPPFLAGS'] del self.environment['CXXFLAGS'] self.environment['CFLAGS'] = '-fomit-frame-pointer' \ ' -funroll-loops' self.environment['FFLAGS'] = '-fomit-frame-pointer' \ ' -funroll-loops' os.environ['GFORTRAN'] = 'x86_64-w64-mingw32-gfortran' self.compile_args += ['USE_THREAD=1', 'FC=gfortran', # 'CC=%s' % os.environ['CC'], 'FC=%s' % os.environ['GFORTRAN'], 'BINARY=64', # 'HOSTCC=%s' % os.environ['CC'], # 'AS=%s' % os.environ['AS'], # 'CROSS=1', # 'LD=%s' % os.environ['LD'], # 'CROSS_SUFFIX=x86_64-w64-mingw32-' ] # If needed. See TargetList.txt in OpenBLAS directory for list of targets if self.is_atom(): self.compile_args += ['TARGET=ATOM'] else: # hardcoded because it failed to compile when detecting for itself self.compile_args += ['TARGET=CORE2'] # self.compile_args = ['make', # 'all', # 'BLASLIB=%s' % (self.libname), # 'OPTS="-O3 -fomit-frame-pointer -funroll-loops"' # ] # not parallel safe self.install_args += ['PREFIX=%s' % (self.prefix_dir) ] def is_atom(self): lines = [] if platform == "linux" or platform == "linux2": with open('/proc/cpuinfo', 'rt') as f: lines = f.readlines() for line in lines: if line.startswith('model name'): model = line[len('model_name'):].strip() model = model[2:] if 'Intel(R) Celeron(R) CPU N2940' in model: return True break return False def patch(self): self.log_dir('patch', self.directory, 'patching getarch') filename = os.path.join(self.directory, 'Makefile.prebuild') src = './getarch_2nd' dst = '%s/../bin/wine64 ./getarch_2nd' % self.prefix_dir patch(filename, src, dst) src = './getarch ' dst = '%s/../bin/wine64 ./getarch ' % self.prefix_dir patch(filename, src, dst) def configure(self): pass # def install(self): # super(OpenBlasRecipe, self).install() # # libs = ['libopenblas.a', # 'libopenblas.so.0', # 'libopenblas.so'] # # for lib in libs: # src = os.path.join(self.directory, 'lib', lib) # dest = os.path.join(self.prefix_dir, 'lib', lib) # shutil.copy2(src, dest)
[ "hardhat.urls.Urls.github_commit", "hardhat.util.patch", "os.path.join" ]
[((614, 668), 'hardhat.urls.Urls.github_commit', 'Urls.github_commit', (['"""xianyi"""', '"""OpenBLAS"""', 'self.version'], {}), "('xianyi', 'OpenBLAS', self.version)\n", (632, 668), False, 'from hardhat.urls import Urls\n'), ((3090, 3139), 'os.path.join', 'os.path.join', (['self.directory', '"""Makefile.prebuild"""'], {}), "(self.directory, 'Makefile.prebuild')\n", (3102, 3139), False, 'import os\n'), ((3243, 3268), 'hardhat.util.patch', 'patch', (['filename', 'src', 'dst'], {}), '(filename, src, dst)\n', (3248, 3268), False, 'from hardhat.util import patch\n'), ((3367, 3392), 'hardhat.util.patch', 'patch', (['filename', 'src', 'dst'], {}), '(filename, src, dst)\n', (3372, 3392), False, 'from hardhat.util import patch\n')]
#!/usr/bin/env python """ Pandoc filter to convert divs with class="theorem" to LaTeX theorem environments in LaTeX output, and to numbered theorems in HTML output. """ from pandocfilters import toJSONFilter, RawBlock, Div theoremcount = 0 def latex(x): return RawBlock('latex',x) def html(x): return RawBlock('html', x) def theorems(key, value, format, meta): if key == 'Div': [[ident,classes,kvs], contents] = value if "theorem" in classes: if format == "latex": if ident == "": label = "" else: label = '\\label{' + ident + '}' return([latex('\\begin{theorem}' + label)] + contents + [latex('\\end{theorem}')]) elif format == "html" or format == "html5": global theoremcount theoremcount = theoremcount + 1 newcontents = [html('<dt>Theorem ' + str(theoremcount) + '</dt>'), html('<dd>')] + contents + [html('</dd>\n</dl>')] return Div([ident,classes,kvs], newcontents) if __name__ == "__main__": toJSONFilter(theorems)
[ "pandocfilters.Div", "pandocfilters.RawBlock", "pandocfilters.toJSONFilter" ]
[((267, 287), 'pandocfilters.RawBlock', 'RawBlock', (['"""latex"""', 'x'], {}), "('latex', x)\n", (275, 287), False, 'from pandocfilters import toJSONFilter, RawBlock, Div\n'), ((310, 329), 'pandocfilters.RawBlock', 'RawBlock', (['"""html"""', 'x'], {}), "('html', x)\n", (318, 329), False, 'from pandocfilters import toJSONFilter, RawBlock, Div\n'), ((1048, 1070), 'pandocfilters.toJSONFilter', 'toJSONFilter', (['theorems'], {}), '(theorems)\n', (1060, 1070), False, 'from pandocfilters import toJSONFilter, RawBlock, Div\n'), ((980, 1019), 'pandocfilters.Div', 'Div', (['[ident, classes, kvs]', 'newcontents'], {}), '([ident, classes, kvs], newcontents)\n', (983, 1019), False, 'from pandocfilters import toJSONFilter, RawBlock, Div\n')]
# -*- coding: utf-8 -*- """ Created on Tue Apr 30 21:24:36 2019 @author: wmy """ import numpy as np import tensorflow as tf import matplotlib.pyplot as plt from PIL import Image from keras import backend as K from keras.losses import mean_absolute_error, mean_squared_error from keras.models import load_model from keras.optimizers import Adam import random import os from model import wdsr_a, wdsr_b from utils import DataLoader model = wdsr_b(scale=4, num_res_blocks=32) model.load_weights('./weights/wdsr-b-32-x4.h5') data_loader = DataLoader(scale=4) def evaluate_test(model, setpath='datasets/train', difficulty='easy', name='evaluate'): images = data_loader.search(setpath) image = random.choice(images) hr = data_loader.imread(image) resize = (hr.size[0]//data_loader.scale, hr.size[1]//data_loader.scale) hidden_scale = random.uniform(1, 3) radius = random.uniform(1, 3) if difficulty=='easy': hidden_scale = random.uniform(1, 1.5) radius = random.uniform(1, 1.5) pass elif difficulty=='normal': hidden_scale = random.uniform(1.5, 2) radius = random.uniform(1.5, 2) pass elif difficulty=='hard': hidden_scale = random.uniform(2, 2.5) radius = random.uniform(2, 2.5) pass elif difficulty=='lunatic': hidden_scale = random.uniform(2.5, 3) radius = random.uniform(2.5, 3) pass else: raise ValueError("unknown difficulty") hidden_resize = (int(resize[0]/hidden_scale), int(resize[1]/hidden_scale)) lr = data_loader.gaussianblur(hr, radius) lr = lr.resize(hidden_resize) lr = lr.resize(resize) lr_resize = lr.resize(hr.size) lr = np.asarray(lr) sr = model.predict(np.array([lr]))[0] sr = np.clip(sr, 0, 255) sr = sr.astype('uint8') lr = Image.fromarray(lr) sr = Image.fromarray(sr) lr_resize.save("images/" + name + "_lr.jpg") sr.save("images/" + name + "_sr.jpg") hr.save("images/" + name + "_hr.jpg") pass evaluate_test(model, difficulty='easy', name='easy') evaluate_test(model, difficulty='normal', name='normal') evaluate_test(model, difficulty='hard', name='hard') evaluate_test(model, difficulty='lunatic', name='lunatic')
[ "random.uniform", "utils.DataLoader", "numpy.asarray", "random.choice", "numpy.clip", "PIL.Image.fromarray", "numpy.array", "model.wdsr_b" ]
[((461, 495), 'model.wdsr_b', 'wdsr_b', ([], {'scale': '(4)', 'num_res_blocks': '(32)'}), '(scale=4, num_res_blocks=32)\n', (467, 495), False, 'from model import wdsr_a, wdsr_b\n'), ((562, 581), 'utils.DataLoader', 'DataLoader', ([], {'scale': '(4)'}), '(scale=4)\n', (572, 581), False, 'from utils import DataLoader\n'), ((728, 749), 'random.choice', 'random.choice', (['images'], {}), '(images)\n', (741, 749), False, 'import random\n'), ((883, 903), 'random.uniform', 'random.uniform', (['(1)', '(3)'], {}), '(1, 3)\n', (897, 903), False, 'import random\n'), ((918, 938), 'random.uniform', 'random.uniform', (['(1)', '(3)'], {}), '(1, 3)\n', (932, 938), False, 'import random\n'), ((1766, 1780), 'numpy.asarray', 'np.asarray', (['lr'], {}), '(lr)\n', (1776, 1780), True, 'import numpy as np\n'), ((1834, 1853), 'numpy.clip', 'np.clip', (['sr', '(0)', '(255)'], {}), '(sr, 0, 255)\n', (1841, 1853), True, 'import numpy as np\n'), ((1893, 1912), 'PIL.Image.fromarray', 'Image.fromarray', (['lr'], {}), '(lr)\n', (1908, 1912), False, 'from PIL import Image\n'), ((1923, 1942), 'PIL.Image.fromarray', 'Image.fromarray', (['sr'], {}), '(sr)\n', (1938, 1942), False, 'from PIL import Image\n'), ((991, 1013), 'random.uniform', 'random.uniform', (['(1)', '(1.5)'], {}), '(1, 1.5)\n', (1005, 1013), False, 'import random\n'), ((1032, 1054), 'random.uniform', 'random.uniform', (['(1)', '(1.5)'], {}), '(1, 1.5)\n', (1046, 1054), False, 'import random\n'), ((1125, 1147), 'random.uniform', 'random.uniform', (['(1.5)', '(2)'], {}), '(1.5, 2)\n', (1139, 1147), False, 'import random\n'), ((1166, 1188), 'random.uniform', 'random.uniform', (['(1.5)', '(2)'], {}), '(1.5, 2)\n', (1180, 1188), False, 'import random\n'), ((1805, 1819), 'numpy.array', 'np.array', (['[lr]'], {}), '([lr])\n', (1813, 1819), True, 'import numpy as np\n'), ((1257, 1279), 'random.uniform', 'random.uniform', (['(2)', '(2.5)'], {}), '(2, 2.5)\n', (1271, 1279), False, 'import random\n'), ((1298, 1320), 'random.uniform', 'random.uniform', (['(2)', '(2.5)'], {}), '(2, 2.5)\n', (1312, 1320), False, 'import random\n'), ((1392, 1414), 'random.uniform', 'random.uniform', (['(2.5)', '(3)'], {}), '(2.5, 3)\n', (1406, 1414), False, 'import random\n'), ((1433, 1455), 'random.uniform', 'random.uniform', (['(2.5)', '(3)'], {}), '(2.5, 3)\n', (1447, 1455), False, 'import random\n')]
import json import logging import os import traceback from string import Formatter, Template from datetime import datetime from dateutil.parser import parse as dateparse from satstac import __version__, STACError, Thing, utils logger = logging.getLogger(__name__) class Item(Thing): def __init__(self, *args, **kwargs): """ Initialize a scene object """ super(Item, self).__init__(*args, **kwargs) # dictionary of assets by eo:band common_name self._assets_by_common_name = None # collection instance self._collection = kwargs.pop('collection', None) # TODO = allow passing in of collection (needed for FC catalogs) def collection(self): """ Get Collection info for this item """ if self._collection is None: if self.filename is None: # TODO - raise exception ? return None link = self.links('collection') if len(link) == 1: self._collection = Collection.open(link[0]) return self._collection @property def eobands(self): """ Get eo:bands from Item or from Collection """ if 'eo:bands' in self.properties: return self.properties['eo:bands'] elif self.collection() is not None and 'eo:bands' in self.collection().properties: return self.collection()['eo:bands'] return [] @property def properties(self): """ Get dictionary of properties """ return self._data.get('properties', {}) def __getitem__(self, key): """ Get key from properties """ val = super(Item, self).__getitem__(key) if val is None: if self.collection() is not None: # load properties from Collection val = self._collection[key] return val @property def date(self): return self.datetime.date() @property def datetime(self): return dateparse(self['datetime']) @property def geometry(self): return self._data['geometry'] @property def bbox(self): """ Get bounding box of scene """ return self._data['bbox'] @property def assets(self): """ Return dictionary of assets """ return self._data.get('assets', {}) @property def assets_by_common_name(self): """ Get assets by common band name (only works for assets containing 1 band """ if self._assets_by_common_name is None and len(self.eobands) > 0: self._assets_by_common_name = {} for a in self.assets: bands = self.assets[a].get('eo:bands', []) if len(bands) == 1: eo_band = self.eobands[bands[0]].get('common_name') if eo_band: self._assets_by_common_name[eo_band] = self.assets[a] return self._assets_by_common_name def asset(self, key): """ Get asset for this key OR common_name """ if key in self.assets: return self.assets[key] elif key in self.assets_by_common_name: return self.assets_by_common_name[key] logging.warning('No such asset (%s)' % key) return None def get_filename(self, path='', filename='${id}', extension='.json'): """ Get complete path with filename to this item """ return os.path.join( self.substitute(path), self.substitute(filename) + extension ) def substitute(self, string): """ Substitute envvars in string with Item values """ string = string.replace(':', '_colon_') subs = {} for key in [i[1] for i in Formatter().parse(string.rstrip('/')) if i[1] is not None]: if key == 'id': subs[key] = self.id elif key in ['date', 'year', 'month', 'day']: vals = {'date': self.date, 'year': self.date.year, 'month': self.date.month, 'day': self.date.day} subs[key] = vals[key] else: subs[key] = self[key.replace('_colon_', ':')] return Template(string).substitute(**subs) def download_assets(self, keys=None, **kwargs): """ Download multiple assets """ if keys is None: keys = self._data['assets'].keys() filenames = [] for key in keys: filenames.append(self.download(key, **kwargs)) return filenames def download(self, key, overwrite=False, path='', filename='${id}', requestor_pays=False): """ Download this key (e.g., a band, or metadata file) from the scene """ asset = self.asset(key) if asset is None: return None _path = self.substitute(path) utils.mkdirp(_path) _filename = None try: fname = self.substitute(filename) ext = os.path.splitext(asset['href'])[1] fout = os.path.join(_path, fname + '_' + key + ext) if not os.path.exists(fout) or overwrite: _filename = utils.download_file(asset['href'], filename=fout, requestor_pays=requestor_pays) else: _filename = fout except Exception as e: _filename = None logger.error('Unable to download %s: %s' % (asset['href'], str(e))) logger.debug(traceback.format_exc()) return _filename ''' @classmethod def create_derived(cls, scenes): """ Create metadata for dervied scene from multiple input scenes """ # data provenance, iterate through links links = [] for i, scene in enumerate(scenes): links.append({ 'rel': 'derived_from', 'href': scene.links['self']['href'] }) # calculate composite geometry and bbox geom = scenes[0].geometry # properties props = { 'id': '%s_%s' % (scenes[0].date, scenes[0]['eo:platform']), 'datetime': scenes[0]['datetime'] } collections = [s['c:id'] for s in scenes if s['c:id'] is not None] if len(collections) == 1: props['c:id'] = collections[0] item = { 'properties': props, 'geometry': geom, 'links': links, 'assets': {} } return Item(item) ''' # import and end of module prevents problems with circular dependencies. # Catalogs use Items and Items use Collections (which are Catalogs) from .collection import Collection
[ "dateutil.parser.parse", "os.path.join", "string.Formatter", "logging.warning", "os.path.exists", "string.Template", "os.path.splitext", "satstac.utils.download_file", "traceback.format_exc", "satstac.utils.mkdirp", "logging.getLogger" ]
[((240, 267), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (257, 267), False, 'import logging\n'), ((1984, 2011), 'dateutil.parser.parse', 'dateparse', (["self['datetime']"], {}), "(self['datetime'])\n", (1993, 2011), True, 'from dateutil.parser import parse as dateparse\n'), ((3193, 3236), 'logging.warning', 'logging.warning', (["('No such asset (%s)' % key)"], {}), "('No such asset (%s)' % key)\n", (3208, 3236), False, 'import logging\n'), ((4788, 4807), 'satstac.utils.mkdirp', 'utils.mkdirp', (['_path'], {}), '(_path)\n', (4800, 4807), False, 'from satstac import __version__, STACError, Thing, utils\n'), ((4964, 5008), 'os.path.join', 'os.path.join', (['_path', "(fname + '_' + key + ext)"], {}), "(_path, fname + '_' + key + ext)\n", (4976, 5008), False, 'import os\n'), ((4144, 4160), 'string.Template', 'Template', (['string'], {}), '(string)\n', (4152, 4160), False, 'from string import Formatter, Template\n'), ((4910, 4941), 'os.path.splitext', 'os.path.splitext', (["asset['href']"], {}), "(asset['href'])\n", (4926, 4941), False, 'import os\n'), ((5091, 5176), 'satstac.utils.download_file', 'utils.download_file', (["asset['href']"], {'filename': 'fout', 'requestor_pays': 'requestor_pays'}), "(asset['href'], filename=fout, requestor_pays=requestor_pays\n )\n", (5110, 5176), False, 'from satstac import __version__, STACError, Thing, utils\n'), ((5028, 5048), 'os.path.exists', 'os.path.exists', (['fout'], {}), '(fout)\n', (5042, 5048), False, 'import os\n'), ((5388, 5410), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (5408, 5410), False, 'import traceback\n'), ((3714, 3725), 'string.Formatter', 'Formatter', ([], {}), '()\n', (3723, 3725), False, 'from string import Formatter, Template\n')]
# Generated by Django 3.1.5 on 2021-05-30 18:48 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('financeiro', '0021_auto_20210530_1434'), ('vendas', '0002_auto_20210528_1954'), ] operations = [ migrations.AlterField( model_name='venda', name='receita', field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='financeiro.receita', verbose_name='Receita'), ), ]
[ "django.db.models.OneToOneField" ]
[((419, 562), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""financeiro.receita"""', 'verbose_name': '"""Receita"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.SET_NULL, to='financeiro.receita', verbose_name='Receita')\n", (439, 562), False, 'from django.db import migrations, models\n')]
import os import sys from time import sleep from colors import WOOD, BLACK, WHITE import pygame from renderer import Renderer from renderer.Renderer import eventManager from events import EventEnums Renderer.initializeRenderer() from gui.button import Button import json import glob replays = glob.glob('replays/*') f = open(replays[0]) replay = json.load(f) VERSION = 'v0.0.1' FONT = pygame.font.SysFont('calibri', 15) fontColor = BLACK # def versionInfo(dt): # Renderer.screen.blit(FONT.render(VERSION, True, fontColor), (3, Renderer.getScreenResolution()[1] - 15)) def background(dt): Renderer.screen.fill(WOOD) def drawGame(dt): # for t in replay: Renderer.screen.blit(FONT.render(VERSION, True, fontColor), (3, Renderer.getScreenResolution()[1] - 15)) def quitGame(event, dt): global running running = False clock = pygame.time.Clock() eventManager.subscribe(EventEnums.quitGame, quitGame) # def mainMenu(): # global running # Renderer.addCallableToVersionLoop(versionInfo) # running = True # while True: # dt = clock.tick(60) # Renderer.processEventsAndCallables(dt) # if not running: sys.exit() running = True Renderer.addCallableToBackgroundLoop(background) # mainMenu() while running: dt = clock.tick(60) Renderer.processEventsAndCallables(dt) pygame.quit() sys.exit()
[ "renderer.Renderer.getScreenResolution", "pygame.quit", "json.load", "renderer.Renderer.initializeRenderer", "renderer.Renderer.eventManager.subscribe", "renderer.Renderer.processEventsAndCallables", "pygame.font.SysFont", "renderer.Renderer.addCallableToBackgroundLoop", "renderer.Renderer.screen.fi...
[((199, 228), 'renderer.Renderer.initializeRenderer', 'Renderer.initializeRenderer', ([], {}), '()\n', (226, 228), False, 'from renderer import Renderer\n'), ((295, 317), 'glob.glob', 'glob.glob', (['"""replays/*"""'], {}), "('replays/*')\n", (304, 317), False, 'import glob\n'), ((349, 361), 'json.load', 'json.load', (['f'], {}), '(f)\n', (358, 361), False, 'import json\n'), ((390, 424), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""calibri"""', '(15)'], {}), "('calibri', 15)\n", (409, 424), False, 'import pygame\n'), ((856, 875), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (873, 875), False, 'import pygame\n'), ((876, 929), 'renderer.Renderer.eventManager.subscribe', 'eventManager.subscribe', (['EventEnums.quitGame', 'quitGame'], {}), '(EventEnums.quitGame, quitGame)\n', (898, 929), False, 'from renderer.Renderer import eventManager\n'), ((1194, 1242), 'renderer.Renderer.addCallableToBackgroundLoop', 'Renderer.addCallableToBackgroundLoop', (['background'], {}), '(background)\n', (1230, 1242), False, 'from renderer import Renderer\n'), ((1340, 1353), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (1351, 1353), False, 'import pygame\n'), ((1354, 1364), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1362, 1364), False, 'import sys\n'), ((603, 629), 'renderer.Renderer.screen.fill', 'Renderer.screen.fill', (['WOOD'], {}), '(WOOD)\n', (623, 629), False, 'from renderer import Renderer\n'), ((1300, 1338), 'renderer.Renderer.processEventsAndCallables', 'Renderer.processEventsAndCallables', (['dt'], {}), '(dt)\n', (1334, 1338), False, 'from renderer import Renderer\n'), ((740, 770), 'renderer.Renderer.getScreenResolution', 'Renderer.getScreenResolution', ([], {}), '()\n', (768, 770), False, 'from renderer import Renderer\n')]
import os from ..DCGAN import DCGAN from .gan_trainer import GANTrainer from .standard_configurations.dcgan_config import _C class DCGANTrainer(GANTrainer): r""" A trainer structure for the DCGAN and DCGAN product models """ _defaultConfig = _C def getDefaultConfig(self): return DCGANTrainer._defaultConfig def __init__(self, pathdb, **kwargs): r""" Args: pathdb (string): path to the input dataset **kwargs: other arguments specific to the GANTrainer class """ GANTrainer.__init__(self, pathdb, **kwargs) self.lossProfile.append({"iter": [], "scale": 0}) def initModel(self): self.model = DCGAN(useGPU=self.useGPU, **vars(self.modelConfig)) def train(self): shift = 0 if self.startIter >0: shift+= self.startIter if self.checkPointDir is not None: pathBaseConfig = os.path.join(self.checkPointDir, self.modelLabel + "_train_config.json") self.saveBaseConfig(pathBaseConfig) maxShift = int(self.modelConfig.nEpoch * len(self.getDBLoader(0))) for epoch in range(self.modelConfig.nEpoch): dbLoader = self.getDBLoader(0) self.trainOnEpoch(dbLoader, 0, shiftIter=shift) shift += len(dbLoader) if shift > maxShift: break label = self.modelLabel + ("_s%d_i%d" % (0, shift)) self.saveCheckpoint(self.checkPointDir, label, 0, shift) def initializeWithPretrainNetworks(self, pathD, pathGShape, pathGTexture, finetune=True): r""" Initialize a product gan by loading 3 pretrained networks Args: pathD (string): Path to the .pt file where the DCGAN discrimator is saved pathGShape (string): Path to .pt file where the DCGAN shape generator is saved pathGTexture (string): Path to .pt file where the DCGAN texture generator is saved finetune (bool): set to True to reinitialize the first layer of the generator and the last layer of the discriminator """ if not self.modelConfig.productGan: raise ValueError("Only product gan can be cross-initialized") self.model.loadG(pathGShape, pathGTexture, resetFormatLayer=finetune) self.model.load(pathD, loadG=False, loadD=True, loadConfig=False, finetuning=True)
[ "os.path.join" ]
[((1008, 1080), 'os.path.join', 'os.path.join', (['self.checkPointDir', "(self.modelLabel + '_train_config.json')"], {}), "(self.checkPointDir, self.modelLabel + '_train_config.json')\n", (1020, 1080), False, 'import os\n')]
from graphene_django.views import GraphQLView from server.token_auth import TokenAuthMiddleware from server.channels import GraphQLSubscriptionConsumer from channels.routing import ProtocolTypeRouter, URLRouter from channels.http import AsgiHandler from channels.auth import AuthMiddlewareStack from django.conf import settings from django.conf.urls.static import static from django.contrib import admin from django.urls import path from django.views.decorators.csrf import csrf_exempt urlpatterns = [ path('admin/', admin.site.urls), path('graphql/', csrf_exempt(GraphQLView.as_view(graphiql=True))), path('gql/', csrf_exempt(GraphQLView.as_view(batch=True))), ] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) application = ProtocolTypeRouter({ "websocket": TokenAuthMiddleware( URLRouter([ path('subscriptions', GraphQLSubscriptionConsumer) ]), ), })
[ "graphene_django.views.GraphQLView.as_view", "django.conf.urls.static.static", "django.urls.path" ]
[((697, 758), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (703, 758), False, 'from django.conf.urls.static import static\n'), ((774, 837), 'django.conf.urls.static.static', 'static', (['settings.STATIC_URL'], {'document_root': 'settings.STATIC_ROOT'}), '(settings.STATIC_URL, document_root=settings.STATIC_ROOT)\n', (780, 837), False, 'from django.conf.urls.static import static\n'), ((511, 542), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (515, 542), False, 'from django.urls import path\n'), ((577, 611), 'graphene_django.views.GraphQLView.as_view', 'GraphQLView.as_view', ([], {'graphiql': '(True)'}), '(graphiql=True)\n', (596, 611), False, 'from graphene_django.views import GraphQLView\n'), ((644, 675), 'graphene_django.views.GraphQLView.as_view', 'GraphQLView.as_view', ([], {'batch': '(True)'}), '(batch=True)\n', (663, 675), False, 'from graphene_django.views import GraphQLView\n'), ((932, 982), 'django.urls.path', 'path', (['"""subscriptions"""', 'GraphQLSubscriptionConsumer'], {}), "('subscriptions', GraphQLSubscriptionConsumer)\n", (936, 982), False, 'from django.urls import path\n')]
import os import time import numpy as np import paddle.fluid as fluid import config as cfg from nets.attention_model import attention_train_net from nets.crnn_ctc_model import ctc_train_net from utils import data_reader from utils.utility import get_ctc_feeder_data, get_attention_feeder_data def main(): """OCR training""" if cfg.use_model == "crnn_ctc": train_net = ctc_train_net get_feeder_data = get_ctc_feeder_data else: train_net = attention_train_net get_feeder_data = get_attention_feeder_data # define network sum_cost, error_evaluator, inference_program, model_average = train_net(cfg, cfg.data_shape, cfg.num_classes) # data reader train_reader = data_reader.train(batch_size=cfg.batch_size, prefix_path=cfg.train_prefix, cycle=cfg.total_step > 0, model=cfg.use_model) test_reader = data_reader.test(prefix_path=cfg.test_prefix, model=cfg.use_model) # prepare environment place = fluid.CUDAPlace(0) if cfg.use_gpu else fluid.CPUPlace() exe = fluid.Executor(place) exe.run(fluid.default_startup_program()) # 加载初始化模型 if cfg.init_model: fluid.load(program=fluid.default_main_program(), model_path=cfg.init_model, executor=exe, var_list=fluid.io.get_program_parameter(fluid.default_main_program())) print("Init model from: %s." % cfg.init_model) train_exe = exe error_evaluator.reset(exe) if cfg.parallel: train_exe = fluid.ParallelExecutor(use_cuda=cfg.use_gpu, loss_name=sum_cost.name) fetch_vars = [sum_cost] + error_evaluator.metrics def train_one_batch(data): var_names = [var.name for var in fetch_vars] if cfg.parallel: results = train_exe.run(var_names, feed=get_feeder_data(data, place)) results = [np.array(r).sum() for r in results] else: results = exe.run(program=fluid.default_main_program(), feed=get_feeder_data(data, place), fetch_list=fetch_vars) results = [r[0] for r in results] return results def test(): error_evaluator.reset(exe) for data in test_reader(): exe.run(inference_program, feed=get_feeder_data(data, place)) _, test_seq_error = error_evaluator.eval(exe) return test_seq_error[0] def save_model(): if not os.path.exists(cfg.model_path): os.makedirs(cfg.model_path) fluid.save(program=fluid.default_main_program(), model_path=os.path.join(cfg.model_path, "model")) print("Saved model to: %s" % cfg.model_path) iter_num = 0 stop = False while not stop: total_loss = 0.0 total_seq_error = 0.0 # train a pass for data in train_reader(): if cfg.total_step < iter_num: stop = True break result = train_one_batch(data) total_loss += result[0] total_seq_error += result[2] iter_num += 1 # training log if iter_num % cfg.log_period == 0: print("[%s] - Iter[%d]; Avg loss: %.3f; Avg seq err: %.3f" % (time.asctime(time.localtime(time.time())), iter_num, total_loss / (cfg.log_period * cfg.batch_size), total_seq_error / (cfg.log_period * cfg.batch_size))) total_loss = 0.0 total_seq_error = 0.0 # evaluate if iter_num % cfg.eval_period == 0: if model_average: with model_average.apply(exe): test_seq_error = test() else: test_seq_error = test() print("\n[%s] - Iter[%d]; Test seq error: %.3f\n" % (time.asctime(time.localtime(time.time())), iter_num, test_seq_error)) # save model if iter_num % cfg.save_model_period == 0: if model_average: with model_average.apply(exe): save_model() else: save_model() if __name__ == "__main__": main()
[ "paddle.fluid.Executor", "paddle.fluid.CUDAPlace", "paddle.fluid.default_main_program", "utils.data_reader.test", "paddle.fluid.default_startup_program", "os.makedirs", "os.path.exists", "time.time", "utils.data_reader.train", "numpy.array", "paddle.fluid.CPUPlace", "paddle.fluid.ParallelExecu...
[((722, 847), 'utils.data_reader.train', 'data_reader.train', ([], {'batch_size': 'cfg.batch_size', 'prefix_path': 'cfg.train_prefix', 'cycle': '(cfg.total_step > 0)', 'model': 'cfg.use_model'}), '(batch_size=cfg.batch_size, prefix_path=cfg.train_prefix,\n cycle=cfg.total_step > 0, model=cfg.use_model)\n', (739, 847), False, 'from utils import data_reader\n'), ((973, 1039), 'utils.data_reader.test', 'data_reader.test', ([], {'prefix_path': 'cfg.test_prefix', 'model': 'cfg.use_model'}), '(prefix_path=cfg.test_prefix, model=cfg.use_model)\n', (989, 1039), False, 'from utils import data_reader\n'), ((1145, 1166), 'paddle.fluid.Executor', 'fluid.Executor', (['place'], {}), '(place)\n', (1159, 1166), True, 'import paddle.fluid as fluid\n'), ((1079, 1097), 'paddle.fluid.CUDAPlace', 'fluid.CUDAPlace', (['(0)'], {}), '(0)\n', (1094, 1097), True, 'import paddle.fluid as fluid\n'), ((1118, 1134), 'paddle.fluid.CPUPlace', 'fluid.CPUPlace', ([], {}), '()\n', (1132, 1134), True, 'import paddle.fluid as fluid\n'), ((1179, 1210), 'paddle.fluid.default_startup_program', 'fluid.default_startup_program', ([], {}), '()\n', (1208, 1210), True, 'import paddle.fluid as fluid\n'), ((1624, 1693), 'paddle.fluid.ParallelExecutor', 'fluid.ParallelExecutor', ([], {'use_cuda': 'cfg.use_gpu', 'loss_name': 'sum_cost.name'}), '(use_cuda=cfg.use_gpu, loss_name=sum_cost.name)\n', (1646, 1693), True, 'import paddle.fluid as fluid\n'), ((2591, 2621), 'os.path.exists', 'os.path.exists', (['cfg.model_path'], {}), '(cfg.model_path)\n', (2605, 2621), False, 'import os\n'), ((2635, 2662), 'os.makedirs', 'os.makedirs', (['cfg.model_path'], {}), '(cfg.model_path)\n', (2646, 2662), False, 'import os\n'), ((1277, 1305), 'paddle.fluid.default_main_program', 'fluid.default_main_program', ([], {}), '()\n', (1303, 1305), True, 'import paddle.fluid as fluid\n'), ((2690, 2718), 'paddle.fluid.default_main_program', 'fluid.default_main_program', ([], {}), '()\n', (2716, 2718), True, 'import paddle.fluid as fluid\n'), ((2750, 2787), 'os.path.join', 'os.path.join', (['cfg.model_path', '"""model"""'], {}), "(cfg.model_path, 'model')\n", (2762, 2787), False, 'import os\n'), ((1445, 1473), 'paddle.fluid.default_main_program', 'fluid.default_main_program', ([], {}), '()\n', (1471, 1473), True, 'import paddle.fluid as fluid\n'), ((2088, 2116), 'paddle.fluid.default_main_program', 'fluid.default_main_program', ([], {}), '()\n', (2114, 2116), True, 'import paddle.fluid as fluid\n'), ((2000, 2011), 'numpy.array', 'np.array', (['r'], {}), '(r)\n', (2008, 2011), True, 'import numpy as np\n'), ((3452, 3463), 'time.time', 'time.time', ([], {}), '()\n', (3461, 3463), False, 'import time\n'), ((4091, 4102), 'time.time', 'time.time', ([], {}), '()\n', (4100, 4102), False, 'import time\n')]
from dataclasses import dataclass from typing import Generic, TypeVar from eth_typing import NodeID from eth_utils import int_to_big_endian import rlp from ddht.endpoint import Endpoint class BaseMessage(rlp.Serializable): # type: ignore message_type: int def to_bytes(self) -> bytes: return b"".join((int_to_big_endian(self.message_type), rlp.encode(self))) TMessage = TypeVar("TMessage", bound=BaseMessage) TResponseMessage = TypeVar("TResponseMessage", bound=BaseMessage) @dataclass(frozen=True) class OutboundMessage(Generic[TMessage]): message: BaseMessage receiver_endpoint: Endpoint receiver_node_id: NodeID def __str__(self) -> str: return f"{self.__class__.__name__}[{self.message.__class__.__name__}]" @dataclass(frozen=True) class InboundMessage(Generic[TMessage]): message: TMessage sender_endpoint: Endpoint sender_node_id: NodeID def __str__(self) -> str: return f"{self.__class__.__name__}[{self.message.__class__.__name__}]" def to_response( self, response_message: TResponseMessage ) -> OutboundMessage[TResponseMessage]: return OutboundMessage( message=response_message, receiver_endpoint=self.sender_endpoint, receiver_node_id=self.sender_node_id, ) AnyInboundMessage = InboundMessage[BaseMessage] AnyOutboundMessage = OutboundMessage[BaseMessage]
[ "rlp.encode", "typing.TypeVar", "eth_utils.int_to_big_endian", "dataclasses.dataclass" ]
[((394, 432), 'typing.TypeVar', 'TypeVar', (['"""TMessage"""'], {'bound': 'BaseMessage'}), "('TMessage', bound=BaseMessage)\n", (401, 432), False, 'from typing import Generic, TypeVar\n'), ((452, 498), 'typing.TypeVar', 'TypeVar', (['"""TResponseMessage"""'], {'bound': 'BaseMessage'}), "('TResponseMessage', bound=BaseMessage)\n", (459, 498), False, 'from typing import Generic, TypeVar\n'), ((502, 524), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (511, 524), False, 'from dataclasses import dataclass\n'), ((766, 788), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (775, 788), False, 'from dataclasses import dataclass\n'), ((324, 360), 'eth_utils.int_to_big_endian', 'int_to_big_endian', (['self.message_type'], {}), '(self.message_type)\n', (341, 360), False, 'from eth_utils import int_to_big_endian\n'), ((362, 378), 'rlp.encode', 'rlp.encode', (['self'], {}), '(self)\n', (372, 378), False, 'import rlp\n')]
""" cloudalbum/api/users.py ~~~~~~~~~~~~~~~~~~~~~~~ REST API for users :description: CloudAlbum is a fully featured sample application for 'Moving to AWS serverless' training course :copyright: © 2019 written by <NAME>, <NAME>. :license: MIT, see LICENSE for more details. """ import hashlib import boto3, hmac, base64 from botocore.exceptions import ClientError from flask import Blueprint, request from flask import current_app as app from flask import jsonify, make_response from flask_restplus import Api, Resource, fields from jsonschema import ValidationError from werkzeug.exceptions import InternalServerError, BadRequest, Conflict from cloudalbum.schemas import validate_user from cloudalbum.solution import solution_signup_cognito from cloudalbum.util.jwt_helper import get_token_from_header, cog_jwt_required users_blueprint = Blueprint('users', __name__) api = Api(users_blueprint, doc='/swagger/', title='Users', description='CloudAlbum-users: \n prefix url "/users" is already exist.', version='0.1') response = api.model('Response', { 'code': fields.Integer, 'message': fields.String, 'data': fields.String }) signup_user = api.model('Signup_user', { 'email': fields.String, 'username': fields.String, 'password': fields.String }) signin_user = api.model('Signin_user', { 'email': fields.String, 'password': fields.String }) @api.route('/ping') class Ping(Resource): @api.doc(responses={200: 'pong!'}) def get(self): """Ping api""" app.logger.debug('success:ping pong!') return make_response({'ok': True, 'Message': 'pong'}, 200) @api.route('/', strict_slashes=False) class UsersList(Resource): @api.doc( responses= { 200: 'Return the whole users list', 500: 'Internal server error' } ) def get(self): """Get all users as list""" try: client = boto3.client('cognito-idp') response = client.list_users( UserPoolId=app.config['COGNITO_POOL_ID'], AttributesToGet=['sub', 'email', 'name'] ) data = [] for user in response['Users']: one_user = {} for attr in user['Attributes']: key = attr['Name'] if key == 'sub': key = 'user_id' one_user[key] = attr['Value'] data.append(one_user) app.logger.debug('success:users_list: {0}'.format(data)) return make_response({'ok': True, 'users': data}, 200) except Exception as e: app.logger.error('users list failed') app.logger.error(e) raise InternalServerError('Retrieve user list failed') @api.route('/<user_id>') class Users(Resource): @api.doc(responses={ 200: 'Return a user data', 500: 'Internal server error' }) def get(self, user_id): """Get a single user details""" client = boto3.client('cognito-idp') try: response = client.admin_get_user( UserPoolId=app.config['COGNITO_POOL_ID'], Username=user_id ) user_data ={} for attr in response['UserAttributes']: key = attr['Name'] if key == 'sub': key = 'user_id' val = attr['Value'] user_data[key] = val app.logger.debug('success: get Cognito user data: {}'.format(user_data)) return make_response({'ok': True, 'users': user_data}, 200) except ValueError as e: app.logger.error('ERROR:user_get_by_id:{}'.format(user_id)) app.logger.error(e) raise BadRequest(e) except Exception as e: app.logger.error('ERROR:user_get_by_id:{}'.format(user_id)) app.logger.error(e) raise InternalServerError('Unexpected Error:{0}'.format(e)) def cognito_signup(signup_user): user = signup_user; msg = '{0}{1}'.format(user['email'], app.config['COGNITO_CLIENT_ID']) dig = hmac.new(app.config['COGNITO_CLIENT_SECRET'].encode('utf-8'), msg=msg.encode('utf-8'), digestmod=hashlib.sha256).digest() # TODO 7: Implement following solution code to sign up user into cognito user pool try: return solution_signup_cognito(user, dig) except ClientError as e: if e.response['Error']['Code'] == 'UsernameExistsException': raise Conflict('ERROR: Existed user!') except Exception as e: raise BadRequest(e.response['Error']['Message']) @api.route('/signup') class Signup(Resource): @api.doc(responses={ 201: 'Return a user data', 400: 'Invalidate email/password', 500: 'Internal server error' }) @api.expect(signup_user) def post(self): """Enroll a new user""" req_data = request.get_json() try: validated = validate_user(req_data) user_data = validated['data'] user = cognito_signup(user_data) app.logger.debug('success: enroll user into Cognito user pool:{}'.format(user)) return make_response({'ok': True, 'users': user}, 201) except ValidationError as e: app.logger.error('ERROR:invalid signup data format:{0}'.format(req_data)) app.logger.error(e) raise BadRequest(e.message) def cognito_signin(cognito_client, user): msg = '{0}{1}'.format(user['email'], app.config['COGNITO_CLIENT_ID']) dig = hmac.new(app.config['COGNITO_CLIENT_SECRET'].encode('utf-8'), msg=msg.encode('utf-8'), digestmod=hashlib.sha256).digest() auth= base64.b64encode(dig).decode() resp = cognito_client.admin_initiate_auth(UserPoolId=app.config['COGNITO_POOL_ID'], ClientId=app.config['COGNITO_CLIENT_ID'], AuthFlow='ADMIN_NO_SRP_AUTH', AuthParameters={'SECRET_HASH': auth,'USERNAME': user['email'], 'PASSWORD': user['password']}) access_token = resp['AuthenticationResult']['AccessToken'] refresh_token = resp['AuthenticationResult']['RefreshToken'] return access_token, refresh_token @api.route('/signin') class Signin(Resource): @api.doc(responses={ 200: 'login success', 400: 'Invalidate data', 500: 'Internal server error' }) @api.expect(signin_user) def post(self): """user signin""" req_data = request.get_json() client = boto3.client('cognito-idp') try: signin_data = validate_user(req_data)['data'] access_token, refresh_token = cognito_signin(client, signin_data) res = jsonify({'accessToken': access_token, 'refreshToken': refresh_token}) app.logger.debug('success:user signin:access_token:{}, refresh_token:{}'.format(access_token, refresh_token)) return make_response(res, 200) except client.exceptions.UserNotFoundException as e: app.logger.error('User does not exist: {0}'.format(signin_data)) app.logger.error(e) raise BadRequest('User does not exist') except client.exceptions.NotAuthorizedException as e: app.logger.error('Password is mismatched or invalid user: {0}'.format(signin_data)) app.logger.error(e) raise BadRequest('Password is mismatched or invalid user') except ValidationError as e: app.logger.error('Invalid data format: {0}'.format(req_data)) app.logger.error(e) raise BadRequest(e.message) except Exception as e: app.logger.error('Unexpected error: {0}'.format(req_data)) app.logger.error(e) raise InternalServerError('Unexpected error: {0}'.format(req_data)) @api.route('/signout') class Signout(Resource): @cog_jwt_required @api.doc(responses={ 200: 'signout success', 500: 'login required' }) def post(self): """user signout""" token = get_token_from_header(request) try: client = boto3.client('cognito-idp') response = client.global_sign_out( AccessToken=token ) app.logger.debug('Access token expired: {}'.format(token)) return make_response({'ok': True}, 200) except Exception as e: app.logger.error('Sign-out:unknown issue:token:{}'.format(token)) app.logger.error(e) raise InternalServerError(e)
[ "werkzeug.exceptions.InternalServerError", "flask.Blueprint", "boto3.client", "flask_restplus.Api", "werkzeug.exceptions.BadRequest", "flask.current_app.logger.error", "flask.current_app.logger.debug", "flask.jsonify", "werkzeug.exceptions.Conflict", "cloudalbum.schemas.validate_user", "base64.b...
[((866, 894), 'flask.Blueprint', 'Blueprint', (['"""users"""', '__name__'], {}), "('users', __name__)\n", (875, 894), False, 'from flask import Blueprint, request\n'), ((901, 1054), 'flask_restplus.Api', 'Api', (['users_blueprint'], {'doc': '"""/swagger/"""', 'title': '"""Users"""', 'description': '"""CloudAlbum-users: \n prefix url "/users" is already exist."""', 'version': '"""0.1"""'}), '(users_blueprint, doc=\'/swagger/\', title=\'Users\', description=\n """CloudAlbum-users: \n prefix url "/users" is already exist.""",\n version=\'0.1\')\n', (904, 1054), False, 'from flask_restplus import Api, Resource, fields\n'), ((1547, 1585), 'flask.current_app.logger.debug', 'app.logger.debug', (['"""success:ping pong!"""'], {}), "('success:ping pong!')\n", (1563, 1585), True, 'from flask import current_app as app\n'), ((1601, 1652), 'flask.make_response', 'make_response', (["{'ok': True, 'Message': 'pong'}", '(200)'], {}), "({'ok': True, 'Message': 'pong'}, 200)\n", (1614, 1652), False, 'from flask import jsonify, make_response\n'), ((3104, 3131), 'boto3.client', 'boto3.client', (['"""cognito-idp"""'], {}), "('cognito-idp')\n", (3116, 3131), False, 'import boto3, hmac, base64\n'), ((4497, 4531), 'cloudalbum.solution.solution_signup_cognito', 'solution_signup_cognito', (['user', 'dig'], {}), '(user, dig)\n', (4520, 4531), False, 'from cloudalbum.solution import solution_signup_cognito\n'), ((5059, 5077), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (5075, 5077), False, 'from flask import Blueprint, request\n'), ((6718, 6736), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (6734, 6736), False, 'from flask import Blueprint, request\n'), ((6754, 6781), 'boto3.client', 'boto3.client', (['"""cognito-idp"""'], {}), "('cognito-idp')\n", (6766, 6781), False, 'import boto3, hmac, base64\n'), ((8294, 8324), 'cloudalbum.util.jwt_helper.get_token_from_header', 'get_token_from_header', (['request'], {}), '(request)\n', (8315, 8324), False, 'from cloudalbum.util.jwt_helper import get_token_from_header, cog_jwt_required\n'), ((1977, 2004), 'boto3.client', 'boto3.client', (['"""cognito-idp"""'], {}), "('cognito-idp')\n", (1989, 2004), False, 'import boto3, hmac, base64\n'), ((2612, 2659), 'flask.make_response', 'make_response', (["{'ok': True, 'users': data}", '(200)'], {}), "({'ok': True, 'users': data}, 200)\n", (2625, 2659), False, 'from flask import jsonify, make_response\n'), ((3655, 3707), 'flask.make_response', 'make_response', (["{'ok': True, 'users': user_data}", '(200)'], {}), "({'ok': True, 'users': user_data}, 200)\n", (3668, 3707), False, 'from flask import jsonify, make_response\n'), ((4722, 4764), 'werkzeug.exceptions.BadRequest', 'BadRequest', (["e.response['Error']['Message']"], {}), "(e.response['Error']['Message'])\n", (4732, 4764), False, 'from werkzeug.exceptions import InternalServerError, BadRequest, Conflict\n'), ((5115, 5138), 'cloudalbum.schemas.validate_user', 'validate_user', (['req_data'], {}), '(req_data)\n', (5128, 5138), False, 'from cloudalbum.schemas import validate_user\n'), ((5337, 5384), 'flask.make_response', 'make_response', (["{'ok': True, 'users': user}", '(201)'], {}), "({'ok': True, 'users': user}, 201)\n", (5350, 5384), False, 'from flask import jsonify, make_response\n'), ((5879, 5900), 'base64.b64encode', 'base64.b64encode', (['dig'], {}), '(dig)\n', (5895, 5900), False, 'import boto3, hmac, base64\n'), ((6949, 7018), 'flask.jsonify', 'jsonify', (["{'accessToken': access_token, 'refreshToken': refresh_token}"], {}), "({'accessToken': access_token, 'refreshToken': refresh_token})\n", (6956, 7018), False, 'from flask import jsonify, make_response\n'), ((7160, 7183), 'flask.make_response', 'make_response', (['res', '(200)'], {}), '(res, 200)\n', (7173, 7183), False, 'from flask import jsonify, make_response\n'), ((8359, 8386), 'boto3.client', 'boto3.client', (['"""cognito-idp"""'], {}), "('cognito-idp')\n", (8371, 8386), False, 'import boto3, hmac, base64\n'), ((8572, 8604), 'flask.make_response', 'make_response', (["{'ok': True}", '(200)'], {}), "({'ok': True}, 200)\n", (8585, 8604), False, 'from flask import jsonify, make_response\n'), ((2704, 2741), 'flask.current_app.logger.error', 'app.logger.error', (['"""users list failed"""'], {}), "('users list failed')\n", (2720, 2741), True, 'from flask import current_app as app\n'), ((2754, 2773), 'flask.current_app.logger.error', 'app.logger.error', (['e'], {}), '(e)\n', (2770, 2773), True, 'from flask import current_app as app\n'), ((2792, 2840), 'werkzeug.exceptions.InternalServerError', 'InternalServerError', (['"""Retrieve user list failed"""'], {}), "('Retrieve user list failed')\n", (2811, 2840), False, 'from werkzeug.exceptions import InternalServerError, BadRequest, Conflict\n'), ((3824, 3843), 'flask.current_app.logger.error', 'app.logger.error', (['e'], {}), '(e)\n', (3840, 3843), True, 'from flask import current_app as app\n'), ((3862, 3875), 'werkzeug.exceptions.BadRequest', 'BadRequest', (['e'], {}), '(e)\n', (3872, 3875), False, 'from werkzeug.exceptions import InternalServerError, BadRequest, Conflict\n'), ((3991, 4010), 'flask.current_app.logger.error', 'app.logger.error', (['e'], {}), '(e)\n', (4007, 4010), True, 'from flask import current_app as app\n'), ((4648, 4680), 'werkzeug.exceptions.Conflict', 'Conflict', (['"""ERROR: Existed user!"""'], {}), "('ERROR: Existed user!')\n", (4656, 4680), False, 'from werkzeug.exceptions import InternalServerError, BadRequest, Conflict\n'), ((5521, 5540), 'flask.current_app.logger.error', 'app.logger.error', (['e'], {}), '(e)\n', (5537, 5540), True, 'from flask import current_app as app\n'), ((5559, 5580), 'werkzeug.exceptions.BadRequest', 'BadRequest', (['e.message'], {}), '(e.message)\n', (5569, 5580), False, 'from werkzeug.exceptions import InternalServerError, BadRequest, Conflict\n'), ((6821, 6844), 'cloudalbum.schemas.validate_user', 'validate_user', (['req_data'], {}), '(req_data)\n', (6834, 6844), False, 'from cloudalbum.schemas import validate_user\n'), ((7335, 7354), 'flask.current_app.logger.error', 'app.logger.error', (['e'], {}), '(e)\n', (7351, 7354), True, 'from flask import current_app as app\n'), ((7373, 7406), 'werkzeug.exceptions.BadRequest', 'BadRequest', (['"""User does not exist"""'], {}), "('User does not exist')\n", (7383, 7406), False, 'from werkzeug.exceptions import InternalServerError, BadRequest, Conflict\n'), ((7577, 7596), 'flask.current_app.logger.error', 'app.logger.error', (['e'], {}), '(e)\n', (7593, 7596), True, 'from flask import current_app as app\n'), ((7615, 7667), 'werkzeug.exceptions.BadRequest', 'BadRequest', (['"""Password is mismatched or invalid user"""'], {}), "('Password is mismatched or invalid user')\n", (7625, 7667), False, 'from werkzeug.exceptions import InternalServerError, BadRequest, Conflict\n'), ((7791, 7810), 'flask.current_app.logger.error', 'app.logger.error', (['e'], {}), '(e)\n', (7807, 7810), True, 'from flask import current_app as app\n'), ((7829, 7850), 'werkzeug.exceptions.BadRequest', 'BadRequest', (['e.message'], {}), '(e.message)\n', (7839, 7850), False, 'from werkzeug.exceptions import InternalServerError, BadRequest, Conflict\n'), ((7965, 7984), 'flask.current_app.logger.error', 'app.logger.error', (['e'], {}), '(e)\n', (7981, 7984), True, 'from flask import current_app as app\n'), ((8726, 8745), 'flask.current_app.logger.error', 'app.logger.error', (['e'], {}), '(e)\n', (8742, 8745), True, 'from flask import current_app as app\n'), ((8764, 8786), 'werkzeug.exceptions.InternalServerError', 'InternalServerError', (['e'], {}), '(e)\n', (8783, 8786), False, 'from werkzeug.exceptions import InternalServerError, BadRequest, Conflict\n')]
__author__ = 'JOHNMCL' import json import time import requests baseUrl = "https://winkapi.quirky.com" headers = {} class wink_sensor_pod(object): """ represents a wink.py sensor json_obj holds the json stat at init (and if there is a refresh it's updated it's the native format for this objects methods and looks like so: { "data": { "last_event": { "brightness_occurred_at": None, "loudness_occurred_at": None, "vibration_occurred_at": None }, "model_name": "Tripper", "capabilities": { "sensor_types": [ { "field": "opened", "type": "boolean" }, { "field": "battery", "type": "percentage" } ] }, "manufacturer_device_model": "quirky_ge_tripper", "location": "", "radio_type": "zigbee", "manufacturer_device_id": None, "gang_id": None, "sensor_pod_id": "37614", "subscription": { }, "units": { }, "upc_id": "184", "hidden_at": None, "last_reading": { "battery_voltage_threshold_2": 0, "opened": False, "battery_alarm_mask": 0, "opened_updated_at": 1421697092.7347496, "battery_voltage_min_threshold_updated_at": 1421697092.7347229, "battery_voltage_min_threshold": 0, "connection": None, "battery_voltage": 25, "battery_voltage_threshold_1": 25, "connection_updated_at": None, "battery_voltage_threshold_3": 0, "battery_voltage_updated_at": 1421697092.7347066, "battery_voltage_threshold_1_updated_at": 1421697092.7347302, "battery_voltage_threshold_3_updated_at": 1421697092.7347434, "battery_voltage_threshold_2_updated_at": 1421697092.7347374, "battery": 1.0, "battery_updated_at": 1421697092.7347553, "battery_alarm_mask_updated_at": 1421697092.734716 }, "triggers": [ ], "name": "MasterBathroom", "lat_lng": [ 37.550773, -122.279182 ], "uuid": "a2cb868a-dda3-4211-ab73-fc08087aeed7", "locale": "en_us", "device_manufacturer": "quirky_ge", "created_at": 1421523277, "local_id": "2", "hub_id": "88264" }, } """ def __init__(self, aJSonObj, objectprefix="sensor_pods"): self.jsonState = aJSonObj self.objectprefix = objectprefix def __str__(self): return "%s %s %s" % (self.name(), self.deviceId(), self.state()) def __repr__(self): return "<Wink sensor %s %s %s>" % (self.name(), self.deviceId(), self.state()) @property def _last_reading(self): return self.jsonState.get('last_reading') or {} def name(self): return self.jsonState.get('name', "Unknown Name") def state(self): return self._last_reading.get('opened', False) def deviceId(self): return self.jsonState.get('sensor_pod_id', self.name()) def refresh_state_at_hub(self): """ Tell hub to query latest status from device and upload to Wink. PS: Not sure if this even works.. """ urlString = baseUrl + "/%s/%s/refresh" % (self.objectprefix, self.deviceId()) requests.get(urlString, headers=headers) def updateState(self): """ Update state with latest info from Wink API. """ urlString = baseUrl + "/%s/%s" % (self.objectprefix, self.deviceId()) arequest = requests.get(urlString, headers=headers) self._updateStateFromResponse(arequest.json()) def _updateStateFromResponse(self, response_json): """ :param response_json: the json obj returned from query :return: """ self.jsonState = response_json.get('data') class wink_binary_switch(object): """ represents a wink.py switch json_obj holds the json stat at init (and if there is a refresh it's updated it's the native format for this objects methods and looks like so: { "data": { "binary_switch_id": "4153", "name": "Garage door indicator", "locale": "en_us", "units": {}, "created_at": 1411614982, "hidden_at": null, "capabilities": {}, "subscription": {}, "triggers": [], "desired_state": { "powered": false }, "manufacturer_device_model": "leviton_dzs15", "manufacturer_device_id": null, "device_manufacturer": "leviton", "model_name": "Switch", "upc_id": "94", "gang_id": null, "hub_id": "11780", "local_id": "9", "radio_type": "zwave", "last_reading": { "powered": false, "powered_updated_at": 1411614983.6153464, "powering_mode": null, "powering_mode_updated_at": null, "consumption": null, "consumption_updated_at": null, "cost": null, "cost_updated_at": null, "budget_percentage": null, "budget_percentage_updated_at": null, "budget_velocity": null, "budget_velocity_updated_at": null, "summation_delivered": null, "summation_delivered_updated_at": null, "sum_delivered_multiplier": null, "sum_delivered_multiplier_updated_at": null, "sum_delivered_divisor": null, "sum_delivered_divisor_updated_at": null, "sum_delivered_formatting": null, "sum_delivered_formatting_updated_at": null, "sum_unit_of_measure": null, "sum_unit_of_measure_updated_at": null, "desired_powered": false, "desired_powered_updated_at": 1417893563.7567682, "desired_powering_mode": null, "desired_powering_mode_updated_at": null }, "current_budget": null, "lat_lng": [ 38.429996, -122.653721 ], "location": "", "order": 0 }, "errors": [], "pagination": {} } """ def __init__(self, aJSonObj, objectprefix="binary_switches"): self.jsonState = aJSonObj self.objectprefix = objectprefix # Tuple (desired state, time) self._last_call = (0, None) def __str__(self): return "%s %s %s" % (self.name(), self.deviceId(), self.state()) def __repr__(self): return "<Wink switch %s %s %s>" % (self.name(), self.deviceId(), self.state()) @property def _last_reading(self): return self.jsonState.get('last_reading') or {} def name(self): return self.jsonState.get('name', "Unknown Name") def state(self): # Optimistic approach to setState: # Within 15 seconds of a call to setState we assume it worked. if self._recent_state_set(): return self._last_call[1] return self._last_reading.get('powered', False) def deviceId(self): return self.jsonState.get('binary_switch_id', self.name()) def setState(self, state): """ :param state: a boolean of true (on) or false ('off') :return: nothing """ urlString = baseUrl + "/%s/%s" % (self.objectprefix, self.deviceId()) values = {"desired_state": {"powered": state}} arequest = requests.put(urlString, data=json.dumps(values), headers=headers) self._updateStateFromResponse(arequest.json()) self._last_call = (time.time(), state) def refresh_state_at_hub(self): """ Tell hub to query latest status from device and upload to Wink. PS: Not sure if this even works.. """ urlString = baseUrl + "/%s/%s/refresh" % (self.objectprefix, self.deviceId()) requests.get(urlString, headers=headers) def updateState(self): """ Update state with latest info from Wink API. """ urlString = baseUrl + "/%s/%s" % (self.objectprefix, self.deviceId()) arequest = requests.get(urlString, headers=headers) self._updateStateFromResponse(arequest.json()) def wait_till_desired_reached(self): """ Wait till desired state reached. Max 10s. """ if self._recent_state_set(): return # self.refresh_state_at_hub() tries = 1 while True: self.updateState() last_read = self._last_reading if last_read.get('desired_powered') == last_read.get('powered') \ or tries == 5: break time.sleep(2) tries += 1 self.updateState() last_read = self._last_reading def _updateStateFromResponse(self, response_json): """ :param response_json: the json obj returned from query :return: """ self.jsonState = response_json.get('data') def _recent_state_set(self): return time.time() - self._last_call[0] < 15 class wink_bulb(wink_binary_switch): """ represents a wink.py bulb json_obj holds the json stat at init (and if there is a refresh it's updated it's the native format for this objects methods and looks like so: "light_bulb_id": "33990", "name": "downstaurs lamp", "locale": "en_us", "units":{}, "created_at": 1410925804, "hidden_at": null, "capabilities":{}, "subscription":{}, "triggers":[], "desired_state":{"powered": true, "brightness": 1}, "manufacturer_device_model": "lutron_p_pkg1_w_wh_d", "manufacturer_device_id": null, "device_manufacturer": "lutron", "model_name": "Caseta Wireless Dimmer & Pico", "upc_id": "3", "hub_id": "11780", "local_id": "8", "radio_type": "lutron", "linked_service_id": null, "last_reading":{ "brightness": 1, "brightness_updated_at": 1417823487.490747, "connection": true, "connection_updated_at": 1417823487.4907365, "powered": true, "powered_updated_at": 1417823487.4907532, "desired_powered": true, "desired_powered_updated_at": 1417823485.054675, "desired_brightness": 1, "desired_brightness_updated_at": 1417409293.2591703 }, "lat_lng":[38.429962, -122.653715], "location": "", "order": 0 """ jsonState = {} def __init__(self, ajsonobj): super().__init__(ajsonobj, "light_bulbs") def deviceId(self): return self.jsonState.get('light_bulb_id', self.name()) def brightness(self): return self._last_reading.get('brightness') def setState(self, state, brightness=None): """ :param state: a boolean of true (on) or false ('off') :return: nothing """ urlString = baseUrl + "/light_bulbs/%s" % self.deviceId() values = { "desired_state": { "powered": state } } if brightness is not None: values["desired_state"]["brightness"] = brightness urlString = baseUrl + "/light_bulbs/%s" % self.deviceId() arequest = requests.put(urlString, data=json.dumps(values), headers=headers) self._updateStateFromResponse(arequest.json()) self._last_call = (time.time(), state) def __repr__(self): return "<Wink Bulb %s %s %s>" % ( self.name(), self.deviceId(), self.state()) def get_devices(filter, constructor): arequestUrl = baseUrl + "/users/me/wink_devices" j = requests.get(arequestUrl, headers=headers).json() items = j.get('data') devices = [] for item in items: id = item.get(filter) if (id is not None and item.get("hidden_at") is None): devices.append(constructor(item)) return devices def get_bulbs(): return get_devices('light_bulb_id', wink_bulb) def get_switches(): return get_devices('binary_switch_id', wink_binary_switch) def get_sensors(): return get_devices('sensor_pod_id', wink_sensor_pod) def is_token_set(): """ Returns if an auth token has been set. """ return bool(headers) def set_bearer_token(token): global headers headers = { "Content-Type": "application/json", "Authorization": "Bearer {}".format(token) } if __name__ == "__main__": sw = get_bulbs() lamp = sw[3] lamp.setState(False)
[ "time.sleep", "json.dumps", "requests.get", "time.time" ]
[((3495, 3535), 'requests.get', 'requests.get', (['urlString'], {'headers': 'headers'}), '(urlString, headers=headers)\n', (3507, 3535), False, 'import requests\n'), ((3722, 3762), 'requests.get', 'requests.get', (['urlString'], {'headers': 'headers'}), '(urlString, headers=headers)\n', (3734, 3762), False, 'import requests\n'), ((8017, 8057), 'requests.get', 'requests.get', (['urlString'], {'headers': 'headers'}), '(urlString, headers=headers)\n', (8029, 8057), False, 'import requests\n'), ((8244, 8284), 'requests.get', 'requests.get', (['urlString'], {'headers': 'headers'}), '(urlString, headers=headers)\n', (8256, 8284), False, 'import requests\n'), ((7728, 7739), 'time.time', 'time.time', ([], {}), '()\n', (7737, 7739), False, 'import time\n'), ((8792, 8805), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (8802, 8805), False, 'import time\n'), ((11440, 11451), 'time.time', 'time.time', ([], {}), '()\n', (11449, 11451), False, 'import time\n'), ((11684, 11726), 'requests.get', 'requests.get', (['arequestUrl'], {'headers': 'headers'}), '(arequestUrl, headers=headers)\n', (11696, 11726), False, 'import requests\n'), ((7608, 7626), 'json.dumps', 'json.dumps', (['values'], {}), '(values)\n', (7618, 7626), False, 'import json\n'), ((9164, 9175), 'time.time', 'time.time', ([], {}), '()\n', (9173, 9175), False, 'import time\n'), ((11320, 11338), 'json.dumps', 'json.dumps', (['values'], {}), '(values)\n', (11330, 11338), False, 'import json\n')]
# -*- coding: utf-8 -*- import contextlib import operator import socket import struct import threading from resources.lib.kodi import kodilogging from resources.lib.kodi.utils import get_setting_as_bool from resources.lib.tubecast.kodicast import Kodicast from resources.lib.tubecast.utils import build_template, str_to_bytes, PY3 if PY3: from socketserver import DatagramRequestHandler, ThreadingUDPServer else: from SocketServer import DatagramRequestHandler, ThreadingUDPServer logger = kodilogging.get_logger("ssdp") def get_interface_address(if_name): import fcntl # late import as this is only supported on Unix platforms. sciocgifaddr = 0x8915 with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as s: return fcntl.ioctl(s.fileno(), sciocgifaddr, struct.pack(b'256s', if_name[:15]))[20:24] class ControlMixin(object): def __init__(self, handler, poll_interval): self._thread = None self.poll_interval = poll_interval self._handler = handler def start(self): self._thread = t = threading.Thread(name=type(self).__name__, target=self.serve_forever, args=(self.poll_interval,)) t.setDaemon(True) t.start() def stop(self): self.shutdown() self._thread.join() self._thread = None class MulticastServer(ControlMixin, ThreadingUDPServer): allow_reuse_address = True def __init__(self, addr, handler, chromecast_addr, poll_interval=0.5, bind_and_activate=True, interfaces=None): ThreadingUDPServer.__init__(self, ('', addr[1]), handler, bind_and_activate) ControlMixin.__init__(self, handler, poll_interval) self.chromecast_addr = chromecast_addr self._multicast_address = addr self._listen_interfaces = interfaces self.set_loopback_mode(1) # localhost self.set_ttl(2) # localhost and local network self.handle_membership(socket.IP_ADD_MEMBERSHIP) def set_loopback_mode(self, mode): mode = struct.pack("b", operator.truth(mode)) self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, mode) def server_bind(self): try: if hasattr(socket, "SO_REUSEADDR"): self.socket.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) except Exception as e: logger.error(e) try: if hasattr(socket, "SO_REUSEPORT"): self.socket.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) except Exception as e: logger.error(e) ThreadingUDPServer.server_bind(self) def handle_membership(self, cmd): if self._listen_interfaces is None: mreq = struct.pack( str("4sI"), socket.inet_aton(self._multicast_address[0]), socket.INADDR_ANY) self.socket.setsockopt(socket.IPPROTO_IP, cmd, mreq) else: for interface in self._listen_interfaces: try: if_addr = socket.inet_aton(interface) except socket.error: if_addr = get_interface_address(interface) mreq = socket.inet_aton(self._multicast_address[0]) + if_addr self.socket.setsockopt(socket.IPPROTO_IP, cmd, mreq) def set_ttl(self, ttl): ttl = struct.pack("B", ttl) self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) def server_close(self): self.handle_membership(socket.IP_DROP_MEMBERSHIP) class SSDPHandler(DatagramRequestHandler): header = '''\ HTTP/1.1 200 OK\r LOCATION: http://{{ ip }}:{{ port }}/ssdp/device-desc.xml\r CACHE-CONTROL: max-age=1800\r EXT: \r SERVER: UPnP/1.0\r BOOTID.UPNP.ORG: 1\r USN: uuid:{{ uuid }}\r ST: urn:dial-multiscreen-org:service:dial:1\r \r ''' def handle(self): data = self.request[0].strip() self.datagram_received(data, self.client_address) def reply(self, data, address): socket = self.request[1] socket.sendto(str_to_bytes(data), address) @staticmethod def get_remote_ip(address): # Create a socket to determine what address the client should use s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(address) iface = s.getsockname()[0] return iface if PY3 else unicode(iface) def datagram_received(self, datagram, address): if get_setting_as_bool('debug-ssdp'): logger.debug('Datagram received. Address:{}; Content:{}'.format(address, datagram)) if b"urn:dial-multiscreen-org:service:dial:1" in datagram and b"M-SEARCH" in datagram: if get_setting_as_bool('debug-ssdp'): logger.debug("Answering datagram") _, port = self.server.chromecast_addr data = build_template(self.header).render( ip=self.get_remote_ip(address), port=port, uuid=Kodicast.uuid ) self.reply(data, address) class SSDPserver(object): SSDP_ADDR = '172.16.17.32' SSDP_PORT = 1900 def start(self, chromecast_addr, interfaces=None): logger.info('Starting SSDP server') self.server = MulticastServer((self.SSDP_ADDR, self.SSDP_PORT), SSDPHandler, chromecast_addr=chromecast_addr, interfaces=interfaces) self.server.start() def shutdown(self): logger.info('Stopping SSDP server') self.server.server_close() self.server.stop()
[ "resources.lib.kodi.utils.get_setting_as_bool", "SocketServer.ThreadingUDPServer.__init__", "socket.socket", "resources.lib.tubecast.utils.build_template", "struct.pack", "operator.truth", "socket.inet_aton", "SocketServer.ThreadingUDPServer.server_bind", "resources.lib.kodi.kodilogging.get_logger",...
[((502, 532), 'resources.lib.kodi.kodilogging.get_logger', 'kodilogging.get_logger', (['"""ssdp"""'], {}), "('ssdp')\n", (524, 532), False, 'from resources.lib.kodi import kodilogging\n'), ((1632, 1708), 'SocketServer.ThreadingUDPServer.__init__', 'ThreadingUDPServer.__init__', (['self', "('', addr[1])", 'handler', 'bind_and_activate'], {}), "(self, ('', addr[1]), handler, bind_and_activate)\n", (1659, 1708), False, 'from SocketServer import DatagramRequestHandler, ThreadingUDPServer\n'), ((2820, 2856), 'SocketServer.ThreadingUDPServer.server_bind', 'ThreadingUDPServer.server_bind', (['self'], {}), '(self)\n', (2850, 2856), False, 'from SocketServer import DatagramRequestHandler, ThreadingUDPServer\n'), ((3657, 3678), 'struct.pack', 'struct.pack', (['"""B"""', 'ttl'], {}), "('B', ttl)\n", (3668, 3678), False, 'import struct\n'), ((4520, 4568), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (4533, 4568), False, 'import socket\n'), ((4743, 4776), 'resources.lib.kodi.utils.get_setting_as_bool', 'get_setting_as_bool', (['"""debug-ssdp"""'], {}), "('debug-ssdp')\n", (4762, 4776), False, 'from resources.lib.kodi.utils import get_setting_as_bool\n'), ((702, 750), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (715, 750), False, 'import socket\n'), ((2203, 2223), 'operator.truth', 'operator.truth', (['mode'], {}), '(mode)\n', (2217, 2223), False, 'import operator\n'), ((4354, 4372), 'resources.lib.tubecast.utils.str_to_bytes', 'str_to_bytes', (['data'], {}), '(data)\n', (4366, 4372), False, 'from resources.lib.tubecast.utils import build_template, str_to_bytes, PY3\n'), ((4984, 5017), 'resources.lib.kodi.utils.get_setting_as_bool', 'get_setting_as_bool', (['"""debug-ssdp"""'], {}), "('debug-ssdp')\n", (5003, 5017), False, 'from resources.lib.kodi.utils import get_setting_as_bool\n'), ((811, 845), 'struct.pack', 'struct.pack', (["b'256s'", 'if_name[:15]'], {}), "(b'256s', if_name[:15])\n", (822, 845), False, 'import struct\n'), ((3000, 3044), 'socket.inet_aton', 'socket.inet_aton', (['self._multicast_address[0]'], {}), '(self._multicast_address[0])\n', (3016, 3044), False, 'import socket\n'), ((3300, 3327), 'socket.inet_aton', 'socket.inet_aton', (['interface'], {}), '(interface)\n', (3316, 3327), False, 'import socket\n'), ((3451, 3495), 'socket.inet_aton', 'socket.inet_aton', (['self._multicast_address[0]'], {}), '(self._multicast_address[0])\n', (3467, 3495), False, 'import socket\n'), ((5140, 5167), 'resources.lib.tubecast.utils.build_template', 'build_template', (['self.header'], {}), '(self.header)\n', (5154, 5167), False, 'from resources.lib.tubecast.utils import build_template, str_to_bytes, PY3\n')]
import logging as lg from threading import Thread from thrift.protocol import TBinaryProtocol from thrift.server import TServer from thrift.transport import TSocket from thrift.transport import TTransport from ...thrift.worker.service import WorkerService from .handler import WorkerServiceHandler from .worker import Worker from ...common.config.configuration import TanitConfiguration from ...common.config.configuration_keys import Keys _logger = lg.getLogger(__name__) class WorkerServer(object): def __init__(self): configuration = TanitConfiguration.getInstance() self.bind_address = configuration.get(Keys.WORKER_RPC_BIND_HOST) self.bind_port = configuration.get(Keys.WORKER_RPC_PORT) self.worker = Worker() self.stopped = False def stop(self): self.stopped = True def _run(self): # Create Service handler handler = WorkerServiceHandler(self.worker) server = TServer.TThreadedServer( WorkerService.Processor(handler), TSocket.TServerSocket(self.bind_address, self.bind_port), TTransport.TBufferedTransportFactory(), TBinaryProtocol.TBinaryProtocolFactory(), daemon=True, ) # Start Tanit server server.serve() def start(self): self.stopped = False _logger.info("Stating Tanit worker server.") self.daemon = Thread(target=self._run, args=()) self.daemon.setDaemon(True) self.daemon.start() _logger.info( "Tanit worker server started, listening at %s:%s", self.bind_address, self.bind_port, ) # Start worker services try: self.worker.start() except Exception: _logger.exception("Failed to start Tanit worker services.") exit(1) try: while self.daemon.is_alive(): # Try to join the child thread back to parent for 0.5 seconds self.daemon.join(0.5) if self.stopped: _logger.info("Tanit worker server stopped, exiting.") break except (KeyboardInterrupt, SystemExit): _logger.info("Received KeyboardInterrupt Signal.") except Exception as e: _logger.exception("Fatal server exception : %s, exiting", e) finally: _logger.info("Stopping Tanit worker server.") self.worker.stop() _logger.info("Tanit worker server stopped.")
[ "threading.Thread", "thrift.protocol.TBinaryProtocol.TBinaryProtocolFactory", "thrift.transport.TSocket.TServerSocket", "thrift.transport.TTransport.TBufferedTransportFactory", "logging.getLogger" ]
[((454, 476), 'logging.getLogger', 'lg.getLogger', (['__name__'], {}), '(__name__)\n', (466, 476), True, 'import logging as lg\n'), ((1422, 1455), 'threading.Thread', 'Thread', ([], {'target': 'self._run', 'args': '()'}), '(target=self._run, args=())\n', (1428, 1455), False, 'from threading import Thread\n'), ((1042, 1098), 'thrift.transport.TSocket.TServerSocket', 'TSocket.TServerSocket', (['self.bind_address', 'self.bind_port'], {}), '(self.bind_address, self.bind_port)\n', (1063, 1098), False, 'from thrift.transport import TSocket\n'), ((1112, 1150), 'thrift.transport.TTransport.TBufferedTransportFactory', 'TTransport.TBufferedTransportFactory', ([], {}), '()\n', (1148, 1150), False, 'from thrift.transport import TTransport\n'), ((1164, 1204), 'thrift.protocol.TBinaryProtocol.TBinaryProtocolFactory', 'TBinaryProtocol.TBinaryProtocolFactory', ([], {}), '()\n', (1202, 1204), False, 'from thrift.protocol import TBinaryProtocol\n')]
''' Build script for package's wheel and zip files. Look in the `dist` folder for the output. Usage: Run as follows: >python setup.py sdist --formats=zip bdist_wheel ''' from pathlib import Path import json import os import setuptools import shutil import sys from platform import python_revision def cleanup(name: str): ''' Clean up extraneous files from the build process. Args: name (str): The top-level package name. ''' shutil.rmtree('./build', ignore_errors=True) shutil.rmtree('./src/{}.egg-info'.format(name), ignore_errors=True) package_dict = {'name': '', '__version__': '', '__author__': ''} init_path = Path("src") / 'pdf_utils' / '__init__.py' print('Retrieved the following settings from __init__.py') with open(init_path) as init: for line in init: for key in package_dict: if line.startswith(f'{key} = '): package_dict[key] = line.split('=')[1].strip().strip("\"'") print(f' >> {key}: {package_dict[key]}') cleanup(package_dict['name']) # remove previous build cruft with open('README.md') as fh: long_description = fh.read() dependencies = setuptools.find_packages('src') print('Package dependencies: {}'.format(dependencies)) with open('requirements.txt') as fh: requirements = fh.read().splitlines() print('Package requirements: {}'.format(requirements)) setuptools.setup( name = package_dict['name'], version = package_dict['__version__'], author = package_dict['__author__'], description = 'PDF utility classes', long_description = long_description, long_description_content_type = 'text/markdown', url = 'https://github.com/gismaps/PDF_Utils', packages = dependencies, install_requires = requirements, package_dir = {'': 'src'}, data_files = [], # [('license', ['LICENSE.txt'])], # done via MANIFEST.in include_package_data = True, license = "MIT", classifiers = [ 'Development Status :: 4 - Beta', 'Programming Language :: Python :: 3.6', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', ], python_requires = '>=3.6', ) cleanup(package_dict['name']) # remove build cruft print('setup.py ended normally')
[ "shutil.rmtree", "pathlib.Path", "setuptools.setup", "setuptools.find_packages" ]
[((1182, 1213), 'setuptools.find_packages', 'setuptools.find_packages', (['"""src"""'], {}), "('src')\n", (1206, 1213), False, 'import setuptools\n'), ((1406, 2056), 'setuptools.setup', 'setuptools.setup', ([], {'name': "package_dict['name']", 'version': "package_dict['__version__']", 'author': "package_dict['__author__']", 'description': '"""PDF utility classes"""', 'long_description': 'long_description', 'long_description_content_type': '"""text/markdown"""', 'url': '"""https://github.com/gismaps/PDF_Utils"""', 'packages': 'dependencies', 'install_requires': 'requirements', 'package_dir': "{'': 'src'}", 'data_files': '[]', 'include_package_data': '(True)', 'license': '"""MIT"""', 'classifiers': "['Development Status :: 4 - Beta', 'Programming Language :: Python :: 3.6',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent']", 'python_requires': '""">=3.6"""'}), "(name=package_dict['name'], version=package_dict[\n '__version__'], author=package_dict['__author__'], description=\n 'PDF utility classes', long_description=long_description,\n long_description_content_type='text/markdown', url=\n 'https://github.com/gismaps/PDF_Utils', packages=dependencies,\n install_requires=requirements, package_dir={'': 'src'}, data_files=[],\n include_package_data=True, license='MIT', classifiers=[\n 'Development Status :: 4 - Beta',\n 'Programming Language :: Python :: 3.6',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent'], python_requires='>=3.6')\n", (1422, 2056), False, 'import setuptools\n'), ((474, 518), 'shutil.rmtree', 'shutil.rmtree', (['"""./build"""'], {'ignore_errors': '(True)'}), "('./build', ignore_errors=True)\n", (487, 518), False, 'import shutil\n'), ((670, 681), 'pathlib.Path', 'Path', (['"""src"""'], {}), "('src')\n", (674, 681), False, 'from pathlib import Path\n')]
# coding=utf-8 import configparser import datetime as dt import json import sqlite3 import os from telethon.sync import TelegramClient from telethon import connection # для корректного переноса времени сообщений в json from datetime import date, datetime # классы для работы с каналами from telethon.tl.functions.channels import GetParticipantsRequest from telethon.tl.types import ChannelParticipantsSearch # класс для работы с сообщениями from telethon.tl.functions.messages import GetHistoryRequest # Считываем учетные данные config = configparser.ConfigParser() config.read("config.ini") # Присваиваем значения внутренним переменным api_id = config['Telegram']['api_id'] api_hash = config['Telegram']['api_hash'] username = config['Telegram']['username'] client = TelegramClient(username, api_id, api_hash) client.start() async def dump_all_messages(channel): """Записывает json-файл с информацией о всех сообщениях канала/чата""" offset_msg = 0 # номер записи, с которой начинается считывание limit_msg = 100 # максимальное число записей, передаваемых за один раз all_messages = [] # список всех сообщений total_messages = 0 total_count_limit = 100 # количество сообщений, которые нужно получить class DateTimeEncoder(json.JSONEncoder): '''Класс для сериализации записи дат в JSON''' def default(self, o): if isinstance(o, datetime): return o.isoformat() if isinstance(o, bytes): return list(o) return json.JSONEncoder.default(self, o) while True: history = await client(GetHistoryRequest( peer=channel, offset_id=offset_msg, offset_date=None, add_offset=0, limit=limit_msg, max_id=0, min_id=0, hash=0)) if not history.messages: break messages = history.messages for message in messages: all_messages.append(message.to_dict()) offset_msg = messages[len(messages) - 1].id total_messages = len(all_messages) if total_count_limit != 0 and total_messages >= total_count_limit: break db_name = 'db.sqlite' con = sqlite3.connect(db_name) cur = con.cursor() for line in all_messages: try: id = line['id'] message = line['message'] if not message: continue pub_date = line['date'].strftime("%Y-%m-%d %I:%M:%S") cur.execute("INSERT INTO news(id,message,pub_date) " f"VALUES({id}, '{message}', '{pub_date}')") except sqlite3.IntegrityError: break except: continue con.commit() con.close() async def main(): url = "https://t.me/QryaProDucktion" channel = await client.get_entity(url) await dump_all_messages(channel) with client: client.loop.run_until_complete(main())
[ "telethon.sync.TelegramClient", "telethon.tl.functions.messages.GetHistoryRequest", "sqlite3.connect", "configparser.ConfigParser", "json.JSONEncoder.default" ]
[((543, 570), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (568, 570), False, 'import configparser\n'), ((775, 817), 'telethon.sync.TelegramClient', 'TelegramClient', (['username', 'api_id', 'api_hash'], {}), '(username, api_id, api_hash)\n', (789, 817), False, 'from telethon.sync import TelegramClient\n'), ((2207, 2231), 'sqlite3.connect', 'sqlite3.connect', (['db_name'], {}), '(db_name)\n', (2222, 2231), False, 'import sqlite3\n'), ((1536, 1569), 'json.JSONEncoder.default', 'json.JSONEncoder.default', (['self', 'o'], {}), '(self, o)\n', (1560, 1569), False, 'import json\n'), ((1618, 1752), 'telethon.tl.functions.messages.GetHistoryRequest', 'GetHistoryRequest', ([], {'peer': 'channel', 'offset_id': 'offset_msg', 'offset_date': 'None', 'add_offset': '(0)', 'limit': 'limit_msg', 'max_id': '(0)', 'min_id': '(0)', 'hash': '(0)'}), '(peer=channel, offset_id=offset_msg, offset_date=None,\n add_offset=0, limit=limit_msg, max_id=0, min_id=0, hash=0)\n', (1635, 1752), False, 'from telethon.tl.functions.messages import GetHistoryRequest\n')]
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # Copyright 2021- QuOCS Team # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ import logging from qtpy import QtCore class HandleExitBasic(QtCore.QObject): """This class check and update the current optimization status and notify the Client Interface and the Optimization code about it""" logger = logging.getLogger("oc_logger") is_user_running: bool @QtCore.Slot(bool) def set_is_user_running(self, is_running: bool): """ Module connected with the Client Interface GUI. Stop the communication when the user presses to the Stop button :param bool is_running: :return: """ self.is_user_running = is_running def check_communication(self, communication: dict) -> bool: """ Update the Client Interface and Optimization Code numbers and return the running status :param dict communication: :return: bool : True if it is still running, False stopped by the interface or the optimization code """ if not self.is_user_running: return False # Check the communication dictionary server_number = communication["server_number"] if server_number == -1 or server_number == 4: self.logger.info("End of communications") return False else: return True def get_terminate_reason(self) -> str: """ Get the ending reason :return: str : terminate reason """ print("Something to write here") return "No idea"
[ "qtpy.QtCore.Slot", "logging.getLogger" ]
[((1005, 1035), 'logging.getLogger', 'logging.getLogger', (['"""oc_logger"""'], {}), "('oc_logger')\n", (1022, 1035), False, 'import logging\n'), ((1069, 1086), 'qtpy.QtCore.Slot', 'QtCore.Slot', (['bool'], {}), '(bool)\n', (1080, 1086), False, 'from qtpy import QtCore\n')]
import re import spacy import torch class Tree: def __init__(self, val=None, formula=None): if val: self.val = val self.children = [] if formula: root = self.parse(formula) self.val = root.val self.children = root.children def parse(self, formula): m = re.search('(.*?)\\((.*)\\)', formula) if m: # NOTE: this was a huge fuckup # if m.group(1) not in '&|$~': # return Tree(val=m.group(0)) root = Tree(val=m.group(1)) subformula = m.group(2) splits = [] paren_count = 0 start = 0 for end in range(len(subformula)): if subformula[end] == '(': paren_count += 1 elif subformula[end] == ')': paren_count -= 1 elif paren_count == 0: if subformula[end] == ',': splits.append(subformula[start:end]) start = end + 1 splits.append(subformula[start:end + 1]) root.children = [ self.parse(sub) for sub in splits ] return root return Tree(val=formula) def flatten(self): """ flatten a tree into string """ if len(self.children) == 0: return self.val params = (',').join([ child.flatten() for child in self.children ]) return '%s(%s)' % (self.val, params) def inorder(self): """ generate the sequence inorder (include the non-terminals) """ if len(self.children) == 0: return ['%s' % self.val] params = (' , ').join([' <N> '] * len(self.children)) inorder = ['%s ( %s ) ' % (self.val, params)] for child in self.children: inorder += child.inorder() return inorder def __str__(self): """ print a tree in hierarchical structure """ if len(self.children) == 0: return self.val ret = [ self.val] for child in self.children: ret += [ '\t' + child_s for child_s in str(child).split('\n') ] return ('\n').join(ret) nlp = spacy.load('en') class DepTree: def __init__(self, sent=None, node=None, src_vocab=None, device='cpu'): self.device = device self.src_vocab = src_vocab if sent: doc = nlp(unicode(sent)) node = self.get_root(doc) if node: self.val = node.text.lower() if self.val not in self.src_vocab.vocab: self.val = '<UNK>' self.idx = src_vocab.word_to_index[self.val] self.input = torch.tensor(self.idx, dtype=torch.long, device=self.device) self.children = [] for child in node.children: self.children.append(DepTree(node=child, src_vocab=src_vocab, device=self.device)) def get_root(self, doc): for token in doc: if token.dep_ == 'ROOT': return token raise
[ "spacy.load", "re.search", "torch.tensor" ]
[((2270, 2286), 'spacy.load', 'spacy.load', (['"""en"""'], {}), "('en')\n", (2280, 2286), False, 'import spacy\n'), ((347, 384), 're.search', 're.search', (['"""(.*?)\\\\((.*)\\\\)"""', 'formula'], {}), "('(.*?)\\\\((.*)\\\\)', formula)\n", (356, 384), False, 'import re\n'), ((2765, 2825), 'torch.tensor', 'torch.tensor', (['self.idx'], {'dtype': 'torch.long', 'device': 'self.device'}), '(self.idx, dtype=torch.long, device=self.device)\n', (2777, 2825), False, 'import torch\n')]
""" Copyright (2017) <NAME> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import logging import smtplib from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart from alarm_central_station_receiver.config import AlarmConfig def create_message(events): """ Build the message body. The first event's timestamp is included in the message body as well. When sending this email to an SMS bridge, sometimes the time that the SMS is received is well after the event occurred and there is no clear way to know when the message was actually sent. """ messages = [] timestamp = '' for event in events: rtype = event.get('type') desc = event.get('description') if not timestamp: timestamp = event.get('timestamp') messages.append('%s: %s' % (rtype, desc)) return '%s:\n%s' % (timestamp, '\n'.join(messages)) def notify(events): if not events: return if 'EmailNotification' not in AlarmConfig.config: return logging.info("Sending email...") username = AlarmConfig.config.get('EmailNotification', 'username') password = AlarmConfig.config.get('EmailNotification', 'password') to_addr = AlarmConfig.config.get('EmailNotification', 'notification_email') subject = AlarmConfig.config.get('EmailNotification', 'notification_subject') tls = AlarmConfig.config.getboolean('EmailNotification', 'tls') server = AlarmConfig.config.get('EmailNotification', 'server_address') server_port = AlarmConfig.config.get('EmailNotification', 'port') msg = MIMEMultipart('alternative') msg['From'] = username msg['To'] = to_addr msg['Subject'] = subject body = create_message(events) msg.attach(MIMEText(body, 'plain')) msg.attach(MIMEText(body, 'html')) try: s = smtplib.SMTP(server, server_port) s.ehlo() if tls: s.starttls() s.ehlo() s.login(username, password) s.sendmail(username, [to_addr], msg.as_string()) s.quit() logging.info("Email send complete") except smtplib.SMTPException as exc: logging.error("Error sending email: %s", str(exc))
[ "alarm_central_station_receiver.config.AlarmConfig.config.getboolean", "smtplib.SMTP", "email.mime.text.MIMEText", "email.mime.multipart.MIMEMultipart", "alarm_central_station_receiver.config.AlarmConfig.config.get", "logging.info" ]
[((1527, 1559), 'logging.info', 'logging.info', (['"""Sending email..."""'], {}), "('Sending email...')\n", (1539, 1559), False, 'import logging\n'), ((1575, 1630), 'alarm_central_station_receiver.config.AlarmConfig.config.get', 'AlarmConfig.config.get', (['"""EmailNotification"""', '"""username"""'], {}), "('EmailNotification', 'username')\n", (1597, 1630), False, 'from alarm_central_station_receiver.config import AlarmConfig\n'), ((1646, 1701), 'alarm_central_station_receiver.config.AlarmConfig.config.get', 'AlarmConfig.config.get', (['"""EmailNotification"""', '"""password"""'], {}), "('EmailNotification', 'password')\n", (1668, 1701), False, 'from alarm_central_station_receiver.config import AlarmConfig\n'), ((1716, 1781), 'alarm_central_station_receiver.config.AlarmConfig.config.get', 'AlarmConfig.config.get', (['"""EmailNotification"""', '"""notification_email"""'], {}), "('EmailNotification', 'notification_email')\n", (1738, 1781), False, 'from alarm_central_station_receiver.config import AlarmConfig\n'), ((1796, 1863), 'alarm_central_station_receiver.config.AlarmConfig.config.get', 'AlarmConfig.config.get', (['"""EmailNotification"""', '"""notification_subject"""'], {}), "('EmailNotification', 'notification_subject')\n", (1818, 1863), False, 'from alarm_central_station_receiver.config import AlarmConfig\n'), ((1874, 1931), 'alarm_central_station_receiver.config.AlarmConfig.config.getboolean', 'AlarmConfig.config.getboolean', (['"""EmailNotification"""', '"""tls"""'], {}), "('EmailNotification', 'tls')\n", (1903, 1931), False, 'from alarm_central_station_receiver.config import AlarmConfig\n'), ((1945, 2006), 'alarm_central_station_receiver.config.AlarmConfig.config.get', 'AlarmConfig.config.get', (['"""EmailNotification"""', '"""server_address"""'], {}), "('EmailNotification', 'server_address')\n", (1967, 2006), False, 'from alarm_central_station_receiver.config import AlarmConfig\n'), ((2025, 2076), 'alarm_central_station_receiver.config.AlarmConfig.config.get', 'AlarmConfig.config.get', (['"""EmailNotification"""', '"""port"""'], {}), "('EmailNotification', 'port')\n", (2047, 2076), False, 'from alarm_central_station_receiver.config import AlarmConfig\n'), ((2088, 2116), 'email.mime.multipart.MIMEMultipart', 'MIMEMultipart', (['"""alternative"""'], {}), "('alternative')\n", (2101, 2116), False, 'from email.mime.multipart import MIMEMultipart\n'), ((2246, 2269), 'email.mime.text.MIMEText', 'MIMEText', (['body', '"""plain"""'], {}), "(body, 'plain')\n", (2254, 2269), False, 'from email.mime.text import MIMEText\n'), ((2286, 2308), 'email.mime.text.MIMEText', 'MIMEText', (['body', '"""html"""'], {}), "(body, 'html')\n", (2294, 2308), False, 'from email.mime.text import MIMEText\n'), ((2332, 2365), 'smtplib.SMTP', 'smtplib.SMTP', (['server', 'server_port'], {}), '(server, server_port)\n', (2344, 2365), False, 'import smtplib\n'), ((2559, 2594), 'logging.info', 'logging.info', (['"""Email send complete"""'], {}), "('Email send complete')\n", (2571, 2594), False, 'import logging\n')]
# -*- coding: utf-8 -*- """bioimageit_core Allgo process service. This module implements a service to run a process using the AllGo client API (allgo18.inria.fr). Classes ------- ProcessServiceProvider """ import os import ntpath import allgo as ag from bioimageit_core.config import ConfigAccess from bioimageit_core.core.utils import Observable from bioimageit_core.processes.containers import ProcessContainer class AllgoRunnerServiceBuilder: """Service builder for the runner service""" def __init__(self): self._instance = None def __call__(self, **_ignored): if not self._instance: self._instance = AllgoRunnerService() return self._instance class AllgoRunnerService(Observable): """Service for runner exec using AllGo client API""" def __init__(self): super().__init__() self.service_name = 'AllgoRunnerService' def set_up(self, process: ProcessContainer): """setup the runner Add here the code to initialize the runner Parameters ---------- process Metadata of the process """ pass def exec(self, process: ProcessContainer, args): """Execute a process Parameters ---------- process Metadata of the process args list of arguments """ token = None config = ConfigAccess.instance().config['runner'] if 'token' in config: token = config['token'] client = ag.Client(token) # exec the process params = ' '.join(args[1:]) files = [] for input_ in process.inputs: if input_.is_data: filename = ntpath.basename(input_.value) params = params.replace(input_.value, filename) files.append(input_.value) for output in process.outputs: if output.is_data: filename = ntpath.basename(output.value) params = params.replace(output.value, filename) # print('files:', files) # print('params:', params) try: out_dict = client.run_job(process.id, files=files, params=params) except ag.StatusError as e: print('API status Error:', e.status_code) print('API status Error:', e.msg) # print(out_dict) # get the outputs job_id = out_dict['id'] for output in process.outputs: output_filename = ntpath.basename(output.value) output_dir = os.path.dirname(os.path.abspath(output.value)) url = out_dict[str(job_id)][output_filename] filepath = client.download_file(file_url=url, outdir=output_dir, force=True) # print('out file downloaded at :', filepath) def tear_down(self, process: ProcessContainer): """tear down the runner Add here the code to down/clean the runner Parameters ---------- process Metadata of the process """ pass
[ "bioimageit_core.config.ConfigAccess.instance", "os.path.abspath", "ntpath.basename", "allgo.Client" ]
[((1545, 1561), 'allgo.Client', 'ag.Client', (['token'], {}), '(token)\n', (1554, 1561), True, 'import allgo as ag\n'), ((2522, 2551), 'ntpath.basename', 'ntpath.basename', (['output.value'], {}), '(output.value)\n', (2537, 2551), False, 'import ntpath\n'), ((1421, 1444), 'bioimageit_core.config.ConfigAccess.instance', 'ConfigAccess.instance', ([], {}), '()\n', (1442, 1444), False, 'from bioimageit_core.config import ConfigAccess\n'), ((1741, 1770), 'ntpath.basename', 'ntpath.basename', (['input_.value'], {}), '(input_.value)\n', (1756, 1770), False, 'import ntpath\n'), ((1976, 2005), 'ntpath.basename', 'ntpath.basename', (['output.value'], {}), '(output.value)\n', (1991, 2005), False, 'import ntpath\n'), ((2593, 2622), 'os.path.abspath', 'os.path.abspath', (['output.value'], {}), '(output.value)\n', (2608, 2622), False, 'import os\n')]
from setuptools import setup, find_packages setup( name='newpackage', version='0.10', packages=find_packages(exclude=['tests*']), license='MIT', description='Another EDSA example python packages', long_description=open('README.md').read(), install_requirements=['numpy'], url='https://github.com/Muzix1/newpackage', author='<NAME>', author_email='<EMAIL>' )
[ "setuptools.find_packages" ]
[((108, 141), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests*']"}), "(exclude=['tests*'])\n", (121, 141), False, 'from setuptools import setup, find_packages\n')]
#!/usr/bin/env python import json import sys def validate(input): normalized = normalize_internal(input) if normalized != input: sys.stderr.write("Input is not normalized.\n") sys.exit(1) def normalize_internal(input): db = json.loads(input) string = json.dumps(normalize_db(db), ensure_ascii=False, allow_nan=False, sort_keys=False, indent=2) return string + "\n" def normalize(input): open("db.json", "w").write(normalize_internal(input)) def normalize_db(db): for (name, normalize_algorithm, sort_key) in [ ("workstreams", normalize_workstream, "name"), ("ideas", normalize_workstream_standard_or_idea, "name"), ("biblio", normalize_reference, "title") ]: db[name] = normalize_list(db[name], normalize_algorithm, sort_key) return db def normalize_list(input, normalize_algorithm, sort_key): output = [normalize_algorithm(item) for item in input] output.sort(key=lambda item: item[sort_key]) return output def normalize_workstream(workstream): return { "id": workstream["id"], "name": workstream["name"], "scope": workstream["scope"], "editors": normalize_list(workstream["editors"], normalize_person, "name"), "standards": normalize_list(workstream["standards"], normalize_workstream_standard, "name") } def normalize_person(editor): return { "name": editor["name"], "email": editor.get("email", None) } def normalize_workstream_standard(document): output = normalize_workstream_standard_or_idea(document) output["review_draft_schedule"] = document["review_draft_schedule"] output["twitter"] = document["twitter"] return output def normalize_workstream_standard_or_idea(document): return { "name": document["name"], "href": document["href"], "description": document["description"], "authors": normalize_list(document["authors"], normalize_person, "name"), "reference": document["reference"] } def normalize_reference(reference): output = { "title": reference["title"], "href": reference["href"], "authors": normalize_list(reference["authors"], normalize_person, "name"), "reference": reference["reference"] } if "obsoletedBy" in reference: output["obsoletedBy"] = reference["obsoletedBy"] return output def usage(): sys.stderr.write( """Usage: %s [command] Commands: * validate -- Checks that db.json is normalized. * normalize -- Normalizes db.json. """ % sys.argv[0] ) sys.exit(0) def main(): command = None try: command = sys.argv[1] except IndexError: usage() if command not in ["validate", "normalize"]: usage() else: input = open("db.json", "r").read() if command == "validate": validate(input) elif command == "normalize": normalize(input) else: assert False, "Unreachable code." main()
[ "sys.stderr.write", "json.loads", "sys.exit" ]
[((255, 272), 'json.loads', 'json.loads', (['input'], {}), '(input)\n', (265, 272), False, 'import json\n'), ((2408, 2574), 'sys.stderr.write', 'sys.stderr.write', (['("""Usage: %s [command]\n\nCommands:\n\n* validate -- Checks that db.json is normalized.\n* normalize -- Normalizes db.json.\n\n"""\n % sys.argv[0])'], {}), '(\n """Usage: %s [command]\n\nCommands:\n\n* validate -- Checks that db.json is normalized.\n* normalize -- Normalizes db.json.\n\n"""\n % sys.argv[0])\n', (2424, 2574), False, 'import sys\n'), ((2583, 2594), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2591, 2594), False, 'import sys\n'), ((147, 193), 'sys.stderr.write', 'sys.stderr.write', (['"""Input is not normalized.\n"""'], {}), "('Input is not normalized.\\n')\n", (163, 193), False, 'import sys\n'), ((202, 213), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (210, 213), False, 'import sys\n')]
#!/usr/bin/env python # Copyright 2016 <NAME> # Apache 2.0. from __future__ import print_function import argparse import sys def GetArgs(): parser = argparse.ArgumentParser(description = "Apply an lexicon edits file (output from subtools/kaldi/steps/dict/select_prons_bayesian.py)to an input lexicon" "to produce a learned lexicon.", epilog = "See subtools/kaldi/steps/dict/learn_lexicon_greedy.sh for example") parser.add_argument("in_lexicon", metavar='<in-lexicon>', type = str, help = "Input lexicon. Each line must be <word> <phones>.") parser.add_argument("lexicon_edits_file", metavar='<lexicon-edits-file>', type = str, help = "Input lexicon edits file containing human-readable & editable" "pronounciation info. The info for each word is like:" "------------ an 4086.0 --------------" "R | Y | 2401.6 | AH N" "R | Y | 640.8 | AE N" "P | Y | 1035.5 | IH N" "R(ef), P(hone-decoding) represents the pronunciation source" "Y/N means the recommended decision of including this pron or not" "and the numbers are soft counts accumulated from lattice-align-word outputs. See subtools/kaldi/steps/dict/select_prons_bayesian.py for more details.") parser.add_argument("out_lexicon", metavar='<out-lexicon>', type = str, help = "Output lexicon to this file.") print (' '.join(sys.argv), file=sys.stderr) args = parser.parse_args() args = CheckArgs(args) return args def CheckArgs(args): if args.in_lexicon == "-": args.in_lexicon = sys.stdin else: args.in_lexicon_handle = open(args.in_lexicon) args.lexicon_edits_file_handle = open(args.lexicon_edits_file) if args.out_lexicon == "-": args.out_lexicon_handle = sys.stdout else: args.out_lexicon_handle = open(args.out_lexicon, "w") return args def ReadLexicon(lexicon_file_handle): lexicon = set() if lexicon_file_handle: for line in lexicon_file_handle.readlines(): splits = line.strip().split() if len(splits) == 0: continue if len(splits) < 2: raise Exception('Invalid format of line ' + line + ' in lexicon file.') word = splits[0] phones = ' '.join(splits[1:]) lexicon.add((word, phones)) return lexicon def ApplyLexiconEdits(lexicon, lexicon_edits_file_handle): if lexicon_edits_file_handle: for line in lexicon_edits_file_handle.readlines(): # skip all commented lines if line.startswith('#'): continue # read a word from a line like "---- MICROPHONES 200.0 ----". if line.startswith('---'): splits = line.strip().strip('-').strip().split() if len(splits) != 2: print(splits, file=sys.stderr) raise Exception('Invalid format of line ' + line + ' in lexicon edits file.') word = splits[0].strip() else: # parse the pron and decision 'Y/N' of accepting the pron or not, # from a line like: 'P | Y | 42.0 | M AY K R AH F OW N Z' splits = line.split('|') if len(splits) != 4: raise Exception('Invalid format of line ' + line + ' in lexicon edits file.') pron = splits[3].strip() if splits[1].strip() == 'Y': lexicon.add((word, pron)) elif splits[1].strip() == 'N': lexicon.discard((word, pron)) else: raise Exception('Invalid format of line ' + line + ' in lexicon edits file.') return lexicon def WriteLexicon(lexicon, out_lexicon_handle): for word, pron in lexicon: print('{0} {1}'.format(word, pron), file=out_lexicon_handle) out_lexicon_handle.close() def Main(): args = GetArgs() lexicon = ReadLexicon(args.in_lexicon_handle) ApplyLexiconEdits(lexicon, args.lexicon_edits_file_handle) WriteLexicon(lexicon, args.out_lexicon_handle) if __name__ == "__main__": Main()
[ "argparse.ArgumentParser" ]
[((157, 427), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Apply an lexicon edits file (output from subtools/kaldi/steps/dict/select_prons_bayesian.py)to an input lexiconto produce a learned lexicon."""', 'epilog': '"""See subtools/kaldi/steps/dict/learn_lexicon_greedy.sh for example"""'}), "(description=\n 'Apply an lexicon edits file (output from subtools/kaldi/steps/dict/select_prons_bayesian.py)to an input lexiconto produce a learned lexicon.'\n , epilog=\n 'See subtools/kaldi/steps/dict/learn_lexicon_greedy.sh for example')\n", (180, 427), False, 'import argparse\n')]
import numpy import qm3 import qm3.engines.gaussian import io import os import sys cwd = os.path.abspath( os.path.dirname( sys.argv[0] ) ) + os.sep mol = qm3.molecule() mol.pdb_read( open( cwd + "charmm.pdb" ) ) mol.psf_read( open( cwd + "charmm.psf" ) ) mol.guess_atomic_numbers() print( mol.anum ) print( mol.chrg ) sqm = mol.resn == "WAT" for a in [ "C6", "C9", "H11", "H12", "H13", "H14", "H15" ]: sqm[mol.indx["A"][1][a]] = True sqm = numpy.logical_not( sqm ) smm = mol.sph_sel( sqm, 12 ) sla = [ ( mol.indx["A"][1]["C10"], mol.indx["A"][1]["C6"] ) ] f = io.StringIO( """%chk=gauss.chk %mem=2048mb %nproc=2 #p b3lyp/def2svp qm3_job qm3_guess charge prop=(field,read) scf=direct nosymm fchk . 1 1 qm3_atoms qm3_charges qm3_field """ ) mol.engines["qm"] = qm3.engines.gaussian.run( mol, f, sqm, smm, sla ) mol.engines["qm"].exe = ". ~/Devel/g09/pgi.imac64/g09.profile; g09 gauss.com" mol.get_grad() print( mol.func ) assert( numpy.fabs( mol.func - -697633.7375 ) < 0.001 ), "function error" print( numpy.linalg.norm( mol.grad ) ) assert( numpy.fabs( numpy.linalg.norm( mol.grad ) - 575.7341 ) < 0.001 ), "gradient error" print( numpy.linalg.norm( mol.grad[mol.indx["A"][1]["C10"]] ) ) assert( numpy.fabs( numpy.linalg.norm( mol.grad[mol.indx["A"][1]["C10"]] ) - 68.4270 ) < 0.001 ), "QM-LA gradient error"
[ "io.StringIO", "qm3.engines.gaussian.run", "os.path.dirname", "numpy.logical_not", "qm3.molecule", "numpy.fabs", "numpy.linalg.norm" ]
[((163, 177), 'qm3.molecule', 'qm3.molecule', ([], {}), '()\n', (175, 177), False, 'import qm3\n'), ((454, 476), 'numpy.logical_not', 'numpy.logical_not', (['sqm'], {}), '(sqm)\n', (471, 476), False, 'import numpy\n'), ((575, 765), 'io.StringIO', 'io.StringIO', (['"""%chk=gauss.chk\n%mem=2048mb\n%nproc=2\n#p b3lyp/def2svp qm3_job qm3_guess charge prop=(field,read) scf=direct nosymm fchk\n\n.\n\n1 1\nqm3_atoms\n\nqm3_charges\n\nqm3_field\n"""'], {}), '(\n """%chk=gauss.chk\n%mem=2048mb\n%nproc=2\n#p b3lyp/def2svp qm3_job qm3_guess charge prop=(field,read) scf=direct nosymm fchk\n\n.\n\n1 1\nqm3_atoms\n\nqm3_charges\n\nqm3_field\n"""\n )\n', (586, 765), False, 'import io\n'), ((778, 825), 'qm3.engines.gaussian.run', 'qm3.engines.gaussian.run', (['mol', 'f', 'sqm', 'smm', 'sla'], {}), '(mol, f, sqm, smm, sla)\n', (802, 825), False, 'import qm3\n'), ((948, 983), 'numpy.fabs', 'numpy.fabs', (['(mol.func - -697633.7375)'], {}), '(mol.func - -697633.7375)\n', (958, 983), False, 'import numpy\n'), ((1021, 1048), 'numpy.linalg.norm', 'numpy.linalg.norm', (['mol.grad'], {}), '(mol.grad)\n', (1038, 1048), False, 'import numpy\n'), ((1151, 1203), 'numpy.linalg.norm', 'numpy.linalg.norm', (["mol.grad[mol.indx['A'][1]['C10']]"], {}), "(mol.grad[mol.indx['A'][1]['C10']])\n", (1168, 1203), False, 'import numpy\n'), ((113, 141), 'os.path.dirname', 'os.path.dirname', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (128, 141), False, 'import os\n'), ((1073, 1100), 'numpy.linalg.norm', 'numpy.linalg.norm', (['mol.grad'], {}), '(mol.grad)\n', (1090, 1100), False, 'import numpy\n'), ((1228, 1280), 'numpy.linalg.norm', 'numpy.linalg.norm', (["mol.grad[mol.indx['A'][1]['C10']]"], {}), "(mol.grad[mol.indx['A'][1]['C10']])\n", (1245, 1280), False, 'import numpy\n')]
"""Unittests for module 'immaculater' that cannot live peaceably with immaculater_test. This unittest is separate because once you call SetAllowOversizeProtos(True) you cannot change it. """ from __future__ import absolute_import from __future__ import unicode_literals from __future__ import print_function import pipes import gflags as flags # https://github.com/gflags/python-gflags from pyatdllib.ui import serialization from pyatdllib.ui import test_helper FLAGS = flags.FLAGS class TooBigProtobufTestCase(test_helper.TestHelper): def setUp(self): super().setUp() FLAGS.pyatdl_allow_infinite_memory_for_protobuf = True def testTooBigToSaveError(self): FLAGS.pyatdl_allow_infinite_memory_for_protobuf = False save_path = self._CreateTmpFile('') inputs = ['loadtest -n 100', 'save %s' % pipes.quote(save_path) ] self.helpTest(inputs, serialization.TooBigToSaveError) if __name__ == '__main__': test_helper.main()
[ "pipes.quote", "pyatdllib.ui.test_helper.main" ]
[((968, 986), 'pyatdllib.ui.test_helper.main', 'test_helper.main', ([], {}), '()\n', (984, 986), False, 'from pyatdllib.ui import test_helper\n'), ((839, 861), 'pipes.quote', 'pipes.quote', (['save_path'], {}), '(save_path)\n', (850, 861), False, 'import pipes\n')]
""" The Envisage core plugin. """ # Enthought library imports. from envisage.api import ExtensionPoint, Plugin, ServiceOffer from traits.api import List, Instance, on_trait_change, Str class CorePlugin(Plugin): """ The Envisage core plugin. The core plugin offers facilities that are generally useful when building extensible applications such as adapters, categories and hooks etc. It does not contain anything to do with user interfaces! The core plugin should be started before any other plugin. It is up to the plugin manager to do this. """ # Extension point Ids. CATEGORIES = 'envisage.categories' CLASS_LOAD_HOOKS = 'envisage.class_load_hooks' PREFERENCES = 'envisage.preferences' SERVICE_OFFERS = 'envisage.service_offers' #### 'IPlugin' interface ################################################## # The plugin's unique identifier. id = 'envisage.core' # The plugin's name (suitable for displaying to the user). name = 'Core' #### Extension points offered by this plugin ############################## # Categories are actually implemented via standard 'ClassLoadHooks', but # for (hopefully) readability and convenience we have a specific extension # point. categories = ExtensionPoint( List(Instance('envisage.category.Category')), id = CATEGORIES, desc = """ Traits categories allow you to dynamically extend a Python class with extra attributes, methods and events. Contributions to this extension point allow you to import categories *lazily* when the class to be extended is imported or created. Each contribution contains the name of the category class that you want to add (the 'class_name') and the name of the class that you want to extend (the 'target_class_name'). e.g. To add the 'FooCategory' category to the 'Foo' class:: Category( class_name = 'foo_category.FooCategory', target_class_name = 'foo.Foo' ) """ ) @on_trait_change('categories_items') def _categories_items_changed(self, event): """ React to new categories being *added*. Note that we don't currently do anything if categories are *removed*. """ self._add_category_class_load_hooks(event.added) return class_load_hooks = ExtensionPoint( List(Instance('envisage.class_load_hook.ClassLoadHook')), id = CLASS_LOAD_HOOKS, desc = """ Class load hooks allow you to be notified when any 'HasTraits' class is imported or created. See the documentation for 'ClassLoadHook' for more details. """ ) @on_trait_change('class_load_hooks_items') def _class_load_hooks_changed(self, event): """ React to new class load hooks being *added*. Note that we don't currently do anything if class load hooks are *removed*. """ self._connect_class_load_hooks(event.added) return preferences = ExtensionPoint( List(Str), id = PREFERENCES, desc = """ Preferences files allow plugins to contribute default values for user preferences. Each contributed string must be the URL of a file-like object that contains preferences values. e.g. 'pkgfile://envisage/preferences.ini' - this looks for the 'preferences.ini' file in the 'envisage' package. 'file://C:/tmp/preferences.ini' - this looks for the 'preferences.ini' file in 'C:/tmp' 'http://some.website/preferences.ini' - this looks for the 'preferences.ini' document on the 'some.website' web site! The files themselves are parsed using the excellent 'ConfigObj' package. For detailed documentation please go to:- http://www.voidspace.org.uk/python/configobj.html """ ) @on_trait_change('preferences_items') def _preferences_changed(self, event): """ React to new preferencess being *added*. Note that we don't currently do anything if preferences are *removed*. """ self._load_preferences(event.added) return service_offers = ExtensionPoint( List(ServiceOffer), id = SERVICE_OFFERS, desc = """ Services are simply objects that a plugin wants to make available to other plugins. This extension point allows you to offer services that are created 'on-demand'. e.g. my_service_offer = ServiceOffer( protocol = 'acme.IMyService', factory = an_object_or_a_callable_that_creates_one, properties = {'a dictionary' : 'that is passed to the factory'} ) See the documentation for 'ServiceOffer' for more details. """ ) @on_trait_change('service_offers_items') def _service_offers_changed(self, event): """ React to new service offers being *added*. Note that we don't currently do anything if services are *removed* as we have no facility to let users of the service know that the offer has been retracted. """ for service in event.added: self._register_service_offer(service) return #### Contributions to extension points made by this plugin ################ # None. ########################################################################### # 'IPlugin' interface. ########################################################################### def start(self): """ Start the plugin. """ # Load all contributed preferences files into the application's root # preferences node. self._load_preferences(self.preferences) # Connect all class load hooks. self._connect_class_load_hooks(self.class_load_hooks) # Add class load hooks for all of the contributed categories. The # category will be imported and added when the associated target class # is imported/created. self._add_category_class_load_hooks(self.categories) # Register all service offers. # # These services are unregistered by the default plugin activation # strategy (due to the fact that we store the service ids in this # specific trait!). self._service_ids = self._register_service_offers(self.service_offers) return ########################################################################### # Private interface. ########################################################################### def _add_category_class_load_hooks(self, categories): """ Add class load hooks for a list of categories. """ for category in categories: class_load_hook = self._create_category_class_load_hook(category) class_load_hook.connect() return def _connect_class_load_hooks(self, class_load_hooks): """ Connect all class load hooks. """ for class_load_hook in class_load_hooks: class_load_hook.connect() return def _create_category_class_load_hook(self, category): """ Create a category class load hook. """ # Local imports. from .class_load_hook import ClassLoadHook def import_and_add_category(cls): """ Import a category and add it to a class. This is a closure that binds 'self' and 'category'. """ category_cls = self.application.import_symbol(category.class_name) cls.add_trait_category(category_cls) return category_class_load_hook = ClassLoadHook( class_name = category.target_class_name, on_load = import_and_add_category ) return category_class_load_hook def _load_preferences(self, preferences): """ Load all contributed preferences into a preferences node. """ # Enthought library imports. from envisage.resource.api import ResourceManager # We add the plugin preferences to the default scope. The default scope # is a transient scope which means that (quite nicely ;^) we never # save the actual default plugin preference values. They will only get # saved if a value has been set in another (persistent) scope - which # is exactly what happens in the preferences UI. default = self.application.preferences.node('default/') # The resource manager is used to find the preferences files. resource_manager = ResourceManager() for resource_name in preferences: f = resource_manager.file(resource_name) try: default.load(f) finally: f.close() return def _register_service_offers(self, service_offers): """ Register a list of service offers. """ return list(map(self._register_service_offer, service_offers)) def _register_service_offer(self, service_offer): """ Register a service offer. """ service_id = self.application.register_service( protocol = service_offer.protocol, obj = service_offer.factory, properties = service_offer.properties ) return service_id ### EOF ######################################################################
[ "traits.api.List", "traits.api.Instance", "envisage.resource.api.ResourceManager", "traits.api.on_trait_change" ]
[((2124, 2159), 'traits.api.on_trait_change', 'on_trait_change', (['"""categories_items"""'], {}), "('categories_items')\n", (2139, 2159), False, 'from traits.api import List, Instance, on_trait_change, Str\n'), ((2786, 2827), 'traits.api.on_trait_change', 'on_trait_change', (['"""class_load_hooks_items"""'], {}), "('class_load_hooks_items')\n", (2801, 2827), False, 'from traits.api import List, Instance, on_trait_change, Str\n'), ((4026, 4062), 'traits.api.on_trait_change', 'on_trait_change', (['"""preferences_items"""'], {}), "('preferences_items')\n", (4041, 4062), False, 'from traits.api import List, Instance, on_trait_change, Str\n'), ((4963, 5002), 'traits.api.on_trait_change', 'on_trait_change', (['"""service_offers_items"""'], {}), "('service_offers_items')\n", (4978, 5002), False, 'from traits.api import List, Instance, on_trait_change, Str\n'), ((3151, 3160), 'traits.api.List', 'List', (['Str'], {}), '(Str)\n', (3155, 3160), False, 'from traits.api import List, Instance, on_trait_change, Str\n'), ((4359, 4377), 'traits.api.List', 'List', (['ServiceOffer'], {}), '(ServiceOffer)\n', (4363, 4377), False, 'from traits.api import List, Instance, on_trait_change, Str\n'), ((8724, 8741), 'envisage.resource.api.ResourceManager', 'ResourceManager', ([], {}), '()\n', (8739, 8741), False, 'from envisage.resource.api import ResourceManager\n'), ((1324, 1362), 'traits.api.Instance', 'Instance', (['"""envisage.category.Category"""'], {}), "('envisage.category.Category')\n", (1332, 1362), False, 'from traits.api import List, Instance, on_trait_change, Str\n'), ((2478, 2528), 'traits.api.Instance', 'Instance', (['"""envisage.class_load_hook.ClassLoadHook"""'], {}), "('envisage.class_load_hook.ClassLoadHook')\n", (2486, 2528), False, 'from traits.api import List, Instance, on_trait_change, Str\n')]
from celery import chain from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField from django.db.models.signals import post_save from django.dispatch import receiver from .tasks.run_spider import run_spider from .tasks.load_data_to_db import load_data_to_db class Spider(Model): name = CharField (max_length=128, unique=True, help_text='use spider from apps/spiders/spiders/<name>.py') desc = TextField (default='') def __str__(self): return f"{self.name}" class Session(Model): spider = ForeignKey (Spider, on_delete=CASCADE) started = DateTimeField (auto_now_add=True) finished = DateTimeField (blank=True, null=True) load_started = DateTimeField (blank=True, null=True) load_finished = DateTimeField (blank=True, null=True) def __str__(self): return f"{self.spider.name} {self.started} - {self.finished}" @receiver(post_save, sender=Session) def run_spider_if_session_was_created(sender, instance, created, **kwargs): if (created): run_spider.delay(instance.id) # run_spider.apply_async(instance.id, link=load_data_to_db.s(instance.id)) (run_spider.s(instance.id) | load_data_to_db.s(instance.id)).apply_async() class Site(Model): url = CharField (max_length=256, unique=True) desc = TextField (default='') def __str__(self): return self.url class Page(Model): site = ForeignKey (Site, on_delete=CASCADE) url = CharField (max_length=512, unique=True, help_text='Url without domain. You can find domain in site.url .') last_visit = DateTimeField ( help_text='When spider was on the page in last time.') def __str__(self): return "%s%s"%(self.site.url, self.url) class Article(Model): site = ForeignKey (Site, on_delete=CASCADE) idx = CharField (max_length=256, help_text='ID or Slug.') last_updated= DateTimeField (help_text='Datetime from ArticleSnapshot.timestamp') title = TextField (help_text='Title.') body = TextField (help_text='Main text of article') publish_date= DateTimeField (blank=True, null=True, help_text='') class Meta: unique_together = (("site", "idx"),) class ArticleSnapshot(Model): session = ForeignKey (Session, on_delete=CASCADE) page = ForeignKey (Page , on_delete=CASCADE) article = ForeignKey (Article, on_delete=CASCADE) timestamp = DateTimeField ( help_text='Datetime when data was read from page.') title = CharField (default='', blank=True, max_length=256, help_text='Title.') body = JSONField (default=dict, help_text='Desc that was scriped from page.') publish_date= DateTimeField (blank=True, null=True, help_text='') class Meta: unique_together = (("session", "page", "article"),)
[ "django.db.models.TextField", "django.db.models.CharField", "django.db.models.ForeignKey", "django.dispatch.receiver", "django.db.models.JSONField", "django.db.models.DateTimeField" ]
[((985, 1020), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'Session'}), '(post_save, sender=Session)\n', (993, 1020), False, 'from django.dispatch import receiver\n'), ((376, 479), 'django.db.models.CharField', 'CharField', ([], {'max_length': '(128)', 'unique': '(True)', 'help_text': '"""use spider from apps/spiders/spiders/<name>.py"""'}), "(max_length=128, unique=True, help_text=\n 'use spider from apps/spiders/spiders/<name>.py')\n", (385, 479), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((500, 521), 'django.db.models.TextField', 'TextField', ([], {'default': '""""""'}), "(default='')\n", (509, 521), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((623, 660), 'django.db.models.ForeignKey', 'ForeignKey', (['Spider'], {'on_delete': 'CASCADE'}), '(Spider, on_delete=CASCADE)\n', (633, 660), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((683, 715), 'django.db.models.DateTimeField', 'DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (696, 715), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((735, 771), 'django.db.models.DateTimeField', 'DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (748, 771), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((793, 829), 'django.db.models.DateTimeField', 'DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (806, 829), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((851, 887), 'django.db.models.DateTimeField', 'DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (864, 887), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((1358, 1396), 'django.db.models.CharField', 'CharField', ([], {'max_length': '(256)', 'unique': '(True)'}), '(max_length=256, unique=True)\n', (1367, 1396), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((1420, 1441), 'django.db.models.TextField', 'TextField', ([], {'default': '""""""'}), "(default='')\n", (1429, 1441), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((1535, 1570), 'django.db.models.ForeignKey', 'ForeignKey', (['Site'], {'on_delete': 'CASCADE'}), '(Site, on_delete=CASCADE)\n', (1545, 1570), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((1593, 1703), 'django.db.models.CharField', 'CharField', ([], {'max_length': '(512)', 'unique': '(True)', 'help_text': '"""Url without domain. You can find domain in site.url ."""'}), "(max_length=512, unique=True, help_text=\n 'Url without domain. You can find domain in site.url .')\n", (1602, 1703), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((1724, 1792), 'django.db.models.DateTimeField', 'DateTimeField', ([], {'help_text': '"""When spider was on the page in last time."""'}), "(help_text='When spider was on the page in last time.')\n", (1737, 1792), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((1939, 1974), 'django.db.models.ForeignKey', 'ForeignKey', (['Site'], {'on_delete': 'CASCADE'}), '(Site, on_delete=CASCADE)\n', (1949, 1974), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((1997, 2047), 'django.db.models.CharField', 'CharField', ([], {'max_length': '(256)', 'help_text': '"""ID or Slug."""'}), "(max_length=256, help_text='ID or Slug.')\n", (2006, 2047), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((2071, 2137), 'django.db.models.DateTimeField', 'DateTimeField', ([], {'help_text': '"""Datetime from ArticleSnapshot.timestamp"""'}), "(help_text='Datetime from ArticleSnapshot.timestamp')\n", (2084, 2137), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((2157, 2186), 'django.db.models.TextField', 'TextField', ([], {'help_text': '"""Title."""'}), "(help_text='Title.')\n", (2166, 2186), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((2210, 2253), 'django.db.models.TextField', 'TextField', ([], {'help_text': '"""Main text of article"""'}), "(help_text='Main text of article')\n", (2219, 2253), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((2277, 2327), 'django.db.models.DateTimeField', 'DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'help_text': '""""""'}), "(blank=True, null=True, help_text='')\n", (2290, 2327), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((2441, 2479), 'django.db.models.ForeignKey', 'ForeignKey', (['Session'], {'on_delete': 'CASCADE'}), '(Session, on_delete=CASCADE)\n', (2451, 2479), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((2502, 2537), 'django.db.models.ForeignKey', 'ForeignKey', (['Page'], {'on_delete': 'CASCADE'}), '(Page, on_delete=CASCADE)\n', (2512, 2537), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((2563, 2601), 'django.db.models.ForeignKey', 'ForeignKey', (['Article'], {'on_delete': 'CASCADE'}), '(Article, on_delete=CASCADE)\n', (2573, 2601), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((2624, 2689), 'django.db.models.DateTimeField', 'DateTimeField', ([], {'help_text': '"""Datetime when data was read from page."""'}), "(help_text='Datetime when data was read from page.')\n", (2637, 2689), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((2728, 2797), 'django.db.models.CharField', 'CharField', ([], {'default': '""""""', 'blank': '(True)', 'max_length': '(256)', 'help_text': '"""Title."""'}), "(default='', blank=True, max_length=256, help_text='Title.')\n", (2737, 2797), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((2821, 2891), 'django.db.models.JSONField', 'JSONField', ([], {'default': 'dict', 'help_text': '"""Desc that was scriped from page."""'}), "(default=dict, help_text='Desc that was scriped from page.')\n", (2830, 2891), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n'), ((2920, 2970), 'django.db.models.DateTimeField', 'DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'help_text': '""""""'}), "(blank=True, null=True, help_text='')\n", (2933, 2970), False, 'from django.db.models import Model, CASCADE, ForeignKey, CharField, TextField, DateTimeField, BooleanField, IntegerField, JSONField\n')]
#!/usr/bin/env python """ pytest plugin script. This script is an extension to py.test which installs SQLAlchemy's testing plugin into the local environment. """ import sys from os import path for pth in ['../lib']: sys.path.insert(0, path.join(path.dirname(path.abspath(__file__)), pth)) from sqlalchemy.testing.plugin.pytestplugin import *
[ "os.path.abspath" ]
[((265, 287), 'os.path.abspath', 'path.abspath', (['__file__'], {}), '(__file__)\n', (277, 287), False, 'from os import path\n')]
import torch import torch.nn as nn from models.neural import aeq from models.neural import gumbel_softmax class Generator(nn.Module): def __init__(self, vocab_size, dec_hidden_size, pad_idx): super(Generator, self).__init__() self.linear = nn.Linear(dec_hidden_size, vocab_size) self.softmax = nn.LogSoftmax(dim=-1) self.pad_idx = pad_idx def forward(self, x, use_gumbel_softmax=False): output = self.linear(x) output[:, self.pad_idx] = -float('inf') if use_gumbel_softmax: output = gumbel_softmax(output, log_mode=True, dim=-1) else: output = self.softmax(output) return output class CopyGenerator(nn.Module): """An implementation of pointer-generator networks :cite:`DBLP:journals/corr/SeeLM17`. These networks consider copying words directly from the source sequence. The copy generator is an extended version of the standard generator that computes three values. * :math:`p_{softmax}` the standard softmax over `tgt_dict` * :math:`p(z)` the probability of copying a word from the source * :math:`p_{copy}` the probility of copying a particular word. taken from the attention distribution directly. The model returns a distribution over the extend dictionary, computed as :math:`p(w) = p(z=1) p_{copy}(w) + p(z=0) p_{softmax}(w)` .. mermaid:: graph BT A[input] S[src_map] B[softmax] BB[switch] C[attn] D[copy] O[output] A --> B A --> BB S --> D C --> D D --> O B --> O BB --> O Args: input_size (int): size of input representation output_size (int): size of output vocabulary pad_idx (int) """ def __init__(self, output_size, input_size, pad_idx): super(CopyGenerator, self).__init__() self.linear = nn.Linear(input_size, output_size) self.linear_copy = nn.Linear(input_size, 1) self.softmax = nn.LogSoftmax(dim=-1) self.pad_idx = pad_idx def forward(self, hidden, attn, src_map, use_gumbel_softmax=False): """ Compute a distribution over the target dictionary extended by the dynamic dictionary implied by copying source words. Args: hidden (FloatTensor): hidden outputs ``(batch x tlen, input_size)`` attn (FloatTensor): attn for each ``(batch x tlen, input_size)`` src_map (FloatTensor): A sparse indicator matrix mapping each source word to its index in the "extended" vocab containing. ``(batch, src_len, extra_words)`` """ # CHECKS batch_by_tlen, _ = hidden.size() batch_by_tlen_, slen = attn.size() batch, slen_, cvocab = src_map.size() aeq(batch_by_tlen, batch_by_tlen_) aeq(slen, slen_) # Original probabilities. logits = self.linear(hidden) logits[:, self.pad_idx] = -float('inf') if use_gumbel_softmax: prob = gumbel_softmax(logits, log_mode=False, dim=1) else: prob = torch.softmax(logits, 1) # Probability of copying p(z=1) batch. p_copy = torch.sigmoid(self.linear_copy(hidden)) # Probability of not copying: p_{word}(w) * (1 - p(z)) out_prob = torch.mul(prob, 1 - p_copy) mul_attn = torch.mul(attn, p_copy) copy_prob = torch.bmm( mul_attn.view(batch, -1, slen), src_map ) copy_prob = copy_prob.contiguous().view(-1, cvocab) return torch.cat([out_prob, copy_prob], 1) def collapse_copy_scores(scores, batch, tgt_vocab, src_vocabs=None, batch_dim=0, batch_offset=None, beam_size=1, segs_index=None): """ Given scores from an expanded dictionary corresponeding to a batch, sums together copies, with a dictionary word when it is ambiguous. """ offset = len(tgt_vocab) if segs_index is None: segs_index = torch.repeat_interleave(torch.arange(len(batch.ex_segs), dtype=torch.long), torch.tensor(batch.ex_segs) * beam_size, dim=0) for b in range(scores.size(batch_dim)): blank = [] fill = [] if src_vocabs is None: src_vocab = batch.src_ex_vocab[segs_index[b]] else: batch_id = batch_offset[b] if batch_offset is not None else b index = batch.indices.data[batch_id] src_vocab = src_vocabs[index] for i in range(1, len(src_vocab)): sw = src_vocab.itos[i] ti = tgt_vocab[sw] if ti != 0: blank.append(offset + i) fill.append(ti) if blank: blank = torch.tensor(blank, device=scores.device) fill = torch.tensor(fill, device=scores.device) score = scores[:, b] if batch_dim == 1 else scores[b] score.index_add_(1, fill, score.index_select(1, blank)) score.index_fill_(1, blank, 0.) return scores
[ "torch.nn.LogSoftmax", "models.neural.gumbel_softmax", "torch.cat", "torch.mul", "torch.softmax", "torch.nn.Linear", "torch.tensor", "models.neural.aeq" ]
[((262, 300), 'torch.nn.Linear', 'nn.Linear', (['dec_hidden_size', 'vocab_size'], {}), '(dec_hidden_size, vocab_size)\n', (271, 300), True, 'import torch.nn as nn\n'), ((324, 345), 'torch.nn.LogSoftmax', 'nn.LogSoftmax', ([], {'dim': '(-1)'}), '(dim=-1)\n', (337, 345), True, 'import torch.nn as nn\n'), ((1977, 2011), 'torch.nn.Linear', 'nn.Linear', (['input_size', 'output_size'], {}), '(input_size, output_size)\n', (1986, 2011), True, 'import torch.nn as nn\n'), ((2039, 2063), 'torch.nn.Linear', 'nn.Linear', (['input_size', '(1)'], {}), '(input_size, 1)\n', (2048, 2063), True, 'import torch.nn as nn\n'), ((2087, 2108), 'torch.nn.LogSoftmax', 'nn.LogSoftmax', ([], {'dim': '(-1)'}), '(dim=-1)\n', (2100, 2108), True, 'import torch.nn as nn\n'), ((2917, 2951), 'models.neural.aeq', 'aeq', (['batch_by_tlen', 'batch_by_tlen_'], {}), '(batch_by_tlen, batch_by_tlen_)\n', (2920, 2951), False, 'from models.neural import aeq\n'), ((2960, 2976), 'models.neural.aeq', 'aeq', (['slen', 'slen_'], {}), '(slen, slen_)\n', (2963, 2976), False, 'from models.neural import aeq\n'), ((3438, 3465), 'torch.mul', 'torch.mul', (['prob', '(1 - p_copy)'], {}), '(prob, 1 - p_copy)\n', (3447, 3465), False, 'import torch\n'), ((3485, 3508), 'torch.mul', 'torch.mul', (['attn', 'p_copy'], {}), '(attn, p_copy)\n', (3494, 3508), False, 'import torch\n'), ((3689, 3724), 'torch.cat', 'torch.cat', (['[out_prob, copy_prob]', '(1)'], {}), '([out_prob, copy_prob], 1)\n', (3698, 3724), False, 'import torch\n'), ((562, 607), 'models.neural.gumbel_softmax', 'gumbel_softmax', (['output'], {'log_mode': '(True)', 'dim': '(-1)'}), '(output, log_mode=True, dim=-1)\n', (576, 607), False, 'from models.neural import gumbel_softmax\n'), ((3147, 3192), 'models.neural.gumbel_softmax', 'gumbel_softmax', (['logits'], {'log_mode': '(False)', 'dim': '(1)'}), '(logits, log_mode=False, dim=1)\n', (3161, 3192), False, 'from models.neural import gumbel_softmax\n'), ((3226, 3250), 'torch.softmax', 'torch.softmax', (['logits', '(1)'], {}), '(logits, 1)\n', (3239, 3250), False, 'import torch\n'), ((4887, 4928), 'torch.tensor', 'torch.tensor', (['blank'], {'device': 'scores.device'}), '(blank, device=scores.device)\n', (4899, 4928), False, 'import torch\n'), ((4948, 4988), 'torch.tensor', 'torch.tensor', (['fill'], {'device': 'scores.device'}), '(fill, device=scores.device)\n', (4960, 4988), False, 'import torch\n'), ((4243, 4270), 'torch.tensor', 'torch.tensor', (['batch.ex_segs'], {}), '(batch.ex_segs)\n', (4255, 4270), False, 'import torch\n')]
import torch import torch.nn as nn import torch.optim as optim import torch.utils.data import torchvision.datasets as dset import torchvision.transforms as transforms import torchvision.utils as utils import matplotlib.pyplot as plt import numpy as np from torch.utils.tensorboard import SummaryWriter from celeba_dataset import CelebaDataset from pathlib import Path from generator import Generator, getImage, CGenerator, cDCGenerator from discriminator import Discriminator, CDiscriminator, cDCDiscriminator from sys import argv, exit, stderr from datetime import date BS = 128 # Batch size LR = 0.0002 # Learning Rate IMG_SIZE = 64 N_CLASSES = 10 def loadMnistDataset(): return torch.utils.data.DataLoader( # Load MNIST DATASET dset.MNIST( './dataset', train=True, download=True, transform=transforms.Compose([ transforms.Resize(IMG_SIZE), transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,)) ]) ), batch_size=BS, shuffle=True ) class CDCTrainer(): def __init__(self, ngpu): print(torch.cuda.is_available()) device_type = "cuda:0" if torch.cuda.is_available() and ngpu >= 0 else "cpu" self.device = torch.device(device_type) self.GNet = cDCGenerator(ngpu).to(self.device) self.DNet = cDCDiscriminator(ngpu).to(self.device) print(self.device.type) if self.device.type == "cuda" and ngpu > 1: device_ids = list(range(ngpu)) self.GNet = nn.DataParallel(self.GNet, device_ids=device_ids) self.DNet = nn.DataParallel(self.DNet, device_ids=device_ids) print("GPU OK") self.GNet.init_weight() self.DNet.init_weight() self.GOpti = optim.Adam(self.GNet.parameters(), lr=LR) self.DOpti = optim.Adam(self.DNet.parameters(), lr=LR) # Adam optimizer -> Stochastic Optimization self.loss_fun = torch.nn.BCELoss() # Error calculation concerning GAN self.writter = SummaryWriter(log_dir='log/loss', comment='Training loss') # logger pour tensorboard self.fill = torch.zeros(10, 10, IMG_SIZE, IMG_SIZE, device=self.device) for i in range(0, N_CLASSES): self.fill[i, i , :, :] = 1 def __del__(self): self.writter.close() # Train generator model def trainGNet(self): self.GOpti.zero_grad() fake_labels = gen_fake_labels(BS, self.device) fake_labels_fill = self.fill[fake_labels] fake_imgs = self.GNet(self.createNoise(fake_labels.size(0)), fake_labels) validity = self.DNet(fake_imgs, fake_labels_fill) g_loss = self.loss_fun(validity, torch.ones(BS, 1, 1, 1, device=self.device)) g_loss.backward() self.GOpti.step() return g_loss # Train discriminator model def trainDNet(self, fake_data, fake_labels, real_data , labels): self.DOpti.zero_grad() labels_fill = self.fill[labels] fake_labels_fill = self.fill[fake_labels] # Train with real pictures real_validity = self.DNet(real_data, labels_fill) real_loss = self.loss_fun(real_validity, torch.ones(real_data.shape[0], 1, 1, 1, device=self.device)) # Train with generated pictures fake_validity = self.DNet(fake_data, fake_labels_fill) fake_loss = self.loss_fun(fake_validity, torch.zeros(BS, 1, 1, 1, device=self.device)) d_loss = real_loss + fake_loss d_loss.backward() self.DOpti.step() return { "error": d_loss, "realRes": real_validity, "fakeRes": fake_validity } def __call__(self, epoch, loader): for e in range(epoch): i = 0 for i, (batch, labels) in enumerate(loader): print("iteration = ", i) # Transform batch in order to make it use the right device and get his real size real_imgs = batch.to(self.device) s = real_imgs.size(0) # Prepare generated pictures and lables sets fake_labels = gen_fake_labels(BS, self.device) fake_imgs = self.GNet(self.createNoise(BS), fake_labels).detach() DResult = self.trainDNet(fake_imgs, fake_labels, real_imgs, labels.cuda() if torch.cuda.is_available() else labels) for j in range(0, 2) : GError = self.trainGNet() self.log(e, DResult['error'], GError) print(f"Epoch {e + 1} done", file=stderr) self.save("./models/default/" + str(date.today()) + "_g_" + str(e + 1), "./models/default/" + str(date.today()) + "_d_" + str(e + 1)) def log(self, epoch, DLoss, GLoss): print(f"epoch: {epoch}") print(f"Discriminator Loss : {DLoss}") print(f"Generator Loss : {GLoss}") print("==========================================") self.writter.add_scalar('Loss/Generator', GLoss, epoch) self.writter.add_scalar('Loss/Discriminator', DLoss, epoch) self.writter.add_scalars('Loss/Generator+Discriminator', { 'Generator': GLoss, 'Discriminator': DLoss }, epoch) def save(self, Gpath, Dpath): torch.save(self.GNet.state_dict(), Gpath) torch.save(self.DNet.state_dict(), Dpath) # Return a normalized vector of shape (1, BS)used as input generator def createNoise(self, n): return torch.randn(n, 100, 1, 1, device=self.device) def preprocess(self, rawData, nout): return rawData.view(rawData.size(0), nout) def reveal(self, data, i, j): return data.view(data.size(0), 1, i, j) def gen_fake_labels(n, device='cpu') : fake_labels = torch.randint(0, 10, (n,) , device=device) return (fake_labels) def loadModel(path, Model): model = Model(0) try: model.load_state_dict(torch.load(path, map_location='cpu')) except Exception as error: exit(f"Error : {path} : {error}") return model def load_and_show(path, label): GNet = loadModel(path, CGenerator) rand_tensor = torch.randn(1, 100, 1, 1) res = GNet(rand_tensor, label).squeeze() img = getImage(res) plt.imshow(img) plt.show() def make_grid(modelPath): Gnet = loadModel(modelPath, CGenerator) rand_labels = gen_fake_labels(64) rand_tensor = torch.randn(64, 100, 1, 1) output = Gnet(rand_tensor, rand_labels).squeeze() plt.figure(figsize=(8, 8)) plt.axis("off") plt.title("Generated images") grid = utils.make_grid(output, padding=2, normalize=True) image = ((grid.permute(1, 2, 0).detach().numpy())) plt.imshow(image) plt.show()
[ "matplotlib.pyplot.title", "torch.randn", "matplotlib.pyplot.figure", "torch.device", "torchvision.transforms.Normalize", "torch.ones", "torch.nn.BCELoss", "matplotlib.pyplot.imshow", "torch.load", "discriminator.cDCDiscriminator", "torch.utils.tensorboard.SummaryWriter", "torch.zeros", "tor...
[((5837, 5878), 'torch.randint', 'torch.randint', (['(0)', '(10)', '(n,)'], {'device': 'device'}), '(0, 10, (n,), device=device)\n', (5850, 5878), False, 'import torch\n'), ((6212, 6237), 'torch.randn', 'torch.randn', (['(1)', '(100)', '(1)', '(1)'], {}), '(1, 100, 1, 1)\n', (6223, 6237), False, 'import torch\n'), ((6293, 6306), 'generator.getImage', 'getImage', (['res'], {}), '(res)\n', (6301, 6306), False, 'from generator import Generator, getImage, CGenerator, cDCGenerator\n'), ((6311, 6326), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (6321, 6326), True, 'import matplotlib.pyplot as plt\n'), ((6331, 6341), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6339, 6341), True, 'import matplotlib.pyplot as plt\n'), ((6469, 6495), 'torch.randn', 'torch.randn', (['(64)', '(100)', '(1)', '(1)'], {}), '(64, 100, 1, 1)\n', (6480, 6495), False, 'import torch\n'), ((6554, 6580), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (6564, 6580), True, 'import matplotlib.pyplot as plt\n'), ((6585, 6600), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (6593, 6600), True, 'import matplotlib.pyplot as plt\n'), ((6605, 6634), 'matplotlib.pyplot.title', 'plt.title', (['"""Generated images"""'], {}), "('Generated images')\n", (6614, 6634), True, 'import matplotlib.pyplot as plt\n'), ((6646, 6696), 'torchvision.utils.make_grid', 'utils.make_grid', (['output'], {'padding': '(2)', 'normalize': '(True)'}), '(output, padding=2, normalize=True)\n', (6661, 6696), True, 'import torchvision.utils as utils\n'), ((6756, 6773), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image'], {}), '(image)\n', (6766, 6773), True, 'import matplotlib.pyplot as plt\n'), ((6778, 6788), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6786, 6788), True, 'import matplotlib.pyplot as plt\n'), ((1289, 1314), 'torch.device', 'torch.device', (['device_type'], {}), '(device_type)\n', (1301, 1314), False, 'import torch\n'), ((2003, 2021), 'torch.nn.BCELoss', 'torch.nn.BCELoss', ([], {}), '()\n', (2019, 2021), False, 'import torch\n'), ((2080, 2138), 'torch.utils.tensorboard.SummaryWriter', 'SummaryWriter', ([], {'log_dir': '"""log/loss"""', 'comment': '"""Training loss"""'}), "(log_dir='log/loss', comment='Training loss')\n", (2093, 2138), False, 'from torch.utils.tensorboard import SummaryWriter\n'), ((2186, 2245), 'torch.zeros', 'torch.zeros', (['(10)', '(10)', 'IMG_SIZE', 'IMG_SIZE'], {'device': 'self.device'}), '(10, 10, IMG_SIZE, IMG_SIZE, device=self.device)\n', (2197, 2245), False, 'import torch\n'), ((5557, 5602), 'torch.randn', 'torch.randn', (['n', '(100)', '(1)', '(1)'], {'device': 'self.device'}), '(n, 100, 1, 1, device=self.device)\n', (5568, 5602), False, 'import torch\n'), ((1155, 1180), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1178, 1180), False, 'import torch\n'), ((1582, 1631), 'torch.nn.DataParallel', 'nn.DataParallel', (['self.GNet'], {'device_ids': 'device_ids'}), '(self.GNet, device_ids=device_ids)\n', (1597, 1631), True, 'import torch.nn as nn\n'), ((1656, 1705), 'torch.nn.DataParallel', 'nn.DataParallel', (['self.DNet'], {'device_ids': 'device_ids'}), '(self.DNet, device_ids=device_ids)\n', (1671, 1705), True, 'import torch.nn as nn\n'), ((2758, 2801), 'torch.ones', 'torch.ones', (['BS', '(1)', '(1)', '(1)'], {'device': 'self.device'}), '(BS, 1, 1, 1, device=self.device)\n', (2768, 2801), False, 'import torch\n'), ((3246, 3305), 'torch.ones', 'torch.ones', (['real_data.shape[0]', '(1)', '(1)', '(1)'], {'device': 'self.device'}), '(real_data.shape[0], 1, 1, 1, device=self.device)\n', (3256, 3305), False, 'import torch\n'), ((3460, 3504), 'torch.zeros', 'torch.zeros', (['BS', '(1)', '(1)', '(1)'], {'device': 'self.device'}), '(BS, 1, 1, 1, device=self.device)\n', (3471, 3504), False, 'import torch\n'), ((5994, 6030), 'torch.load', 'torch.load', (['path'], {'map_location': '"""cpu"""'}), "(path, map_location='cpu')\n", (6004, 6030), False, 'import torch\n'), ((6071, 6104), 'sys.exit', 'exit', (['f"""Error : {path} : {error}"""'], {}), "(f'Error : {path} : {error}')\n", (6075, 6104), False, 'from sys import argv, exit, stderr\n'), ((1216, 1241), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1239, 1241), False, 'import torch\n'), ((1336, 1354), 'generator.cDCGenerator', 'cDCGenerator', (['ngpu'], {}), '(ngpu)\n', (1348, 1354), False, 'from generator import Generator, getImage, CGenerator, cDCGenerator\n'), ((1391, 1413), 'discriminator.cDCDiscriminator', 'cDCDiscriminator', (['ngpu'], {}), '(ngpu)\n', (1407, 1413), False, 'from discriminator import Discriminator, CDiscriminator, cDCDiscriminator\n'), ((893, 920), 'torchvision.transforms.Resize', 'transforms.Resize', (['IMG_SIZE'], {}), '(IMG_SIZE)\n', (910, 920), True, 'import torchvision.transforms as transforms\n'), ((938, 959), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (957, 959), True, 'import torchvision.transforms as transforms\n'), ((977, 1013), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['(0.5,)', '(0.5,)'], {}), '((0.5,), (0.5,))\n', (997, 1013), True, 'import torchvision.transforms as transforms\n'), ((4404, 4429), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4427, 4429), False, 'import torch\n'), ((4681, 4693), 'datetime.date.today', 'date.today', ([], {}), '()\n', (4691, 4693), False, 'from datetime import date\n'), ((4759, 4771), 'datetime.date.today', 'date.today', ([], {}), '()\n', (4769, 4771), False, 'from datetime import date\n')]
import asyncio import xplane.autopilot import xplane.io class MyProtocol(xplane.io.Protocol, xplane.autopilot.TakeoffMixin): def __init__(self, remote_addr): super().__init__(remote_addr) def got_data_packet(self, packet, address): self.take_off_got_data_packet(packet, address) def mainloop(local_addr, remote_addr, action): loop = asyncio.get_event_loop() connect = loop.create_datagram_endpoint(lambda: MyProtocol(remote_addr), local_addr=local_addr) transport, protocol = loop.run_until_complete(connect) loop.call_soon(lambda: getattr(protocol, action)()) loop.run_forever() transport.close() loop.close() def main(): from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument('send_host', type=str) parser.add_argument('--send-port', '-p', type=int, default=49000) parser.add_argument('--listen-host', '-b', type=str, default='0.0.0.0') parser.add_argument('--listen-port', '-P', type=int, default=49000) parser.add_argument('action', type=str, choices=['takeoff']) args = parser.parse_args() local_addr = (args.listen_host, args.listen_port) remote_addr = (args.send_host, args.send_port) mainloop(local_addr, remote_addr, args.action)
[ "asyncio.get_event_loop", "argparse.ArgumentParser" ]
[((367, 391), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (389, 391), False, 'import asyncio\n'), ((783, 799), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (797, 799), False, 'from argparse import ArgumentParser\n')]
import asyncio import datetime import json import logging import os from collections import Counter from copy import deepcopy as dc from io import BytesIO from random import choice as randchoice from random import randrange as rrange from re import match, sub from statistics import mean as avg from time import time as timetime import aiohttp import requests from discord import Embed as discordembed from discord import File as discordfile from lzl import lzfloat, lzint, lzlist from PIL import Image as img from PIL import ImageDraw as imgdraw from PIL import ImageFont as imgfont # Set color variables BACKGROUND = (10, 10, 10) BLUE = (0, 255, 255) RED = (255, 211, 0) WHITE = (255, 255, 255) # set font variables defaultfont = imgfont.truetype("fonts/death.ttf", 70) namesfont = imgfont.truetype("fonts/lk.ttf", 70) namesfont_small = imgfont.truetype("fonts/guild.ttf", 50) systemfont = imgfont.truetype("fonts/systemtext.ttf", 35) largesystemfont = imgfont.truetype("fonts/systemtext.ttf", 60) infotext = imgfont.truetype("fonts/info.ttf", 35) # set formatted items used for items_get function formatteditems = requests.get( "https://raw.githubusercontent.com/broderickhyman/ao-bin-dumps/master/formatted/items.json" ).json() # get accurate plotting locations and names. with open("data.json") as file: data = json.load(file) # possibleprefixes is used for formatting the embed with open("possibleprefixes.json") as file: prefixes = json.load(file) # substituting useless items in the item name def substitute(name): # Crystal league tokens m = match(r"T4_TOKEN_CRYSTALLEAGUE_LVL_(\d{1,2})_S(\d{1,2})", name) # Trade missions k = match(r"QUESTITEM_CARAVAN_TRADEPACK_([A-Z]{5,8})_(LIGHT|MEDIUM|HEAVY)", name) # HCE Maps h = match(r"QUESTITEM_EXP_TOKEN_D(\d{1,2})_T\d.+", name) if m: return f"S{m.group(2)} Crystal League Token (Lvl. {m.group(1)})" elif k: location = { "SWAMP": "Thetford", "FOREST": "Lymhurst", "STEPPE": "Bridgewatch", "HIGHLAND": "Martlock", "MOUNTAIN": "Fort Sterling" }[k.group(1)] tier = {"LIGHT": 1, "MEDIUM": 2, "HEAVY": 3}[k.group(2)] return f"Tier {tier} {location}'s Faction Transport" elif h: return f"HCE Map (Lvl. {h.group(1)})" ls = [ "Beginner's ", "Novice's ", "Journeyman's ", "Adept's ", "Expert's ", "Master's ", "Grandmaster's ", "Elder's ", "Uncommon ", "Rare ", "Exceptional ", "Novice ", "Journeyman ", "Adept ", "Expert ", "Master ", "Grandmaster ", "Elder ", "Major ", "Minor ", "Danglemouth " ] for items in ls: name = sub(items, "", name) name = sub("Partially Full", "Half", name) return name # get tier of items def loadtier(i): tier = None try: tier = match(r"T([1-8]).*", i).group(1) enchantment = match(r".+@([1-3])", i).group(1) except AttributeError: enchantment = 0 if not tier: return "" if not enchantment: enchantment = 0 return f"[{tier}.{enchantment}]" # Return item name from given unique key def items_get(items, quality=1): try: return loadtier(items) + substitute([ i["LocalizedNames"]["EN-US"] for i in formatteditems if i["UniqueName"] == str(items) ][0]) + '{}'.format({ 0: "", 1: "(NM)", 2: "(GD)", 3: "(OT)", 4: "(EX)", 5: "(MP)" }[quality]) except Exception as e: return substitute(str(items)) # convert the transparent nodes into orange nodes like the background # Parameters: imageobj, transparent color to be converted into. def convert_to_transparent(imageobj, transparent): # convert image object into rgba format imageobj = imageobj.convert("RGBA") # get image data from the image object data = imageobj.getdata() newData = [] # loop through data items for i in data: # i[3] is the alpha channel for the image if i[3] <= 10: newData.append(transparent) else: newData.append(i) # Convert the list of data into an image. imageobj.putdata(newData) return imageobj # async function to check if itemid is a double handed weapon. # parameter: name -> unique key of weapon async def is_two_main_hand(name): if name is None or name == "": return False try: async with aiohttp.ClientSession( headers={"Connection": "close"}) as session: async with session.get( f"http://gameinfo.albiononline.com/api/gameinfo/items/{name}/data" ) as resp: respond = await resp.json() return respond["twoHanded"] except Exception as e: # Print error type logging.warning(f"in is_two_main_hand, {e.__class__.__name__}") return False # async function to get image from the given link async def get_image(link, item, session, quality=1, debugchannel=None, count=0): async with session.get(link + item + f".png?quality={quality}") as resp: ''' use bytesio object to load the respond conetent from online∂ -> use image module to load the image from bytesio object -> resize the image object to 180x180 size -> convert the image to transparent ''' try: tobyte = BytesIO(await resp.content.read()) tobyte.seek(0) return convert_to_transparent( img.open(tobyte).resize((180, 180), img.ANTIALIAS), BACKGROUND) except Exception as e: await asyncio.sleep(1) if debugchannel: await debugchannel.send( debugchannel.guild.owner.mention + f"{e.__class__.__name__} in get_image. Keys include {item}, {quality}" ) if count == 10: return False return await get_image(link, item, session, quality, debugchannel, count + 1) async def get_iw_json(items, session, count=0, sendchannel=None): # Lambda function to return the api link getlink = lambda x: "https://www.albion-online-data.com/api/v2/stats/prices/" + x try: async with session.get(getlink(items)) as resp: return await resp.json(content_type=None) # happens when the returned item is 404 error except json.decoder.JSONDecodeError: if count == 0: logging.warning("Gearworth Error {}".format(items)) if sendchannel: await sendchannel.send(sendchannel.guild.owner.mention + f" Gearworth error for {items}") if count > 5: count = 5 if count == 5: try: return requests.get(getlink(items)).json() except Exception as e: return await get_iw_json(items, session, 1, sendchannel=sendchannel) await asyncio.sleep(count) return await get_iw_json(items, session, count + 1, sendchannel=sendchannel) # Function to get the average price from Lymhurst, Martlock, Bridgewatch, FortSterling and Thetford def _getaverage(x, y): fnl = [] for i in x: if i['quality'] == y and i["sell_price_min"] != 0: fnl.append(i["sell_price_min"]) if len(fnl) == 0: fnl = [i["sell_price_min"] for i in x if i["sell_price_min"] != 0] return 0 if len(fnl) == 0 else fnl[0] if len(fnl) == 1 else avg( [i for i in fnl if i <= 3 * avg(sorted(fnl)[:-1])]) # determines gear worth async def calculate_gearworth(person, session, debugchannel=None): # initialise list of inventory and total value loi = [] total = 0 # unpack user items, get gear for position, gear in person["Equipment"].items(): # Gear is sometimes None if the user did not use the value if gear is not None: loi.append((gear["Type"], gear['Quality'])) # looping through items in counter for items, count in Counter(loi).items(): try: total += _getaverage( await get_iw_json(items[0], session, sendchannel=debugchannel), items[1], ) * count except KeyError: logging.info("Time: {0:20} KeyError: Item {1}".format( datetime.datetime.now().strftime("%x %X:%f"), items[0])) return total async def drawplayer(player, kav, totaldamage=0, killer=True, peoplegettingfame=0, debugchannel=None): # Base image link to load the images _baseimagelink = "https://render.albiononline.com/v1/item/" # kav is used to determine if hte player is killer, assist or victim # Create a new image playerimg = img.new("RGBA", (600, 1200), BACKGROUND) # set lambda functions to put text in the middle of width and height wmiddle = lambda x: (600 - x) / 2 hmiddle = lambda x, y, z: x + (y - z) / 2 # set drawing image for putting text drawimg = imgdraw.Draw(playerimg) # Get width and height of text width, height = drawimg.textsize(kav, font=defaultfont) # Set a text for the heading, padding of 10. drawimg.text((wmiddle(width), hmiddle(10, 50, height)), text=kav, font=defaultfont, fill=RED) # height after this starts from 65.0 width, height = drawimg.textsize(player["Name"], font=namesfont) drawimg.text((wmiddle(width), hmiddle(65, 50, height)), text=player["Name"], font=namesfont, fill=WHITE) # After this line of the text will start at height 140 # Get user guild name as shown in the game fullguildname = "{0}{1}".format(player["AllianceName"], player["GuildName"]) # Get the width and height of the guild name in text width, height = drawimg.textsize(fullguildname, font=namesfont_small) drawimg.text((wmiddle(width), hmiddle(150, 25, height)), text=fullguildname, font=namesfont_small, fill=BLUE) # set a variable for easy access equipments = player["Equipment"] """ File structure for data.json: [itemname, photolocation, textspace] itemname: UNIQUE_NAME key for the using item, can be used as a key to look for the image online from the database photolocation: location data on the image, it is a 2 point tuple that helps determine the x and y value of the upper left corner of the photo textspace: location data for the count of the item. Usually only useful in potion slot and food slot, used to determine the count of the item. """ async with aiohttp.ClientSession( headers={"Connection": "keep-alive"}) as session: # unpacks the data for item, imgspace, textspace in data: # check if the item exists if equipments[item]: # downloads image loadingimg = await get_image(_baseimagelink, equipments[item]["Type"], session, equipments[item]["Quality"], debugchannel) if loadingimg == False: return False # puts the image on the background using the given data playerimg.paste(loadingimg, imgspace) # put the count on the pasted image using the given data drawimg.text(textspace, text=str(equipments[item]["Count"]), font=systemfont, fill=WHITE) # Check if user is using a two-handed weapon try: twohand = await is_two_main_hand(equipments["MainHand"]["Type"]) except (AttributeError, TypeError) as e: twohand = False if twohand and equipments["MainHand"]: # downloads the image again from the database async with aiohttp.ClientSession( headers={"Connection": "close"}) as session: content = await get_image(_baseimagelink, equipments["MainHand"]["Type"], session, equipments["MainHand"]["Count"], debugchannel=debugchannel) # make the image transparent content.putalpha(100) playerimg.paste(content, (400, 380)) # provides the count drawimg.text((533, 490), text=str(equipments["MainHand"]["Count"]), font=systemfont, fill=WHITE) # Calculate their gear worth async with aiohttp.ClientSession( headers={"Connection": "close"}) as session: gearworth = await calculate_gearworth(player, session, debugchannel) # Set IP width, height = drawimg.textsize("IP: {}".format( round(player["AverageItemPower"], 2)), font=largesystemfont) drawimg.text((wmiddle(width), 930), "IP: {}".format(round(player["AverageItemPower"], 2)), font=largesystemfont, fill=WHITE) if killer: damageline = "Damage done:\n{}%[{}/{}]".format( lzfloat(player["DamageDone"] / totaldamage * 100).round_sf(4), round(int(player["DamageDone"])), totaldamage) else: damageline = "Death Fame: {} [{}]\n ={}/particiant".format( player["DeathFame"], peoplegettingfame, player["DeathFame"] // peoplegettingfame) width, height = drawimg.textsize(damageline, font=infotext) # Both death fame and the damage done are multiline texts. drawimg.multiline_text((wmiddle(width), hmiddle(1000, 70, height)), damageline, font=infotext, fill=WHITE) # Convert the gear worth into integer and round the gear worth to 5 signifacant figures gearworthline = "Estimated Gear Worth: {:,}".format( lzint(gearworth).round_sf(5)) width, height = drawimg.textsize(gearworthline, font=infotext) # Set gear worth drawimg.text((wmiddle(width), hmiddle(1120, 40, height)), gearworthline, font=infotext, fill=(RED if gearworth >= 1000000 else WHITE)) return playerimg class kill: def __init__(self, kd, debugchannel=None): """ Usage: variable = kill(kill json item) """ self.debugchannel = debugchannel self.starttime = timetime() kd = dc(kd) self.kd = kd self.killer = kd["Killer"] # Track killer for i in kd["Participants"]: if i["Id"] == kd["Killer"]["Id"]: self.killer = dc(i) break try: if not self.killer["DamageDone"]: self.killer["DamageDone"] = 0 except KeyError: self.killer["DamageDone"] = 0 # track victim self.victim = kd["Victim"] # Get the people who did the most damage try: self.assist = sorted( [i for i in kd["Participants"] if i["DamageDone"] > 0], key=lambda x: x["DamageDone"], reverse=True)[0] # Happens when the amount of participants is less than 1(even though I don't know how did it happen) except IndexError: self.assist = dc(self.killer) # Set type of solo kill or group kill # Is used to show if 3 people is shown on the final kill or 2 self.solokill = (self.killer["Id"] == self.assist["Id"]) # Set alliance names to the one similar in game if self.killer["AllianceName"]: self.killer["AllianceName"] = "[{}]".format( self.killer["AllianceName"]) if self.assist["AllianceName"]: self.assist["AllianceName"] = "[{}]".format( self.assist["AllianceName"]) if self.victim["AllianceName"]: self.victim["AllianceName"] = "[{}]".format( self.victim["AllianceName"]) # Set victim guild if victim does not have a guild if not self.killer["GuildName"]: self.killer["GuildName"] = "- - - - -" if not self.assist["GuildName"]: self.assist["GuildName"] = "- - - - -" if not self.victim["GuildName"]: self.victim["GuildName"] = "- - - - -" self.totaldamage = int( sum([i["DamageDone"] for i in kd["Participants"]])) self.peoplegettingfame = len( [i for i in kd["GroupMembers"] if i["KillFame"] > 0]) # Get the list of participants that dealt damage self.participants = sorted( [i for i in kd["Participants"] if i["DamageDone"] != 0], key=lambda x: x["DamageDone"], reverse=True) for i in self.participants: if i["AllianceName"] and not match(r"\[.*\]", i["AllianceName"]): i["AllianceName"] = "[{}]".format(i["AllianceName"]) self.eventid = kd["EventId"] # Use regex and datetime module to get the time of killing in UTC dt = match( r"(\d{4})\-(\d{2})\-(\d{2})T(\d{2})\:(\d{2})\:(\d{2}\:*)\.(\d+)Z", kd["TimeStamp"]) self.eventtime = datetime.datetime(int(dt.group(1)), int(dt.group(2)), int(dt.group(3)), int(dt.group(4)), int(dt.group(5)), int(dt.group(6)), int(dt.group(7)[:6])) if self.peoplegettingfame == 0: self.peoplegettingfame = len(kd["GroupMembers"]) logging.warning("Peoplegetting fame error: {}".format(self.eventid)) if self.totaldamage == 0: self.totaldamage = 100 logging.warning("totaldamage error: {}".format(self.eventid)) self.gw = 0 # Function to draw a whole set of gear on a blank template async def draw(self): background = img.new("RGBA", (1800, 1200), BACKGROUND) killer_pic = False victim_pic = False while int(bool(killer_pic)) + int(bool(victim_pic)) < 2: # load pictures for each player killer_pic = await drawplayer(self.killer, "Killer", totaldamage=self.totaldamage, killer=True, debugchannel=self.debugchannel) victim_pic = await drawplayer( self.victim, "Victim", killer=False, peoplegettingfame=self.peoplegettingfame, debugchannel=self.debugchannel) if self.solokill: background.paste(killer_pic, (150, 0)) background.paste(victim_pic, (1050, 0)) else: assist_pic = False while bool(assist_pic) != True: assist_pic = await drawplayer(self.assist, "Assist", killer=True, totaldamage=self.totaldamage, debugchannel=self.debugchannel) background.paste(killer_pic, (0, 0)) background.paste(assist_pic, (600, 0)) background.paste(victim_pic, (1200, 0)) self.fileloc = f"temp/{self.eventid}.png" background.save(self.fileloc, "png") # returns gear worth async with aiohttp.ClientSession( headers={"Connection": "close"}) as session: self.gw = round(await calculate_gearworth(self.victim, session, self.debugchannel)) self.inv = await self.inventory() return background # returns a tuple of 3 values, [kill/assist] name, guild(allliance) and damage[percentage] @property def assists(self): # list of names for participants fn = [i["Name"] for i in self.participants] # List of guild naems guild = [(i["AllianceName"] + i["GuildName"] if i["GuildName"] else "- - - - -") for i in self.participants] # list of damage/percent of total damage perc = [ "{:4}[{}%]".format( round(i["DamageDone"]), round(i['DamageDone'] / self.totaldamage * 100, 2)) for i in self.participants ] # return joins return ("\n".join(fn), "\n".join(guild), "\n".join(perc)) def gettype(self, iskiller=False, isvictim=False, isassist=False): if (iskiller or isassist) and isvictim: useitem = prefixes["ffire"] color = 0xae00ff elif self.solokill and len(self.participants) == 1 and iskiller: useitem = prefixes["solo"] color = 0x00ff00 if self.gw <= 2500000 else 0xfa77aa elif (iskiller or isassist) and self.gw > 2500000: useitem = prefixes["juicyk"] color = 0xfa77aa elif iskiller: useitem = prefixes["kill"] color = 0x00ff00 elif isassist: useitem = prefixes["assist"] color = 0x00ff00 elif isvictim and self.gw > 2500000: useitem = prefixes["juicyd"] color = 0x3131b2 elif isvictim: useitem = prefixes["death"] color = 0xd42f2f else: useitem = prefixes["juicy"] color = 0x00ffff return ( f"{self.victim['Name']} was killed by {self.killer['Name']} for {self.victim['DeathFame']} kill fame" if isvictim else f"{self.killer['Name']} killed {self.victim['Name']} for {self.victim['DeathFame']} kill fame. :{randchoice(useitem['emoji'])}:", f"{randchoice(useitem['choices'])}", color) async def inventory(self): stuff = [] async with aiohttp.ClientSession( headers={"Connection": "close"}) as session: for i in [j for j in self.victim["Inventory"] if j is not None]: itemworth = _getaverage( await get_iw_json(i["Type"], session, sendchannel=self.debugchannel), i["Quality"]) stuff.append( (items_get(i["Type"], i["Quality"]), int(i["Count"]), int(itemworth))) for i in stuff: self.gw += i[2] * i[1] sortedstuff = sorted(stuff, key=lambda x: x[2], reverse=True) rs = lambda x, y: "\n".join([str(i[int(x)]) for i in tuple(y)]) if any(len(rs(0, sortedstuff)) > 1024 for x in range(0, 3)): s0, s1 = (lzlist(sortedstuff).split_to(2)) return (rs(0, s0), rs(1, s0), rs(2, s0), rs(0, s1), rs(1, s1), rs(2, s1), True) return (rs(0, sortedstuff), rs(1, sortedstuff), rs(2, sortedstuff), "", "", "", False) def create_embed(self, followinglists): self.file = discordfile(self.fileloc, filename=f"{self.eventid}.png") # find kill type iskiller = self.killer["Id"] in followinglists or self.killer[ "GuildId"] in followinglists isvictim = self.victim["Id"] in followinglists or self.victim[ "GuildId"] in followinglists isassist = False for i in [i for i in self.kd["Participants"] if i["DamageDone"] > 0]: if i["Id"] in followinglists or i["GuildId"] in followinglists: isassist = True localtitle, localdescription, color = self.gettype( iskiller, isvictim, isassist) # Create discord embed object self.embed = None self.embed = discordembed( title=localtitle, url=f"https://albiononline.com/en/killboard/kill/{self.eventid}", description=localdescription + "!" * rrange(1, 3), color=color, timestamp=self.eventtime) # derives image link from eventid as uploads are done in draw() function self.embed.set_image(url=f"attachment://{self.eventid}.png") self.embed.set_footer(text="Local Kill time: ") # get an assist list self.assistlist = self.assists # This step may encounter an error where no one dealt damage # These two lines fixes the output and prevent httperror where value is None if self.assistlist == ("", "", ""): # Forcibly set assistlist to a tuple self.assistlist = (self.killer["Name"], self.killer["GuildName"], "100[100%]") # Add in values for the embed self.embed.add_field(name="Killers", value=self.assistlist[0]) self.embed.add_field(name="Guild", value=self.assistlist[1], inline=True) self.embed.add_field(name="Damage", value=self.assistlist[2], inline=True) # check if victim's inventory is empty if len([i for i in self.victim["Inventory"] if i is not None]) > 0: i0, c0, v0, i1, c1, v1, lis2 = self.inv # adds embed field for victim's inventory self.embed.add_field(name="Victim's Inventory:", value=i0, inline=True) self.embed.add_field(name="Amount", value=c0, inline=True) self.embed.add_field(name="Worth est.", value=v0, inline=True) if lis2: self.embed.add_field(name="Inventory", value=i1, inline=True) self.embed.add_field(name="Amount", value=c1, inline=True) self.embed.add_field(name="Worth est.", value=v1, inline=True) # adds embed field for the total gear worth. self.embed.add_field(name="Estimated Victim's Total Worth:", value="{:,}".format(self.gw), inline=False) ''' returns two items: self.embed: the embed file to be sent self.file: the file object that has to be sent together with the embed ''' return (self.embed, self.file)
[ "PIL.Image.new", "aiohttp.ClientSession", "lzl.lzfloat", "discord.File", "logging.warning", "requests.get", "PIL.ImageDraw.Draw", "collections.Counter", "re.sub", "lzl.lzlist", "datetime.datetime.now", "copy.deepcopy", "asyncio.sleep", "re.match", "lzl.lzint", "json.load", "random.ch...
[((735, 774), 'PIL.ImageFont.truetype', 'imgfont.truetype', (['"""fonts/death.ttf"""', '(70)'], {}), "('fonts/death.ttf', 70)\n", (751, 774), True, 'from PIL import ImageFont as imgfont\n'), ((787, 823), 'PIL.ImageFont.truetype', 'imgfont.truetype', (['"""fonts/lk.ttf"""', '(70)'], {}), "('fonts/lk.ttf', 70)\n", (803, 823), True, 'from PIL import ImageFont as imgfont\n'), ((842, 881), 'PIL.ImageFont.truetype', 'imgfont.truetype', (['"""fonts/guild.ttf"""', '(50)'], {}), "('fonts/guild.ttf', 50)\n", (858, 881), True, 'from PIL import ImageFont as imgfont\n'), ((895, 939), 'PIL.ImageFont.truetype', 'imgfont.truetype', (['"""fonts/systemtext.ttf"""', '(35)'], {}), "('fonts/systemtext.ttf', 35)\n", (911, 939), True, 'from PIL import ImageFont as imgfont\n'), ((958, 1002), 'PIL.ImageFont.truetype', 'imgfont.truetype', (['"""fonts/systemtext.ttf"""', '(60)'], {}), "('fonts/systemtext.ttf', 60)\n", (974, 1002), True, 'from PIL import ImageFont as imgfont\n'), ((1014, 1052), 'PIL.ImageFont.truetype', 'imgfont.truetype', (['"""fonts/info.ttf"""', '(35)'], {}), "('fonts/info.ttf', 35)\n", (1030, 1052), True, 'from PIL import ImageFont as imgfont\n'), ((1329, 1344), 'json.load', 'json.load', (['file'], {}), '(file)\n', (1338, 1344), False, 'import json\n'), ((1457, 1472), 'json.load', 'json.load', (['file'], {}), '(file)\n', (1466, 1472), False, 'import json\n'), ((1579, 1643), 're.match', 'match', (['"""T4_TOKEN_CRYSTALLEAGUE_LVL_(\\\\d{1,2})_S(\\\\d{1,2})"""', 'name'], {}), "('T4_TOKEN_CRYSTALLEAGUE_LVL_(\\\\d{1,2})_S(\\\\d{1,2})', name)\n", (1584, 1643), False, 'from re import match, sub\n'), ((1672, 1748), 're.match', 'match', (['"""QUESTITEM_CARAVAN_TRADEPACK_([A-Z]{5,8})_(LIGHT|MEDIUM|HEAVY)"""', 'name'], {}), "('QUESTITEM_CARAVAN_TRADEPACK_([A-Z]{5,8})_(LIGHT|MEDIUM|HEAVY)', name)\n", (1677, 1748), False, 'from re import match, sub\n'), ((1787, 1840), 're.match', 'match', (['"""QUESTITEM_EXP_TOKEN_D(\\\\d{1,2})_T\\\\d.+"""', 'name'], {}), "('QUESTITEM_EXP_TOKEN_D(\\\\d{1,2})_T\\\\d.+', name)\n", (1792, 1840), False, 'from re import match, sub\n'), ((2729, 2764), 're.sub', 'sub', (['"""Partially Full"""', '"""Half"""', 'name'], {}), "('Partially Full', 'Half', name)\n", (2732, 2764), False, 'from re import match, sub\n'), ((9183, 9223), 'PIL.Image.new', 'img.new', (['"""RGBA"""', '(600, 1200)', 'BACKGROUND'], {}), "('RGBA', (600, 1200), BACKGROUND)\n", (9190, 9223), True, 'from PIL import Image as img\n'), ((9436, 9459), 'PIL.ImageDraw.Draw', 'imgdraw.Draw', (['playerimg'], {}), '(playerimg)\n', (9448, 9459), True, 'from PIL import ImageDraw as imgdraw\n'), ((1121, 1236), 'requests.get', 'requests.get', (['"""https://raw.githubusercontent.com/broderickhyman/ao-bin-dumps/master/formatted/items.json"""'], {}), "(\n 'https://raw.githubusercontent.com/broderickhyman/ao-bin-dumps/master/formatted/items.json'\n )\n", (1133, 1236), False, 'import requests\n'), ((2697, 2717), 're.sub', 'sub', (['items', '""""""', 'name'], {}), "(items, '', name)\n", (2700, 2717), False, 'from re import match, sub\n'), ((11097, 11156), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'headers': "{'Connection': 'keep-alive'}"}), "(headers={'Connection': 'keep-alive'})\n", (11118, 11156), False, 'import aiohttp\n'), ((13174, 13228), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'headers': "{'Connection': 'close'}"}), "(headers={'Connection': 'close'})\n", (13195, 13228), False, 'import aiohttp\n'), ((15086, 15096), 'time.time', 'timetime', ([], {}), '()\n', (15094, 15096), True, 'from time import time as timetime\n'), ((15110, 15116), 'copy.deepcopy', 'dc', (['kd'], {}), '(kd)\n', (15112, 15116), True, 'from copy import deepcopy as dc\n'), ((17729, 17840), 're.match', 'match', (['"""(\\\\d{4})\\\\-(\\\\d{2})\\\\-(\\\\d{2})T(\\\\d{2})\\\\:(\\\\d{2})\\\\:(\\\\d{2}\\\\:*)\\\\.(\\\\d+)Z"""', "kd['TimeStamp']"], {}), "(\n '(\\\\d{4})\\\\-(\\\\d{2})\\\\-(\\\\d{2})T(\\\\d{2})\\\\:(\\\\d{2})\\\\:(\\\\d{2}\\\\:*)\\\\.(\\\\d+)Z'\n , kd['TimeStamp'])\n", (17734, 17840), False, 'from re import match, sub\n'), ((18602, 18643), 'PIL.Image.new', 'img.new', (['"""RGBA"""', '(1800, 1200)', 'BACKGROUND'], {}), "('RGBA', (1800, 1200), BACKGROUND)\n", (18609, 18643), True, 'from PIL import Image as img\n'), ((23804, 23861), 'discord.File', 'discordfile', (['self.fileloc'], {'filename': 'f"""{self.eventid}.png"""'}), "(self.fileloc, filename=f'{self.eventid}.png')\n", (23815, 23861), True, 'from discord import File as discordfile\n'), ((4508, 4562), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'headers': "{'Connection': 'close'}"}), "(headers={'Connection': 'close'})\n", (4529, 4562), False, 'import aiohttp\n'), ((4888, 4951), 'logging.warning', 'logging.warning', (['f"""in is_two_main_hand, {e.__class__.__name__}"""'], {}), "(f'in is_two_main_hand, {e.__class__.__name__}')\n", (4903, 4951), False, 'import logging\n'), ((8374, 8386), 'collections.Counter', 'Counter', (['loi'], {}), '(loi)\n', (8381, 8386), False, 'from collections import Counter\n'), ((12418, 12472), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'headers': "{'Connection': 'close'}"}), "(headers={'Connection': 'close'})\n", (12439, 12472), False, 'import aiohttp\n'), ((20190, 20244), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'headers': "{'Connection': 'close'}"}), "(headers={'Connection': 'close'})\n", (20211, 20244), False, 'import aiohttp\n'), ((22637, 22691), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'headers': "{'Connection': 'close'}"}), "(headers={'Connection': 'close'})\n", (22658, 22691), False, 'import aiohttp\n'), ((2860, 2882), 're.match', 'match', (['"""T([1-8]).*"""', 'i'], {}), "('T([1-8]).*', i)\n", (2865, 2882), False, 'from re import match, sub\n'), ((2915, 2937), 're.match', 'match', (['""".+@([1-3])"""', 'i'], {}), "('.+@([1-3])', i)\n", (2920, 2937), False, 'from re import match, sub\n'), ((7211, 7231), 'asyncio.sleep', 'asyncio.sleep', (['count'], {}), '(count)\n', (7224, 7231), False, 'import asyncio\n'), ((14549, 14565), 'lzl.lzint', 'lzint', (['gearworth'], {}), '(gearworth)\n', (14554, 14565), False, 'from lzl import lzfloat, lzint, lzlist\n'), ((15309, 15314), 'copy.deepcopy', 'dc', (['i'], {}), '(i)\n', (15311, 15314), True, 'from copy import deepcopy as dc\n'), ((15977, 15992), 'copy.deepcopy', 'dc', (['self.killer'], {}), '(self.killer)\n', (15979, 15992), True, 'from copy import deepcopy as dc\n'), ((5709, 5725), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (5722, 5725), False, 'import asyncio\n'), ((13767, 13816), 'lzl.lzfloat', 'lzfloat', (["(player['DamageDone'] / totaldamage * 100)"], {}), "(player['DamageDone'] / totaldamage * 100)\n", (13774, 13816), False, 'from lzl import lzfloat, lzint, lzlist\n'), ((17499, 17535), 're.match', 'match', (['"""\\\\[.*\\\\]"""', "i['AllianceName']"], {}), "('\\\\[.*\\\\]', i['AllianceName'])\n", (17504, 17535), False, 'from re import match, sub\n'), ((22526, 22556), 'random.choice', 'randchoice', (["useitem['choices']"], {}), "(useitem['choices'])\n", (22536, 22556), True, 'from random import choice as randchoice\n'), ((23483, 23502), 'lzl.lzlist', 'lzlist', (['sortedstuff'], {}), '(sortedstuff)\n', (23489, 23502), False, 'from lzl import lzfloat, lzint, lzlist\n'), ((5596, 5612), 'PIL.Image.open', 'img.open', (['tobyte'], {}), '(tobyte)\n', (5604, 5612), True, 'from PIL import Image as img\n'), ((22478, 22506), 'random.choice', 'randchoice', (["useitem['emoji']"], {}), "(useitem['emoji'])\n", (22488, 22506), True, 'from random import choice as randchoice\n'), ((24680, 24692), 'random.randrange', 'rrange', (['(1)', '(3)'], {}), '(1, 3)\n', (24686, 24692), True, 'from random import randrange as rrange\n'), ((8679, 8702), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (8700, 8702), False, 'import datetime\n')]
""" Variant of the base class with parallelized, pipelined, and vectorized operations. The technique pertaining to convolution was reused from https://stackoverflow.com/a/36968434. Try executing this program with (the other variants will crawl): python3 conway_v3.py --board-size 160 --interval 20 --configuration patterns/garden-of-eden.cells 30 30 """ import numpy as np import dask.array as da from scipy.ndimage import convolve from conway_base import Cell, ConwayBase class ConwayV3(ConwayBase): def create_buffers(self): self.board = da.from_array(self.board, chunks=("auto", "auto")) self._mask = np.ones((3, 3)) self._mask[1, 1] = 0 def _process_cell(self, block, block_id=None): rows, cols = block.shape start_row = block_id[0] * rows start_col = block_id[1] * cols # We presume that this slicing will fit into memory. board_slice = self.board[start_row:(start_row + rows), start_col:(start_col + cols)].compute() # Apply the rules of the game. block[np.logical_or(block < 2, block > 3)] = Cell.DEAD block[block == 3] = Cell.LIVE block[block == 2] = board_slice[block == 2] return block def prepare_next_board(self, steps): for _ in range(steps): num_live_neighbors = self.board.map_overlap(convolve, depth=1, boundary='none', weights=self._mask, mode='constant', cval=0) next_board = num_live_neighbors.map_blocks(self._process_cell, dtype=np.int).compute() self.board = da.from_array(next_board, chunks=("auto", "auto")) return next_board if __name__ == '__main__': game = ConwayV3(ConwayV3.parse_command_line_args()) game.simulate()
[ "dask.array.from_array", "numpy.logical_or", "numpy.ones" ]
[((557, 607), 'dask.array.from_array', 'da.from_array', (['self.board'], {'chunks': "('auto', 'auto')"}), "(self.board, chunks=('auto', 'auto'))\n", (570, 607), True, 'import dask.array as da\n'), ((629, 644), 'numpy.ones', 'np.ones', (['(3, 3)'], {}), '((3, 3))\n', (636, 644), True, 'import numpy as np\n'), ((1055, 1090), 'numpy.logical_or', 'np.logical_or', (['(block < 2)', '(block > 3)'], {}), '(block < 2, block > 3)\n', (1068, 1090), True, 'import numpy as np\n'), ((1606, 1656), 'dask.array.from_array', 'da.from_array', (['next_board'], {'chunks': "('auto', 'auto')"}), "(next_board, chunks=('auto', 'auto'))\n", (1619, 1656), True, 'import dask.array as da\n')]
import unittest from unittest.mock import MagicMock, patch from oil.barrels.aws import AutoScalingBarrel class AutoScalingBarrelTestCase(unittest.TestCase): def client_mock(self, fixture): client = MagicMock() paginator = MagicMock() response_iterator = fixture paginator.paginate.return_value = response_iterator client.get_paginator.return_value = paginator return client def test_has_correct_supported_regions(self): supported_regions = set([ 'us-east-2', 'us-east-1', 'us-west-2', 'us-west-1', 'ap-northeast-1', 'ap-northeast-2', 'ap-south-1', 'ap-southheast-1', 'ap-southheast-2', 'ca-central-1', 'cn-north-1', 'cn-northwest-1', 'eu-central-1', 'eu-west-1', 'eu-west-2', 'eu-west-3', 'sa-east-1', ]) barrel = AutoScalingBarrel({}, clients={}) self.assertEqual(supported_regions, barrel.supported_regions) @patch("boto3.client") def test_default_clients(self, mock_client): mock_client.return_value = MagicMock() barrel = AutoScalingBarrel({}) for region, client in barrel.clients.items(): self.assertIn(region, barrel.supported_regions) def test_tap_functions_with_describe_auto_scaling_groups(self): fixture = [ { 'AutoScalingGroups': [ { 'AutoScalingGroupName': 'a_group', } ] } ] clients = { 'us-east-1': self.client_mock(fixture) } barrel = AutoScalingBarrel({}, clients=clients) tap_return = barrel.tap('describe_auto_scaling_groups') auto_scaling_return = barrel.describe_auto_scaling_groups() self.assertEqual(auto_scaling_return, tap_return) def test_tap_throws_error_with_unsupported_call(self): barrel = AutoScalingBarrel({}) with self.assertRaises(RuntimeError): barrel.tap('unsupported_call') def test_auto_scaling_returns_groups_by_region(self): fixture_1 = [ { 'AutoScalingGroups': [ { 'AutoScalingGroupName': 'a_group', } ] } ] fixture_2 = [ { 'AutoScalingGroups': [ { 'AutoScalingGroupName': 'a_group_2', } ] } ] clients = { 'us-east-1': self.client_mock(fixture_1), 'us-east-2': self.client_mock(fixture_2), } barrel = AutoScalingBarrel({}, clients=clients) results = barrel.describe_auto_scaling_groups() expected = { 'us-east-1': [ { 'AutoScalingGroupName': 'a_group' }, ], 'us-east-2': [ { 'AutoScalingGroupName': 'a_group_2' }, ] } self.assertEqual(results, expected) def test_describe_auto_scaling_groups_empty(self): fixture = [ # Multiple pages of empty { 'AutoScalingGroups': [ ] } ] clients = { 'us-east-1': self.client_mock(fixture) } barrel = AutoScalingBarrel({}, clients=clients) results = barrel.describe_auto_scaling_groups() expected = { 'us-east-1': [] } self.assertEqual(results, expected)
[ "unittest.mock.patch", "oil.barrels.aws.AutoScalingBarrel", "unittest.mock.MagicMock" ]
[((1113, 1134), 'unittest.mock.patch', 'patch', (['"""boto3.client"""'], {}), "('boto3.client')\n", (1118, 1134), False, 'from unittest.mock import MagicMock, patch\n'), ((212, 223), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (221, 223), False, 'from unittest.mock import MagicMock, patch\n'), ((244, 255), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (253, 255), False, 'from unittest.mock import MagicMock, patch\n'), ((1003, 1036), 'oil.barrels.aws.AutoScalingBarrel', 'AutoScalingBarrel', (['{}'], {'clients': '{}'}), '({}, clients={})\n', (1020, 1036), False, 'from oil.barrels.aws import AutoScalingBarrel\n'), ((1219, 1230), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1228, 1230), False, 'from unittest.mock import MagicMock, patch\n'), ((1248, 1269), 'oil.barrels.aws.AutoScalingBarrel', 'AutoScalingBarrel', (['{}'], {}), '({})\n', (1265, 1269), False, 'from oil.barrels.aws import AutoScalingBarrel\n'), ((1770, 1808), 'oil.barrels.aws.AutoScalingBarrel', 'AutoScalingBarrel', (['{}'], {'clients': 'clients'}), '({}, clients=clients)\n', (1787, 1808), False, 'from oil.barrels.aws import AutoScalingBarrel\n'), ((2077, 2098), 'oil.barrels.aws.AutoScalingBarrel', 'AutoScalingBarrel', (['{}'], {}), '({})\n', (2094, 2098), False, 'from oil.barrels.aws import AutoScalingBarrel\n'), ((2845, 2883), 'oil.barrels.aws.AutoScalingBarrel', 'AutoScalingBarrel', (['{}'], {'clients': 'clients'}), '({}, clients=clients)\n', (2862, 2883), False, 'from oil.barrels.aws import AutoScalingBarrel\n'), ((3581, 3619), 'oil.barrels.aws.AutoScalingBarrel', 'AutoScalingBarrel', (['{}'], {'clients': 'clients'}), '({}, clients=clients)\n', (3598, 3619), False, 'from oil.barrels.aws import AutoScalingBarrel\n')]
import manifests m = manifests.read("deploy.yml") print(m.name)
[ "manifests.read" ]
[((22, 50), 'manifests.read', 'manifests.read', (['"""deploy.yml"""'], {}), "('deploy.yml')\n", (36, 50), False, 'import manifests\n')]
from math import log2, ceil def find(gen, arr): if gen == 1: return arr rev = arr[::-1] for i in range(len(arr)): arr[i] = "0" + arr[i] rev[i] = "1" + rev[i] return find(gen-1, arr+rev) R, C = map(int, input().split()) rows = find(ceil(log2(R+1)), ["0", "1"]) cols = find(ceil(log2(C+1)), ["0", "1"]) for i in range(R): for j in range(C): print(int(rows[i]+cols[j], 2), end=" ") print()
[ "math.log2" ]
[((284, 295), 'math.log2', 'log2', (['(R + 1)'], {}), '(R + 1)\n', (288, 295), False, 'from math import log2, ceil\n'), ((325, 336), 'math.log2', 'log2', (['(C + 1)'], {}), '(C + 1)\n', (329, 336), False, 'from math import log2, ceil\n')]
""" created by chen 该模块用于密码加密 基本思路为 : 调用 hashlib 库中的 md5 加密方法, 将用户密码 与 专门的 key 进行 update 运算 返回一个 32 位 长的密码 """ import hashlib import sys sys.path.append("..") from config import SECRET_KEY def encryption(pwd): """ 用于加密的函数 :param pwd: 用户密码 (str) :return: 32位长度字符串 """ pwd = str(pwd) password = hashlib.md5(pwd.encode()) password.update(SECRET_KEY['KEY']) # 第三次加密 password.update(SECRET_KEY['KEY2']) return password.hexdigest() if __name__ == '__main__': print(encryption("a")) print(encryption(123)) print(encryption(None))
[ "sys.path.append" ]
[((165, 186), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (180, 186), False, 'import sys\n')]
from app import db from app.dao.dao_utils import autocommit from app.models import EmailBranding def dao_get_email_branding_options(): return EmailBranding.query.all() def dao_get_email_branding_by_id(email_branding_id): return EmailBranding.query.filter_by(id=email_branding_id).one() def dao_get_email_branding_by_name(email_branding_name): return EmailBranding.query.filter_by(name=email_branding_name).first() @autocommit def dao_create_email_branding(email_branding): db.session.add(email_branding) @autocommit def dao_update_email_branding(email_branding, **kwargs): for key, value in kwargs.items(): setattr(email_branding, key, value or None) db.session.add(email_branding)
[ "app.models.EmailBranding.query.filter_by", "app.models.EmailBranding.query.all", "app.db.session.add" ]
[((148, 173), 'app.models.EmailBranding.query.all', 'EmailBranding.query.all', ([], {}), '()\n', (171, 173), False, 'from app.models import EmailBranding\n'), ((497, 527), 'app.db.session.add', 'db.session.add', (['email_branding'], {}), '(email_branding)\n', (511, 527), False, 'from app import db\n'), ((693, 723), 'app.db.session.add', 'db.session.add', (['email_branding'], {}), '(email_branding)\n', (707, 723), False, 'from app import db\n'), ((240, 291), 'app.models.EmailBranding.query.filter_by', 'EmailBranding.query.filter_by', ([], {'id': 'email_branding_id'}), '(id=email_branding_id)\n', (269, 291), False, 'from app.models import EmailBranding\n'), ((368, 423), 'app.models.EmailBranding.query.filter_by', 'EmailBranding.query.filter_by', ([], {'name': 'email_branding_name'}), '(name=email_branding_name)\n', (397, 423), False, 'from app.models import EmailBranding\n')]
from django.shortcuts import render from django.utils.safestring import mark_safe from django.http import HttpResponse import json alias_set = set() def home(request): return render(request,'home.html',{}) def room(request, room_name, user): return render(request, 'room.html', { 'alias': mark_safe(json.dumps(user)) }) def check_alias(request,alias): print(alias_set) if alias in alias_set: return HttpResponse(status=400) else: alias_set.add(alias) return HttpResponse(status=200)
[ "django.shortcuts.render", "django.http.HttpResponse", "json.dumps" ]
[((180, 212), 'django.shortcuts.render', 'render', (['request', '"""home.html"""', '{}'], {}), "(request, 'home.html', {})\n", (186, 212), False, 'from django.shortcuts import render\n'), ((440, 464), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(400)'}), '(status=400)\n', (452, 464), False, 'from django.http import HttpResponse\n'), ((519, 543), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(200)'}), '(status=200)\n', (531, 543), False, 'from django.http import HttpResponse\n'), ((318, 334), 'json.dumps', 'json.dumps', (['user'], {}), '(user)\n', (328, 334), False, 'import json\n')]
""" Parses the Jetbrains based IDEs recent projects list """ import glob import os import re import xml.etree.ElementTree as ET class RecentProjectsParser(): """ Processes the "Recent projects" file from Jetbrains IDEs """ @staticmethod def parse(file_path, query): """ Parses the recent projects file passed as argument and returns a list of projects @param str file_path The path to the file which holds the recent open projects by the IDE @param str query Optional search query to filter the results """ if not os.path.isfile(file_path): return [] root = ET.parse(file_path).getroot() recent_projects = root.findall( # recent projects in products version 2020.2 and below './/component[@name="RecentProjectsManager"][1]/option[@name="recentPaths"]/list/option' ) + root.findall( # recent projects in products version 2020.2 and below './/component[@name="RecentDirectoryProjectsManager"][1]/option[@name="recentPaths"]/list/option' ) + root.findall( # projects in groups in products version 2020.3+ './/component[@name="RecentProjectsManager"][1]/option[@name="groups"]/list/ProjectGroup/option' '[@name="projects"]/list/option' ) + root.findall( # recent projects in products version 2020.3+ './/component[@name="RecentProjectsManager"][1]/option[@name="additionalInfo"]/map/entry' ) home = os.path.expanduser('~') query = query.lower() if query else '' # extract all the words (delimited by " " or "/") from the query. # we will match them against the title and the path of the project. words = [word.lower() for word in re.split('[ /]+', query)] result = [] already_matched = [] for project in recent_projects: title = '' path = (project.attrib['value' if 'value' in project.attrib else 'key']).replace('$USER_HOME$', home) title_file = path + '/.idea/.name' if os.path.exists(title_file): with open(title_file, 'r') as file: title = file.read().replace('\n', '').lower() icons = glob.glob(os.path.join(path, '.idea', 'icon.*')) # match all words from the query to the path and the title of the project matched_words = [word for word in words if word in '{} {}'.format(title, path)] if query and len(matched_words) < len(words): continue # prevent duplicate results, because from version 2020.3, a project can appear more than once in the XML # (in the option[@name="groups"] section and in the option[@name="additionalInfo"] section) if path in already_matched: continue already_matched.append(path) result.append({ 'name': title or os.path.basename(path).lower(), 'path': path, 'icon': icons[0] if len(icons) > 0 else None }) return result[:8]
[ "xml.etree.ElementTree.parse", "re.split", "os.path.join", "os.path.basename", "os.path.exists", "os.path.isfile", "os.path.expanduser" ]
[((1493, 1516), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (1511, 1516), False, 'import os\n'), ((577, 602), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (591, 602), False, 'import os\n'), ((2073, 2099), 'os.path.exists', 'os.path.exists', (['title_file'], {}), '(title_file)\n', (2087, 2099), False, 'import os\n'), ((642, 661), 'xml.etree.ElementTree.parse', 'ET.parse', (['file_path'], {}), '(file_path)\n', (650, 661), True, 'import xml.etree.ElementTree as ET\n'), ((1756, 1780), 're.split', 're.split', (['"""[ /]+"""', 'query'], {}), "('[ /]+', query)\n", (1764, 1780), False, 'import re\n'), ((2250, 2287), 'os.path.join', 'os.path.join', (['path', '""".idea"""', '"""icon.*"""'], {}), "(path, '.idea', 'icon.*')\n", (2262, 2287), False, 'import os\n'), ((2943, 2965), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (2959, 2965), False, 'import os\n')]
from aiolegomac.app import run run()
[ "aiolegomac.app.run" ]
[((33, 38), 'aiolegomac.app.run', 'run', ([], {}), '()\n', (36, 38), False, 'from aiolegomac.app import run\n')]
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. import unittest import azure.functions as func from azure.functions._http_asgi import ( AsgiMiddleware ) class MockAsgiApplication: response_code = 200 response_body = b'' response_headers = [ [b"content-type", b"text/plain"], ] async def __call__(self, scope, receive, send): self.received_scope = scope # Verify against ASGI specification assert scope['type'] == 'http' assert isinstance(scope['type'], str) assert scope['asgi.spec_version'] in ['2.0', '2.1'] assert isinstance(scope['asgi.spec_version'], str) assert scope['asgi.version'] in ['2.0', '2.1', '2.2'] assert isinstance(scope['asgi.version'], str) assert scope['http_version'] in ['1.0', '1.1', '2'] assert isinstance(scope['http_version'], str) assert scope['method'] in ['POST', 'GET', 'PUT', 'DELETE', 'PATCH'] assert isinstance(scope['method'], str) assert scope['scheme'] in ['http', 'https'] assert isinstance(scope['scheme'], str) assert isinstance(scope['path'], str) assert isinstance(scope['raw_path'], bytes) assert isinstance(scope['query_string'], bytes) assert isinstance(scope['root_path'], str) assert hasattr(scope['headers'], '__iter__') for k, v in scope['headers']: assert isinstance(k, bytes) assert isinstance(v, bytes) assert scope['client'] is None or hasattr(scope['client'], '__iter__') if scope['client']: assert len(scope['client']) == 2 assert isinstance(scope['client'][0], str) assert isinstance(scope['client'][1], int) assert scope['server'] is None or hasattr(scope['server'], '__iter__') if scope['server']: assert len(scope['server']) == 2 assert isinstance(scope['server'][0], str) assert isinstance(scope['server'][1], int) self.received_request = await receive() assert self.received_request['type'] == 'http.request' assert isinstance(self.received_request['body'], bytes) assert isinstance(self.received_request['more_body'], bool) await send( { "type": "http.response.start", "status": self.response_code, "headers": self.response_headers, } ) await send( { "type": "http.response.body", "body": self.response_body, } ) class TestHttpAsgiMiddleware(unittest.TestCase): def _generate_func_request( self, method="POST", url="https://function.azurewebsites.net/api/http?firstname=rt", headers={ "Content-Type": "application/json", "x-ms-site-restricted-token": "xmsrt" }, params={ "firstname": "roger" }, route_params={}, body=b'{ "lastname": "tsang" }' ) -> func.HttpRequest: return func.HttpRequest( method=method, url=url, headers=headers, params=params, route_params=route_params, body=body ) def _generate_func_context( self, invocation_id='123e4567-e89b-12d3-a456-426655440000', function_name='httptrigger', function_directory='/home/roger/wwwroot/httptrigger' ) -> func.Context: class MockContext(func.Context): def __init__(self, ii, fn, fd): self._invocation_id = ii self._function_name = fn self._function_directory = fd @property def invocation_id(self): return self._invocation_id @property def function_name(self): return self._function_name @property def function_directory(self): return self._function_directory return MockContext(invocation_id, function_name, function_directory) def test_middleware_calls_app(self): app = MockAsgiApplication() test_body = b'Hello world!' app.response_body = test_body app.response_code = 200 req = func.HttpRequest(method='get', url='/test', body=b'') response = AsgiMiddleware(app).handle(req) # Verify asserted self.assertEqual(response.status_code, 200) self.assertEqual(response.get_body(), test_body) def test_middleware_calls_app_with_context(self): """Test if the middleware can be used by exposing the .handle method, specifically when the middleware is used as def main(req, context): return AsgiMiddleware(app).handle(req, context) """ app = MockAsgiApplication() test_body = b'Hello world!' app.response_body = test_body app.response_code = 200 req = self._generate_func_request() ctx = self._generate_func_context() response = AsgiMiddleware(app).handle(req, ctx) # Verify asserted self.assertEqual(response.status_code, 200) self.assertEqual(response.get_body(), test_body) def test_middleware_wrapper(self): """Test if the middleware can be used by exposing the .main property, specifically when the middleware is used as main = AsgiMiddleware(app).main """ app = MockAsgiApplication() test_body = b'Hello world!' app.response_body = test_body app.response_code = 200 req = self._generate_func_request() ctx = self._generate_func_context() main = AsgiMiddleware(app).main response = main(req, ctx) # Verify asserted self.assertEqual(response.status_code, 200) self.assertEqual(response.get_body(), test_body)
[ "azure.functions.HttpRequest", "azure.functions._http_asgi.AsgiMiddleware" ]
[((3184, 3298), 'azure.functions.HttpRequest', 'func.HttpRequest', ([], {'method': 'method', 'url': 'url', 'headers': 'headers', 'params': 'params', 'route_params': 'route_params', 'body': 'body'}), '(method=method, url=url, headers=headers, params=params,\n route_params=route_params, body=body)\n', (3200, 3298), True, 'import azure.functions as func\n'), ((4415, 4468), 'azure.functions.HttpRequest', 'func.HttpRequest', ([], {'method': '"""get"""', 'url': '"""/test"""', 'body': "b''"}), "(method='get', url='/test', body=b'')\n", (4431, 4468), True, 'import azure.functions as func\n'), ((5835, 5854), 'azure.functions._http_asgi.AsgiMiddleware', 'AsgiMiddleware', (['app'], {}), '(app)\n', (5849, 5854), False, 'from azure.functions._http_asgi import AsgiMiddleware\n'), ((4488, 4507), 'azure.functions._http_asgi.AsgiMiddleware', 'AsgiMiddleware', (['app'], {}), '(app)\n', (4502, 4507), False, 'from azure.functions._http_asgi import AsgiMiddleware\n'), ((5194, 5213), 'azure.functions._http_asgi.AsgiMiddleware', 'AsgiMiddleware', (['app'], {}), '(app)\n', (5208, 5213), False, 'from azure.functions._http_asgi import AsgiMiddleware\n')]
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # # Run with: # PYTHONPATH=.. python3 aiohttp_s3_put_get_redis.py # """ This example is exactly the same as aiohttp_s3_put_get.py, but has the Redis endpoint_url uncommented so it uses the Redis client facade instead of HTTP/S3. This example illustrates using aiohttp/aiosonic for native asyncio s3.put_object and s3.get_object. The approach taken is to create a "aioboto3lite" library which re-implements the main boto3/aioboto3 S3 CRUD methods by directly invoking the underlying HTTP requests using aiohttp or alternatively aiosonic clients. This approach avoids much of the many levels of indirection that can slow down boto3 invocations, though it can get quite complicated due to the header signing that AWS APIs require hence wrapping all of those gory details in a library. Another advantage of creating a "fake" aioboto3 is that it is basically possible to do a "plug in replacement" where instead of doing: import botocore, aioboto3 we can do: import aioboto3lite as aioboto3 import aioboto3lite as botocore and the actual application code can remain the same for both. N.B. At the moment aioboto3lite is very much a proof of concept and is fairly limited, only supporting a few of the basic S3 CRUD methods however the performance difference is *significant* Using a single instance Docker minio and the default overlayfs data directory with 5KB objects the put_object rate seems to be around 1429 items/s and the get_object rate seems to be around 3238 items/s using aiosonic - that's around 2.65x the write performance of aioboto3 and 4.5x the read performance of aioboto3. """ import sys assert sys.version_info >= (3, 8) # Bomb out if not running Python3.8 import asyncio, os, time, uuid import aioboto3lite as aioboto3 import aioboto3lite as aiobotocore #import aiobotocore, aioboto3 # Uncomment this line to use the real aioboto3/aiobotocore from utils.logger import init_logging from s3_utils_asyncio import ( create_configured_session, create_bucket, purge_and_delete_bucket, put_object, get_object ) async def aiohttp_launch_as_tasks_in_batches(): """ Creates aiohttp.Session() configured from environment variables or users's profile or minio profile. The easiest way to use with minio is to add the following to ~/.aws/credentials (setting the key_id and key used to set MINIO_ROOT_USER and MINIO_ROOT_PASSWORD. [minio] aws_access_key_id = <id> aws_secret_access_key = <key> """ session = create_configured_session(aioboto3) config=aiobotocore.config.AioConfig(max_pool_connections=MAX_CONNECTIONS) #config.http_client = "aiohttp" # Defaults to "aiosonic" #client = session.client("s3", endpoint_url="http://localhost:9001", config=config) client = session.client("s3", endpoint_url="redis://localhost:6379", config=config) async with client as s3: # In aioboto3 client and resource are context managers await create_bucket(s3, bucket_name) content = "x" * 5000 print() print(__file__) print(f"Testing {ITERATIONS} iterations, with an item size of {len(content)}") #------------------------- Test writing objects ------------------------ start = time.time() overall_start = start # Used to time aggregate put then get time object_refs = [] tasks = [] for i in range(ITERATIONS): s3_uri = f"s3://{bucket_name}/{uuid.uuid4()}" #print(s3_uri) tasks.append(put_object(s3, s3_uri, content)) if len(tasks) == MAX_CONNECTIONS: await asyncio.gather(*tasks) tasks = [] object_refs.append(s3_uri) #print(len(tasks)) await asyncio.gather(*tasks) # await any outstanding tasks end = time.time() rate = ITERATIONS/(end - start) bandwidth = rate * len(content)/1024 print(f"put_object: rate {rate} items/s, {bandwidth} KiB/s") #------------------------ Test reading objects ------------------------- start = time.time() tasks = [] for s3_uri in object_refs: tasks.append(get_object(s3, s3_uri)) if len(tasks) == MAX_CONNECTIONS: results = await asyncio.gather(*tasks) tasks = [] results = await asyncio.gather(*tasks) # await any outstanding tasks #print(results) end = time.time() rate = ITERATIONS/(end - start) bandwidth = rate * len(content)/1000 print(f"get_object: rate {rate} items/s, {bandwidth} KiB/s") print() rate = ITERATIONS/(end - overall_start) bandwidth = rate * len(content)/1000 print(f"Overall put_object then get_object: rate {rate} items/s, {bandwidth} KiB/s") print() #------------------ Test writing then reading objects ------------------ async def put_then_get(s3, s3_uri, body): # put followed by get as a task await put_object(s3, s3_uri, body) return await get_object(s3, s3_uri) start = time.time() object_refs = [] tasks = [] for i in range(ITERATIONS): s3_uri = f"s3://{bucket_name}/{uuid.uuid4()}" #print(s3_uri) tasks.append(put_then_get(s3, s3_uri, content)) if len(tasks) == MAX_CONNECTIONS * 2: await asyncio.gather(*tasks) tasks = [] object_refs.append(s3_uri) #print(len(tasks)) await asyncio.gather(*tasks) # await any outstanding tasks end = time.time() rate = ITERATIONS/(end - start) bandwidth = rate * len(content)/1024 print(f"put_then_get: rate {rate} items/s, {bandwidth} KiB/s") # Delete the objects we created then the bucket to tidy things up up await purge_and_delete_bucket(s3, bucket_name) if __name__ == '__main__': """ Attempt to use uvloop libuv based event loop if available https://github.com/MagicStack/uvloop """ try: import uvloop uvloop.install() except: # Fall back to standard library asyncio epoll event loop pass ITERATIONS = 10000 MAX_CONNECTIONS = 1000 # Create bucket to use in this test bucket_name = "aiohttp-s3-put-get" # Initialise logger logger = init_logging(log_name=bucket_name) loop = asyncio.get_event_loop() loop.run_until_complete(aiohttp_launch_as_tasks_in_batches())
[ "asyncio.gather", "s3_utils_asyncio.create_bucket", "asyncio.get_event_loop", "uuid.uuid4", "aioboto3lite.config.AioConfig", "utils.logger.init_logging", "s3_utils_asyncio.get_object", "s3_utils_asyncio.create_configured_session", "time.time", "uvloop.install", "s3_utils_asyncio.put_object", "...
[((3273, 3308), 's3_utils_asyncio.create_configured_session', 'create_configured_session', (['aioboto3'], {}), '(aioboto3)\n', (3298, 3308), False, 'from s3_utils_asyncio import create_configured_session, create_bucket, purge_and_delete_bucket, put_object, get_object\n'), ((3321, 3387), 'aioboto3lite.config.AioConfig', 'aiobotocore.config.AioConfig', ([], {'max_pool_connections': 'MAX_CONNECTIONS'}), '(max_pool_connections=MAX_CONNECTIONS)\n', (3349, 3387), True, 'import aioboto3lite as aiobotocore\n'), ((7164, 7198), 'utils.logger.init_logging', 'init_logging', ([], {'log_name': 'bucket_name'}), '(log_name=bucket_name)\n', (7176, 7198), False, 'from utils.logger import init_logging\n'), ((7211, 7235), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (7233, 7235), False, 'import asyncio, os, time, uuid\n'), ((4020, 4031), 'time.time', 'time.time', ([], {}), '()\n', (4029, 4031), False, 'import asyncio, os, time, uuid\n'), ((4600, 4611), 'time.time', 'time.time', ([], {}), '()\n', (4609, 4611), False, 'import asyncio, os, time, uuid\n'), ((4872, 4883), 'time.time', 'time.time', ([], {}), '()\n', (4881, 4883), False, 'import asyncio, os, time, uuid\n'), ((5234, 5245), 'time.time', 'time.time', ([], {}), '()\n', (5243, 5245), False, 'import asyncio, os, time, uuid\n'), ((5895, 5906), 'time.time', 'time.time', ([], {}), '()\n', (5904, 5906), False, 'import asyncio, os, time, uuid\n'), ((6407, 6418), 'time.time', 'time.time', ([], {}), '()\n', (6416, 6418), False, 'import asyncio, os, time, uuid\n'), ((6895, 6911), 'uvloop.install', 'uvloop.install', ([], {}), '()\n', (6909, 6911), False, 'import uvloop\n'), ((3725, 3755), 's3_utils_asyncio.create_bucket', 'create_bucket', (['s3', 'bucket_name'], {}), '(s3, bucket_name)\n', (3738, 3755), False, 'from s3_utils_asyncio import create_configured_session, create_bucket, purge_and_delete_bucket, put_object, get_object\n'), ((4531, 4553), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (4545, 4553), False, 'import asyncio, os, time, uuid\n'), ((5141, 5163), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (5155, 5163), False, 'import asyncio, os, time, uuid\n'), ((6338, 6360), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (6352, 6360), False, 'import asyncio, os, time, uuid\n'), ((6667, 6707), 's3_utils_asyncio.purge_and_delete_bucket', 'purge_and_delete_bucket', (['s3', 'bucket_name'], {}), '(s3, bucket_name)\n', (6690, 6707), False, 'from s3_utils_asyncio import create_configured_session, create_bucket, purge_and_delete_bucket, put_object, get_object\n'), ((4298, 4329), 's3_utils_asyncio.put_object', 'put_object', (['s3', 's3_uri', 'content'], {}), '(s3, s3_uri, content)\n', (4308, 4329), False, 'from s3_utils_asyncio import create_configured_session, create_bucket, purge_and_delete_bucket, put_object, get_object\n'), ((4964, 4986), 's3_utils_asyncio.get_object', 'get_object', (['s3', 's3_uri'], {}), '(s3, s3_uri)\n', (4974, 4986), False, 'from s3_utils_asyncio import create_configured_session, create_bucket, purge_and_delete_bucket, put_object, get_object\n'), ((5801, 5829), 's3_utils_asyncio.put_object', 'put_object', (['s3', 's3_uri', 'body'], {}), '(s3, s3_uri, body)\n', (5811, 5829), False, 'from s3_utils_asyncio import create_configured_session, create_bucket, purge_and_delete_bucket, put_object, get_object\n'), ((5855, 5877), 's3_utils_asyncio.get_object', 'get_object', (['s3', 's3_uri'], {}), '(s3, s3_uri)\n', (5865, 5877), False, 'from s3_utils_asyncio import create_configured_session, create_bucket, purge_and_delete_bucket, put_object, get_object\n'), ((4230, 4242), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (4240, 4242), False, 'import asyncio, os, time, uuid\n'), ((4399, 4421), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (4413, 4421), False, 'import asyncio, os, time, uuid\n'), ((5066, 5088), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (5080, 5088), False, 'import asyncio, os, time, uuid\n'), ((6031, 6043), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (6041, 6043), False, 'import asyncio, os, time, uuid\n'), ((6206, 6228), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (6220, 6228), False, 'import asyncio, os, time, uuid\n')]
import os import shutil import tempfile from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises from crds.submit import Submission, NoFilesSelected from crds.tests import test_config import mock # To run: # nosetests -v unit_tests.py TEMPFILES = ['ipppssoot_ccd.fits', 'opppssoot_bia.fits'] # Mocked urllib.request to .../redcat_description.yml: FORM_DESCRIPTION_YML = '''\ - {help_text: 'Who are you?', key: deliverer, label: Name of deliverer, required: true, type: CharField} - {help_text: Comma-delimited list (optional), key: other_email, label: Other e-mail adresses to send notifications, required: false, type: CharField} - choices: [acs, cos, nicmos, stis, synphot, wfc3, wfpc2] initial: acs key: instrument label: Instrument (All submitted files should match this instrument. This instrument will be locked for your submission exclusively) required: true type: TypedChoiceField - {key: file_type, label: 'Type of files (Bias, Dark, etc.)', required: true, type: CharField} - choices: [false, true] initial: false key: history_updated label: Has HISTORY section in the primary header been updated to describe in detail the reason for delivery and how the files were created? required: false type: BooleanField - choices: [false, true] initial: false key: pedigree_updated label: Has PEDIGREE keyword been checked and updated as necessary? required: false type: BooleanField - choices: [false, true] initial: false key: keywords_checked label: Has COMMENT been checked? required: false type: BooleanField - choices: [false, true] initial: false key: descrip_updated label: Was the DESCRIP keyword updated with a summary of why the files were updated or created? required: false type: BooleanField - choices: [false, true] initial: false key: useafter_updated label: Has the USEAFTER keyword been checked, and if necessary, updated? required: false type: BooleanField - choices: [N/A, 'No', 'Yes'] help_text: N/A for ETC Files Only initial: N/A key: useafter_matches label: If the reference files are replacing previous versions, do the new USEAFTER dates exactly match the old ones? required: true type: TypedChoiceField - choices: [N/A, 'No', 'Yes'] help_text: optional initial: N/A key: compliance_verified label: Verification for compliance complete (fits, certify, etc. or N/A) required: true type: TypedChoiceField - choices: [false, true] initial: false key: ingest_files label: Should the affected files be reprocessed? required: false type: BooleanField - choices: [false, true] initial: false key: etc_delivery label: Should the files be submitted to the ETC? required: false type: BooleanField - choices: [false, true] initial: false key: jwst_etc label: Are these JWST ETC files? required: false type: BooleanField - {key: calpipe_version, label: Files run through the current version of the calibration software being used by the pipeline or PYSYNPHOT and ETC (yes/no and version number), required: true, type: CharField} - choices: [false, true] initial: false key: replacement_files label: Are any files replacing old reference files (deliveries can be a mix of files that are or are not replacing old files) (yes/no) required: false type: BooleanField - {key: old_reference_files, label: 'If yes, list them here', required: false, type: CharField} - choices: [N/A, 'No', 'Yes'] initial: N/A key: replacing_badfiles label: If the files being replaced are bad, and should not be used with any data, please indicate this here required: true type: TypedChoiceField - {help_text: Comma-delimited list (optional), key: jira_issue, label: Any JIRA issues filed in regard to the references being delivered (e.g. "REDCAT-25"), required: false, type: CharField} - {key: table_rows_changed, label: 'If files are tables, please indicate exactly which rows have changed', required: false, type: CharField} - {key: modes_affected, label: 'Please indicate which modes (e.g. all the STIS, FUVMAMA, E140L modes) are affected by the changes in the files', required: true, type: CharField} - {key: correctness_testing, label: Description of how the files were tested for correctness, required: true, type: CharField} - {key: additional_considerations, label: Additional considerations, required: false, type: CharField} ''' def touch(path): with open(path, 'a'): os.utime(path, None) class TestSubmission(object): @classmethod @mock.patch('crds.submit.rc_submit.urllib.request.urlopen', autospec=True) def setup_class(cls, urlopen): '''This method is run once for each class before any tests are run.''' cls.old_state = test_config.setup() # Create a temporary directory: cls.tmpdir = tempfile.mkdtemp(prefix='tmp_rc_submit_') # Create empty test files in the temporary directory: cls.tempfiles = [os.path.join(cls.tmpdir, x) for x in TEMPFILES] for filename in cls.tempfiles: touch(filename) # Create a file handle to use as a mockup of the urllib.request object: cls.mockup_form = os.path.join(cls.tmpdir, 'mocked_redcat_description.yml') with open(cls.mockup_form, 'w') as f: f.write(FORM_DESCRIPTION_YML) urlopen.return_value = open(cls.mockup_form) # Instantiate the Submission object used in these tests: cls.s = Submission('hst', 'dev', context='hst_0723.pmap') @classmethod def teardown_class(cls): '''This method is run once for each class after all tests are run.''' # Remove temporary directory and all files contained therein: shutil.rmtree(cls.tmpdir) test_config.cleanup(cls.old_state) @raises(KeyError) def test_badkey(self): self.s['bad_key'] = 'some value' def test_goodvalue_char(self): self.s['file_type'] = 'bias' def test_goodvalue_bool(self, key='history_updated'): self.s[key] = True assert_is(self.s[key], True) self.s[key] = False assert_is(self.s[key], False) def test_goodvalue_trinary(self, key='compliance_verified'): # Set with Booleans: self.s[key] = True assert_equals(self.s[key], 'Yes') self.s[key] = False assert_equals(self.s[key], 'No') # Set with strings: self.s[key] = 'Yes' assert_equals(self.s[key], 'Yes') self.s[key] = 'yes' # Handle different case assert_equals(self.s[key], 'Yes') self.s[key] = 'No' assert_equals(self.s[key], 'No') self.s[key] = 'n/a' # Handle different case assert_equals(self.s[key], 'N/A') @raises(ValueError) def test_badtype(self): self.s['calpipe_version'] = 123 # Expects a str @raises(ValueError) def test_badvalue_trinary(self, key='compliance_verified'): self.s[key] = 'bad value' @raises(ValueError) def test_badvalue_choices(self): self.s['change_level'] = 'bad choice' @raises(ValueError) def test_emptyvalue_char(self): self.s['file_type'] = '' @raises(ValueError) def test_emptyvalue_char(self, key='file_type'): self.s[key] = '' # Required field def test_emptyvalue_optional(self): self.s['additional_considerations'] = '' # Optional field def test_resetfield(self, key='deliverer'): new_value = 'Wombat' self.s[key] = new_value assert_equals(self.s[key], new_value) del self.s[key] assert_not_equals(self.s[key], new_value) # Also assumes KeyError is not thrown! def test_addfiles(self): for filename in self.tempfiles: self.s.add_file(filename) @raises(FileNotFoundError) def test_addbadfile(self): self.s.add_file(os.path.join(self.tmpdir, 'missing_file.fits')) def test_rmfile(self): for filename in self.tempfiles: self.s.add_file(filename) self.s.remove_file(list(self.tempfiles)[0]) assert_equals( len(self.s.files), len(self.tempfiles)-1 ) @raises(KeyError) def test_rmbadfile(self): for filename in self.tempfiles: self.s.add_file(filename) self.s.remove_file('bad_filename.fits') def test_yaml(self): assert_true(self.s.yaml) def test_help(self): self.s.help() # Prints stuff @raises(ValueError) def test_validate_emptykey(self, key='file_type'): del self.s[key] # Resets to empty str self.s.validate() @raises(NoFilesSelected) def test_validate_emptyfiles(self): for filename in self.s.files: self.s.remove_file(filename) # Do something here to pass field validation checks: self.s['file_type'] = 'value' self.s['correctness_testing'] = 'value' self.s['deliverer'] = 'value' self.s['description'] = 'value' self.s['calpipe_version'] = 'value' self.s['modes_affected'] = 'value' self.s['instrument'] = 'stis' # Only works for HST self.s.validate() def test_validate(self): self.s.add_file(list(self.tempfiles)[0]) # Do something here to pass field validation checks: self.s['file_type'] = 'value' self.s['correctness_testing'] = 'value' self.s['deliverer'] = 'value' self.s['description'] = 'value' self.s['calpipe_version'] = 'value' self.s['modes_affected'] = 'value' self.s['instrument'] = 'stis' # Only works for HST self.s.validate()
[ "nose.tools.assert_equals", "crds.tests.test_config.cleanup", "os.path.join", "shutil.rmtree", "nose.tools.assert_true", "mock.patch", "crds.submit.Submission", "tempfile.mkdtemp", "nose.tools.assert_not_equals", "nose.tools.assert_is", "nose.tools.raises", "crds.tests.test_config.setup", "o...
[((4581, 4654), 'mock.patch', 'mock.patch', (['"""crds.submit.rc_submit.urllib.request.urlopen"""'], {'autospec': '(True)'}), "('crds.submit.rc_submit.urllib.request.urlopen', autospec=True)\n", (4591, 4654), False, 'import mock\n'), ((5836, 5852), 'nose.tools.raises', 'raises', (['KeyError'], {}), '(KeyError)\n', (5842, 5852), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((6779, 6797), 'nose.tools.raises', 'raises', (['ValueError'], {}), '(ValueError)\n', (6785, 6797), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((6889, 6907), 'nose.tools.raises', 'raises', (['ValueError'], {}), '(ValueError)\n', (6895, 6907), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((7012, 7030), 'nose.tools.raises', 'raises', (['ValueError'], {}), '(ValueError)\n', (7018, 7030), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((7120, 7138), 'nose.tools.raises', 'raises', (['ValueError'], {}), '(ValueError)\n', (7126, 7138), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((7214, 7232), 'nose.tools.raises', 'raises', (['ValueError'], {}), '(ValueError)\n', (7220, 7232), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((7821, 7846), 'nose.tools.raises', 'raises', (['FileNotFoundError'], {}), '(FileNotFoundError)\n', (7827, 7846), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((8180, 8196), 'nose.tools.raises', 'raises', (['KeyError'], {}), '(KeyError)\n', (8186, 8196), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((8482, 8500), 'nose.tools.raises', 'raises', (['ValueError'], {}), '(ValueError)\n', (8488, 8500), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((8635, 8658), 'nose.tools.raises', 'raises', (['NoFilesSelected'], {}), '(NoFilesSelected)\n', (8641, 8658), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((4507, 4527), 'os.utime', 'os.utime', (['path', 'None'], {}), '(path, None)\n', (4515, 4527), False, 'import os\n'), ((4793, 4812), 'crds.tests.test_config.setup', 'test_config.setup', ([], {}), '()\n', (4810, 4812), False, 'from crds.tests import test_config\n'), ((4875, 4916), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""tmp_rc_submit_"""'}), "(prefix='tmp_rc_submit_')\n", (4891, 4916), False, 'import tempfile\n'), ((5227, 5284), 'os.path.join', 'os.path.join', (['cls.tmpdir', '"""mocked_redcat_description.yml"""'], {}), "(cls.tmpdir, 'mocked_redcat_description.yml')\n", (5239, 5284), False, 'import os\n'), ((5508, 5557), 'crds.submit.Submission', 'Submission', (['"""hst"""', '"""dev"""'], {'context': '"""hst_0723.pmap"""'}), "('hst', 'dev', context='hst_0723.pmap')\n", (5518, 5557), False, 'from crds.submit import Submission, NoFilesSelected\n'), ((5761, 5786), 'shutil.rmtree', 'shutil.rmtree', (['cls.tmpdir'], {}), '(cls.tmpdir)\n', (5774, 5786), False, 'import shutil\n'), ((5795, 5829), 'crds.tests.test_config.cleanup', 'test_config.cleanup', (['cls.old_state'], {}), '(cls.old_state)\n', (5814, 5829), False, 'from crds.tests import test_config\n'), ((6088, 6116), 'nose.tools.assert_is', 'assert_is', (['self.s[key]', '(True)'], {}), '(self.s[key], True)\n', (6097, 6116), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((6153, 6182), 'nose.tools.assert_is', 'assert_is', (['self.s[key]', '(False)'], {}), '(self.s[key], False)\n', (6162, 6182), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((6313, 6346), 'nose.tools.assert_equals', 'assert_equals', (['self.s[key]', '"""Yes"""'], {}), "(self.s[key], 'Yes')\n", (6326, 6346), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((6383, 6415), 'nose.tools.assert_equals', 'assert_equals', (['self.s[key]', '"""No"""'], {}), "(self.s[key], 'No')\n", (6396, 6415), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((6481, 6514), 'nose.tools.assert_equals', 'assert_equals', (['self.s[key]', '"""Yes"""'], {}), "(self.s[key], 'Yes')\n", (6494, 6514), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((6576, 6609), 'nose.tools.assert_equals', 'assert_equals', (['self.s[key]', '"""Yes"""'], {}), "(self.s[key], 'Yes')\n", (6589, 6609), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((6645, 6677), 'nose.tools.assert_equals', 'assert_equals', (['self.s[key]', '"""No"""'], {}), "(self.s[key], 'No')\n", (6658, 6677), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((6739, 6772), 'nose.tools.assert_equals', 'assert_equals', (['self.s[key]', '"""N/A"""'], {}), "(self.s[key], 'N/A')\n", (6752, 6772), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((7555, 7592), 'nose.tools.assert_equals', 'assert_equals', (['self.s[key]', 'new_value'], {}), '(self.s[key], new_value)\n', (7568, 7592), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((7625, 7666), 'nose.tools.assert_not_equals', 'assert_not_equals', (['self.s[key]', 'new_value'], {}), '(self.s[key], new_value)\n', (7642, 7666), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((8387, 8411), 'nose.tools.assert_true', 'assert_true', (['self.s.yaml'], {}), '(self.s.yaml)\n', (8398, 8411), False, 'from nose.tools import assert_equals, assert_not_equals, assert_is, assert_true, raises\n'), ((5005, 5032), 'os.path.join', 'os.path.join', (['cls.tmpdir', 'x'], {}), '(cls.tmpdir, x)\n', (5017, 5032), False, 'import os\n'), ((7902, 7948), 'os.path.join', 'os.path.join', (['self.tmpdir', '"""missing_file.fits"""'], {}), "(self.tmpdir, 'missing_file.fits')\n", (7914, 7948), False, 'import os\n')]
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Software License Agreement (Apache 2.0) Copyright (c) 2020, The MITRE Corporation. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. This project was developed by The MITRE Corporation. If this code is used in a deployment or embedded within another project, it is requested that you send an email to <EMAIL> in order to let us know where this software is being used. """ # this setup.py was created from this template below. See it for more features. # https://github.com/kennethreitz/setup.py/blob/master/setup.py import io import os #from shutil import rmtree from setuptools import find_packages, setup # Package meta-data. NAME = 'demodocusfw' DESCRIPTION = 'demodocusfw generates a full state graph for a web site' URL = 'https://gitlab.mitre.org/demodocus/demodocus-framework' EMAIL = '<EMAIL>' AUTHOR = '<NAME>' REQUIRES_PYTHON = '>=3.8.0' VERSION = '0.1.0' here = os.path.abspath(os.path.dirname(__file__)) # loading required packages from 'requirements.txt' with open(os.path.join(here, 'requirements.txt')) as f: required = f.read().splitlines() # What packages are required for this module to be executed? REQUIRED = required # What packages are optional? EXTRAS = { # 'fancy feature': ['django'], } # The rest you shouldn't have to touch too much :) # ------------------------------------------------ # Except, perhaps the License and Trove Classifiers! # If you do change the License, remember to change the Trove Classifier for that! # Import the README and use it as the long-description. # Note: this will only work if 'README.md' is present in your MANIFEST.in file! try: with io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f: long_description = '\n' + f.read() except FileNotFoundError: long_description = DESCRIPTION # Where the magic happens: setup( name=NAME, version=VERSION, description=DESCRIPTION, long_description=long_description, long_description_content_type='text/markdown', author=AUTHOR, author_email=EMAIL, python_requires=REQUIRES_PYTHON, url=URL, #package_dir={'demodocusfw': 'demodocusfw'}, packages=find_packages(), install_requires=REQUIRED, extras_require=EXTRAS, include_package_data=True, license='MIT', classifiers=[ # Trove classifiers # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy' ], )
[ "os.path.dirname", "os.path.join", "setuptools.find_packages" ]
[((1443, 1468), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1458, 1468), False, 'import os\n'), ((1533, 1571), 'os.path.join', 'os.path.join', (['here', '"""requirements.txt"""'], {}), "(here, 'requirements.txt')\n", (1545, 1571), False, 'import os\n'), ((2680, 2695), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (2693, 2695), False, 'from setuptools import find_packages, setup\n'), ((2174, 2205), 'os.path.join', 'os.path.join', (['here', '"""README.md"""'], {}), "(here, 'README.md')\n", (2186, 2205), False, 'import os\n')]
""" This script utilises the ground truth label's 2D bounding box to crop out the the points of interest and feed it into the model so that it can predict a 3D bounding box for each of the 2D detections The script will plot the results of the 3D bounding box onto the image and display them alongside the groundtruth image and it's 3D bounding box. This is to help with qualitative assesment. Images to be evaluated should be placed in eval/image_2 folder Eval Results for each file in the eval/image_2 folder will be saved to "eval/eval-results/" FLAGS: --show-single Show 3D BoundingBox detections one at a time --hide-imgs Hides Display of ground truth and bounding box """ import os import cv2 import errno import argparse import torch import torch.nn as nn import numpy as np from torch.autograd import Variable import torchvision.models as models from lib.DataUtils import * from lib.Utils import * from tqdm import tqdm from lib import Model, ClassAverages def main(): exp_no = 34 print ("Generating evaluation results for experiment No. ",exp_no) use_cuda = torch.cuda.is_available() device = torch.device("cuda" if use_cuda else "cpu") weights_path = os.path.abspath(os.path.dirname(__file__)) + '/weights/exp_' + str(exp_no) + '/' weight_list = [x for x in sorted(os.listdir(weights_path)) if x.endswith('.pkl')] # Create out folder for pred-labels and pred-imgs for x in range(len(weight_list)): check_and_make_dir('Kitti/results/validation/labels/exp_' + str(exp_no) +"/epoch_%s/" % str(x+1)) check_and_make_dir('Kitti/results/validation/pred_imgs/exp_' + str(exp_no) ) if len(weight_list) == 0: print('We could not find any model weights to load, please train the model first!') exit() for model_weight in weight_list: epoch_no = model_weight.split(".")[0].split('_')[-1] print ("Evaluating for Epoch: ",epoch_no) print ('Loading model with %s'%model_weight) my_vgg = models.vgg19_bn(pretrained=True) model = Model.Model(features=my_vgg.features, bins=2) if use_cuda: checkpoint = torch.load(weights_path + model_weight) else: checkpoint = torch.load(weights_path + model_weight) model.load_state_dict(checkpoint['model_state_dict']) model.eval() # Load Test Images from eval folder dataset = Dataset(os.path.abspath(os.path.dirname(__file__)) + 'Kitti/validation') all_images = dataset.all_objects() print ("Length of eval data",len(all_images)) averages = ClassAverages.ClassAverages() all_images = dataset.all_objects() print ("Model is commencing predictions.....") for key in tqdm(sorted(all_images.keys())): data = all_images[key] truth_img = data['Image'] img = np.copy(truth_img) imgGT = np.copy(truth_img) objects = data['Objects'] cam_to_img = data['Calib'] filename = "Kitti/results/validation/labels/exp_" +str(exp_no) + '/epoch_' + str(epoch_no) + "/" +str(key)+".txt" check_and_make_dir(filename) file = open(filename,"w") for object in objects: label = object.label theta_ray = object.theta_ray input_img = object.img input_tensor = torch.zeros([1,3,224,224]) input_tensor[0,:,:,:] = input_img input_tensor.cuda() [orient, conf, dim] = model(input_tensor) orient = orient.cpu().data.numpy()[0, :, :] conf = conf.cpu().data.numpy()[0, :] dim = dim.cpu().data.numpy()[0, :] dim += averages.get_item(label['Class']) argmax = np.argmax(conf) orient = orient[argmax, :] cos = orient[0] sin = orient[1] alpha = np.arctan2(sin, cos) alpha += dataset.angle_bins[argmax] alpha -= np.pi location = plot_regressed_3d_bbox_2(img, truth_img, cam_to_img, label['Box_2D'], dim, alpha, theta_ray) locationGT = plot_regressed_3d_bbox_2(imgGT, truth_img, cam_to_img, label['Box_2D'], label['Dimensions'], label['Alpha'], theta_ray) file.write( \ # Class label str(label['Class']) + " -1 -1 " + \ # Alpha str(round(alpha,2)) + " " + \ # 2D Bounding box coordinates str(label['Box_2D'][0][0]) + " " + str(label['Box_2D'][0][1]) + " " + \ str(label['Box_2D'][1][0]) + " " + str(label['Box_2D'][1][1]) + " " + \ # 3D Box Dimensions str(' '.join(str(round(e,2)) for e in dim)) + " " + \ # 3D Box Location str(' '.join(str(round(e,2)) for e in location)) + " 0.0 " + \ # Ry str(round(theta_ray + alpha ,2)) + " " + \ # Confidence str( round(max(softmax(conf)),2) ) + "\n" ) # print('Estimated pose: %s'%location) # print('Truth pose: %s'%label['Location']) # print('-------------') file.close() numpy_vertical = np.concatenate((truth_img,imgGT, img), axis=0) image_name = 'Kitti/results/validation/pred_imgs/exp_' + str(exp_no) + '/' + str(key) + "/epoch_" + epoch_no + '_' + str(key) + '.jpg' check_and_make_dir(image_name) cv2.imwrite(image_name, numpy_vertical) print ("Finished.") if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("--show-single", action="store_true", help="Show 3D BoundingBox detecions one at a time") parser.add_argument("--hide-imgs", action="store_true", help="Hide display of visual results") FLAGS = parser.parse_args() main()
[ "torchvision.models.vgg19_bn", "argparse.ArgumentParser", "numpy.copy", "numpy.concatenate", "numpy.argmax", "cv2.imwrite", "torch.load", "numpy.arctan2", "lib.ClassAverages.ClassAverages", "os.path.dirname", "torch.cuda.is_available", "torch.device", "torch.zeros", "os.listdir", "lib.Mo...
[((1096, 1121), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1119, 1121), False, 'import torch\n'), ((1135, 1178), 'torch.device', 'torch.device', (["('cuda' if use_cuda else 'cpu')"], {}), "('cuda' if use_cuda else 'cpu')\n", (1147, 1178), False, 'import torch\n'), ((5836, 5861), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (5859, 5861), False, 'import argparse\n'), ((2011, 2043), 'torchvision.models.vgg19_bn', 'models.vgg19_bn', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (2026, 2043), True, 'import torchvision.models as models\n'), ((2060, 2105), 'lib.Model.Model', 'Model.Model', ([], {'features': 'my_vgg.features', 'bins': '(2)'}), '(features=my_vgg.features, bins=2)\n', (2071, 2105), False, 'from lib import Model, ClassAverages\n'), ((2608, 2637), 'lib.ClassAverages.ClassAverages', 'ClassAverages.ClassAverages', ([], {}), '()\n', (2635, 2637), False, 'from lib import Model, ClassAverages\n'), ((2153, 2192), 'torch.load', 'torch.load', (['(weights_path + model_weight)'], {}), '(weights_path + model_weight)\n', (2163, 2192), False, 'import torch\n'), ((2233, 2272), 'torch.load', 'torch.load', (['(weights_path + model_weight)'], {}), '(weights_path + model_weight)\n', (2243, 2272), False, 'import torch\n'), ((2881, 2899), 'numpy.copy', 'np.copy', (['truth_img'], {}), '(truth_img)\n', (2888, 2899), True, 'import numpy as np\n'), ((2920, 2938), 'numpy.copy', 'np.copy', (['truth_img'], {}), '(truth_img)\n', (2927, 2938), True, 'import numpy as np\n'), ((5473, 5520), 'numpy.concatenate', 'np.concatenate', (['(truth_img, imgGT, img)'], {'axis': '(0)'}), '((truth_img, imgGT, img), axis=0)\n', (5487, 5520), True, 'import numpy as np\n'), ((5726, 5765), 'cv2.imwrite', 'cv2.imwrite', (['image_name', 'numpy_vertical'], {}), '(image_name, numpy_vertical)\n', (5737, 5765), False, 'import cv2\n'), ((1317, 1341), 'os.listdir', 'os.listdir', (['weights_path'], {}), '(weights_path)\n', (1327, 1341), False, 'import os\n'), ((3417, 3446), 'torch.zeros', 'torch.zeros', (['[1, 3, 224, 224]'], {}), '([1, 3, 224, 224])\n', (3428, 3446), False, 'import torch\n'), ((3837, 3852), 'numpy.argmax', 'np.argmax', (['conf'], {}), '(conf)\n', (3846, 3852), True, 'import numpy as np\n'), ((3984, 4004), 'numpy.arctan2', 'np.arctan2', (['sin', 'cos'], {}), '(sin, cos)\n', (3994, 4004), True, 'import numpy as np\n'), ((1215, 1240), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1230, 1240), False, 'import os\n'), ((2443, 2468), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2458, 2468), False, 'import os\n')]
from gdsctools.readers import GenomicFeatures, IC50, DrugDecode from gdsctools.readers import Reader, drug_name_to_int from easydev import TempFile from gdsctools import ic50_test, gdsctools_data import pandas as pd from gdsctools.datasets import testing def test_readers(): a = Reader() try: a = Reader('stupido') assert False except: assert True try: a = Reader(1) assert False except: assert True def test_read_ic50(): # -------------------------------- functionalities r = IC50(ic50_test) # we can also instanciate from a valid dataframe r = IC50(r) # test repr r # and print statement print(r) # the copy method assert r == r.copy() r.hist() r.plot_ic50_count() r.cosmicIds f = TempFile() r.to_csv(f.name) f.delete() # columns may be duplicated r = IC50(ic50_test) df = pd.concat([r.df, r.df[999]], axis=1) # create new instance that should raise an error try: IC50(df) assert False except: assert True # ---------------------------------------- different IC50 formats # test all files available for key in testing.keys() : filename = testing[key].location if filename.startswith('ic50_test'): ic = IC50(filename) # some specific checks: #ic = IC50(testing['ic50_test_header_drug_prefix_only'].location) #assert ic.df.shape == (2,2) #assert all(ic.df.columns == ['1','2']) ic = IC50(testing['ic50_test_header_no_drug_prefix'].location) assert ic.drugIds == [1, 2] ic = IC50(testing['ic50_test_header_drug_prefix_only'].location) assert ic.drugIds == [1, 2] ic = IC50(testing['ic50_test_header_mixed_drug_prefix'].location) assert ic.drugIds == [1, 2] def test_read_gf(): # Reads a default file r = GenomicFeatures() # we can also instanciate from another GenomicFeatures instance r = GenomicFeatures(r) # we can also instanciate from a valid dataframe r = GenomicFeatures(r.df) # test repr r # and print statement print(r) r.features r.tissues r.plot() r.drop_tissue_in('breast') r.drop_tissue_in(['skin', 'bone']) r.keep_tissue_in(['cervix', 'lung']) assert r.shift == 2 assert len(r.unique_tissues) == 2 gf1 = GenomicFeatures() gf2 = GenomicFeatures(testing.genomic_features_csv) to_drop = [x for x in gf1.df.index if x not in gf2.df.index] gf1.drop_cosmic(to_drop) gf1.features = gf2.features assert gf2 == gf1 gf = GenomicFeatures(testing.genomic_features_bare_csv) assert gf.shift == 1 gf.get_TCGA() def test_gf_compress(): gf = GenomicFeatures() gf.compress_identical_features() def test_drugs(): r1 = DrugDecode(testing.drug_test_csv) r1.drugIds r2 = DrugDecode(testing.drug_test_tsv) r2.drugIds assert r1 == r2 # r1.get_info() this example fails because all webrelease are NAN assert len(r1) == 11 dd = DrugDecode(gdsctools_data("test_drug_decode_comp.csv")) assert dd.companies == ["ME"] assert dd.is_public(5) == 'Y' dd.check() assert dd.get_info()['N_prop'] == 1 # test repr and print print(dd) dd # test __add__ assert dd + dd == dd assert len(dd.get_public_and_one_company("ME")) == 10 dd = DrugDecode(testing.drug_test_csv) dd.get_name("Drug_1047_IC50") == "Nutlin-3a" def test_readers_tabs(): # If the files ends in csv but its content is tsv, this may be an issue try: IC50(gdsctools_data("test_IC50_tabs.csv")) assert False except: assert True def test_reader_long_strings(): assert 10 == drug_name_to_int(10) assert drug_name_to_int("1234567890123456789") == 1234567890123456789 assert drug_name_to_int(str(2**63)) == 9223372036854775808
[ "gdsctools.readers.Reader", "gdsctools.readers.drug_name_to_int", "gdsctools.datasets.testing.keys", "gdsctools.readers.DrugDecode", "gdsctools.gdsctools_data", "easydev.TempFile", "gdsctools.readers.GenomicFeatures", "gdsctools.readers.IC50", "pandas.concat" ]
[((287, 295), 'gdsctools.readers.Reader', 'Reader', ([], {}), '()\n', (293, 295), False, 'from gdsctools.readers import Reader, drug_name_to_int\n'), ((560, 575), 'gdsctools.readers.IC50', 'IC50', (['ic50_test'], {}), '(ic50_test)\n', (564, 575), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((637, 644), 'gdsctools.readers.IC50', 'IC50', (['r'], {}), '(r)\n', (641, 644), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((821, 831), 'easydev.TempFile', 'TempFile', ([], {}), '()\n', (829, 831), False, 'from easydev import TempFile\n'), ((909, 924), 'gdsctools.readers.IC50', 'IC50', (['ic50_test'], {}), '(ic50_test)\n', (913, 924), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((934, 970), 'pandas.concat', 'pd.concat', (['[r.df, r.df[999]]'], {'axis': '(1)'}), '([r.df, r.df[999]], axis=1)\n', (943, 970), True, 'import pandas as pd\n'), ((1220, 1234), 'gdsctools.datasets.testing.keys', 'testing.keys', ([], {}), '()\n', (1232, 1234), False, 'from gdsctools.datasets import testing\n'), ((1539, 1596), 'gdsctools.readers.IC50', 'IC50', (["testing['ic50_test_header_no_drug_prefix'].location"], {}), "(testing['ic50_test_header_no_drug_prefix'].location)\n", (1543, 1596), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((1639, 1698), 'gdsctools.readers.IC50', 'IC50', (["testing['ic50_test_header_drug_prefix_only'].location"], {}), "(testing['ic50_test_header_drug_prefix_only'].location)\n", (1643, 1698), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((1741, 1801), 'gdsctools.readers.IC50', 'IC50', (["testing['ic50_test_header_mixed_drug_prefix'].location"], {}), "(testing['ic50_test_header_mixed_drug_prefix'].location)\n", (1745, 1801), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((1891, 1908), 'gdsctools.readers.GenomicFeatures', 'GenomicFeatures', ([], {}), '()\n', (1906, 1908), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((1986, 2004), 'gdsctools.readers.GenomicFeatures', 'GenomicFeatures', (['r'], {}), '(r)\n', (2001, 2004), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((2068, 2089), 'gdsctools.readers.GenomicFeatures', 'GenomicFeatures', (['r.df'], {}), '(r.df)\n', (2083, 2089), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((2381, 2398), 'gdsctools.readers.GenomicFeatures', 'GenomicFeatures', ([], {}), '()\n', (2396, 2398), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((2410, 2455), 'gdsctools.readers.GenomicFeatures', 'GenomicFeatures', (['testing.genomic_features_csv'], {}), '(testing.genomic_features_csv)\n', (2425, 2455), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((2616, 2666), 'gdsctools.readers.GenomicFeatures', 'GenomicFeatures', (['testing.genomic_features_bare_csv'], {}), '(testing.genomic_features_bare_csv)\n', (2631, 2666), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((2746, 2763), 'gdsctools.readers.GenomicFeatures', 'GenomicFeatures', ([], {}), '()\n', (2761, 2763), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((2831, 2864), 'gdsctools.readers.DrugDecode', 'DrugDecode', (['testing.drug_test_csv'], {}), '(testing.drug_test_csv)\n', (2841, 2864), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((2889, 2922), 'gdsctools.readers.DrugDecode', 'DrugDecode', (['testing.drug_test_tsv'], {}), '(testing.drug_test_tsv)\n', (2899, 2922), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((3402, 3435), 'gdsctools.readers.DrugDecode', 'DrugDecode', (['testing.drug_test_csv'], {}), '(testing.drug_test_csv)\n', (3412, 3435), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((318, 335), 'gdsctools.readers.Reader', 'Reader', (['"""stupido"""'], {}), "('stupido')\n", (324, 335), False, 'from gdsctools.readers import Reader, drug_name_to_int\n'), ((410, 419), 'gdsctools.readers.Reader', 'Reader', (['(1)'], {}), '(1)\n', (416, 419), False, 'from gdsctools.readers import Reader, drug_name_to_int\n'), ((1041, 1049), 'gdsctools.readers.IC50', 'IC50', (['df'], {}), '(df)\n', (1045, 1049), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((3075, 3118), 'gdsctools.gdsctools_data', 'gdsctools_data', (['"""test_drug_decode_comp.csv"""'], {}), "('test_drug_decode_comp.csv')\n", (3089, 3118), False, 'from gdsctools import ic50_test, gdsctools_data\n'), ((3752, 3772), 'gdsctools.readers.drug_name_to_int', 'drug_name_to_int', (['(10)'], {}), '(10)\n', (3768, 3772), False, 'from gdsctools.readers import Reader, drug_name_to_int\n'), ((3784, 3823), 'gdsctools.readers.drug_name_to_int', 'drug_name_to_int', (['"""1234567890123456789"""'], {}), "('1234567890123456789')\n", (3800, 3823), False, 'from gdsctools.readers import Reader, drug_name_to_int\n'), ((1340, 1354), 'gdsctools.readers.IC50', 'IC50', (['filename'], {}), '(filename)\n', (1344, 1354), False, 'from gdsctools.readers import GenomicFeatures, IC50, DrugDecode\n'), ((3610, 3646), 'gdsctools.gdsctools_data', 'gdsctools_data', (['"""test_IC50_tabs.csv"""'], {}), "('test_IC50_tabs.csv')\n", (3624, 3646), False, 'from gdsctools import ic50_test, gdsctools_data\n')]
# -*- coding: utf-8 -*- from datetime import timedelta from django.contrib.auth.models import Group from django.contrib.auth.decorators import login_required from django.contrib.messages.views import SuccessMessageMixin from django.core.urlresolvers import reverse_lazy from django.db.models import Q from django.utils.translation import ugettext_lazy as _ from django.views.generic import View from django.views.generic.edit import CreateView, DeleteView, UpdateView from django.views.generic.list import ListView from .models import Ticket # noqa from .forms import TicketFormUser, TicketFormStaff from .helpers import monitor, monitorfile from .settings import (BASE_TEMPLATE, ST_FL_MNTR_OWNER, ST_FL_MNTR_STAFF, ST_SETT_NUMBERS_STAFF, ST_SETT_NUMBERS_OWNER, ST_SETT_TIMES_STAFF, ST_SETT_TIMES_OWNER, ST_SETT_MAIN_TASKBAR, ST_STAFF_GNAME, ST_ADMIN_GNAME ) # MIXINS class ContextMixin(SuccessMessageMixin, View): def get_context_data(self, **kwargs): context = super(ContextMixin, self).get_context_data(**kwargs) context['title'] = self.title context['base_template'] = BASE_TEMPLATE is_staff = self.request.user.is_staff context['ST_MNTR'] = is_staff and ST_FL_MNTR_STAFF or ST_FL_MNTR_OWNER stt_numb = is_staff and ST_SETT_NUMBERS_STAFF or ST_SETT_NUMBERS_OWNER stt_times = is_staff and ST_SETT_TIMES_STAFF or ST_SETT_TIMES_OWNER context['ST_SETT_MAIN_TASKBAR'] = ST_SETT_MAIN_TASKBAR if stt_numb: context['stt_numb'] = stt_numb n_solved = Ticket.objects.n_solved(self.request.user) n_total = Ticket.objects.n_total(self.request.user) if n_solved and n_total: context['porc_solved'] = n_solved * 100 / n_total context['porc_pending'] = 100 - context['porc_solved'] elif n_total: context['porc_solved'] = 0 context['porc_pending'] = 100 else: context['porc_solved'] = 100 context['porc_pending'] = 0 if stt_times: context['statistic_times'] = stt_times all_tickets = Ticket.objects.all() if all_tickets.filter(state__gt=7): context['fastest'] = all_tickets.filter(state__gt=7 ).order_by('resolution_delta' )[0].humanized_delta() if n_solved: context['media'] = timedelta( seconds=sum( [t.resolution_delta for t in all_tickets.filter( state__gt=7)]) / n_solved ) else: context['media'] = 'N/A' return context class Login_required_mixin(View): @classmethod def as_view(self, **kwargs): return login_required( super(Login_required_mixin, self).as_view(**kwargs) ) class TicketMixin(object): staff_group = Group.objects.get_or_create(name=ST_STAFF_GNAME)[0] admin_group = Group.objects.get_or_create(name=ST_ADMIN_GNAME)[0] def get_queryset(self): user = self.request.user is_ticket_manager = user.groups.filter( name=self.staff_group.name ).exists() is_ticket_admin = user.groups.filter( name=self.admin_group.name ).exists() if is_ticket_admin: return Ticket.objects.filter( Q(state=1) | Q(state__lt=9) ) elif is_ticket_manager: return Ticket.objects.filter( Q(state=1) | Q(staff=user, state__lt=9) ) return Ticket.objects.filter(user=user) def get_object(self): return self.get_queryset().get(id=self.kwargs['ST_id']) # END MIXINS class TicketCreate(ContextMixin, Login_required_mixin, CreateView): title = _('Edit ticket') model = Ticket fields = ['ticket_type', 'severity', 'description', 'attachment', ] success_message = _('Ticket was successfully created') error_message = _('Please check the failures bellow') success_url = reverse_lazy('ticketList') def form_valid(self, form): form.instance.user = self.request.user return super(TicketCreate, self).form_valid(form) class TicketDelete(ContextMixin, Login_required_mixin, TicketMixin, DeleteView): title = _('Delete ticket') model = Ticket success_url = reverse_lazy('home') success_message = _('Ticket was successfully deleted') success_url = reverse_lazy('ticketList') class TicketUpdate(ContextMixin, Login_required_mixin, TicketMixin, UpdateView): title = _('Edit ticket') model = Ticket success_url = reverse_lazy('home') success_message = _('Ticket was successfully updated') success_url = reverse_lazy('ticketList') def getHeader(self, date, user): header_msg = _('{date} [user: {user}] ').format(date=date, user=user) return header_msg def get_form_class(self): if self.request.user.is_staff: return TicketFormStaff return TicketFormUser def form_valid(self, form): ticket = form.instance header_msg = self.getHeader( ticket.creation_date, self.request.user.username ) if not ticket.staff and self.request.user.is_staff: ticket.staff = self.request.user if not self.request.user.is_staff and ticket.state == 8: ticket.state = 2 ticket.resolution_date = None ticket.resolution_delta = None monitor(monitorfile(ticket), header_msg) return super(TicketUpdate, self).form_valid(form) class TicketList(ContextMixin, Login_required_mixin, TicketMixin, ListView): model = Ticket title = _('Ticket list')
[ "django.db.models.Q", "django.utils.translation.ugettext_lazy", "django.core.urlresolvers.reverse_lazy", "django.contrib.auth.models.Group.objects.get_or_create" ]
[((4069, 4085), 'django.utils.translation.ugettext_lazy', '_', (['"""Edit ticket"""'], {}), "('Edit ticket')\n", (4070, 4085), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4202, 4238), 'django.utils.translation.ugettext_lazy', '_', (['"""Ticket was successfully created"""'], {}), "('Ticket was successfully created')\n", (4203, 4238), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4260, 4297), 'django.utils.translation.ugettext_lazy', '_', (['"""Please check the failures bellow"""'], {}), "('Please check the failures bellow')\n", (4261, 4297), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4317, 4343), 'django.core.urlresolvers.reverse_lazy', 'reverse_lazy', (['"""ticketList"""'], {}), "('ticketList')\n", (4329, 4343), False, 'from django.core.urlresolvers import reverse_lazy\n'), ((4594, 4612), 'django.utils.translation.ugettext_lazy', '_', (['"""Delete ticket"""'], {}), "('Delete ticket')\n", (4595, 4612), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4652, 4672), 'django.core.urlresolvers.reverse_lazy', 'reverse_lazy', (['"""home"""'], {}), "('home')\n", (4664, 4672), False, 'from django.core.urlresolvers import reverse_lazy\n'), ((4696, 4732), 'django.utils.translation.ugettext_lazy', '_', (['"""Ticket was successfully deleted"""'], {}), "('Ticket was successfully deleted')\n", (4697, 4732), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4752, 4778), 'django.core.urlresolvers.reverse_lazy', 'reverse_lazy', (['"""ticketList"""'], {}), "('ticketList')\n", (4764, 4778), False, 'from django.core.urlresolvers import reverse_lazy\n'), ((4887, 4903), 'django.utils.translation.ugettext_lazy', '_', (['"""Edit ticket"""'], {}), "('Edit ticket')\n", (4888, 4903), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4943, 4963), 'django.core.urlresolvers.reverse_lazy', 'reverse_lazy', (['"""home"""'], {}), "('home')\n", (4955, 4963), False, 'from django.core.urlresolvers import reverse_lazy\n'), ((4987, 5023), 'django.utils.translation.ugettext_lazy', '_', (['"""Ticket was successfully updated"""'], {}), "('Ticket was successfully updated')\n", (4988, 5023), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5043, 5069), 'django.core.urlresolvers.reverse_lazy', 'reverse_lazy', (['"""ticketList"""'], {}), "('ticketList')\n", (5055, 5069), False, 'from django.core.urlresolvers import reverse_lazy\n'), ((6067, 6083), 'django.utils.translation.ugettext_lazy', '_', (['"""Ticket list"""'], {}), "('Ticket list')\n", (6068, 6083), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3090, 3138), 'django.contrib.auth.models.Group.objects.get_or_create', 'Group.objects.get_or_create', ([], {'name': 'ST_STAFF_GNAME'}), '(name=ST_STAFF_GNAME)\n', (3117, 3138), False, 'from django.contrib.auth.models import Group\n'), ((3161, 3209), 'django.contrib.auth.models.Group.objects.get_or_create', 'Group.objects.get_or_create', ([], {'name': 'ST_ADMIN_GNAME'}), '(name=ST_ADMIN_GNAME)\n', (3188, 3209), False, 'from django.contrib.auth.models import Group\n'), ((5132, 5159), 'django.utils.translation.ugettext_lazy', '_', (['"""{date} [user: {user}] """'], {}), "('{date} [user: {user}] ')\n", (5133, 5159), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3593, 3603), 'django.db.models.Q', 'Q', ([], {'state': '(1)'}), '(state=1)\n', (3594, 3603), False, 'from django.db.models import Q\n'), ((3623, 3637), 'django.db.models.Q', 'Q', ([], {'state__lt': '(9)'}), '(state__lt=9)\n', (3624, 3637), False, 'from django.db.models import Q\n'), ((3750, 3760), 'django.db.models.Q', 'Q', ([], {'state': '(1)'}), '(state=1)\n', (3751, 3760), False, 'from django.db.models import Q\n'), ((3780, 3806), 'django.db.models.Q', 'Q', ([], {'staff': 'user', 'state__lt': '(9)'}), '(staff=user, state__lt=9)\n', (3781, 3806), False, 'from django.db.models import Q\n')]
from django.contrib import admin from .models import Author, Recode, Tag, Division, Language # Register your models here. admin.site.register(Author) admin.site.register(Recode) admin.site.register(Tag) admin.site.register(Division) admin.site.register(Language)
[ "django.contrib.admin.site.register" ]
[((123, 150), 'django.contrib.admin.site.register', 'admin.site.register', (['Author'], {}), '(Author)\n', (142, 150), False, 'from django.contrib import admin\n'), ((151, 178), 'django.contrib.admin.site.register', 'admin.site.register', (['Recode'], {}), '(Recode)\n', (170, 178), False, 'from django.contrib import admin\n'), ((179, 203), 'django.contrib.admin.site.register', 'admin.site.register', (['Tag'], {}), '(Tag)\n', (198, 203), False, 'from django.contrib import admin\n'), ((204, 233), 'django.contrib.admin.site.register', 'admin.site.register', (['Division'], {}), '(Division)\n', (223, 233), False, 'from django.contrib import admin\n'), ((234, 263), 'django.contrib.admin.site.register', 'admin.site.register', (['Language'], {}), '(Language)\n', (253, 263), False, 'from django.contrib import admin\n')]
# Generated by Django 3.2.3 on 2021-06-20 16:54 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('publishers', '0002_auto_20210608_2151'), ('writers', '0004_auto_20210621_0054'), ('books', '0007_alter_books_title'), ] operations = [ migrations.AlterField( model_name='books', name='classification', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='books.classification', verbose_name='Category'), ), migrations.AlterField( model_name='books', name='edition', field=models.TextField(null=True, verbose_name='Edition'), ), migrations.AlterField( model_name='books', name='price', field=models.DecimalField(decimal_places=2, max_digits=6, verbose_name='Price'), ), migrations.AlterField( model_name='books', name='price_vip', field=models.DecimalField(decimal_places=2, max_digits=6, verbose_name='VIP Price'), ), migrations.AlterField( model_name='books', name='publish_date', field=models.DateField(verbose_name='Publish Date'), ), migrations.AlterField( model_name='books', name='publishers', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='publishers.publishers', verbose_name='Publisher'), ), migrations.AlterField( model_name='books', name='storage', field=models.PositiveIntegerField(default=0, verbose_name='Storage'), ), migrations.AlterField( model_name='books', name='sub_classification', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='books.classificationsub', verbose_name='Sub-Category'), ), migrations.AlterField( model_name='books', name='title', field=models.CharField(max_length=64, verbose_name='Title'), ), migrations.AlterField( model_name='books', name='writers', field=models.ManyToManyField(to='writers.Writers', verbose_name='Author/Translator'), ), migrations.AddIndex( model_name='books', index=models.Index(fields=['title'], name='books_books_title_5d5dc9_idx'), ), ]
[ "django.db.models.TextField", "django.db.models.ManyToManyField", "django.db.models.ForeignKey", "django.db.models.CharField", "django.db.models.PositiveIntegerField", "django.db.models.Index", "django.db.models.DecimalField", "django.db.models.DateField" ]
[((472, 591), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""books.classification"""', 'verbose_name': '"""Category"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'books.classification', verbose_name='Category')\n", (489, 591), False, 'from django.db import migrations, models\n'), ((708, 759), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'verbose_name': '"""Edition"""'}), "(null=True, verbose_name='Edition')\n", (724, 759), False, 'from django.db import migrations, models\n'), ((879, 952), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(6)', 'verbose_name': '"""Price"""'}), "(decimal_places=2, max_digits=6, verbose_name='Price')\n", (898, 952), False, 'from django.db import migrations, models\n'), ((1076, 1153), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(6)', 'verbose_name': '"""VIP Price"""'}), "(decimal_places=2, max_digits=6, verbose_name='VIP Price')\n", (1095, 1153), False, 'from django.db import migrations, models\n'), ((1280, 1325), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""Publish Date"""'}), "(verbose_name='Publish Date')\n", (1296, 1325), False, 'from django.db import migrations, models\n'), ((1450, 1571), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""publishers.publishers"""', 'verbose_name': '"""Publisher"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'publishers.publishers', verbose_name='Publisher')\n", (1467, 1571), False, 'from django.db import migrations, models\n'), ((1688, 1750), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(0)', 'verbose_name': '"""Storage"""'}), "(default=0, verbose_name='Storage')\n", (1715, 1750), False, 'from django.db import migrations, models\n'), ((1883, 2009), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""books.classificationsub"""', 'verbose_name': '"""Sub-Category"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'books.classificationsub', verbose_name='Sub-Category')\n", (1900, 2009), False, 'from django.db import migrations, models\n'), ((2124, 2177), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'verbose_name': '"""Title"""'}), "(max_length=64, verbose_name='Title')\n", (2140, 2177), False, 'from django.db import migrations, models\n'), ((2299, 2377), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'to': '"""writers.Writers"""', 'verbose_name': '"""Author/Translator"""'}), "(to='writers.Writers', verbose_name='Author/Translator')\n", (2321, 2377), False, 'from django.db import migrations, models\n'), ((2469, 2536), 'django.db.models.Index', 'models.Index', ([], {'fields': "['title']", 'name': '"""books_books_title_5d5dc9_idx"""'}), "(fields=['title'], name='books_books_title_5d5dc9_idx')\n", (2481, 2536), False, 'from django.db import migrations, models\n')]
import collections import datetime import logging import os import syncfin.db.model as mydb import syncfin.utils.common as common import syncfin.core.config as config from prettytable import PrettyTable log = logging.getLogger(__name__) class Positions(object): def parse_and_save(self, fpath): """ Parses the file and saves to Positions DB. """ data = [] with open(fpath, 'r') as fp: data = fp.readlines() data = [_.strip() for _ in data] with mydb.PositionsDB() as _db: _db.table = _db.TABLE for line in data[1:]: try: date, fund, company, ticker, _, shares, mvalue, weight = line.split(',') # TODO: Convert date to standard format. date = '-'.join(date.split('/')[::-1]) if _db.read(date=date, fund=fund, ticker=ticker): # If entry is already present for a ticker in a fund on a given date, # do not create duplicate entry and ignore it. continue _db.write(date=date, fund=fund, company=company, ticker = ticker, shares = shares, mvalue = mvalue, weight = weight ) except Exception as err: log.info("Skipped line : %s", line) log.error('%r' % err) def _update_from_file(self, fund_file): try: self.parse_and_save(fund_file) except Exception as _: log.error("Error in updating - %s", fund_file) def _update_from_files(self): fund_files = config.get_param('SYNCFIN_FUND_FILES') if not fund_files: return fund_files = [x for x in fund_files.split(';') if x] for fund_file in fund_files: self._update_from_file(fund_file) def _update_from_dirs(self): funds_dirs = config.get_param('SYNCFIN_FUND_DIRS') if not funds_dirs: return funds_dirs = [x for x in funds_dirs.split(';') if x] # Add info from all the files in directory. for funds_dir in funds_dirs: for root, _, files in os.walk(funds_dir): for fpath in files: if fpath.startswith('.'): continue self._update_from_file(os.path.join(root, fpath)) def update(self): """ Update database from CSV files of respective ETFs. ETFs must be in ./data/sample_etf.csv format. """ self._update_from_files() self._update_from_dirs() def report(self, tickers): results = collections.defaultdict(list) with mydb.PositionsDB() as _db: _db.table = _db.TABLE for ticker in tickers: records = _db.read(ticker=ticker) results[ticker].extend(sorted(records)) t = PrettyTable(['Date','Fund','Company','Ticker','Shares', 'Market value($) of holding', 'Weight(%)', 'Note']) print ("=" * 70) print (" " * 30, " Holdings (in ETFs) ") print ("=" * 70) for ticker in sorted(results): for holding in results[ticker]: holding = list(holding) holding[5] = common.format_currency(holding[5]) t.add_row(holding) print(t)
[ "syncfin.db.model.PositionsDB", "syncfin.utils.common.format_currency", "syncfin.core.config.get_param", "os.walk", "collections.defaultdict", "prettytable.PrettyTable", "os.path.join", "logging.getLogger" ]
[((214, 241), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (231, 241), False, 'import logging\n'), ((1861, 1899), 'syncfin.core.config.get_param', 'config.get_param', (['"""SYNCFIN_FUND_FILES"""'], {}), "('SYNCFIN_FUND_FILES')\n", (1877, 1899), True, 'import syncfin.core.config as config\n'), ((2146, 2183), 'syncfin.core.config.get_param', 'config.get_param', (['"""SYNCFIN_FUND_DIRS"""'], {}), "('SYNCFIN_FUND_DIRS')\n", (2162, 2183), True, 'import syncfin.core.config as config\n'), ((2897, 2926), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (2920, 2926), False, 'import collections\n'), ((3156, 3271), 'prettytable.PrettyTable', 'PrettyTable', (["['Date', 'Fund', 'Company', 'Ticker', 'Shares',\n 'Market value($) of holding', 'Weight(%)', 'Note']"], {}), "(['Date', 'Fund', 'Company', 'Ticker', 'Shares',\n 'Market value($) of holding', 'Weight(%)', 'Note'])\n", (3167, 3271), False, 'from prettytable import PrettyTable\n'), ((531, 549), 'syncfin.db.model.PositionsDB', 'mydb.PositionsDB', ([], {}), '()\n', (547, 549), True, 'import syncfin.db.model as mydb\n'), ((2415, 2433), 'os.walk', 'os.walk', (['funds_dir'], {}), '(funds_dir)\n', (2422, 2433), False, 'import os\n'), ((2940, 2958), 'syncfin.db.model.PositionsDB', 'mydb.PositionsDB', ([], {}), '()\n', (2956, 2958), True, 'import syncfin.db.model as mydb\n'), ((3541, 3575), 'syncfin.utils.common.format_currency', 'common.format_currency', (['holding[5]'], {}), '(holding[5])\n', (3563, 3575), True, 'import syncfin.utils.common as common\n'), ((2593, 2618), 'os.path.join', 'os.path.join', (['root', 'fpath'], {}), '(root, fpath)\n', (2605, 2618), False, 'import os\n')]
import argparse import os import sys from src import config def process_arguments(): # Take arguments from commandline and parse them parser = argparse.ArgumentParser() parser.add_argument("--page-id", required=True, help="your facebook page-id") parser.add_argument("--pdir", required=True, help="directory of frames for main posts") parser.add_argument("--cdir", help="directory of frames to post as comments under main posts") parser.add_argument("--palbum-id", help="album-id to post frames from --pdir") parser.add_argument("--calbum-id", help="album-id to post frames from --cdir") parser.add_argument("--token", required=True, help="your facebook page access-token") parser.add_argument("--start", type=int, required=True, help="starting number of the frame to post") parser.add_argument("--count", type=int, help="how many frames to post starting from --start") parser.add_argument("--delay", type=int, help="delay between two frame-posts in seconds") parser.add_argument("--use-timestamp", action="store_true", help="parse timestamp from filename") parser.add_argument("-v", "--verbose", action="store_true", help="turns on verbosity") parser.add_argument("-n", "--dry-run", action="store_true", help="offline testing, no web request made") args = parser.parse_args() # Store the values from commandline into variables config.page_id = args.page_id config.pdir = args.pdir config.cdir = args.cdir config.palbum_id = args.palbum_id config.calbum_id = args.calbum_id config.token = args.token config.start = args.start config.count = args.count config.delay = args.delay config.use_timestamp = args.use_timestamp config.verbose = args.verbose config.dry_run = args.dry_run if config.dry_run: config.verbose = True print("DRY RUN MODE") print("No web request will be made, a dummy response will be returned for offline app testing.\n") if not os.path.isdir(config.pdir): print("Photo-frames directory is not valid.") sys.exit(1) if config.cdir: if not os.path.isdir(config.cdir): print("Comment-frames directory is not valid.") sys.exit(1) if not config.count: # If --count is not provided in commandline, adjust the count variable from the remaining number of frames config.count = len(os.listdir(config.pdir)) - config.start + 1 # If count is less than 0, then that means start-number is greater than total frame-counts if config.count < 0: print(f"Invalid start-number. There are less than {config.start} frames in your directory.") sys.exit(1) if not config.delay: config.delay = 120 # Default delay is 120 seconds or 2 minutes if config.verbose: print(f"Page-id: {config.page_id}") print(f"Access-token: {config.token}") print(f"Photo-frames directory: {config.pdir}") if config.cdir: print(f"Comment-frames directory: {config.cdir}") else: print("Warning: Comment-frames directory is not provided, nothing will be posted in comments.") if config.palbum_id: print(f"Album-id for photo-frames: {config.palbum_id}") else: print("Warning: album-id for photo-frames is not provided, photo-frames will not be added to album") if config.calbum_id: # In case comment-album-id is provided but comment-photo directory isn't if not config.cdir: print("ERROR: Comment-frames directory not provided, not possible to post in album.") sys.exit(1) else: print(f"Album-id for comment-frames: {config.calbum_id}") else: print("Warning: Album-id for comment-frames is not provided, comment-frames will not be added to album") print(f"Starting frame-number: {config.start}") print(f"Number of frames to post: {config.count}") print(f"Delay: {config.delay} seconds")
[ "os.path.isdir", "os.listdir", "argparse.ArgumentParser", "sys.exit" ]
[((153, 178), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (176, 178), False, 'import argparse\n'), ((1998, 2024), 'os.path.isdir', 'os.path.isdir', (['config.pdir'], {}), '(config.pdir)\n', (2011, 2024), False, 'import os\n'), ((2088, 2099), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2096, 2099), False, 'import sys\n'), ((2136, 2162), 'os.path.isdir', 'os.path.isdir', (['config.cdir'], {}), '(config.cdir)\n', (2149, 2162), False, 'import os\n'), ((2236, 2247), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2244, 2247), False, 'import sys\n'), ((2705, 2716), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2713, 2716), False, 'import sys\n'), ((3686, 3697), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3694, 3697), False, 'import sys\n'), ((2416, 2439), 'os.listdir', 'os.listdir', (['config.pdir'], {}), '(config.pdir)\n', (2426, 2439), False, 'import os\n')]
#!/usr/bin/env python # coding: utf-8 # In[9]: #setup for implementing gpu brought from given file import numpy as np import os import torch import torch.nn as nn import torchvision import torchvision.transforms as transforms import torch.optim as optim import h5py import time # In[3]: #data augmentation transform_train = transforms.Compose([ transforms.RandomCrop(32, padding=4), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)), ]) transform = transforms.Compose([ transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)) ]) # In[5]: train_data = torchvision.datasets.CIFAR10(root='./data', train=True, download=True, transform=transform_train) test_data = torchvision.datasets.CIFAR10(root='./data', train=False, download=False, transform=transform) train_loader = torch.utils.data.DataLoader(train_data, batch_size= 100, shuffle=True, num_workers=2) test_loader = torch.utils.data.DataLoader(test_data, batch_size=100, shuffle=False, num_workers=2) # In[19]: class CNN(nn.Module): def __init__(self): super(CNN, self).__init__() self.convlayer1 = nn.Sequential( nn.Conv2d(3,64,4,1,2), nn.ReLU(inplace = True), nn.BatchNorm2d(64), nn.Conv2d(64,64,4,1,2), nn.ReLU(inplace = True), nn.BatchNorm2d(64), nn.MaxPool2d(2,2), nn.Dropout(p=0.5) ) self.convlayer2 = nn.Sequential( nn.Conv2d(64,128,4,1,2), nn.ReLU(inplace = True), nn.BatchNorm2d(128), nn.Conv2d(128,128,4,1,2), nn.ReLU(inplace = True), nn.BatchNorm2d(128), nn.MaxPool2d(2,2), nn.Dropout(p=0.5) ) self.convlayer3 = nn.Sequential( nn.Conv2d(128,256,4,1,2), nn.ReLU(inplace = True), nn.BatchNorm2d(256), nn.Conv2d(256,256,4,1,2), nn.ReLU(inplace = True), nn.BatchNorm2d(256), nn.MaxPool2d(2,2), nn.Dropout(p=0.5) ) self.fulllayer = nn.Sequential( nn.Linear(256*5*5,500), nn.ReLU(inplace = True), nn.Linear(500,10) ) def forward(self,x): x = self.convlayer1(x) x = self.convlayer2(x) x = self.convlayer3(x) x = x.view(-1,256*5*5) x = self.fulllayer(x) return x # In[20]: device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") print(device) model = CNN().to(device) criterion = nn.CrossEntropyLoss() optimizer = torch.optim.Adam(model.parameters(), lr = 0.0005) scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer,milestones = [60,120,160],gamma=0.1, last_epoch=-1) # In[ ]: epoch = 9 time1 = time.time() for epoch in range(epoch): scheduler.step() running_loss = 0.0 for i, data in enumerate(train_loader, 0): inputs, labels = data inputs, labels = inputs.to(device), labels.to(device) optimizer.zero_grad() outputs = model(inputs) loss = criterion(outputs, labels) loss.backward() optimizer.step() running_loss += loss.item() if i % 100 == 99: print ("Epoch", epoch+1, "%s minibatches"%i, "loss: %.4f"%(running_loss/1000.)) running_loss = 0.0 print('Complete') time2 = time.time() print(time2-time1) # In[22]: with torch.no_grad(): correct = 0 total = 0 for images, labels in test_loader: images = images.to(device) labels = labels.to(device) outputs = model(images) _, predicted = torch.max(outputs.data, 1) total += labels.size(0) correct += (predicted == labels).sum().item() print("Test accuracy:", (100 * correct / total)) # In[ ]:
[ "torch.nn.Dropout", "torch.nn.ReLU", "torch.utils.data.DataLoader", "torchvision.transforms.RandomHorizontalFlip", "torch.nn.Conv2d", "torch.nn.CrossEntropyLoss", "time.time", "torchvision.datasets.CIFAR10", "torchvision.transforms.ToTensor", "torch.nn.BatchNorm2d", "torch.cuda.is_available", ...
[((675, 776), 'torchvision.datasets.CIFAR10', 'torchvision.datasets.CIFAR10', ([], {'root': '"""./data"""', 'train': '(True)', 'download': '(True)', 'transform': 'transform_train'}), "(root='./data', train=True, download=True,\n transform=transform_train)\n", (703, 776), False, 'import torchvision\n'), ((825, 922), 'torchvision.datasets.CIFAR10', 'torchvision.datasets.CIFAR10', ([], {'root': '"""./data"""', 'train': '(False)', 'download': '(False)', 'transform': 'transform'}), "(root='./data', train=False, download=False,\n transform=transform)\n", (853, 922), False, 'import torchvision\n'), ((973, 1061), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['train_data'], {'batch_size': '(100)', 'shuffle': '(True)', 'num_workers': '(2)'}), '(train_data, batch_size=100, shuffle=True,\n num_workers=2)\n', (1000, 1061), False, 'import torch\n'), ((1116, 1204), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['test_data'], {'batch_size': '(100)', 'shuffle': '(False)', 'num_workers': '(2)'}), '(test_data, batch_size=100, shuffle=False,\n num_workers=2)\n', (1143, 1204), False, 'import torch\n'), ((2809, 2830), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (2828, 2830), True, 'import torch.nn as nn\n'), ((2905, 3009), 'torch.optim.lr_scheduler.MultiStepLR', 'torch.optim.lr_scheduler.MultiStepLR', (['optimizer'], {'milestones': '[60, 120, 160]', 'gamma': '(0.1)', 'last_epoch': '(-1)'}), '(optimizer, milestones=[60, 120, 160],\n gamma=0.1, last_epoch=-1)\n', (2941, 3009), False, 'import torch\n'), ((3035, 3046), 'time.time', 'time.time', ([], {}), '()\n', (3044, 3046), False, 'import time\n'), ((3626, 3637), 'time.time', 'time.time', ([], {}), '()\n', (3635, 3637), False, 'import time\n'), ((3676, 3691), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3689, 3691), False, 'import torch\n'), ((356, 392), 'torchvision.transforms.RandomCrop', 'transforms.RandomCrop', (['(32)'], {'padding': '(4)'}), '(32, padding=4)\n', (377, 392), True, 'import torchvision.transforms as transforms\n'), ((399, 432), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (430, 432), True, 'import torchvision.transforms as transforms\n'), ((441, 462), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (460, 462), True, 'import torchvision.transforms as transforms\n'), ((468, 522), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['(0.5, 0.5, 0.5)', '(0.5, 0.5, 0.5)'], {}), '((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))\n', (488, 522), True, 'import torchvision.transforms as transforms\n'), ((564, 585), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (583, 585), True, 'import torchvision.transforms as transforms\n'), ((591, 645), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['(0.5, 0.5, 0.5)', '(0.5, 0.5, 0.5)'], {}), '((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))\n', (611, 645), True, 'import torchvision.transforms as transforms\n'), ((2720, 2745), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2743, 2745), False, 'import torch\n'), ((3887, 3913), 'torch.max', 'torch.max', (['outputs.data', '(1)'], {}), '(outputs.data, 1)\n', (3896, 3913), False, 'import torch\n'), ((1391, 1416), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', '(64)', '(4)', '(1)', '(2)'], {}), '(3, 64, 4, 1, 2)\n', (1400, 1416), True, 'import torch.nn as nn\n'), ((1426, 1447), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1433, 1447), True, 'import torch.nn as nn\n'), ((1463, 1481), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (1477, 1481), True, 'import torch.nn as nn\n'), ((1495, 1521), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(64)', '(4)', '(1)', '(2)'], {}), '(64, 64, 4, 1, 2)\n', (1504, 1521), True, 'import torch.nn as nn\n'), ((1531, 1552), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1538, 1552), True, 'import torch.nn as nn\n'), ((1568, 1586), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (1582, 1586), True, 'import torch.nn as nn\n'), ((1600, 1618), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)', '(2)'], {}), '(2, 2)\n', (1612, 1618), True, 'import torch.nn as nn\n'), ((1631, 1648), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)'}), '(p=0.5)\n', (1641, 1648), True, 'import torch.nn as nn\n'), ((1712, 1739), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(128)', '(4)', '(1)', '(2)'], {}), '(64, 128, 4, 1, 2)\n', (1721, 1739), True, 'import torch.nn as nn\n'), ((1749, 1770), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1756, 1770), True, 'import torch.nn as nn\n'), ((1786, 1805), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(128)'], {}), '(128)\n', (1800, 1805), True, 'import torch.nn as nn\n'), ((1819, 1847), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', '(128)', '(4)', '(1)', '(2)'], {}), '(128, 128, 4, 1, 2)\n', (1828, 1847), True, 'import torch.nn as nn\n'), ((1857, 1878), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1864, 1878), True, 'import torch.nn as nn\n'), ((1894, 1913), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(128)'], {}), '(128)\n', (1908, 1913), True, 'import torch.nn as nn\n'), ((1927, 1945), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)', '(2)'], {}), '(2, 2)\n', (1939, 1945), True, 'import torch.nn as nn\n'), ((1958, 1975), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)'}), '(p=0.5)\n', (1968, 1975), True, 'import torch.nn as nn\n'), ((2039, 2067), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', '(256)', '(4)', '(1)', '(2)'], {}), '(128, 256, 4, 1, 2)\n', (2048, 2067), True, 'import torch.nn as nn\n'), ((2077, 2098), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2084, 2098), True, 'import torch.nn as nn\n'), ((2114, 2133), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(256)'], {}), '(256)\n', (2128, 2133), True, 'import torch.nn as nn\n'), ((2147, 2175), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)', '(4)', '(1)', '(2)'], {}), '(256, 256, 4, 1, 2)\n', (2156, 2175), True, 'import torch.nn as nn\n'), ((2185, 2206), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2192, 2206), True, 'import torch.nn as nn\n'), ((2222, 2241), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(256)'], {}), '(256)\n', (2236, 2241), True, 'import torch.nn as nn\n'), ((2255, 2273), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)', '(2)'], {}), '(2, 2)\n', (2267, 2273), True, 'import torch.nn as nn\n'), ((2286, 2303), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)'}), '(p=0.5)\n', (2296, 2303), True, 'import torch.nn as nn\n'), ((2366, 2393), 'torch.nn.Linear', 'nn.Linear', (['(256 * 5 * 5)', '(500)'], {}), '(256 * 5 * 5, 500)\n', (2375, 2393), True, 'import torch.nn as nn\n'), ((2402, 2423), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2409, 2423), True, 'import torch.nn as nn\n'), ((2439, 2457), 'torch.nn.Linear', 'nn.Linear', (['(500)', '(10)'], {}), '(500, 10)\n', (2448, 2457), True, 'import torch.nn as nn\n')]
import numpy as np from qiskit import ( QuantumCircuit, execute, Aer) from qiskit.visualization import plot_histogram # Use Aer's qasm_simulator simulator = Aer.get_backend('qasm_simulator') # Create a Quantum Circuit acting on the q register circuit = QuantumCircuit(3, 3) # Add a H gate on qubit 0 circuit.h(0) # Add a CX (CNOT) gate on control qubit 0 and target qubit 1 circuit.cx(0, 1) # Map the quantum measurement to the classical bits circuit.measure([0, 1], [0, 1]) # Execute the circuit on the qasm simulator job = execute(circuit, simulator, shots=1000) # Grab results from the job result = job.result() # Returns counts counts = result.get_counts(circuit) print("\nTotal count for 00 and 11 are:", counts) # Draw the circuit circuit.draw() plot_histogram(counts)
[ "qiskit.execute", "qiskit.visualization.plot_histogram", "qiskit.QuantumCircuit", "qiskit.Aer.get_backend" ]
[((170, 203), 'qiskit.Aer.get_backend', 'Aer.get_backend', (['"""qasm_simulator"""'], {}), "('qasm_simulator')\n", (185, 203), False, 'from qiskit import QuantumCircuit, execute, Aer\n'), ((267, 287), 'qiskit.QuantumCircuit', 'QuantumCircuit', (['(3)', '(3)'], {}), '(3, 3)\n', (281, 287), False, 'from qiskit import QuantumCircuit, execute, Aer\n'), ((543, 582), 'qiskit.execute', 'execute', (['circuit', 'simulator'], {'shots': '(1000)'}), '(circuit, simulator, shots=1000)\n', (550, 582), False, 'from qiskit import QuantumCircuit, execute, Aer\n'), ((773, 795), 'qiskit.visualization.plot_histogram', 'plot_histogram', (['counts'], {}), '(counts)\n', (787, 795), False, 'from qiskit.visualization import plot_histogram\n')]
from urllib.request import urlopen # if has Chinese, apply decode() html = urlopen("https://mofanpy.com/static/scraping/basic-structure.html").read().decode('utf-8') print(html) import re res = re.findall(r"<title>(.+?)</title>", html) print("\nPage title is: ", res[0]) # Page title is: Scraping tutorial 1 | 莫烦Python res = re.findall(r"<p>(.*?)</p>", html, flags=re.DOTALL) # re.DOTALL if multi line print("\nPage paragraph is: ", res[0]) # Page paragraph is: # 这是一个在 <a href="https://mofanpy.com/">莫烦Python</a> # <a href="https://mofanpy.com/tutorials/scraping">爬虫教程</a> 中的简单测试. res = re.findall(r'href="(.*?)"', html) print("\nAll links: ", res) # All links: ['https://mofanpy.com/static/img/description/tab_icon.png', 'https://mofanpy.com/', 'https://mofanpy.com/tutorials/scraping']
[ "re.findall", "urllib.request.urlopen" ]
[((197, 237), 're.findall', 're.findall', (['"""<title>(.+?)</title>"""', 'html'], {}), "('<title>(.+?)</title>', html)\n", (207, 237), False, 'import re\n'), ((331, 380), 're.findall', 're.findall', (['"""<p>(.*?)</p>"""', 'html'], {'flags': 're.DOTALL'}), "('<p>(.*?)</p>', html, flags=re.DOTALL)\n", (341, 380), False, 'import re\n'), ((607, 639), 're.findall', 're.findall', (['"""href="(.*?)\\""""', 'html'], {}), '(\'href="(.*?)"\', html)\n', (617, 639), False, 'import re\n'), ((76, 143), 'urllib.request.urlopen', 'urlopen', (['"""https://mofanpy.com/static/scraping/basic-structure.html"""'], {}), "('https://mofanpy.com/static/scraping/basic-structure.html')\n", (83, 143), False, 'from urllib.request import urlopen\n')]
from collections import deque n, q = map(int, input().split()) ab = [[] for i in range(n)] for i in range(n - 1): a, b = map(int, input().split()) ab[a - 1].append(b - 1) ab[b - 1].append(a - 1) cd = [list(map(int, input().split())) for i in range(q)] s = set() qu = deque() s.add(0) qu.append(0) hugo = [0] * n while qu: here = qu.popleft() for i in ab[here]: if i not in s: s.add(i) qu.append(i) hugo[i] = (hugo[here] + 1) % 2 for i in cd: if hugo[i[0] - 1] == hugo[i[1] - 1]: print("Town") else: print("Road")
[ "collections.deque" ]
[((281, 288), 'collections.deque', 'deque', ([], {}), '()\n', (286, 288), False, 'from collections import deque\n')]
#!/usr/bin python3 # Imports # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Python: import re from json import loads from datetime import datetime # 3rd party: # Internal: from app.storage import AsyncStorageClient from app.caching import from_cache_or_func # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ __all__ = [ 'get_whats_new_banners' ] BANNER_DATA = dict( container="publicdata", path="assets/cms/changeLog.json" ) special_chars_pattern = re.compile(r"[\"')]") to_underscore_pattern = re.compile(r"[\s.(&,]+") async def _get_whats_new_banners(timestamp: str): async with AsyncStorageClient(**BANNER_DATA) as client: data_io = await client.download() raw_data = await data_io.readall() full_data = loads(raw_data.decode()) if full_data is None: full_data = dict() data = full_data.get("changeLog", list()) datestamp = timestamp.split("T")[0] filtered_data = filter( lambda b: b["date"] == datestamp and b.get("displayBanner", False), data ) results = list() for banner in filtered_data: banner['anchor'] = special_chars_pattern.sub("", banner["headline"].lower()) banner['anchor'] = to_underscore_pattern.sub("_", banner["anchor"]) banner['formatted_date'] = f"{datetime.strptime(banner['date'], '%Y-%m-%d'):%-d %B %Y}" results.append(banner) return results async def get_whats_new_banners(request, timestamp): response = from_cache_or_func( request=request, func=_get_whats_new_banners, prefix="FRONTEND::CL::", expire=60 * 15, timestamp=timestamp ) return await response
[ "app.storage.AsyncStorageClient", "app.caching.from_cache_or_func", "datetime.datetime.strptime", "re.compile" ]
[((556, 578), 're.compile', 're.compile', (['"""[\\\\"\')]"""'], {}), '(\'[\\\\"\\\')]\')\n', (566, 578), False, 'import re\n'), ((602, 626), 're.compile', 're.compile', (['"""[\\\\s.(&,]+"""'], {}), "('[\\\\s.(&,]+')\n", (612, 626), False, 'import re\n'), ((1565, 1696), 'app.caching.from_cache_or_func', 'from_cache_or_func', ([], {'request': 'request', 'func': '_get_whats_new_banners', 'prefix': '"""FRONTEND::CL::"""', 'expire': '(60 * 15)', 'timestamp': 'timestamp'}), "(request=request, func=_get_whats_new_banners, prefix=\n 'FRONTEND::CL::', expire=60 * 15, timestamp=timestamp)\n", (1583, 1696), False, 'from app.caching import from_cache_or_func\n'), ((694, 727), 'app.storage.AsyncStorageClient', 'AsyncStorageClient', ([], {}), '(**BANNER_DATA)\n', (712, 727), False, 'from app.storage import AsyncStorageClient\n'), ((1386, 1431), 'datetime.datetime.strptime', 'datetime.strptime', (["banner['date']", '"""%Y-%m-%d"""'], {}), "(banner['date'], '%Y-%m-%d')\n", (1403, 1431), False, 'from datetime import datetime\n')]
import logging import traceback import pkg_resources from gi.repository import Gtk, GdkPixbuf from .devicestatusbar import DeviceStatusBar from .logtreeview import LogTreeView from ..accounting import UserManager, ProjectManager from ..core import ToolWindow, error_message, ToolFrame, question_message from ..devices import Motors, GeniX, TPG201, HaakePhoenix, Pilatus, DeviceConnections from ..diagnostics import ResourceUsage from ..measurement import ScanMeasurement, SingleExposure, TransmissionMeasurement, ScriptMeasurement, CommandHelpDialog from ..setup import EditConfig, SampleEdit, DefineGeometry, Calibration from ..toolframes import ResourceUsageFrame, NextFSN, ShutterBeamstop, AccountingFrame from ..tools import ExposureViewer, CapillaryMeasurement, ScanViewer, MaskEditor, DataReduction, OptimizeGeometry from ...core.commands.command import CommandError from ...core.instrument.instrument import Instrument from ...core.services.interpreter import Interpreter # initialize the logger for the main window level. logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) class CollectingHandler(logging.Handler): instance = None def __init__(self): self.collected = [] if self.__class__.instance is not None: raise RuntimeError('This is a singleton class!') super().__init__() self.__class__.instance = self @classmethod def get_default(cls): return cls.instance def emit(self, record): self.collected.append(record) class MainWindow(object): toolwindow_registry = [ ('sampleeditor', SampleEdit, 'samplesetup', 'setup_sampleedit.glade', {}), ('definegeometry', DefineGeometry, 'definegeometry', 'setup_definegeometry.glade', {}), ('editconfig', EditConfig, 'editconfig', 'setup_editconfig.glade', {}), ('calibration', Calibration, 'calibration', 'setup_calibration.glade', {}), ('xraysource', GeniX, 'genix', 'devices_genix.glade', {}), ('detector', Pilatus, 'pilatus', 'devices_pilatus.glade', {}), ('motors', Motors, 'motoroverview', 'devices_motors.glade', {}), ('vacgauge', TPG201, 'vacgauge', 'devices_tpg201.glade', {}), ('temperaturestage', HaakePhoenix, 'haakephoenix', 'devices_haakephoenix.glade', {}), ('connections', DeviceConnections, 'deviceconnections', 'devices_connection.glade', {}), ('scanmeasurement', ScanMeasurement, 'scan', 'measurement_scan.glade', {}), ('singleexposure', SingleExposure, 'singleexposure', 'measurement_singleexposure.glade', {}), ('transmission', TransmissionMeasurement, 'measuretransmission', 'measurement_transmission.glade', {}), ('scriptmeasurement', ScriptMeasurement, 'script', 'measurement_script.glade', {}), ('maskeditor', MaskEditor, 'maskeditor', 'tools_maskeditor.glade', {}), ('imgviewer', ExposureViewer, 'calibration', 'setup_calibration.glade', {}), ('viewscans', ScanViewer, 'scanviewer', 'tools_scanviewer.glade', {}), ('capillarymeasurement', CapillaryMeasurement, 'capillarymeasurement', 'tools_capillarymeasurement.glade', {}), ('datareduction', DataReduction, 'datareduction', 'tools_datareduction.glade', {}), ('resourceusage', ResourceUsage, 'resourceusagewindow', 'diagnostics_resourceusage.glade', {}), ('commandhelp', CommandHelpDialog, 'commandhelpbrowser', 'help_commandhelpbrowser.glade', {'insert': 'on_insert_command'}), ('users', UserManager, 'usermanager', 'accounting_usermanager.glade', {}), ('projects', ProjectManager, 'projectmanager', 'accounting_projectmanager.glade', {}), ('optimizegeometry', OptimizeGeometry, 'optimizegeometry', 'tools_optimizegeometry.glade', {}), ] class LogHandler(logging.Handler): def __init__(self, mainwindow): super().__init__() self.mw = mainwindow def emit(self, record): message = self.format(record) # GLib.idle_add(lambda msg=message, rec=record: self.mw.writelogline(msg, rec) and False) self.mw.writelogline(message, record) def __init__(self, instrument: Instrument): # initialize the main window self.builder = Gtk.Builder.new_from_file( pkg_resources.resource_filename('cct', 'resource/glade/mainwindow.glade')) self.builder.set_application(Gtk.Application.get_default()) self.widget = self.builder.get_object('mainwindow') self.builder.connect_signals(self) self.widget.set_show_menubar(True) self.widget.connect('delete-event', self.on_delete_event) self.widget.set_default_icon_list([GdkPixbuf.Pixbuf.new_from_file_at_size( pkg_resources.resource_filename('cct', 'resource/icons/scalable/cctlogo.svg'), sz, sz) for sz in [16, 32, 48, 64, 128, 256]]) self.widget.show_all() # Initialize the log textbuffer self._logtags = self.builder.get_object('log_texttags') self._logbuffer = self.builder.get_object('logbuffer') self._logbuffer.create_mark( 'log_end', self._logbuffer.get_end_iter(), False) self._logview = self.builder.get_object('logtext') self._logview2 = LogTreeView() self.builder.get_object('logviewer_stack').add_titled(self._logview2.widget, 'treelogviewer', 'Log tree') # initialize custom log handler for the root logger. This is responsible for printing # all log records in the main window. self._loghandler = self.LogHandler(self) self._loghandler.setLevel(logging.DEBUG) logging.root.addHandler(self._loghandler) self._loghandler.setFormatter(logging.Formatter( '%(asctime)s: %(levelname)s: %(message)s (Origin: %(name)s:%(lineno)d)')) ch = CollectingHandler.get_default() for record in ch.collected: self._loghandler.emit(record) logging.root.removeHandler(ch) del ch.collected self._toolwindows = {} self._toolwindow_connections = {} self.instrument = instrument self._instrumentconnections = [ self.instrument.connect('shutdown', self.on_instrument_shutdown), self.instrument.connect('device-connected', lambda i, d: self.set_menu_sensitivity()), self.instrument.connect('device-disconnected', lambda i, d, b: self.set_menu_sensitivity()), ] if self.instrument.online: self.instrument.connect_devices() logger.debug('Mainwindow: devices connected.') self._devicestatus = DeviceStatusBar(self.instrument) logger.debug('DeviceStatusBar initialized') self.builder.get_object('devicestatus_box').pack_start(self._devicestatus, True, True, 0) self._toolframes = {} for framename, cls, gladefile, mainwidget in [ ('resourceusage', ResourceUsageFrame, 'toolframe_telemetry.glade', 'telemetryframe'), ('nextfsn', NextFSN, 'toolframe_nextfsn.glade', 'nextfsnframe'), ('shutterbeamstop', ShutterBeamstop, 'toolframe_shutter.glade', 'shutterframe'), ('accounting', AccountingFrame, 'toolframe_accounting.glade', 'accountingframe') ]: try: self._toolframes[framename] = cls(gladefile, mainwidget, self.instrument) self.builder.get_object('toolbox').pack_end(self._toolframes[framename].widget, False, True, 0) except Exception: logger.error('Cannot open toolframe ' + framename) logger.debug('Initializing toolframes done.') self.widget.show_all() self.widget.set_title('Credo Control Tool v{}'.format(pkg_resources.get_distribution('cct').version)) logger.debug('Connecting to interpreter') interpreter = self.instrument.services['interpreter'] self._interpreterconnections = [ interpreter.connect('cmd-return', self.on_interpreter_cmd_return), interpreter.connect('cmd-fail', self.on_interpreter_cmd_fail), interpreter.connect('pulse', self.on_interpreter_cmd_pulse), interpreter.connect('progress', self.on_interpreter_cmd_progress), interpreter.connect('cmd-message', self.on_interpreter_cmd_message), interpreter.connect('idle-changed', self.on_interpreter_idle_changed), ] self._commandhistory = [] self._historyindex = None self.on_change_logviewer(self.builder.get_object('menuitem_advancedlogviewer')) self.set_menu_sensitivity() def on_change_logviewer(self, checkmenuitem: Gtk.CheckMenuItem): if checkmenuitem.get_active(): self.builder.get_object('logviewer_stack').set_visible_child_name('treelogviewer') else: self.builder.get_object('logviewer_stack').set_visible_child_name('textlogviewer') def on_command_entry_keyevent(self, entry: Gtk.Entry, event): if event.hardware_keycode == 111: # cursor up key if self._commandhistory: if self._historyindex is None: self._historyindex = len(self._commandhistory) self._historyindex = max(0, self._historyindex - 1) entry.set_text(self._commandhistory[self._historyindex]) return True # inhibit further processing of this key event elif event.hardware_keycode == 116: # cursor down key if self._commandhistory: if self._historyindex is None: self._historyindex = -1 self._historyindex = min(self._historyindex + 1, len(self._commandhistory) - 1) entry.set_text(self._commandhistory[self._historyindex]) return True # inhibit further processing of this key event return False def on_interpreter_idle_changed(self, interpreter: Instrument, idle: bool): if not idle: self.builder.get_object('command_entry').set_sensitive(idle) if self.builder.get_object('execute_button').get_label() == 'Execute': self.builder.get_object('execute_button').set_sensitive(idle) if idle: self.builder.get_object('command_entry').set_sensitive(idle) self.builder.get_object('execute_button').set_sensitive(idle) def on_command_execute(self, button: Gtk.Button): if button.get_label() == 'Execute': cmd = self.builder.get_object('command_entry').get_text() try: self.instrument.services['interpreter'].execute_command(cmd) except CommandError as ce: error_message(self.widget, 'Cannot execute command', str(ce)) else: button.set_label('Stop') if (not self._commandhistory) or (self._commandhistory and self._commandhistory[-1] != cmd): self._commandhistory.append(self.builder.get_object('command_entry').get_text()) elif button.get_label() == 'Stop': self.instrument.services['interpreter'].kill() else: raise ValueError(button.get_label()) # noinspection PyUnusedLocal def on_interpreter_cmd_return(self, interpreter: Interpreter, commandname: str, returnvalue: object): self.builder.get_object('command_entry').set_sensitive(True) self.builder.get_object('command_entry').set_progress_fraction(0) self.builder.get_object('command_entry').set_text('') self.builder.get_object('command_entry').grab_focus() self.builder.get_object('execute_button').set_label('Execute') self._historyindex = None self.builder.get_object('statusbar').pop(1) # noinspection PyUnusedLocal,PyMethodMayBeStatic def on_interpreter_cmd_fail(self, interpreter, commandname, exc, tb): logger.error('Command {} failed: {} {}'.format(commandname, str(exc), tb)) # noinspection PyUnusedLocal def on_interpreter_cmd_message(self, interpreter, commandname, message): self.builder.get_object('statusbar').pop(1) self.builder.get_object('statusbar').push(1, message) logger.info('Command {} :: {}'.format(commandname, message)) # noinspection PyUnusedLocal def on_interpreter_cmd_pulse(self, interpreter, commandname, message): self.builder.get_object('command_entry').progress_pulse() self.builder.get_object('statusbar').pop(1) self.builder.get_object('statusbar').push(1, message) # noinspection PyUnusedLocal def on_interpreter_cmd_progress(self, interpreter, commandname, message, fraction): self.builder.get_object('command_entry').set_progress_fraction(fraction) self.builder.get_object('statusbar').pop(1) self.builder.get_object('statusbar').push(1, message) def on_delete_event(self, window, event): return self.on_quit() def writelogline(self, message: str, record: logging.LogRecord): assert hasattr(record, 'message') if record.levelno >= logging.CRITICAL: tag = self._logtags.lookup('critical') elif record.levelno >= logging.ERROR: tag = self._logtags.lookup('error') elif record.levelno >= logging.WARNING: tag = self._logtags.lookup('warning') else: tag = self._logtags.lookup('normal') enditer = self._logbuffer.get_end_iter() self._logbuffer.insert_with_tags(enditer, message + '\n', tag) self._logview.scroll_to_mark( self._logbuffer.get_mark('log_end'), 0.1, False, 0, 0) if record.levelno >= logging.INFO: self.builder.get_object('statusbar').pop(0) self.builder.get_object('statusbar').push(0, record.message.split('\n')[0]) self._logview2.add_logentry(record) return False def construct_and_run_dialog(self, windowclass, toplevelname, gladefile, windowtitle, connections): assert issubclass(windowclass, ToolWindow) key = str(windowclass) + str(toplevelname) logger.debug('Construct & run dialog: ' + gladefile) if key not in self._toolwindows: logger.debug('Constructing needed for dialog ' + gladefile) try: self._toolwindows[key] = windowclass(gladefile, toplevelname, self.instrument, windowtitle) except ToolFrame.DeviceException as ex: error_message(self.widget, 'Could not open window {}'.format(windowtitle), 'Missing required device: {}'.format(ex.args[0])) return except Exception as exc: error_message(self.widget, 'Could not open window {}'.format(windowtitle), '{}\n{}'.format(str(exc), traceback.format_exc())) return # if self._toolwindows[key].widget.destroyed(): # logger.error('Error while constructing dialog ' + gladefile) # del self._toolwindows[key] logger.debug('Successful construction of dialog ' + gladefile) assert key not in self._toolwindow_connections logger.debug('Connecting signals for dialog ' + gladefile) try: self._toolwindow_connections[key] = [ self._toolwindows[key].connect('destroy', self.on_toolwindow_destroyed, key)] for signal in connections: self._toolwindow_connections[key].append( self._toolwindows[key].connect(signal, getattr(self, connections[signal]))) except Exception as exc: logger.error('Error connecting signals to dialog ' + gladefile) try: for c in self._toolwindow_connections[key]: self._toolwindows[key].disconnect(c) self._toolwindows[key].destroy() raise finally: del self._toolwindow_connections[key] del self._toolwindows[key] logger.debug('Dialog should be up and running: ' + gladefile) logger.debug('Presenting dialog ' + gladefile) return self._toolwindows[key].widget.present() def on_toolwindow_destroyed(self, toolwindow: ToolWindow, key): logger.debug('Dialog destroyed: ' + toolwindow.gladefile) assert key in self._toolwindow_connections for c in self._toolwindow_connections[key]: toolwindow.disconnect(c) del self._toolwindow_connections[key] del self._toolwindows[key] logger.debug('Mainwindow keeps no reference for dialog ' + toolwindow.gladefile) def on_quit(self): if self.instrument.is_busy(): if not question_message(self.widget, 'Confirm quit', 'The instrument is busy. Do you still want to quit?'): return True logger.info('Shutdown requested.') self.instrument.save_state() self.instrument.shutdown() return True def on_instrument_shutdown(self, instrument): logger.info('Instrument shutdown finished.') for c in self._instrumentconnections: instrument.disconnect(c) self._instrumentconnections = [] logging.root.removeHandler(self._loghandler) self.widget.destroy() Gtk.Application.get_default().quit() def on_menu(self, menuitem: Gtk.MenuItem): name = menuitem.get_name() if not (name.startswith('menuitem') or name.startswith('toolitem')): raise ValueError('Invalid menu item name: {}'.format(name)) name = name.split('_', 1)[1] if name == 'quit': return self.on_quit() elif name == 'savesettings': self.instrument.save_state() elif name == 'about': builder = Gtk.Builder.new_from_file( pkg_resources.resource_filename('cct', 'resource/glade/help_about.glade')) ad = builder.get_object('aboutdialog') ad.set_version(pkg_resources.get_distribution('cct').version) ad.set_logo(GdkPixbuf.Pixbuf.new_from_file_at_size( pkg_resources.resource_filename('cct', 'resource/icons/scalable/cctlogo.svg'), 256, 256)) ad.run() ad.destroy() del ad else: for nm, cls, toplevelname, gladefile, connections in self.toolwindow_registry: if nm != name: continue self.construct_and_run_dialog(cls, toplevelname, gladefile, menuitem.get_label().replace('_', ''), connections) return False raise ValueError(name) def on_insert_command(self, commandhelpdialog: CommandHelpDialog, command: str): self.builder.get_object('command_entry').set_text(command) def on_toolbar(self, toolbutton): return self.on_menu(toolbutton) def set_menu_sensitivity(self): for nm, cls, toplevelname, gladefile, connections in self.toolwindow_registry: requirementsmet = cls.requirements_met(self.instrument) for what in ['menuitem', 'toolitem']: try: self.builder.get_object(what + '_' + nm).set_sensitive(requirementsmet) except AttributeError: pass
[ "pkg_resources.get_distribution", "gi.repository.Gtk.Application.get_default", "logging.getLogger", "pkg_resources.resource_filename", "logging.Formatter", "traceback.format_exc", "logging.root.removeHandler", "logging.root.addHandler" ]
[((1042, 1069), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1059, 1069), False, 'import logging\n'), ((5673, 5714), 'logging.root.addHandler', 'logging.root.addHandler', (['self._loghandler'], {}), '(self._loghandler)\n', (5696, 5714), False, 'import logging\n'), ((5991, 6021), 'logging.root.removeHandler', 'logging.root.removeHandler', (['ch'], {}), '(ch)\n', (6017, 6021), False, 'import logging\n'), ((17349, 17393), 'logging.root.removeHandler', 'logging.root.removeHandler', (['self._loghandler'], {}), '(self._loghandler)\n', (17375, 17393), False, 'import logging\n'), ((4327, 4400), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['"""cct"""', '"""resource/glade/mainwindow.glade"""'], {}), "('cct', 'resource/glade/mainwindow.glade')\n", (4358, 4400), False, 'import pkg_resources\n'), ((4439, 4468), 'gi.repository.Gtk.Application.get_default', 'Gtk.Application.get_default', ([], {}), '()\n', (4466, 4468), False, 'from gi.repository import Gtk, GdkPixbuf\n'), ((5753, 5849), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s: %(levelname)s: %(message)s (Origin: %(name)s:%(lineno)d)"""'], {}), "(\n '%(asctime)s: %(levelname)s: %(message)s (Origin: %(name)s:%(lineno)d)')\n", (5770, 5849), False, 'import logging\n'), ((17432, 17461), 'gi.repository.Gtk.Application.get_default', 'Gtk.Application.get_default', ([], {}), '()\n', (17459, 17461), False, 'from gi.repository import Gtk, GdkPixbuf\n'), ((4777, 4854), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['"""cct"""', '"""resource/icons/scalable/cctlogo.svg"""'], {}), "('cct', 'resource/icons/scalable/cctlogo.svg')\n", (4808, 4854), False, 'import pkg_resources\n'), ((7763, 7800), 'pkg_resources.get_distribution', 'pkg_resources.get_distribution', (['"""cct"""'], {}), "('cct')\n", (7793, 7800), False, 'import pkg_resources\n'), ((17973, 18046), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['"""cct"""', '"""resource/glade/help_about.glade"""'], {}), "('cct', 'resource/glade/help_about.glade')\n", (18004, 18046), False, 'import pkg_resources\n'), ((14858, 14880), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (14878, 14880), False, 'import traceback\n'), ((18126, 18163), 'pkg_resources.get_distribution', 'pkg_resources.get_distribution', (['"""cct"""'], {}), "('cct')\n", (18156, 18163), False, 'import pkg_resources\n'), ((18253, 18330), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['"""cct"""', '"""resource/icons/scalable/cctlogo.svg"""'], {}), "('cct', 'resource/icons/scalable/cctlogo.svg')\n", (18284, 18330), False, 'import pkg_resources\n')]
#! /usr/bin/env python # -*-coding:UTF-8-*- # __author__ : pighui # __time__ : 2019-11-2 上午11:51 import requests def get_proxies(params: dict = {}): ''' :param params: 参数字典 默认为空 :return: 返回一个包含多条代理信息的列表,列表的每一个元素是一个字典 ''' try: response = requests.get('http://127.0.0.1:8888/ip/', params=params) if response.status_code == 200: result = response.json() return [{d['protocol']: 'http://' + d['ip'] + ':' + d['port']} for d in result] except ConnectionError: return None def random_proxy(): ''' :return: 返回一个代理信息字典 ''' try: response = requests.get('http://127.0.0.1:8888/ip/random/') if response.status_code == 200: result = response.json() return result except ConnectionError: return None if __name__ == '__main__': # 获取一条代理 ip1 = get_proxies() print(ip1) # 获取多条代理 ip2 = get_proxies({'count': 3}) print(ip2) # 获取匿名代理 ip3 = get_proxies({'anonymity': 1}) print(ip3) # 获取https代理 ip4 = get_proxies({'protocol': 'https'}) print(ip4) # 获取多条匿名的https代理 ip5 = get_proxies({'count': 3, 'anonymity': 1, 'protocol': 'https'}) print(ip5) # 随机获取一条代理 ip6 = random_proxy() print(ip6)
[ "requests.get" ]
[((269, 325), 'requests.get', 'requests.get', (['"""http://127.0.0.1:8888/ip/"""'], {'params': 'params'}), "('http://127.0.0.1:8888/ip/', params=params)\n", (281, 325), False, 'import requests\n'), ((633, 681), 'requests.get', 'requests.get', (['"""http://127.0.0.1:8888/ip/random/"""'], {}), "('http://127.0.0.1:8888/ip/random/')\n", (645, 681), False, 'import requests\n')]
import numpy as np from autocnet.camera.utils import crossform from cv2 import triangulatePoints def compute_epipoles(f): """ Compute the epipole and epipolar prime Parameters ---------- f : ndarray (3,3) fundamental matrix or autocnet Fundamental Matrix object Returns ------- e : ndarray (3,1) epipole e1 : ndarray (3,3) epipolar prime matrix """ u, _, _ = np.linalg.svd(f) e = u[:, -1] e1 = crossform(e) return e, e1 def idealized_camera(): """ Create an idealized camera transformation matrix Returns ------- : ndarray (3,4) with diagonal 1 """ i = np.eye(3, 4) i[:,-1] = 0 return i def camera_from_f(F): """ Estimate a camera matrix using a fundamental matrix. Parameters ---------- f : ndarray (3,3) fundamental matrix or autocnet Fundamental Matrix object Returns ------- p1 : ndarray Estimated camera matrix """ e, e1 = compute_epipoles(F) p1 = np.empty((3, 4)) p1[:, :3] = -e1.dot(F) p1[:, 3] = e return p1 def triangulate(pt, pt1, p, p1): """ Given two sets of homogeneous coordinates and two camera matrices, triangulate the 3D coordinates. The image correspondences are assumed to be implicitly ordered. References ---------- [Hartley2003]_ Parameters ---------- pt : ndarray (n, 3) array of homogeneous correspondences pt1 : ndarray (n, 3) array of homogeneous correspondences p : ndarray (3, 4) camera matrix p1 : ndarray (3, 4) camera matrix Returns ------- coords : ndarray (4, n) projection matrix """ pt = np.asarray(pt) pt1 = np.asarray(pt1) # Transpose for the openCV call if needed if pt.shape[0] != 3: pt = pt.T if pt1.shape[0] != 3: pt1 = pt1.T X = triangulatePoints(p, p1, pt[:2], pt1[:2]) X /= X[3] # Homogenize return X def projection_error(p1, p, pt, pt1): """ Based on Hartley and Zisserman p.285 this function triangulates image correspondences and computes the reprojection error by back-projecting the points into the image. This is the classic cost function (minimization problem) into the gold standard method for fundamental matrix estimation. Parameters ----------- p1 : ndarray (3,4) camera matrix p : ndarray (3,4) idealized camera matrix in the form np.eye(3,4) pt : dataframe or ndarray of homogeneous coordinates in the form (x_{i}, y_{i}, 1) pt1 : dataframe or ndarray of homogeneous coordinates in the form (x_{i}, y_{i}, 1) Returns ------- reproj_error : ndarray (n, 1) vector of reprojection errors """ # SciPy least squares solver needs a vector, so reshape back to a 3x4 c # camera matrix at each iteration if p1.shape != (3,4): p1 = p1.reshape(3,4) # Triangulate the correspondences xhat = triangulate(pt, pt1, p, p1) xhat1 = xhat[:3] / xhat[2] xhat2 = p1.dot(xhat) xhat2 /= xhat2[2] # Compute error cost = (pt - xhat1)**2 + (pt1 - xhat2)**2 cost = np.sqrt(np.sum(cost, axis=0)) return cost
[ "numpy.sum", "autocnet.camera.utils.crossform", "numpy.asarray", "numpy.empty", "numpy.linalg.svd", "numpy.eye", "cv2.triangulatePoints" ]
[((433, 449), 'numpy.linalg.svd', 'np.linalg.svd', (['f'], {}), '(f)\n', (446, 449), True, 'import numpy as np\n'), ((476, 488), 'autocnet.camera.utils.crossform', 'crossform', (['e'], {}), '(e)\n', (485, 488), False, 'from autocnet.camera.utils import crossform\n'), ((678, 690), 'numpy.eye', 'np.eye', (['(3)', '(4)'], {}), '(3, 4)\n', (684, 690), True, 'import numpy as np\n'), ((1051, 1067), 'numpy.empty', 'np.empty', (['(3, 4)'], {}), '((3, 4))\n', (1059, 1067), True, 'import numpy as np\n'), ((1766, 1780), 'numpy.asarray', 'np.asarray', (['pt'], {}), '(pt)\n', (1776, 1780), True, 'import numpy as np\n'), ((1791, 1806), 'numpy.asarray', 'np.asarray', (['pt1'], {}), '(pt1)\n', (1801, 1806), True, 'import numpy as np\n'), ((1952, 1993), 'cv2.triangulatePoints', 'triangulatePoints', (['p', 'p1', 'pt[:2]', 'pt1[:2]'], {}), '(p, p1, pt[:2], pt1[:2])\n', (1969, 1993), False, 'from cv2 import triangulatePoints\n'), ((3275, 3295), 'numpy.sum', 'np.sum', (['cost'], {'axis': '(0)'}), '(cost, axis=0)\n', (3281, 3295), True, 'import numpy as np\n')]
from mock import mock_open, patch import sure from keeper.core import get_bucket, use_file @patch("keeper.core.get_bucket") def test_basic_decorator(get_bucket): class FakeKey(object): def __init__(self, keyname): self.keyname = keyname def get_contents_as_string(self): return "test1\ntest2" fake_key = FakeKey('other.csv') class FakeBucket(object): vals = {'foobar.csv': fake_key} def get_key(self, keyname): return self.vals.get(keyname) fake_bucket = FakeBucket() get_bucket.return_value = fake_bucket @use_file('foobar.csv') def a_handle(keeper_file, *args, **options): lines = [] for line in keeper_file: lines.append(line) return lines a_handle.when.called_with().should.return_value(["test1", "test2"]) @patch("keeper.core.get_bucket") def test_result_csv(get_bucket): class FakeKey(object): def __init__(self, keyname): self.keyname = keyname def get_contents_as_string(self): return "test1,test2\nblue,green\n" fake_key = FakeKey('other.csv') class FakeBucket(object): vals = {'foobar.csv': fake_key} def get_key(self, keyname): return self.vals.get(keyname) fake_bucket = FakeBucket() get_bucket.return_value = fake_bucket @use_file('foobar.csv') def a_handle(keeper_file, *args, **options): lines = [] for line in keeper_file.csv: lines.append(tuple(line)) return lines a_handle.when.called_with().should.return_value([("test1", "test2"), ("blue", "green")]) @patch("keeper.core.get_bucket") def test_result_json(get_bucket): class FakeKey(object): def __init__(self, keyname): self.keyname = keyname def get_contents_as_string(self): return '{"test1": "test2",\n"blue":{\n"a":"green"}}' fake_key = FakeKey('other.json') class FakeBucket(object): vals = {'foobar.json': fake_key} def get_key(self, keyname): return self.vals.get(keyname) fake_bucket = FakeBucket() get_bucket.return_value = fake_bucket @use_file('foobar.json') def a_handle(keeper_file, *args, **options): return keeper_file.json a_handle.when.called_with().should.return_value({ 'test1': 'test2', 'blue': { 'a': 'green' } }) @patch("keeper.core.get_bucket") def test_key_doesnt_exist(get_bucket): get_bucket.return_value.get_key.return_value = None @use_file('foobar.csv') def a_handle(keeper_file, *args, **options): pass a_handle.when.called_with().should.throw(IOError, 'The file foobar.csv cannot be found on S3.') @patch("keeper.core.settings") @patch("keeper.core.boto.connect_s3") def test_get_bucket(connect_s3, settings): connect_s3.return_value.get_bucket.return_value = "foobar" get_bucket.when.called_with().should.return_value("foobar") @patch('keeper.core.open', mock_open(read_data='local contents\nand some more'), create=True) @patch("keeper.core.get_bucket") def test_local_result(get_bucket): get_bucket.return_value.get_key.return_value = None @use_file('foobar.csv') def a_handle(keeper_file, *args, **options): lines = [] for line in keeper_file: lines.append(line) return lines a_handle.when.called_with(local=True).should.return_value(['local contents', 'and some more'])
[ "mock.mock_open", "keeper.core.use_file", "mock.patch", "keeper.core.get_bucket.when.called_with" ]
[((94, 125), 'mock.patch', 'patch', (['"""keeper.core.get_bucket"""'], {}), "('keeper.core.get_bucket')\n", (99, 125), False, 'from mock import mock_open, patch\n'), ((858, 889), 'mock.patch', 'patch', (['"""keeper.core.get_bucket"""'], {}), "('keeper.core.get_bucket')\n", (863, 889), False, 'from mock import mock_open, patch\n'), ((1662, 1693), 'mock.patch', 'patch', (['"""keeper.core.get_bucket"""'], {}), "('keeper.core.get_bucket')\n", (1667, 1693), False, 'from mock import mock_open, patch\n'), ((2452, 2483), 'mock.patch', 'patch', (['"""keeper.core.get_bucket"""'], {}), "('keeper.core.get_bucket')\n", (2457, 2483), False, 'from mock import mock_open, patch\n'), ((2782, 2811), 'mock.patch', 'patch', (['"""keeper.core.settings"""'], {}), "('keeper.core.settings')\n", (2787, 2811), False, 'from mock import mock_open, patch\n'), ((2813, 2849), 'mock.patch', 'patch', (['"""keeper.core.boto.connect_s3"""'], {}), "('keeper.core.boto.connect_s3')\n", (2818, 2849), False, 'from mock import mock_open, patch\n'), ((3117, 3148), 'mock.patch', 'patch', (['"""keeper.core.get_bucket"""'], {}), "('keeper.core.get_bucket')\n", (3122, 3148), False, 'from mock import mock_open, patch\n'), ((606, 628), 'keeper.core.use_file', 'use_file', (['"""foobar.csv"""'], {}), "('foobar.csv')\n", (614, 628), False, 'from keeper.core import get_bucket, use_file\n'), ((1378, 1400), 'keeper.core.use_file', 'use_file', (['"""foobar.csv"""'], {}), "('foobar.csv')\n", (1386, 1400), False, 'from keeper.core import get_bucket, use_file\n'), ((2203, 2226), 'keeper.core.use_file', 'use_file', (['"""foobar.json"""'], {}), "('foobar.json')\n", (2211, 2226), False, 'from keeper.core import get_bucket, use_file\n'), ((2585, 2607), 'keeper.core.use_file', 'use_file', (['"""foobar.csv"""'], {}), "('foobar.csv')\n", (2593, 2607), False, 'from keeper.core import get_bucket, use_file\n'), ((3246, 3268), 'keeper.core.use_file', 'use_file', (['"""foobar.csv"""'], {}), "('foobar.csv')\n", (3254, 3268), False, 'from keeper.core import get_bucket, use_file\n'), ((3049, 3104), 'mock.mock_open', 'mock_open', ([], {'read_data': '"""local contents\nand some more"""'}), '(read_data="""local contents\nand some more""")\n', (3058, 3104), False, 'from mock import mock_open, patch\n'), ((2960, 2989), 'keeper.core.get_bucket.when.called_with', 'get_bucket.when.called_with', ([], {}), '()\n', (2987, 2989), False, 'from keeper.core import get_bucket, use_file\n')]
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2018-04-15 11:47 from __future__ import unicode_literals import django.db.models.deletion from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('das', '0004_auto_20180415_1926'), ] operations = [ migrations.AlterField( model_name='category', name='parent', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='das.Category'), ), migrations.AlterField( model_name='comment', name='parent', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='das.Comment'), ), ]
[ "django.db.models.ForeignKey" ]
[((431, 565), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""children"""', 'to': '"""das.Category"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, related_name='children', to='das.Category')\n", (448, 565), False, 'from django.db import migrations, models\n'), ((719, 852), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""children"""', 'to': '"""das.Comment"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, related_name='children', to='das.Comment')\n", (736, 852), False, 'from django.db import migrations, models\n')]
# -*- coding: utf-8 -*- """ plugin tests ~~~~~~~~~~~~ """ # (c) 2014-2020 <NAME> <<EMAIL>> import glob import os import sys import pytest pytest_plugins = "pytester" MOCK_PIPELINE = """ #!/usr/bin/env python if __name__ == "__main__": import os import sys OUT_DIR = "output_dir" if len(sys.argv) > 1: sys.exit(1) sys.stdout.write("stdout stream") sys.stderr.write("stderr stream") with open("log.txt", "w") as log: log.write("not really\\n") if not os.path.exists(OUT_DIR): os.makedirs(OUT_DIR) with open(os.path.join(OUT_DIR, "results.txt"), "w") as result: result.write("42\\n") """ @pytest.fixture(scope="function") def mockpipe(request, testdir): """Mock pipeline script""" mp = testdir.makefile("", pipeline=MOCK_PIPELINE) return mp TEST_OK = f""" import os, shutil, unittest import pytest from pytest_pipeline import PipelineRun, mark class MyRun(PipelineRun): @mark.before_run def prep_executable(self): shutil.copy2("../pipeline", "pipeline") assert os.path.exists("pipeline") run = MyRun.make_fixture("class", "{sys.executable} pipeline") @pytest.mark.usefixtures("run") class TestMyPipeline(unittest.TestCase): def test_exit_code(self): assert self.run_fixture.exit_code == 0 """ def test_pipeline_basic(mockpipe, testdir): """Test for basic run""" test = testdir.makepyfile(TEST_OK) result = testdir.inline_run( "-v", f"--base-pipeline-dir={test.dirname}", test ) passed, skipped, failed = result.listoutcomes() assert len(passed) == 1 assert len(skipped) == 0 assert len(failed) == 0 TEST_OK_CLASS_FIXTURE = f""" import os, shutil, unittest import pytest from pytest_pipeline import PipelineRun, mark class MyRun(PipelineRun): @mark.before_run def prep_executable(self): shutil.copy2("../pipeline", "pipeline") assert os.path.exists("pipeline") run = MyRun.class_fixture("{sys.executable} pipeline") @pytest.mark.usefixtures("run") class TestMyPipelineAgain(unittest.TestCase): def test_exit_code(self): assert self.run_fixture.exit_code == 0 """ def test_pipeline_class_fixture(mockpipe, testdir): """Test for basic run""" test = testdir.makepyfile(TEST_OK_CLASS_FIXTURE) result = testdir.inline_run( "-v", f"--base-pipeline-dir={test.dirname}", test ) passed, skipped, failed = result.listoutcomes() assert len(passed) == 1 assert len(skipped) == 0 assert len(failed) == 0 TEST_REDIRECTION = f""" import os, shutil, unittest import pytest from pytest_pipeline import PipelineRun, mark class MyRun(PipelineRun): @mark.before_run def prep_executable(self): shutil.copy2("../pipeline", "pipeline") assert os.path.exists("pipeline") run = MyRun.make_fixture( "class", cmd="{sys.executable} pipeline", stdout="stream.out", stderr="stream.err", ) @pytest.mark.usefixtures("run") class TestMyPipeline(unittest.TestCase): def test_exit_code(self): assert self.run_fixture.exit_code == 0 """ def test_pipeline_redirection(mockpipe, testdir): test = testdir.makepyfile(TEST_REDIRECTION) result = testdir.inline_run( "-v", f"--base-pipeline-dir={test.dirname}", test ) passed, skipped, failed = result.listoutcomes() assert len(passed) == 1 assert len(skipped) == 0 assert len(failed) == 0 testdir_matches = glob.glob(os.path.join(test.dirname, "MyRun*")) assert len(testdir_matches) == 1 testdir_pipeline = testdir_matches[0] stdout = os.path.join(testdir_pipeline, "stream.out") assert os.path.exists(stdout) assert open(stdout).read() == "stdout stream" stderr = os.path.join(testdir_pipeline, "stream.err") assert os.path.exists(stderr) assert open(stderr).read() == "stderr stream" TEST_REDIRECTION_MEM = f""" import os, shutil, unittest import pytest from pytest_pipeline import PipelineRun, mark class MyRun(PipelineRun): @mark.before_run def prep_executable(self): shutil.copy2("../pipeline", "pipeline") assert os.path.exists("pipeline") run = MyRun.make_fixture( "class", cmd="{sys.executable} pipeline", stdout=True, stderr=True, ) @pytest.mark.usefixtures("run") class TestMyPipeline(unittest.TestCase): def test_exit_code(self): assert self.run_fixture.exit_code == 0 def test_stdout(self): assert self.run_fixture.stdout == b"stdout stream" def test_stderr(self): assert self.run_fixture.stderr == b"stderr stream" """ def test_pipeline_redirection_mem(mockpipe, testdir): test = testdir.makepyfile(TEST_REDIRECTION_MEM) result = testdir.inline_run( "-v", f"--base-pipeline-dir={test.dirname}", test ) passed, skipped, failed = result.listoutcomes() assert len(passed) == 3 assert len(skipped) == 0 assert len(failed) == 0 testdir_matches = glob.glob(os.path.join(test.dirname, "MyRun*")) assert len(testdir_matches) == 1 TEST_AS_NONCLASS_FIXTURE = f""" import os, shutil, unittest import pytest from pytest_pipeline import PipelineRun, mark class MyRun(PipelineRun): @mark.before_run def prep_executable(self): shutil.copy2("../pipeline", "pipeline") assert os.path.exists("pipeline") run = MyRun.make_fixture("module", "{sys.executable} pipeline") def test_exit_code(run): assert run.exit_code == 0 """ def test_pipeline_as_nonclass_fixture(mockpipe, testdir): """Test for PipelineTest classes without run attribute""" test = testdir.makepyfile(TEST_AS_NONCLASS_FIXTURE) result = testdir.inline_run( "-v", f"--base-pipeline-dir={test.dirname}", test ) passed, skipped, failed = result.listoutcomes() assert len(passed) == 1 assert len(skipped) == 0 assert len(failed) == 0 TEST_OK_GRANULAR = f""" import os, shutil, unittest import pytest from pytest_pipeline import PipelineRun, mark class MyRun(PipelineRun): @mark.before_run(order=2) def prep_executable(self): shutil.copy2("../pipeline", "pipeline") assert os.path.exists("pipeline") @mark.before_run(order=1) def check_init_condition(self): assert not os.path.exists("pipeline") run = MyRun.make_fixture("class", cmd="{sys.executable} pipeline") @pytest.mark.usefixtures("run") class TestMyPipeline(unittest.TestCase): def test_exit_code(self): assert self.run_fixture.exit_code == 0 def test_output_file(self): assert os.path.exists(os.path.join("output_dir", "results.txt")) """ def test_pipeline_granular(mockpipe, testdir): """Test for execution with 'order' specified in before_run and after_run""" test = testdir.makepyfile(TEST_OK_GRANULAR) result = testdir.inline_run( "-v", f"--base-pipeline-dir={test.dirname}", test ) passed, skipped, failed = result.listoutcomes() assert len(passed) == 2 assert len(skipped) == 0 assert len(failed) == 0 MOCK_PIPELINE_TIMEOUT = """ #!/usr/bin/env python if __name__ == "__main__": import time time.sleep(10) """ TEST_TIMEOUT = f""" import os, shutil, unittest import pytest from pytest_pipeline import PipelineRun, mark class MyRun(PipelineRun): @mark.before_run def test_and_prep_executable(self): shutil.copy2("../pipeline", "pipeline") assert os.path.exists("pipeline") run = PipelineRun.make_fixture( "class", cmd="{sys.executable} pipeline", timeout=0.01, ) @pytest.mark.usefixtures("run") class TestMyPipeline(unittest.TestCase): def test_exit_code(self): assert self.run_fixture.exit_code != 0 """ @pytest.fixture(scope="function") def mockpipe_timeout(request, testdir): """Mock pipeline script with timeout""" mp = testdir.makefile("", pipeline=MOCK_PIPELINE_TIMEOUT) return mp def test_pipeline_timeout(mockpipe_timeout, testdir): """Test for execution with timeout""" test = testdir.makepyfile(TEST_TIMEOUT) result = testdir.inline_run( "-v", f"--base-pipeline-dir={test.dirname}", test ) passed, skipped, failed = result.listoutcomes() assert len(passed) == 0 assert len(skipped) == 0 assert len(failed) == 1 MOCK_PIPELINE_FMT = """ #!/usr/bin/env python import sys if __name__ == "__main__": print(sys.argv[1]) """ TEST_FMT = f""" import os, shutil, unittest import pytest from pytest_pipeline import PipelineRun, mark class MyRun(PipelineRun): @mark.before_run def prep_executable(self): shutil.copy2("../pipeline", "pipeline") assert os.path.exists("pipeline") run = MyRun.make_fixture( "class", "{sys.executable} pipeline {{run_dir}}", stdout=True, ) @pytest.mark.usefixtures("run") class TestMyPipeline(unittest.TestCase): def test_exit_code(self): assert self.run_fixture.exit_code == 0 def test_stdout(self): stdout = self.run_fixture.stdout.decode("utf-8").strip() assert self.run_fixture.run_dir == stdout """ @pytest.fixture(scope="function") def mockpipe_fmt(request, testdir): """Mock pipeline script with timeout""" mp = testdir.makefile("", pipeline=MOCK_PIPELINE_FMT) return mp def test_pipeline_fmt(mockpipe_fmt, testdir): """Test for run with templated command""" test = testdir.makepyfile(TEST_FMT) result = testdir.inline_run( "-v", f"--base-pipeline-dir={test.dirname}", test ) passed, skipped, failed = result.listoutcomes() assert len(passed) == 2 assert len(skipped) == 0 assert len(failed) == 0
[ "pytest.fixture", "os.path.join", "os.path.exists" ]
[((678, 710), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (692, 710), False, 'import pytest\n'), ((7869, 7901), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (7883, 7901), False, 'import pytest\n'), ((9258, 9290), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (9272, 9290), False, 'import pytest\n'), ((3695, 3739), 'os.path.join', 'os.path.join', (['testdir_pipeline', '"""stream.out"""'], {}), "(testdir_pipeline, 'stream.out')\n", (3707, 3739), False, 'import os\n'), ((3752, 3774), 'os.path.exists', 'os.path.exists', (['stdout'], {}), '(stdout)\n', (3766, 3774), False, 'import os\n'), ((3839, 3883), 'os.path.join', 'os.path.join', (['testdir_pipeline', '"""stream.err"""'], {}), "(testdir_pipeline, 'stream.err')\n", (3851, 3883), False, 'import os\n'), ((3896, 3918), 'os.path.exists', 'os.path.exists', (['stderr'], {}), '(stderr)\n', (3910, 3918), False, 'import os\n'), ((3563, 3599), 'os.path.join', 'os.path.join', (['test.dirname', '"""MyRun*"""'], {}), "(test.dirname, 'MyRun*')\n", (3575, 3599), False, 'import os\n'), ((5097, 5133), 'os.path.join', 'os.path.join', (['test.dirname', '"""MyRun*"""'], {}), "(test.dirname, 'MyRun*')\n", (5109, 5133), False, 'import os\n')]
import aoc_utils import datetime si = aoc_utils.read() starting_nos = list(map(int,si.split(','))) x = 1 p = [-1 for x in range(30000000)] previous = 0 for sno in starting_nos: p[sno] = x previous = sno x+=1 first = True while True: if first: nextout = 0 else: nextout = x-p[previous]-1 if p[nextout] == -1: first = True else: first = False p[previous] = x-1 previous = nextout if x == 2020: print(nextout) if x == 30000000: print(nextout) break x+= 1
[ "aoc_utils.read" ]
[((39, 55), 'aoc_utils.read', 'aoc_utils.read', ([], {}), '()\n', (53, 55), False, 'import aoc_utils\n')]
import os MODULE_PATH = os.path.dirname(__file__) VERBOSE = True from ita import loader_file from ita import parser from ita import generator FileLoader = loader_file.FileLoader Loader = loader_file.FileLoader Parser = parser.Parser Generator = generator.Generator GeneratorException = generator.GeneratorException __all__ = ["VERBOSE", "MODULE_PATH", "web", "cli", "Generator", "Parser", "Loader", "FileLoader", "GeneratorException"]
[ "os.path.dirname" ]
[((24, 49), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (39, 49), False, 'import os\n')]
import paramiko import sys import os import time import re class CiscoExecBySSH: def __init__(self): self.input_file = self.get_input_file() self.login_params = {'host': '', 'user': '', 'password': '', 'port': 22} self.get_login_info() self.client = self.login() def get_login_info(self): host, user, password = self.input_file.readline().strip().split(',') self.login_params['host'] = host self.login_params['user'] = user self.login_params['password'] = password def login(self): client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(hostname=self.login_params['host'], username=self.login_params['user'], password=self.login_params['password'], port=self.login_params['port'], look_for_keys=False, allow_agent=False) return client def exec_commands(self): buffer = 2048 delay = 1 channel = self.client.invoke_shell() self._print_channel_output(channel.recv(buffer)) channel.send(self.login_params['user'] + '\n') channel.send(self.login_params['password'] + '\n') self._print_channel_output(channel.recv(buffer)) for command in self.input_file: channel.send(command) time.sleep(delay) output = channel.recv(buffer) if re.search('\(y/n\)', output.decode(), re.IGNORECASE): channel.send('y') time.sleep(delay) self._print_channel_output(output) output = channel.recv(buffer) self._print_channel_output(output) time.sleep(delay) channel.close() self.client.close() @staticmethod def _print_channel_output(output): print(output.decode(), end='') @staticmethod def get_input_file(): try: input_file_path = sys.argv[1] if not os.path.isfile(input_file_path): print('No such file {}'.format(input_file_path)) sys.exit(1) return open(input_file_path) except IndexError: print('Please enter input file') print('python3 <scrip name>.py <input file>') sys.exit(1) CiscoExecBySSH().exec_commands()
[ "paramiko.SSHClient", "time.sleep", "os.path.isfile", "paramiko.AutoAddPolicy", "sys.exit" ]
[((579, 599), 'paramiko.SSHClient', 'paramiko.SSHClient', ([], {}), '()\n', (597, 599), False, 'import paramiko\n'), ((1722, 1739), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (1732, 1739), False, 'import time\n'), ((643, 667), 'paramiko.AutoAddPolicy', 'paramiko.AutoAddPolicy', ([], {}), '()\n', (665, 667), False, 'import paramiko\n'), ((1373, 1390), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (1383, 1390), False, 'import time\n'), ((1552, 1569), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (1562, 1569), False, 'import time\n'), ((2008, 2039), 'os.path.isfile', 'os.path.isfile', (['input_file_path'], {}), '(input_file_path)\n', (2022, 2039), False, 'import os\n'), ((2122, 2133), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2130, 2133), False, 'import sys\n'), ((2317, 2328), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2325, 2328), False, 'import sys\n')]
""" Copyright 2021 <NAME> """ from pathlib import Path from tempfile import TemporaryDirectory from textwrap import TextWrapper from time import sleep from typing import Optional import typer from moviepy.video.io.VideoFileClip import VideoFileClip from speech_recognition import Recognizer, AudioFile, UnknownValueError, RequestError def clear_console(): typer.clear() def video_to_text_converter(path_of_video: Optional[Path], retries: Optional[int], transcription_file_dir_path: Optional[Path], transcription_file_name: Optional[str], skip_bad_chunks: Optional[bool], abort_on_bad_chunk: Optional[bool], language: Optional[str]) -> None: with TemporaryDirectory( prefix="Joitek_Video_To_Text_Of_Video_", suffix=f"{path_of_video.stem.capitalize()}") as temp_folder_path: clear_console() typer.echo("Determining the total minutes that video has...") with VideoFileClip(path_of_video.__str__()) as original_videoFC: video_seconds_duration = int(float(original_videoFC.duration)) if video_seconds_duration <= 60: minutes_on_seconds_range = list(range(0, video_seconds_duration + 1, video_seconds_duration)) else: minutes_on_seconds_range = list(range(0, video_seconds_duration + 1, 60)) length_of_list = len(minutes_on_seconds_range) sleep(2) clear_console() extracted_words_container = {} text_wrapper = TextWrapper() text_wrapper.width = 80 try: for i in range(length_of_list - 1): typer.echo(f"Video will be divided in: {length_of_list - 1} chunks...") typer.echo("Original video is not affected...") typer.echo() temp_audio_start_time = (minutes_on_seconds_range[i] - 1 * (minutes_on_seconds_range[i] != 0)) temp_audio_end_time = minutes_on_seconds_range[i + 1] name_temp_audio = f"tempA-{i + 1}.wav" dic_key = f"text-{i + 1}" temp_audio_abs_path = Path(temp_folder_path, name_temp_audio).__str__() audio_sub = original_videoFC.audio.subclip(temp_audio_start_time, temp_audio_end_time) audio_sub.write_audiofile(temp_audio_abs_path, logger=None) attempt_number = 1 exit_loop = False text_to_save = "" while not exit_loop: typer.echo(f"Chunk {(i + 1)} of {length_of_list - 1}") typer.echo() typer.echo(f"Attempt {attempt_number} of {retries}") typer.echo("Please wait...") r = Recognizer() try: audio = AudioFile(temp_audio_abs_path) with audio as source: r.adjust_for_ambient_noise(source) audio_file = r.record(source) result = r.recognize_google(audio_file, language=language) typer.echo("Extraction success...") text_to_save = text_wrapper.fill(result) exit_loop = True except UnknownValueError: typer.echo("Can't understand this chunk...") except RequestError: typer.echo("Text extractor failure ") typer.echo("or there is no active internet connection...") typer.echo() if exit_loop: typer.echo("Saving results...") clear_console() else: if abort_on_bad_chunk: typer.echo("Bad chunk found.\n Aborting conversion...") extracted_words_container[dic_key] = text_to_save exit_loop = True else: if skip_bad_chunks: typer.echo("Bad fragment...") typer.echo("Ignoring...") actual_seconds_range = f"[{temp_audio_start_time}-{temp_audio_end_time}]" text_to_save = f"Bad fragment {actual_seconds_range}" extracted_words_container[dic_key] = text_to_save exit_loop = True else: if attempt_number <= retries: attempt_number += 1 else: typer.echo("Retry limit reached...") actual_seconds_range = f"[{temp_audio_start_time}-{temp_audio_end_time}]" text_to_save = f"Retry limit reached for chunk on {actual_seconds_range}" extracted_words_container[dic_key] = text_to_save exit_loop = True if abort_on_bad_chunk: break else: continue typer.echo("Retrieving previously saved results...") list_with_all_text = [] for k, v in extracted_words_container.items(): list_with_all_text.append(v) separator = "\n\n========\n\n" text = separator.join(list_with_all_text) clear_console() typer.echo("Saving...") file = Path(transcription_file_dir_path, transcription_file_name) file.write_text(text) typer.echo() typer.echo("Text extraction complete!") typer.echo() input("Press ENTER to exit...") except Exception as ex: typer.echo('\n') typer.echo("A very serious error occurred\n" "Can't continue...") typer.echo(ex) typer.echo(ex.__cause__)
[ "typer.echo", "time.sleep", "pathlib.Path", "textwrap.TextWrapper", "speech_recognition.AudioFile", "typer.clear", "speech_recognition.Recognizer" ]
[((363, 376), 'typer.clear', 'typer.clear', ([], {}), '()\n', (374, 376), False, 'import typer\n'), ((927, 988), 'typer.echo', 'typer.echo', (['"""Determining the total minutes that video has..."""'], {}), "('Determining the total minutes that video has...')\n", (937, 988), False, 'import typer\n'), ((1471, 1479), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (1476, 1479), False, 'from time import sleep\n'), ((1579, 1592), 'textwrap.TextWrapper', 'TextWrapper', ([], {}), '()\n', (1590, 1592), False, 'from textwrap import TextWrapper\n'), ((5610, 5662), 'typer.echo', 'typer.echo', (['"""Retrieving previously saved results..."""'], {}), "('Retrieving previously saved results...')\n", (5620, 5662), False, 'import typer\n'), ((5971, 5994), 'typer.echo', 'typer.echo', (['"""Saving..."""'], {}), "('Saving...')\n", (5981, 5994), False, 'import typer\n'), ((6018, 6076), 'pathlib.Path', 'Path', (['transcription_file_dir_path', 'transcription_file_name'], {}), '(transcription_file_dir_path, transcription_file_name)\n', (6022, 6076), False, 'from pathlib import Path\n'), ((6131, 6143), 'typer.echo', 'typer.echo', ([], {}), '()\n', (6141, 6143), False, 'import typer\n'), ((6161, 6200), 'typer.echo', 'typer.echo', (['"""Text extraction complete!"""'], {}), "('Text extraction complete!')\n", (6171, 6200), False, 'import typer\n'), ((6217, 6229), 'typer.echo', 'typer.echo', ([], {}), '()\n', (6227, 6229), False, 'import typer\n'), ((1719, 1790), 'typer.echo', 'typer.echo', (['f"""Video will be divided in: {length_of_list - 1} chunks..."""'], {}), "(f'Video will be divided in: {length_of_list - 1} chunks...')\n", (1729, 1790), False, 'import typer\n'), ((1811, 1858), 'typer.echo', 'typer.echo', (['"""Original video is not affected..."""'], {}), "('Original video is not affected...')\n", (1821, 1858), False, 'import typer\n'), ((1879, 1891), 'typer.echo', 'typer.echo', ([], {}), '()\n', (1889, 1891), False, 'import typer\n'), ((6331, 6347), 'typer.echo', 'typer.echo', (['"""\n"""'], {}), "('\\n')\n", (6341, 6347), False, 'import typer\n'), ((6364, 6429), 'typer.echo', 'typer.echo', (['"""A very serious error occurred\nCan\'t continue..."""'], {}), '("""A very serious error occurred\nCan\'t continue...""")\n', (6374, 6429), False, 'import typer\n'), ((6473, 6487), 'typer.echo', 'typer.echo', (['ex'], {}), '(ex)\n', (6483, 6487), False, 'import typer\n'), ((6504, 6528), 'typer.echo', 'typer.echo', (['ex.__cause__'], {}), '(ex.__cause__)\n', (6514, 6528), False, 'import typer\n'), ((2648, 2700), 'typer.echo', 'typer.echo', (['f"""Chunk {i + 1} of {length_of_list - 1}"""'], {}), "(f'Chunk {i + 1} of {length_of_list - 1}')\n", (2658, 2700), False, 'import typer\n'), ((2727, 2739), 'typer.echo', 'typer.echo', ([], {}), '()\n', (2737, 2739), False, 'import typer\n'), ((2764, 2816), 'typer.echo', 'typer.echo', (['f"""Attempt {attempt_number} of {retries}"""'], {}), "(f'Attempt {attempt_number} of {retries}')\n", (2774, 2816), False, 'import typer\n'), ((2841, 2869), 'typer.echo', 'typer.echo', (['"""Please wait..."""'], {}), "('Please wait...')\n", (2851, 2869), False, 'import typer\n'), ((2898, 2910), 'speech_recognition.Recognizer', 'Recognizer', ([], {}), '()\n', (2908, 2910), False, 'from speech_recognition import Recognizer, AudioFile, UnknownValueError, RequestError\n'), ((3796, 3808), 'typer.echo', 'typer.echo', ([], {}), '()\n', (3806, 3808), False, 'import typer\n'), ((2229, 2268), 'pathlib.Path', 'Path', (['temp_folder_path', 'name_temp_audio'], {}), '(temp_folder_path, name_temp_audio)\n', (2233, 2268), False, 'from pathlib import Path\n'), ((2976, 3006), 'speech_recognition.AudioFile', 'AudioFile', (['temp_audio_abs_path'], {}), '(temp_audio_abs_path)\n', (2985, 3006), False, 'from speech_recognition import Recognizer, AudioFile, UnknownValueError, RequestError\n'), ((3301, 3336), 'typer.echo', 'typer.echo', (['"""Extraction success..."""'], {}), "('Extraction success...')\n", (3311, 3336), False, 'import typer\n'), ((3875, 3906), 'typer.echo', 'typer.echo', (['"""Saving results..."""'], {}), "('Saving results...')\n", (3885, 3906), False, 'import typer\n'), ((3529, 3573), 'typer.echo', 'typer.echo', (['"""Can\'t understand this chunk..."""'], {}), '("Can\'t understand this chunk...")\n', (3539, 3573), False, 'import typer\n'), ((3647, 3684), 'typer.echo', 'typer.echo', (['"""Text extractor failure """'], {}), "('Text extractor failure ')\n", (3657, 3684), False, 'import typer\n'), ((3713, 3771), 'typer.echo', 'typer.echo', (['"""or there is no active internet connection..."""'], {}), "('or there is no active internet connection...')\n", (3723, 3771), False, 'import typer\n'), ((4064, 4122), 'typer.echo', 'typer.echo', (['"""Bad chunk found.\n Aborting conversion..."""'], {}), '("""Bad chunk found.\n Aborting conversion...""")\n', (4074, 4122), False, 'import typer\n'), ((4373, 4402), 'typer.echo', 'typer.echo', (['"""Bad fragment..."""'], {}), "('Bad fragment...')\n", (4383, 4402), False, 'import typer\n'), ((4439, 4464), 'typer.echo', 'typer.echo', (['"""Ignoring..."""'], {}), "('Ignoring...')\n", (4449, 4464), False, 'import typer\n'), ((5050, 5086), 'typer.echo', 'typer.echo', (['"""Retry limit reached..."""'], {}), "('Retry limit reached...')\n", (5060, 5086), False, 'import typer\n')]
from django.conf import settings from django.db import models from django.contrib.auth.models import User from django.db.models.signals import post_save from django.dispatch import receiver class UserProfile(models.Model): user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) name = models.CharField(max_length=45, blank=True) photo = models.ImageField(default="man.png", upload_to='admin/', null=True, blank=True) gender_select = ( ('male', 'Male'), ('female', 'Female') ) gender = models.CharField(choices=gender_select, max_length=6, blank=True) employee_select = ( ('admin', 'Admin'), ('professor', 'Professor'), ('teacher', 'Teacher'), ('register', 'Register'), ('student', 'Student'), ) employee_type = models.CharField(choices=employee_select, max_length=15, blank=True) def __str__(self): return str(self.user) @receiver(post_save, sender=User) def update_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) instance.userprofile.save()
[ "django.db.models.CharField", "django.db.models.OneToOneField", "django.dispatch.receiver", "django.db.models.ImageField" ]
[((244, 316), 'django.db.models.OneToOneField', 'models.OneToOneField', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.CASCADE'}), '(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)\n', (264, 316), False, 'from django.db import models\n'), ((329, 372), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(45)', 'blank': '(True)'}), '(max_length=45, blank=True)\n', (345, 372), False, 'from django.db import models\n'), ((386, 465), 'django.db.models.ImageField', 'models.ImageField', ([], {'default': '"""man.png"""', 'upload_to': '"""admin/"""', 'null': '(True)', 'blank': '(True)'}), "(default='man.png', upload_to='admin/', null=True, blank=True)\n", (403, 465), False, 'from django.db import models\n'), ((567, 632), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'gender_select', 'max_length': '(6)', 'blank': '(True)'}), '(choices=gender_select, max_length=6, blank=True)\n', (583, 632), False, 'from django.db import models\n'), ((853, 921), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'employee_select', 'max_length': '(15)', 'blank': '(True)'}), '(choices=employee_select, max_length=15, blank=True)\n', (869, 921), False, 'from django.db import models\n'), ((987, 1019), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'User'}), '(post_save, sender=User)\n', (995, 1019), False, 'from django.dispatch import receiver\n')]
import matplotlib.pyplot as plt with open('benchmark_local.txt', 'rt') as fd: lines = fd.readlines() data = {} for line in lines: line = line.strip() if not line.endswith('ns/op'): continue split = line.split() benchmark_name, record_len = split[0].split('-')[:2] benchmark_name = benchmark_name.split('/')[1] ns_per_op = int(split[2]) if benchmark_name not in data: data[benchmark_name] = { 'record_len': [], 'ns_per_op': [], } data[benchmark_name]['record_len'].append(record_len) data[benchmark_name]['ns_per_op'].append(ns_per_op) benchmarks = sorted(list(data.keys())) print(benchmarks) for benchmark in benchmarks: value = data[benchmark] record_len = [l[:-3] + 'k' for l in value['record_len']] ns_per_op = value['ns_per_op'] plt.plot(record_len, ns_per_op, '-o', label=benchmark) plt.grid(True) plt.xlabel('records number') plt.ylabel('ns/op') plt.yscale('log') plt.legend() plt.title('In-memory TopN Benchmark') plt.show()
[ "matplotlib.pyplot.title", "matplotlib.pyplot.yscale", "matplotlib.pyplot.show", "matplotlib.pyplot.plot", "matplotlib.pyplot.legend", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.grid" ]
[((897, 911), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (905, 911), True, 'import matplotlib.pyplot as plt\n'), ((912, 940), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""records number"""'], {}), "('records number')\n", (922, 940), True, 'import matplotlib.pyplot as plt\n'), ((941, 960), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""ns/op"""'], {}), "('ns/op')\n", (951, 960), True, 'import matplotlib.pyplot as plt\n'), ((961, 978), 'matplotlib.pyplot.yscale', 'plt.yscale', (['"""log"""'], {}), "('log')\n", (971, 978), True, 'import matplotlib.pyplot as plt\n'), ((979, 991), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (989, 991), True, 'import matplotlib.pyplot as plt\n'), ((992, 1029), 'matplotlib.pyplot.title', 'plt.title', (['"""In-memory TopN Benchmark"""'], {}), "('In-memory TopN Benchmark')\n", (1001, 1029), True, 'import matplotlib.pyplot as plt\n'), ((1030, 1040), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1038, 1040), True, 'import matplotlib.pyplot as plt\n'), ((841, 895), 'matplotlib.pyplot.plot', 'plt.plot', (['record_len', 'ns_per_op', '"""-o"""'], {'label': 'benchmark'}), "(record_len, ns_per_op, '-o', label=benchmark)\n", (849, 895), True, 'import matplotlib.pyplot as plt\n')]
""" OpenVINO DL Workbench Class for exporting the whole project Copyright (c) 2021 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import json import os import tarfile import tempfile from contextlib import closing import yaml from sqlalchemy import desc from sqlalchemy.orm import Session from openvino.tools.pot.version import get_version as get_pot_version import openvino.tools.accuracy_checker.__init__ as accuracy_checker_info from wb.extensions_factories.database import get_db_session_for_celery from wb.main.accuracy_utils.accuracy_utils import construct_accuracy_tool_config from wb.main.calibration_abstractions.utils import construct_calibration_tool_config from wb.main.enumerates import JobTypesEnum, ModelSourceEnum, StatusEnum, AccuracyReportTypeEnum from wb.main.jobs.interfaces.ijob import IJob from wb.main.jobs.interfaces.job_observers import ExportProjectDBObserver from wb.main.models import (ExportProjectJobModel, JobsModel, SingleInferenceInfoModel, DownloadableArtifactsModel, ProjectsModel, WBInfoModel, AccuracyReportModel) from wb.main.shared.enumerates import TaskEnum class ExportProjectJob(IJob): job_type = JobTypesEnum.export_project_type _job_model_class = ExportProjectJobModel extension = '.tar.gz' calibration_config_name = 'calibration_config.json' accuracy_config_name = 'accuracy_config.yml' def __init__(self, job_id: int, **unused_kwargs): super().__init__(job_id=job_id) export_project_db_observer = ExportProjectDBObserver(job_id=self._job_id) self._job_state_subject.attach(export_project_db_observer) self._attach_default_db_and_socket_observers() def run(self): self._job_state_subject.update_state(status=StatusEnum.running, progress=0) session = get_db_session_for_celery() with closing(session): export_project_model: ExportProjectJobModel = self.get_job_model(session) include_model = export_project_model.include_model include_dataset = export_project_model.include_dataset include_accuracy_config = export_project_model.include_accuracy_config include_calibration_config = export_project_model.include_calibration_config components_paths = dict() project = export_project_model.project if include_model: components_paths['model'] = project.topology.path self._job_state_subject.update_state(status=StatusEnum.running, progress=10) with tempfile.TemporaryDirectory() as temp_directory: components_paths['description'] = self._generate_description(session, project, temp_directory) if include_dataset: components_paths['dataset'] = self._pack_dataset(project.dataset.path, temp_directory, project.dataset.name) self._job_state_subject.update_state(status=StatusEnum.running, progress=20) configs_folder = os.path.join(temp_directory, 'configs') os.mkdir(configs_folder) components_paths['configs'] = configs_folder if include_accuracy_config: accuracy_config_path = os.path.join(configs_folder, self.accuracy_config_name) self._generate_accuracy_config(accuracy_config_path, export_project_model) self._job_state_subject.update_state(status=StatusEnum.running, progress=30) if include_calibration_config: calibration_config_path = os.path.join(configs_folder, self.calibration_config_name) self._generate_calibration_config(calibration_config_path, project) self._job_state_subject.update_state(status=StatusEnum.running, progress=40) artifact = export_project_model.shared_artifact archive_path = artifact.build_full_artifact_path() self._pack_project(archive_path, components_paths) is_int8 = '_INT8' if project.topology.analysis_job.is_int8 else '' package_name = project.topology.name + is_int8 + '_' + project.dataset.name artifact.name = package_name artifact.update(archive_path) artifact.write_record(session) self._job_state_subject.update_state(status=StatusEnum.ready, progress=100) self._job_state_subject.detach_all_observers() @staticmethod def _generate_description(session: Session, project: ProjectsModel, directory: str): best_inference_job = session.query(SingleInferenceInfoModel).filter( JobsModel.job_type == JobTypesEnum.single_inference_type, JobsModel.project_id == project.id, JobsModel.progress == 100).order_by(desc(SingleInferenceInfoModel.throughput)).first() accuracy_report: AccuracyReportModel = ( session .query(AccuracyReportModel) .filter_by(project_id=project.id, report_type=AccuracyReportTypeEnum.dataset_annotations) .order_by(AccuracyReportModel.accuracy_result.desc()) .first() ) workbench_info = session.query(WBInfoModel).first() description = { 'Model': project.topology.name, 'Dataset': project.dataset.name, 'Device': ' | '.join((project.device.device_name, project.device.product_name)), 'Target': ' | '.join((project.target.target_type.value, project.target.host, project.target.name)), 'Optimized with INT8 Calibration': 'Yes' if project.topology.analysis_job.is_int8 else 'No', } if best_inference_job: description['Corresponding latency'] = best_inference_job.latency description['Best result FPS'] = best_inference_job.throughput description['Best result batch configuration'] = best_inference_job.batch description['Best result stream configuration'] = best_inference_job.nireq if accuracy_report: description['Accuracy'] = accuracy_report.accuracy_result description['DL Workbench version'] = workbench_info.get_version_from_file() description['Accuracy Checker version'] = accuracy_checker_info.__version__ description['Post-training Optimisation Tool version'] = get_pot_version() description_path = os.path.join(directory, 'Description.txt') with open(description_path, 'w') as description_file: for parameter in description: new_line = ': '.join((parameter, str(description[parameter]))) description_file.write(new_line + '\n') return description_path @staticmethod def _generate_accuracy_config(accuracy_config_path: str, export_project_model: ExportProjectJobModel): accuracy_config_dict = None project = export_project_model.project topology = export_project_model.project.topology if project.accuracy: accuracy_config = project.accuracy.raw_configuration accuracy_config_dict = json.loads(accuracy_config) elif topology.source == ModelSourceEnum.omz or topology.meta.task_type != TaskEnum.generic: accuracy_config = construct_accuracy_tool_config(topology, project.dataset, project.device) accuracy_config_dict = accuracy_config.to_dict() if accuracy_config_dict: with open(accuracy_config_path, 'w') as outfile: yaml.dump(accuracy_config_dict, outfile, sort_keys=False) @staticmethod def _pack_dataset(dataset_path: str, temp_directory: str, dataset_name: str) -> str: packed_dataset_folder = os.path.join(temp_directory, 'dataset') os.mkdir(packed_dataset_folder) dataset_full_name = f'{dataset_name}{ExportProjectJob.extension}' packed_dataset_path = os.path.join(packed_dataset_folder, dataset_full_name) with tarfile.open(packed_dataset_path, 'w:gz') as tar: for file in os.listdir(dataset_path): tar.add(os.path.join(dataset_path, file), arcname=file) return packed_dataset_folder @staticmethod def _generate_calibration_config(calibration_config_path: str, project: ProjectsModel): if project.topology.int8_job.calibration_config: calibration_config = json.loads(project.topology.int8_job.calibration_config) else: calibration_config = construct_calibration_tool_config(project.topology, project.topology.int8_job).json() with open(calibration_config_path, 'w') as out_file: json.dump(calibration_config, out_file, indent=3) def _pack_project(self, output_filename: str, components_paths: dict): with tarfile.open(output_filename, 'w:gz') as tar: progress_step = int(50 / len(components_paths)) progress = 40 for component in components_paths: progress += progress_step if component == 'description': tar.add(components_paths[component], arcname=component + os.path.splitext(components_paths[component])[1]) continue if component == 'model': for file in os.listdir(components_paths[component]): if os.path.splitext(file)[1] in ('.xml', '.bin'): tar.add(os.path.join(components_paths[component], file), arcname=(os.path.join(component, file))) continue for file in os.listdir(components_paths[component]): tar.add(os.path.join(components_paths[component], file), arcname=(os.path.join(component, file))) self._job_state_subject.update_state(status=StatusEnum.running, progress=progress) def on_failure(self, exception: Exception): with closing(get_db_session_for_celery()) as session: export_project_model: ExportProjectJobModel = self.get_job_model(session) file_path = export_project_model.shared_artifact.build_full_artifact_path() if file_path and os.path.isfile(file_path): os.remove(file_path) super().on_failure(exception)
[ "os.mkdir", "os.remove", "wb.main.jobs.interfaces.job_observers.ExportProjectDBObserver", "yaml.dump", "os.path.isfile", "os.path.join", "wb.extensions_factories.database.get_db_session_for_celery", "tempfile.TemporaryDirectory", "json.loads", "tarfile.open", "contextlib.closing", "wb.main.mod...
[((2029, 2073), 'wb.main.jobs.interfaces.job_observers.ExportProjectDBObserver', 'ExportProjectDBObserver', ([], {'job_id': 'self._job_id'}), '(job_id=self._job_id)\n', (2052, 2073), False, 'from wb.main.jobs.interfaces.job_observers import ExportProjectDBObserver\n'), ((2318, 2345), 'wb.extensions_factories.database.get_db_session_for_celery', 'get_db_session_for_celery', ([], {}), '()\n', (2343, 2345), False, 'from wb.extensions_factories.database import get_db_session_for_celery\n'), ((7025, 7042), 'openvino.tools.pot.version.get_version', 'get_pot_version', ([], {}), '()\n', (7040, 7042), True, 'from openvino.tools.pot.version import get_version as get_pot_version\n'), ((7071, 7113), 'os.path.join', 'os.path.join', (['directory', '"""Description.txt"""'], {}), "(directory, 'Description.txt')\n", (7083, 7113), False, 'import os\n'), ((8385, 8424), 'os.path.join', 'os.path.join', (['temp_directory', '"""dataset"""'], {}), "(temp_directory, 'dataset')\n", (8397, 8424), False, 'import os\n'), ((8433, 8464), 'os.mkdir', 'os.mkdir', (['packed_dataset_folder'], {}), '(packed_dataset_folder)\n', (8441, 8464), False, 'import os\n'), ((8569, 8623), 'os.path.join', 'os.path.join', (['packed_dataset_folder', 'dataset_full_name'], {}), '(packed_dataset_folder, dataset_full_name)\n', (8581, 8623), False, 'import os\n'), ((2360, 2376), 'contextlib.closing', 'closing', (['session'], {}), '(session)\n', (2367, 2376), False, 'from contextlib import closing\n'), ((7783, 7810), 'json.loads', 'json.loads', (['accuracy_config'], {}), '(accuracy_config)\n', (7793, 7810), False, 'import json\n'), ((8638, 8679), 'tarfile.open', 'tarfile.open', (['packed_dataset_path', '"""w:gz"""'], {}), "(packed_dataset_path, 'w:gz')\n", (8650, 8679), False, 'import tarfile\n'), ((8712, 8736), 'os.listdir', 'os.listdir', (['dataset_path'], {}), '(dataset_path)\n', (8722, 8736), False, 'import os\n'), ((9048, 9104), 'json.loads', 'json.loads', (['project.topology.int8_job.calibration_config'], {}), '(project.topology.int8_job.calibration_config)\n', (9058, 9104), False, 'import json\n'), ((9311, 9360), 'json.dump', 'json.dump', (['calibration_config', 'out_file'], {'indent': '(3)'}), '(calibration_config, out_file, indent=3)\n', (9320, 9360), False, 'import json\n'), ((9450, 9487), 'tarfile.open', 'tarfile.open', (['output_filename', '"""w:gz"""'], {}), "(output_filename, 'w:gz')\n", (9462, 9487), False, 'import tarfile\n'), ((10884, 10909), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (10898, 10909), False, 'import os\n'), ((10923, 10943), 'os.remove', 'os.remove', (['file_path'], {}), '(file_path)\n', (10932, 10943), False, 'import os\n'), ((3064, 3093), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (3091, 3093), False, 'import tempfile\n'), ((3659, 3698), 'os.path.join', 'os.path.join', (['temp_directory', '"""configs"""'], {}), "(temp_directory, 'configs')\n", (3671, 3698), False, 'import os\n'), ((3715, 3739), 'os.mkdir', 'os.mkdir', (['configs_folder'], {}), '(configs_folder)\n', (3723, 3739), False, 'import os\n'), ((7941, 8014), 'wb.main.accuracy_utils.accuracy_utils.construct_accuracy_tool_config', 'construct_accuracy_tool_config', (['topology', 'project.dataset', 'project.device'], {}), '(topology, project.dataset, project.device)\n', (7971, 8014), False, 'from wb.main.accuracy_utils.accuracy_utils import construct_accuracy_tool_config\n'), ((8187, 8244), 'yaml.dump', 'yaml.dump', (['accuracy_config_dict', 'outfile'], {'sort_keys': '(False)'}), '(accuracy_config_dict, outfile, sort_keys=False)\n', (8196, 8244), False, 'import yaml\n'), ((10312, 10351), 'os.listdir', 'os.listdir', (['components_paths[component]'], {}), '(components_paths[component])\n', (10322, 10351), False, 'import os\n'), ((10644, 10671), 'wb.extensions_factories.database.get_db_session_for_celery', 'get_db_session_for_celery', ([], {}), '()\n', (10669, 10671), False, 'from wb.extensions_factories.database import get_db_session_for_celery\n'), ((3889, 3944), 'os.path.join', 'os.path.join', (['configs_folder', 'self.accuracy_config_name'], {}), '(configs_folder, self.accuracy_config_name)\n', (3901, 3944), False, 'import os\n'), ((4230, 4288), 'os.path.join', 'os.path.join', (['configs_folder', 'self.calibration_config_name'], {}), '(configs_folder, self.calibration_config_name)\n', (4242, 4288), False, 'import os\n'), ((5462, 5503), 'sqlalchemy.desc', 'desc', (['SingleInferenceInfoModel.throughput'], {}), '(SingleInferenceInfoModel.throughput)\n', (5466, 5503), False, 'from sqlalchemy import desc\n'), ((5759, 5801), 'wb.main.models.AccuracyReportModel.accuracy_result.desc', 'AccuracyReportModel.accuracy_result.desc', ([], {}), '()\n', (5799, 5801), False, 'from wb.main.models import ExportProjectJobModel, JobsModel, SingleInferenceInfoModel, DownloadableArtifactsModel, ProjectsModel, WBInfoModel, AccuracyReportModel\n'), ((8762, 8794), 'os.path.join', 'os.path.join', (['dataset_path', 'file'], {}), '(dataset_path, file)\n', (8774, 8794), False, 'import os\n'), ((9152, 9230), 'wb.main.calibration_abstractions.utils.construct_calibration_tool_config', 'construct_calibration_tool_config', (['project.topology', 'project.topology.int8_job'], {}), '(project.topology, project.topology.int8_job)\n', (9185, 9230), False, 'from wb.main.calibration_abstractions.utils import construct_calibration_tool_config\n'), ((9977, 10016), 'os.listdir', 'os.listdir', (['components_paths[component]'], {}), '(components_paths[component])\n', (9987, 10016), False, 'import os\n'), ((10381, 10428), 'os.path.join', 'os.path.join', (['components_paths[component]', 'file'], {}), '(components_paths[component], file)\n', (10393, 10428), False, 'import os\n'), ((10439, 10468), 'os.path.join', 'os.path.join', (['component', 'file'], {}), '(component, file)\n', (10451, 10468), False, 'import os\n'), ((10045, 10067), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (10061, 10067), False, 'import os\n'), ((10128, 10175), 'os.path.join', 'os.path.join', (['components_paths[component]', 'file'], {}), '(components_paths[component], file)\n', (10140, 10175), False, 'import os\n'), ((9824, 9869), 'os.path.splitext', 'os.path.splitext', (['components_paths[component]'], {}), '(components_paths[component])\n', (9840, 9869), False, 'import os\n'), ((10222, 10251), 'os.path.join', 'os.path.join', (['component', 'file'], {}), '(component, file)\n', (10234, 10251), False, 'import os\n')]
from dataclasses import dataclass from typing import List, Union, Optional from pyaniml.utility.utils import SchemaBase, elements, attribute from pyaniml.core.series import SeriesSet from pyaniml.core.parameter import Category @dataclass class Result(SchemaBase): """Container for experiment results""" results: List[object] = elements( choices=( {"name": "SeriesSet", "type": SeriesSet}, {"name": "Category", "type": Category}, ), default=list, ) def add_result(self, result: Union[SeriesSet, Category]) -> None: """Adds a measurement result to the the container. Must be of any low-level AnIML type. Args: result (Union[SeriesSet, Category]): The quantitive measurement results. """ self.results.append(result)
[ "pyaniml.utility.utils.elements" ]
[((338, 456), 'pyaniml.utility.utils.elements', 'elements', ([], {'choices': "({'name': 'SeriesSet', 'type': SeriesSet}, {'name': 'Category', 'type':\n Category})", 'default': 'list'}), "(choices=({'name': 'SeriesSet', 'type': SeriesSet}, {'name':\n 'Category', 'type': Category}), default=list)\n", (346, 456), False, 'from pyaniml.utility.utils import SchemaBase, elements, attribute\n')]
import importlib.util import os.path import pytest import shutil import unittest from cookiecutter.main import cookiecutter def load_module_from_path(module_name, path): module_spec = importlib.util.spec_from_file_location(module_name, path) module = importlib.util.module_from_spec(module_spec) module_spec.loader.exec_module(module) return module class TestGenerator(unittest.TestCase): cookiecutter_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) def setUp(self): # Generate test project with Cookiecutter cookiecutter( self.cookiecutter_path, no_input=True, extra_context={ 'title': 'Test project', 'mailjet_apikey_public': 'public_key', 'mailjet_apikey_private': 'private_key', 'mailjet_contactslist_id': 'contactslist_id' } ) self.test_project_path = os.path.join(self.cookiecutter_path, 'test_project') def tearDown(self): # Remove test project files shutil.rmtree(self.test_project_path, ignore_errors=True) def test_project_generated(self): # Project directory self.assertTrue(os.path.exists(self.test_project_path)) # Flask app directory flask_app_path = os.path.join(self.test_project_path, 'test_project') self.assertTrue(os.path.exists(flask_app_path)) def test_config_values_copied(self): config = load_module_from_path('test_project.config', os.path.join(self.test_project_path, 'test_project', 'config.py')) self.assertEqual(config.Config.MJ_APIKEY_PUBLIC, 'public_key') self.assertEqual(config.Config.MJ_APIKEY_PRIVATE, 'private_key') self.assertEqual(config.Config.MJ_CONTACTSLIST_ID, 'contactslist_id') def test_app(self): # Run the tests generated in test project return_code = pytest.main([os.path.join(self.test_project_path, 'test_project')]) self.assertEqual(return_code, 0)
[ "shutil.rmtree", "cookiecutter.main.cookiecutter" ]
[((575, 805), 'cookiecutter.main.cookiecutter', 'cookiecutter', (['self.cookiecutter_path'], {'no_input': '(True)', 'extra_context': "{'title': 'Test project', 'mailjet_apikey_public': 'public_key',\n 'mailjet_apikey_private': 'private_key', 'mailjet_contactslist_id':\n 'contactslist_id'}"}), "(self.cookiecutter_path, no_input=True, extra_context={'title':\n 'Test project', 'mailjet_apikey_public': 'public_key',\n 'mailjet_apikey_private': 'private_key', 'mailjet_contactslist_id':\n 'contactslist_id'})\n", (587, 805), False, 'from cookiecutter.main import cookiecutter\n'), ((1073, 1130), 'shutil.rmtree', 'shutil.rmtree', (['self.test_project_path'], {'ignore_errors': '(True)'}), '(self.test_project_path, ignore_errors=True)\n', (1086, 1130), False, 'import shutil\n')]
import os from gcloud import datastore def selectAll(client, kind, order=None): query = client.query(kind=kind) if order: query.order = order resultlist = list(query.fetch()) return resultlist def deleteAll(client, kind): all = selectAll(client, kind) keylist = list(map(lambda x: x.key, all)) client.delete_multi(keylist) def fetchDataDefs(): import gspread from oauth2client.service_account import ServiceAccountCredentials scope = ['https://spreadsheets.google.com/feeds'] credentials = ServiceAccountCredentials.from_json_keyfile_name('socialmapkorea-credentials.json', scope) gc = gspread.authorize(credentials) wks = gc.open("socialmapkorea_data").sheet1 code_list = list(filter(lambda x: len(x)>0, wks.col_values(1))) baseyear_list = list(filter(lambda x: len(x)>0, wks.col_values(2))) name_list = list(filter(lambda x: len(x)>0, wks.col_values(3))) data_def_list = list(map(lambda x,y,z: [x,y,z], code_list, baseyear_list, name_list)) return data_def_list # ------------------------------------------------- def importDataDefs(): client = datastore.Client(os.environ['GCLOUD_PROJECT']) deleteAll(client, 'Data_Def') data_def_list = fetchDataDefs() with client.transaction(): incomplete_keys = client.key('Data_Def') for item in data_def_list: if item[0] != u'data_code': datadefEntity = datastore.Entity(key=incomplete_keys) datadefEntity.update({ 'code': item[0], 'baseyear': item[1], 'name': item[2]}) client.put(datadefEntity) def getAllDataDefs(): client = datastore.Client(os.environ['GCLOUD_PROJECT']) all = selectAll(client, kind='Data_Def', order='code') allc = list(map(lambda x: {'code': x[u'code'].encode("utf-8"), 'baseyear': x[u'baseyear'].encode("utf-8"), 'name': x[u'name'].encode("utf-8")}, all)) return allc
[ "oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name", "gspread.authorize", "gcloud.datastore.Entity", "gcloud.datastore.Client" ]
[((556, 651), 'oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name', 'ServiceAccountCredentials.from_json_keyfile_name', (['"""socialmapkorea-credentials.json"""', 'scope'], {}), "(\n 'socialmapkorea-credentials.json', scope)\n", (604, 651), False, 'from oauth2client.service_account import ServiceAccountCredentials\n'), ((656, 686), 'gspread.authorize', 'gspread.authorize', (['credentials'], {}), '(credentials)\n', (673, 686), False, 'import gspread\n'), ((1153, 1199), 'gcloud.datastore.Client', 'datastore.Client', (["os.environ['GCLOUD_PROJECT']"], {}), "(os.environ['GCLOUD_PROJECT'])\n", (1169, 1199), False, 'from gcloud import datastore\n'), ((1792, 1838), 'gcloud.datastore.Client', 'datastore.Client', (["os.environ['GCLOUD_PROJECT']"], {}), "(os.environ['GCLOUD_PROJECT'])\n", (1808, 1838), False, 'from gcloud import datastore\n'), ((1467, 1504), 'gcloud.datastore.Entity', 'datastore.Entity', ([], {'key': 'incomplete_keys'}), '(key=incomplete_keys)\n', (1483, 1504), False, 'from gcloud import datastore\n')]
from __future__ import absolute_import from __future__ import division from __future__ import print_function from pysc2.maps import lib from smac.env.starcraft2.maps import smac_maps map_param_registry = { "1o_10b_vs_1r": { "n_agents": 11, "n_enemies": 1, "limit": 50, "a_race": "Z", "b_race": "Z", "unit_type_bits": 2, "map_type": "overload_bane" }, "1o_2r_vs_4r": { "n_agents": 3, "n_enemies": 4, "limit": 50, "a_race": "Z", "b_race": "Z", "unit_type_bits": 2, "map_type": "overload_roach" }, "bane_vs_hM": { "n_agents": 3, "n_enemies": 2, "limit": 30, "a_race": "Z", "b_race": "T", "unit_type_bits": 2, "map_type": "bZ_hM" } } smac_maps.map_param_registry.update(map_param_registry) def get_map_params(map_name): map_param_registry = smac_maps.get_smac_map_registry() return map_param_registry[map_name] for name in map_param_registry.keys(): globals()[name] = type(name, (smac_maps.SMACMap,), dict(filename=name))
[ "smac.env.starcraft2.maps.smac_maps.get_smac_map_registry", "smac.env.starcraft2.maps.smac_maps.map_param_registry.update" ]
[((830, 885), 'smac.env.starcraft2.maps.smac_maps.map_param_registry.update', 'smac_maps.map_param_registry.update', (['map_param_registry'], {}), '(map_param_registry)\n', (865, 885), False, 'from smac.env.starcraft2.maps import smac_maps\n'), ((942, 975), 'smac.env.starcraft2.maps.smac_maps.get_smac_map_registry', 'smac_maps.get_smac_map_registry', ([], {}), '()\n', (973, 975), False, 'from smac.env.starcraft2.maps import smac_maps\n')]
import collections BOOL = {"BOOLAND", "BOOLOR", "BOOLNOT"} POW = {"POW"} MUL_DIV = {"MUL", "DIV"} ADD_SUB = {"ADD", "SUB"} SHIFT = {"LSHIFT", "RSHIFT"} BIN = {"AND", "OR", "XOR"} CMP = {"EQ", "NEQ", "LEQ", "GEQ", "LT", "GT", "CONTAINS", "CONTAINED"} NODES = { "UnaryExpr": "op val", "BinExpr": "op left right", "CmpExpr": "ops vals", "DotExpr": "val name", "CallExpr": "func args", "ColonCallExpr": "expr name args", "IdentExpr": "ident", "ListExpr": "vals", "NumLit": "val", "BoolLit": "val", "StrLit": "val", "ExprLine": "expr", "SetLine": "name expr", "IfLine": "cond_codes", "WhileLine": "cond line", "FuncLine": "name arg_names line", "ReturnLine": "val", "Suite": "lines", } for name, fields in NODES.items(): globals()[name] = collections.namedtuple(name, fields)
[ "collections.namedtuple" ]
[((783, 819), 'collections.namedtuple', 'collections.namedtuple', (['name', 'fields'], {}), '(name, fields)\n', (805, 819), False, 'import collections\n')]
"""Simple test for monochromatic character LCD on Raspberry Pi""" import time import board import digitalio import adafruit_character_lcd.character_lcd as characterlcd # Modify this if you have a different sized character LCD lcd_columns = 16 lcd_rows = 2 # Raspberry Pi Pin Config: lcd_rs = digitalio.DigitalInOut(board.D26) lcd_en = digitalio.DigitalInOut(board.D19) lcd_d7 = digitalio.DigitalInOut(board.D27) lcd_d6 = digitalio.DigitalInOut(board.D22) lcd_d5 = digitalio.DigitalInOut(board.D24) lcd_d4 = digitalio.DigitalInOut(board.D25) lcd_backlight = digitalio.DigitalInOut(board.D4) # Initialise the lcd class lcd = characterlcd.Character_LCD_Mono(lcd_rs, lcd_en, lcd_d4, lcd_d5, lcd_d6, lcd_d7, lcd_columns, lcd_rows, lcd_backlight) # Turn backlight on lcd.backlight = True # Print a two line message lcd.message = "Hello\nCircuitPython" # Wait 5s time.sleep(5) lcd.clear() # Print two line message right to left lcd.text_direction = lcd.RIGHT_TO_LEFT lcd.message = "Hello\nCircuitPython" # Wait 5s time.sleep(5) # Return text direction to left to right lcd.text_direction = lcd.LEFT_TO_RIGHT # Display cursor lcd.clear() lcd.cursor = True lcd.message = "Cursor! " # Wait 5s time.sleep(5) # Display blinking cursor lcd.clear() lcd.blink = True lcd.message = "Blinky Cursor!" # Wait 5s time.sleep(5) lcd.blink = False lcd.clear() # Create message to scroll scroll_msg = '<-- Scroll' lcd.message = scroll_msg # Scroll message to the left for i in range(len(scroll_msg)): time.sleep(0.5) lcd.move_left() lcd.clear() lcd.message = "Going to sleep\nCya later!" # Turn backlight off lcd.backlight = False time.sleep(2)
[ "digitalio.DigitalInOut", "adafruit_character_lcd.character_lcd.Character_LCD_Mono", "time.sleep" ]
[((294, 327), 'digitalio.DigitalInOut', 'digitalio.DigitalInOut', (['board.D26'], {}), '(board.D26)\n', (316, 327), False, 'import digitalio\n'), ((337, 370), 'digitalio.DigitalInOut', 'digitalio.DigitalInOut', (['board.D19'], {}), '(board.D19)\n', (359, 370), False, 'import digitalio\n'), ((380, 413), 'digitalio.DigitalInOut', 'digitalio.DigitalInOut', (['board.D27'], {}), '(board.D27)\n', (402, 413), False, 'import digitalio\n'), ((423, 456), 'digitalio.DigitalInOut', 'digitalio.DigitalInOut', (['board.D22'], {}), '(board.D22)\n', (445, 456), False, 'import digitalio\n'), ((466, 499), 'digitalio.DigitalInOut', 'digitalio.DigitalInOut', (['board.D24'], {}), '(board.D24)\n', (488, 499), False, 'import digitalio\n'), ((509, 542), 'digitalio.DigitalInOut', 'digitalio.DigitalInOut', (['board.D25'], {}), '(board.D25)\n', (531, 542), False, 'import digitalio\n'), ((559, 591), 'digitalio.DigitalInOut', 'digitalio.DigitalInOut', (['board.D4'], {}), '(board.D4)\n', (581, 591), False, 'import digitalio\n'), ((626, 747), 'adafruit_character_lcd.character_lcd.Character_LCD_Mono', 'characterlcd.Character_LCD_Mono', (['lcd_rs', 'lcd_en', 'lcd_d4', 'lcd_d5', 'lcd_d6', 'lcd_d7', 'lcd_columns', 'lcd_rows', 'lcd_backlight'], {}), '(lcd_rs, lcd_en, lcd_d4, lcd_d5, lcd_d6,\n lcd_d7, lcd_columns, lcd_rows, lcd_backlight)\n', (657, 747), True, 'import adafruit_character_lcd.character_lcd as characterlcd\n'), ((898, 911), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (908, 911), False, 'import time\n'), ((1049, 1062), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1059, 1062), False, 'import time\n'), ((1225, 1238), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1235, 1238), False, 'import time\n'), ((1335, 1348), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1345, 1348), False, 'import time\n'), ((1657, 1670), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1667, 1670), False, 'import time\n'), ((1523, 1538), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (1533, 1538), False, 'import time\n')]
#!/usr/bin/python3 import os import sys import re import argparse import subprocess import shlex import shutil import datetime from zoneutils import zonefile, zonefileformatter, nsupdate, utils SLUG_RGX = re.compile(r"[^a-zA-Z0-9_]") def parse_args(): """ Parse command line arguments """ parser = argparse.ArgumentParser(description='nsupdate-interactive') group = parser.add_mutually_exclusive_group(required=True) group.add_argument('--zone', type=str) group.add_argument('--get-zone-slug', type=str) parser.add_argument('--dnsserver', type=str, required=False) return parser.parse_args() def check_dependencies(editor: str): """ Check for binaries which are required for this script """ binaries = [ editor, 'nsupdate', 'dig', 'diff', 'colordiff', 'named-checkzone' ] binarymissing = False for binary in binaries: if shutil.which(binary) is None: binarymissing = True print("The program '"+binary+"' is required to use this script") if binarymissing: sys.exit(1) def domain_slugify(domain: str) -> str: idn = domain.encode('idna').decode('utf-8-sig') return SLUG_RGX.sub('_', idn).upper().strip() def press(what: str): input(f"Press ENTER to {what}, CTRL+C to abort.") def main(): """ Main function of the script""" # get editor editor = os.environ.get('EDITOR', 'nano') # check for dependend programs check_dependencies(editor) # parse arguments args = parse_args() # print domain slug if args.get_zone_slug: print(f"HMAC_{domain_slugify(args.get_zone_slug)}") sys.exit(0) # get hmac key zone_varname = f"HMAC_{domain_slugify(args.zone)}" hmackey = os.environ.get('HMAC', os.environ.get(zone_varname)) if hmackey is None: print("Environment variable 'HMAC' is required.") sys.exit(1) # find nameserver if no one is defined if not args.dnsserver: args.dnsserver = utils.dig_get_authoritative_server(args.zone) if args.dnsserver: print(f"Found dns server by SOA record: {args.dnsserver}") else: print("There was no '--dnsserver' option defined and we are unable") print("to find the authoritative name server by SOA record.") sys.exit(1) # base filename ts = datetime.datetime.now().strftime('%Y%m%dT%H%M%S')+'Z' filename = 'nsupdate_'+args.dnsserver+'_'+args.zone+'_'+ts+'.{0}.db' # get zone records by calling dig digstr = utils.dig_zonetransfer(args.dnsserver, hmackey, args.zone) if digstr[2] == utils.ZonetransferResult.KEYINVALID: print(digstr[1]) print("Invalid HMAC key provided or HMAC key was denied by DNS server.") sys.exit(1) elif digstr[2] == utils.ZonetransferResult.FAILED: print(digstr[1]) print("Transfer failed.") print("Maybe a typo in zone name or dns server address?") print("Or the HMAC doesn't have the permission to access the given dns zone.") sys.exit(1) elif digstr[0] == False: print("dig failed:") print(digstr[1]) sys.exit(1) records = zonefile.ZoneFile(digstr[1]) if len(records.records) < 1: print("Unable to find any records in the DNS zone.") print("There must be at least a SOA record.") print("Maybe a typo in the zone name or dns server address?") print("Or something wrong with your permissions?") sys.exit(1) # create zone files for diff and editing formatter = zonefileformatter.ZoneFileFormatter() for version in [ 'org', 'new' ]: formatter.save(filename.format(version), records) # edit and check syntax haserrors = True while haserrors: # open text editor subprocess.call([ editor, filename.format('new') ]) # check syntax checkresult = utils.checkzone(args.zone, filename.format('new')) if checkresult[0]: haserrors = False else: print("Found syntax errors in zone file:") print(checkresult[1]) press('correct the zone file') # show a diff between work copy and original diffresult = utils.diff(filename.format('org'), filename.format('new')) if diffresult[0] == False: print("No changes made. Exit.") os.remove(filename.format('org')) os.remove(filename.format('new')) sys.exit(0) # update soa serial originalsoa = zonefile.SoaRecord(next(filter(lambda x: x.dnsType=='SOA', records.records))) newrecords = zonefile.load(filename.format('new')) editedsoa = zonefile.SoaRecord(next(filter(lambda x: x.dnsType=='SOA', newrecords.records))) if originalsoa == editedsoa: # update serial with the classic date format editedsoa.apply_default_serialincrease() # write zone file and redo diff formatter.save(filename.format('new'), newrecords) diffresult = utils.diff(filename.format('org'), filename.format('new')) print(utils.colorize_diff(diffresult[1])[1]) # write diff into a patch file with open(filename.format('patch'), 'w+') as f: f.write(diffresult[1]) # ask befort continue with nsupdate press('send the changes to the nameserver') # create nsupdate batch file minidiff = utils.diff_minimal(filename.format('org'), filename.format('new'))[1] nsupdater = nsupdate.from_diff(minidiff) nsupdatestr = '\n'.join(list(nsupdater.get_nsupdate_batch(args.dnsserver, args.zone))) with open(filename.format('batch'), 'w+') as f: f.write(nsupdatestr) # execute nsupdate updateresult = utils.nsupdate(hmackey, filename.format('batch')) if updateresult[0] == False: print("nsupdate failed:") print(updateresult[1]) sys.exit(1) # start main function if __name__ == "__main__": try: main() except KeyboardInterrupt: # catch exception when script was canceled by CTRL+C pass
[ "argparse.ArgumentParser", "zoneutils.utils.colorize_diff", "zoneutils.utils.dig_zonetransfer", "shutil.which", "os.environ.get", "zoneutils.zonefile.ZoneFile", "zoneutils.zonefileformatter.ZoneFileFormatter", "sys.exit", "zoneutils.utils.dig_get_authoritative_server", "datetime.datetime.now", "...
[((208, 235), 're.compile', 're.compile', (['"""[^a-zA-Z0-9_]"""'], {}), "('[^a-zA-Z0-9_]')\n", (218, 235), False, 'import re\n'), ((312, 371), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""nsupdate-interactive"""'}), "(description='nsupdate-interactive')\n", (335, 371), False, 'import argparse\n'), ((1374, 1406), 'os.environ.get', 'os.environ.get', (['"""EDITOR"""', '"""nano"""'], {}), "('EDITOR', 'nano')\n", (1388, 1406), False, 'import os\n'), ((2541, 2599), 'zoneutils.utils.dig_zonetransfer', 'utils.dig_zonetransfer', (['args.dnsserver', 'hmackey', 'args.zone'], {}), '(args.dnsserver, hmackey, args.zone)\n', (2563, 2599), False, 'from zoneutils import zonefile, zonefileformatter, nsupdate, utils\n'), ((3189, 3217), 'zoneutils.zonefile.ZoneFile', 'zonefile.ZoneFile', (['digstr[1]'], {}), '(digstr[1])\n', (3206, 3217), False, 'from zoneutils import zonefile, zonefileformatter, nsupdate, utils\n'), ((3578, 3615), 'zoneutils.zonefileformatter.ZoneFileFormatter', 'zonefileformatter.ZoneFileFormatter', ([], {}), '()\n', (3613, 3615), False, 'from zoneutils import zonefile, zonefileformatter, nsupdate, utils\n'), ((5453, 5481), 'zoneutils.nsupdate.from_diff', 'nsupdate.from_diff', (['minidiff'], {}), '(minidiff)\n', (5471, 5481), False, 'from zoneutils import zonefile, zonefileformatter, nsupdate, utils\n'), ((1056, 1067), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1064, 1067), False, 'import sys\n'), ((1641, 1652), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1649, 1652), False, 'import sys\n'), ((1765, 1793), 'os.environ.get', 'os.environ.get', (['zone_varname'], {}), '(zone_varname)\n', (1779, 1793), False, 'import os\n'), ((1886, 1897), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1894, 1897), False, 'import sys\n'), ((1994, 2039), 'zoneutils.utils.dig_get_authoritative_server', 'utils.dig_get_authoritative_server', (['args.zone'], {}), '(args.zone)\n', (2028, 2039), False, 'from zoneutils import zonefile, zonefileformatter, nsupdate, utils\n'), ((2772, 2783), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2780, 2783), False, 'import sys\n'), ((3504, 3515), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3512, 3515), False, 'import sys\n'), ((4459, 4470), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4467, 4470), False, 'import sys\n'), ((5855, 5866), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5863, 5866), False, 'import sys\n'), ((885, 905), 'shutil.which', 'shutil.which', (['binary'], {}), '(binary)\n', (897, 905), False, 'import shutil\n'), ((2320, 2331), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2328, 2331), False, 'import sys\n'), ((3059, 3070), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3067, 3070), False, 'import sys\n'), ((5071, 5105), 'zoneutils.utils.colorize_diff', 'utils.colorize_diff', (['diffresult[1]'], {}), '(diffresult[1])\n', (5090, 5105), False, 'from zoneutils import zonefile, zonefileformatter, nsupdate, utils\n'), ((2362, 2385), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2383, 2385), False, 'import datetime\n'), ((3162, 3173), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3170, 3173), False, 'import sys\n')]
import asyncio from types import TracebackType from typing import Optional, Mapping, Union, Type from .aio import simple_cache_client as aio from ._async_utils import wait_for_coroutine from .cache_operation_types import ( CacheGetResponse, CacheSetResponse, CacheDeleteResponse, CreateCacheResponse, CreateSigningKeyResponse, DeleteCacheResponse, ListCachesResponse, CacheMultiGetResponse, CacheMultiSetResponse, ListSigningKeysResponse, RevokeSigningKeyResponse, ) from ._utilities._data_validation import _validate_request_timeout class SimpleCacheClient: def __init__( self, auth_token: str, default_ttl_seconds: int, data_client_operation_timeout_ms: Optional[int], ): self._init_loop() self._momento_async_client = aio.SimpleCacheClient( auth_token=auth_token, default_ttl_seconds=default_ttl_seconds, data_client_operation_timeout_ms=data_client_operation_timeout_ms, ) def _init_loop(self) -> None: try: # If the synchronous client is used inside an async application, # use the event loop it's running within. loop: asyncio.AbstractEventLoop = asyncio.get_running_loop() except RuntimeError: # Currently, we rely on asyncio's module-wide event loop due to the # way the grpc stubs we've got are hiding the _loop parameter. # If a separate loop is required, e.g., so you can run Simple Cache # on a background thread, you'll want to open an issue. loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) self._loop = loop def __enter__(self) -> "SimpleCacheClient": wait_for_coroutine(self._loop, self._momento_async_client.__aenter__()) return self def __exit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: wait_for_coroutine( self._loop, self._momento_async_client.__aexit__(exc_type, exc_value, traceback), ) def create_cache(self, cache_name: str) -> CreateCacheResponse: """Creates a new cache in your Momento account. Args: cache_name: String used to create cache. Returns: CreateCacheResponse Raises: InvalidArgumentError: If provided cache_name is None. BadRequestError: If the cache name provided doesn't follow the naming conventions AlreadyExistsError: If cache with the given name already exists. AuthenticationError: If the provided Momento Auth Token is invalid. ClientSdkError: For any SDK checks that fail. """ coroutine = self._momento_async_client.create_cache(cache_name) return wait_for_coroutine(self._loop, coroutine) def delete_cache(self, cache_name: str) -> DeleteCacheResponse: """Deletes a cache and all the items within it. Args: cache_name: String cache name to delete. Returns: DeleteCacheResponse Raises: InvalidArgumentError: If provided cache_name is None. BadRequestError: If the cache name provided doesn't follow the naming conventions NotFoundError: If an attempt is made to delete a MomentoCache that doesn't exist. AuthenticationError: If the provided Momento Auth Token is invalid. ClientSdkError: For any SDK checks that fail. """ coroutine = self._momento_async_client.delete_cache(cache_name) return wait_for_coroutine(self._loop, coroutine) def list_caches(self, next_token: Optional[str] = None) -> ListCachesResponse: """Lists all caches. Args: next_token: Token to continue paginating through the list. It's used to handle large paginated lists. Returns: ListCachesResponse Raises: AuthenticationError: If the provided Momento Auth Token is invalid. """ coroutine = self._momento_async_client.list_caches(next_token) return wait_for_coroutine(self._loop, coroutine) def create_signing_key(self, ttl_minutes: int) -> CreateSigningKeyResponse: """Creates a Momento signing key Args: ttl_minutes: The key's time-to-live in minutes Returns: CreateSigningKeyResponse Raises: InvalidArgumentError: If provided ttl minutes is negative. BadRequestError: If the ttl provided is not accepted AuthenticationError: If the provided Momento Auth Token is invalid. ClientSdkError: For any SDK checks that fail. """ coroutine = self._momento_async_client.create_signing_key(ttl_minutes) return wait_for_coroutine(self._loop, coroutine) def revoke_signing_key(self, key_id: str) -> RevokeSigningKeyResponse: """Revokes a Momento signing key, all tokens signed by which will be invalid Args: key_id: The id of the Momento signing key to revoke Returns: RevokeSigningKeyResponse Raises: AuthenticationError: If the provided Momento Auth Token is invalid. ClientSdkError: For any SDK checks that fail. """ coroutine = self._momento_async_client.revoke_signing_key(key_id) return wait_for_coroutine(self._loop, coroutine) def list_signing_keys( self, next_token: Optional[str] = None ) -> ListSigningKeysResponse: """Lists all Momento signing keys for the provided auth token. Args: next_token: Token to continue paginating through the list. It's used to handle large paginated lists. Returns: ListSigningKeysResponse Raises: AuthenticationError: If the provided Momento Auth Token is invalid. ClientSdkError: For any SDK checks that fail. """ coroutine = self._momento_async_client.list_signing_keys(next_token) return wait_for_coroutine(self._loop, coroutine) def set( self, cache_name: str, key: str, value: Union[str, bytes], ttl_seconds: Optional[int] = None, ) -> CacheSetResponse: """Stores an item in cache Args: cache_name: Name of the cache to store the item in. key (string or bytes): The key to be used to store item. value (string or bytes): The value to be stored. ttl_seconds (Optional): Time to live in cache in seconds. If not provided, then default TTL for the cache client instance is used. Returns: CacheSetResponse Raises: InvalidArgumentError: If validation fails for the provided method arguments. BadRequestError: If the provided inputs are rejected by server because they are invalid NotFoundError: If the cache with the given name doesn't exist. AuthenticationError: If the provided Momento Auth Token is invalid. InternalServerError: If server encountered an unknown error while trying to store the item. """ coroutine = self._momento_async_client.set(cache_name, key, value, ttl_seconds) return wait_for_coroutine(self._loop, coroutine) def multi_set( self, cache_name: str, items: Union[Mapping[str, str], Mapping[bytes, bytes]], ttl_seconds: Optional[int] = None, ) -> CacheMultiSetResponse: """Store items in the cache. Args: cache_name: Name of the cache to store the item in. items: (Union[Mapping[str, str], Mapping[bytes, bytes]]): The items to store. ttl_seconds: (Optional[int]): The TTL to apply to each item. Defaults to None. Returns: CacheMultiSetResponse Raises: InvalidArgumentError: If validation fails for the provided method arguments. BadRequestError: If the provided inputs are rejected by server because they are invalid NotFoundError: If the cache with the given name doesn't exist. AuthenticationError: If the provided Momento Auth Token is invalid. InternalServerError: If server encountered an unknown error while trying to retrieve the item. """ coroutine = self._momento_async_client.multi_set(cache_name, items, ttl_seconds) return wait_for_coroutine(self._loop, coroutine) def get(self, cache_name: str, key: str) -> CacheGetResponse: """Retrieve an item from the cache Args: cache_name: Name of the cache to get the item from key (string or bytes): The key to be used to retrieve the item. Returns: CacheGetResponse Raises: InvalidArgumentError: If validation fails for the provided method arguments. BadRequestError: If the provided inputs are rejected by server because they are invalid NotFoundError: If the cache with the given name doesn't exist. AuthenticationError: If the provided Momento Auth Token is invalid. InternalServerError: If server encountered an unknown error while trying to retrieve the item. """ coroutine = self._momento_async_client.get(cache_name, key) return wait_for_coroutine(self._loop, coroutine) def multi_get( self, cache_name: str, *keys: Union[str, bytes] ) -> CacheMultiGetResponse: """Retrieve multiple items from the cache. Args: cache_name (str): Name of the cache to get the item from. keys: (Union[str, bytes]): The keys used to retrieve the items. Returns: CacheMultiGetResponse Raises: InvalidArgumentError: If validation fails for the provided method arguments. BadRequestError: If the provided inputs are rejected by server because they are invalid NotFoundError: If the cache with the given name doesn't exist. AuthenticationError: If the provided Momento Auth Token is invalid. InternalServerError: If server encountered an unknown error while trying to retrieve the item. """ coroutine = self._momento_async_client.multi_get(cache_name, *keys) return wait_for_coroutine(self._loop, coroutine) def delete(self, cache_name: str, key: str) -> CacheDeleteResponse: """Delete an item from the cache. Performs a no-op if the item is not in the cache. Args: cache_name: Name of the cache to delete the item from. key (string or bytes): The key to delete. Returns: CacheDeleteResponse Raises: InvalidArgumentError: If validation fails for provided method arguments. BadRequestError: If the provided inputs are rejected by server because they are invalid NotFoundError: If the cache with the given name doesn't exist. AuthenticationError: If the provided Momento Auth Token is invalid. InternalServerError: If server encountered an unknown error while trying to delete the item. """ coroutine = self._momento_async_client.delete(cache_name, key) return wait_for_coroutine(self._loop, coroutine) def init( auth_token: str, item_default_ttl_seconds: int, request_timeout_ms: Optional[int] = None, ) -> SimpleCacheClient: """Creates a SimpleCacheClient Args: auth_token: Momento Token to authenticate the requests with Simple Cache Service item_default_ttl_seconds: A default Time To Live in seconds for cache objects created by this client. It is possible to override this setting when calling the set method. request_timeout_ms: An optional timeout in milliseconds to allow for Get and Set operations to complete. Defaults to 5 seconds. The request will be terminated if it takes longer than this value and will result in TimeoutError. Returns: SimpleCacheClient Raises: IllegalArgumentError: If method arguments fail validations """ _validate_request_timeout(request_timeout_ms) return SimpleCacheClient(auth_token, item_default_ttl_seconds, request_timeout_ms)
[ "asyncio.set_event_loop", "asyncio.new_event_loop", "asyncio.get_running_loop" ]
[((1249, 1275), 'asyncio.get_running_loop', 'asyncio.get_running_loop', ([], {}), '()\n', (1273, 1275), False, 'import asyncio\n'), ((1627, 1651), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ([], {}), '()\n', (1649, 1651), False, 'import asyncio\n'), ((1664, 1692), 'asyncio.set_event_loop', 'asyncio.set_event_loop', (['loop'], {}), '(loop)\n', (1686, 1692), False, 'import asyncio\n')]
#!/usr/bin/env python from gmt import app app.run(debug=app.config['DEBUG'], port=app.config['PORT'])
[ "gmt.app.run" ]
[((42, 101), 'gmt.app.run', 'app.run', ([], {'debug': "app.config['DEBUG']", 'port': "app.config['PORT']"}), "(debug=app.config['DEBUG'], port=app.config['PORT'])\n", (49, 101), False, 'from gmt import app\n')]
import io import json import logging import os import re from datetime import datetime from typing import Any, Dict, List, Optional, Tuple import httpx from jobs.base.job import Job from jobs.convert import Convert logger = logging.getLogger(__name__) class Register(Job): """ A job that registers a DOI with a registration agency. Currently, the only DOI supported in Crossref, although this could be expand to other agencies in the future. See https://www.crossref.org/education/member-setup/direct-deposit-xml/https-post/. """ name = "register" def __init__( self, server: Optional[str] = None, credentials: Optional[str] = None, ): super().__init__() self.server = server self.credentials = credentials def do(self, node: dict, doi: str, url: str, batch: str, *args, **kwargs) -> dict: # type: ignore assert node is not None assert "type" in node and node["type"] in ("Article", "Review") # Generate Crossref deposit XML json_str = json.dumps(node).encode("utf-8") xml = Convert().do( json_str, "-", {"from": "json", "to": "crossref", "doi": doi, "url": url}, # type: ignore ) if not xml: raise RuntimeError("Failed to convert node to Crossref XML") # Replace batch id and email xml = re.sub( r"<doi_batch_id>[^<]*</doi_batch_id>", f"<doi_batch_id>{batch}</doi_batch_id>", xml, ) xml = re.sub( r"<email_address>[^<]*</email_address>", r"<email_address><EMAIL></email_address>", xml, ) server = self.server or os.getenv("CROSSREF_DEPOSIT_SERVER") if not server: # If no server explicitly defined then use test server. # Do not fallback to production server to avoid inadvertent # use during testing. server = "https://test.crossref.org/servlet/deposit" credentials = self.credentials or os.getenv("CROSSREF_DEPOSIT_CREDENTIALS") if not credentials: # If no credentials were available for the registration agency # then log a warning and return an empty dictionary. # This allows testing during development without having to have # credentials logger.warning("Credentials for DOI registrar are not available") return dict() # Deposit XML username, password = credentials.split(":") deposited = datetime.utcnow().isoformat() response = httpx.post( server, data=dict(login_id=username, login_passwd=password), files=dict(fname=io.BytesIO(xml.encode())), ) # Crossref returns 200 response with an error message for bad login credentials # so we need to check for 'SUCCESS' in the response body deposit_success = response.status_code == 200 and "SUCCESS" in response.text if not deposit_success: logger.error("Unexpected response from Crossref") # Return details of this job return dict( deposited=deposited, deposit_request=dict(body=xml), deposit_response=dict( status=dict(code=response.status_code), headers=dict(response.headers), body=response.text, ), deposit_success=deposit_success, )
[ "json.dumps", "datetime.datetime.utcnow", "jobs.convert.Convert", "re.sub", "os.getenv", "logging.getLogger" ]
[((227, 254), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (244, 254), False, 'import logging\n'), ((1371, 1465), 're.sub', 're.sub', (['"""<doi_batch_id>[^<]*</doi_batch_id>"""', 'f"""<doi_batch_id>{batch}</doi_batch_id>"""', 'xml'], {}), "('<doi_batch_id>[^<]*</doi_batch_id>',\n f'<doi_batch_id>{batch}</doi_batch_id>', xml)\n", (1377, 1465), False, 'import re\n'), ((1524, 1621), 're.sub', 're.sub', (['"""<email_address>[^<]*</email_address>"""', '"""<email_address><EMAIL></email_address>"""', 'xml'], {}), "('<email_address>[^<]*</email_address>',\n '<email_address><EMAIL></email_address>', xml)\n", (1530, 1621), False, 'import re\n'), ((1700, 1736), 'os.getenv', 'os.getenv', (['"""CROSSREF_DEPOSIT_SERVER"""'], {}), "('CROSSREF_DEPOSIT_SERVER')\n", (1709, 1736), False, 'import os\n'), ((2042, 2083), 'os.getenv', 'os.getenv', (['"""CROSSREF_DEPOSIT_CREDENTIALS"""'], {}), "('CROSSREF_DEPOSIT_CREDENTIALS')\n", (2051, 2083), False, 'import os\n'), ((1052, 1068), 'json.dumps', 'json.dumps', (['node'], {}), '(node)\n', (1062, 1068), False, 'import json\n'), ((1099, 1108), 'jobs.convert.Convert', 'Convert', ([], {}), '()\n', (1106, 1108), False, 'from jobs.convert import Convert\n'), ((2553, 2570), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2568, 2570), False, 'from datetime import datetime\n')]
from __future__ import print_function from datetime import datetime from threading import Lock from ingenico.connect.sdk.log.python_communicator_logger import \ CommunicatorLogger class SysOutCommunicatorLogger(CommunicatorLogger): """ A communicator logger that prints its message to sys.stdout It includes a timestamp in yyyy-MM-ddTHH:mm:ss format in the system time zone. """ def __init__(self): CommunicatorLogger.__init__(self) _global_lock = Lock() _old_print = print @staticmethod def INSTANCE(): return _SYS_OUT_COMMUNICATOR_LOGGER_INSTANCE def __print(self, *a): with self._global_lock: self._old_print(*a) def log(self, message, thrown=None): # Make sure the same object is used for locking and printing self.__print(self.__get_date_prefix() + message) if thrown: self.__print(str(thrown)) def __get_date_prefix(self): return datetime.now().strftime("%Y-%m-%dT%H:%M:%S ") _SYS_OUT_COMMUNICATOR_LOGGER_INSTANCE = SysOutCommunicatorLogger()
[ "threading.Lock", "ingenico.connect.sdk.log.python_communicator_logger.CommunicatorLogger.__init__", "datetime.datetime.now" ]
[((490, 496), 'threading.Lock', 'Lock', ([], {}), '()\n', (494, 496), False, 'from threading import Lock\n'), ((436, 469), 'ingenico.connect.sdk.log.python_communicator_logger.CommunicatorLogger.__init__', 'CommunicatorLogger.__init__', (['self'], {}), '(self)\n', (463, 469), False, 'from ingenico.connect.sdk.log.python_communicator_logger import CommunicatorLogger\n'), ((978, 992), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (990, 992), False, 'from datetime import datetime\n')]
from mpl_toolkits.mplot3d import axes3d ax = plt.subplot(111, projection='3d') X, Y, Z = axes3d.get_test_data(0.1) ax.plot_wireframe(X, Y, Z, linewidth=0.1) plt.savefig('wire.pdf')
[ "mpl_toolkits.mplot3d.axes3d.get_test_data" ]
[((90, 115), 'mpl_toolkits.mplot3d.axes3d.get_test_data', 'axes3d.get_test_data', (['(0.1)'], {}), '(0.1)\n', (110, 115), False, 'from mpl_toolkits.mplot3d import axes3d\n')]
# Copyright: See the LICENSE file. """Helper to test circular factory dependencies.""" import factory class Bar: def __init__(self, foo, y): self.foo = foo self.y = y class BarFactory(factory.Factory): class Meta: model = Bar y = 13 foo = factory.SubFactory('cyclic.foo.FooFactory')
[ "factory.SubFactory" ]
[((286, 329), 'factory.SubFactory', 'factory.SubFactory', (['"""cyclic.foo.FooFactory"""'], {}), "('cyclic.foo.FooFactory')\n", (304, 329), False, 'import factory\n')]
# Generated by Django 2.0.4 on 2018-04-18 22:16 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('todos', '0002_auto_20180418_2213'), ] operations = [ migrations.AlterField( model_name='status', name='status', field=models.CharField(choices=[('NOT DONE', 'NOT DONE'), ('DONE', 'DONE')], default='NOT DONE', max_length=255), ), ]
[ "django.db.models.CharField" ]
[((334, 444), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('NOT DONE', 'NOT DONE'), ('DONE', 'DONE')]", 'default': '"""NOT DONE"""', 'max_length': '(255)'}), "(choices=[('NOT DONE', 'NOT DONE'), ('DONE', 'DONE')],\n default='NOT DONE', max_length=255)\n", (350, 444), False, 'from django.db import migrations, models\n')]
import os, sys sys.path.append(os.getcwd()) from PyQt5.QtWidgets import QApplication, QWidget from src.controller.lib.ssqt import SSQt SSQt.load_uifile(True, 'src/view/widgets/ui/field_lookup.ui') from src.view.widgets.ui.field_lookup import Ui_FieldLookup class FieldLookupView(QWidget, Ui_FieldLookup, SSQt): def __init__(self): super().__init__() self.setupUi(self) print('FieldLookup') if __name__ == "__main__": app = QApplication(sys.argv) w = FieldLookupView() w.show() sys.exit(app.exec_())
[ "src.controller.lib.ssqt.SSQt.load_uifile", "PyQt5.QtWidgets.QApplication", "os.getcwd" ]
[((138, 199), 'src.controller.lib.ssqt.SSQt.load_uifile', 'SSQt.load_uifile', (['(True)', '"""src/view/widgets/ui/field_lookup.ui"""'], {}), "(True, 'src/view/widgets/ui/field_lookup.ui')\n", (154, 199), False, 'from src.controller.lib.ssqt import SSQt\n'), ((31, 42), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (40, 42), False, 'import os, sys\n'), ((436, 458), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (448, 458), False, 'from PyQt5.QtWidgets import QApplication, QWidget\n')]
from kinematicEnv import KinematicEnv from QL import QL from upDDPG import DDPG as uDDPG import tensorflow as tf from bottomDDPG import DDPG as bDDPG import numpy as np env = KinematicEnv() s_dim = env.state_dim a_dim = env.action_dim a_bound = env.action_bound g1 = tf.Graph() isess1 = tf.Session(graph=g1) with g1.as_default(): isess1.run(tf.global_variables_initializer()) uddpg = uDDPG(a_dim, s_dim, a_bound) uddpg.restore() g2 = tf.Graph() isess2 = tf.Session(graph=g2) with g2.as_default(): isess2.run(tf.global_variables_initializer()) bddpg = bDDPG(a_dim, s_dim, a_bound) bddpg.restore() g3 = tf.Graph() isess3 = tf.Session(graph=g3) with g3.as_default(): isess3.run(tf.global_variables_initializer()) Q = QL(2, s_dim) def initial(): tt = np.random.randint(0, 3) if tt == 0: s = env.initialUp() elif tt == 1: s = env.initialDown() else: s = env.initialOn() return s def train(): step = 0 for i_episode in range(6000): s = initial() j = 0 for i in range(300): #env.render() a0 = Q.choose_action(s) if a0 == 0: k = uddpg.choose_action(s) s_, _, _ = env.stepUp(k) else: k = bddpg.choose_action(s) s_, _, _ = env.stepDown(k) #rewardReset label1, label2, label3 = s[0], s[8], s[9] - s[1] if -20.<label1<20. and -20.<label2<20.: if label3 < 150.: if a0 == 0: reward = 1 else: reward = -1 else: if a0 == 0: reward = -1 else: reward = 1 elif -20.<label1<20. and abs(label2) >= 20.: if a0 == 0: reward = 1 else: reward = -2 elif abs(label1) >= 20. and -20.<label2<20.: if a0 == 0: reward = -2 else: reward = 1 Q.store_transition(s, a0, reward, s_) if step > 300 and step % 50 == 0: Q.learn() step+=1 if reward == 1: j += 1 if reward == -2 or i == 299: print('Ep: %i | accuracy: %.2f | step: %i' % (i_episode, 1.*j/(i+1)*100, i)) break with g3.as_default(): Q.save() #多个计算图训练时,怎么分别存储模型 train()
[ "kinematicEnv.KinematicEnv", "QL.QL", "tensorflow.global_variables_initializer", "tensorflow.Session", "upDDPG.DDPG", "numpy.random.randint", "tensorflow.Graph", "bottomDDPG.DDPG" ]
[((175, 189), 'kinematicEnv.KinematicEnv', 'KinematicEnv', ([], {}), '()\n', (187, 189), False, 'from kinematicEnv import KinematicEnv\n'), ((268, 278), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (276, 278), True, 'import tensorflow as tf\n'), ((288, 308), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'g1'}), '(graph=g1)\n', (298, 308), True, 'import tensorflow as tf\n'), ((447, 457), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (455, 457), True, 'import tensorflow as tf\n'), ((467, 487), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'g2'}), '(graph=g2)\n', (477, 487), True, 'import tensorflow as tf\n'), ((626, 636), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (634, 636), True, 'import tensorflow as tf\n'), ((646, 666), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'g3'}), '(graph=g3)\n', (656, 666), True, 'import tensorflow as tf\n'), ((393, 421), 'upDDPG.DDPG', 'uDDPG', (['a_dim', 's_dim', 'a_bound'], {}), '(a_dim, s_dim, a_bound)\n', (398, 421), True, 'from upDDPG import DDPG as uDDPG\n'), ((572, 600), 'bottomDDPG.DDPG', 'bDDPG', (['a_dim', 's_dim', 'a_bound'], {}), '(a_dim, s_dim, a_bound)\n', (577, 600), True, 'from bottomDDPG import DDPG as bDDPG\n'), ((747, 759), 'QL.QL', 'QL', (['(2)', 's_dim'], {}), '(2, s_dim)\n', (749, 759), False, 'from QL import QL\n'), ((786, 809), 'numpy.random.randint', 'np.random.randint', (['(0)', '(3)'], {}), '(0, 3)\n', (803, 809), True, 'import numpy as np\n'), ((346, 379), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (377, 379), True, 'import tensorflow as tf\n'), ((525, 558), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (556, 558), True, 'import tensorflow as tf\n'), ((704, 737), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (735, 737), True, 'import tensorflow as tf\n')]
import unittest import warnings import numpy as np # import safety_gym # from safety_gym.envs.engine import Engine import gym import gym.spaces as spaces from gym.envs.registration import register from edge.gym_wrappers import BoxWrapper, DiscreteWrapper, GymEnvironmentWrapper from edge.agent import RandomAgent class SpaceWrappers(unittest.TestCase): def test_box_wrapper(self): warnings.filterwarnings('ignore') gb = spaces.Box(0, 1, (2,2)) eb = BoxWrapper(gb, (10,10,10,10)) eelem = eb.sample() gelem = gb.sample() self.assertEqual(eelem.shape, (4,)) self.assertEqual(eb.to_gym((2,3,4,5)).shape, gb.shape) self.assertEqual(eb.from_gym(gelem).shape, (4,)) gb = spaces.Box(np.array([0,1]), np.array([2,3])) eb = BoxWrapper(gb, (10, 10)) eelem = eb.sample() gelem = gb.sample() self.assertEqual(eelem.shape, (2,)) self.assertEqual(eb.to_gym((2,3)).shape, gb.shape) self.assertEqual(eb.from_gym(gelem).shape, (2,)) gb = spaces.Box(-np.inf, np.inf, (1,)) eb = BoxWrapper(gb, (10, ), inf_ceiling=5) for t in range(100): eelem = eb.sample() self.assertTrue(np.abs(eelem)[0] <= 5) self.assertTrue(eelem in eb) def test_discrete_wrapper(self): gd = spaces.Discrete(10) ed = DiscreteWrapper(gd) g = gd.sample() e = ed.sample() self.assertEqual(ed.to_gym(e), int(e)) self.assertEqual(ed.from_gym(g), g) # class SafetyGymEnvironmentWrappers(unittest.TestCase): # def test_safety_gym_environment_creation(self): # senv = gym.make('Safexp-PointGoal1-v0') # env = GymEnvironmentWrapper(senv) # # config = { # 'robot_base': 'xmls/car.xml', # 'task': 'push', # 'observe_goal_lidar': True, # 'observe_box_lidar': True, # 'observe_hazards': True, # 'observe_vases': True, # 'constrain_hazards': True, # 'lidar_max_dist': 3, # 'lidar_num_bins': 16, # 'hazards_num': 4, # 'vases_num': 4 # } # # senv = Engine(config) # register(id='SafexpTestEnvironment-v0', # entry_point='safety_gym.envs.mujoco:Engine', # kwargs={'config': config}) # env = GymEnvironmentWrapper(senv, failure_critical=True) # # def test_safety_gym_random_agent(self): # senv = gym.make('Safexp-PointGoal1-v0') # env = GymEnvironmentWrapper(senv) # random_agent = RandomAgent(env) # # ep_ret, ep_cost = 0, 0 # for t in range(1000): # new_state, reward, failed = random_agent.step() # ep_ret += reward # ep_cost += env.info.get('cost', 0) # env.gym_env.render() # if env.done: # print('Episode Return: %.3f \t Episode Cost: %.3f' % (ep_ret, ep_cost)) # ep_ret, ep_cost = 0, 0 # random_agent.reset() class GymEnvironmentWrappers(unittest.TestCase): def test_gym_environment_creation(self): gymenv = gym.make('LunarLander-v2') env = GymEnvironmentWrapper(gymenv) env = GymEnvironmentWrapper(gymenv, failure_critical=True) self.assertTrue(True) def test_gym_random_agent(self): gymenv = gym.make('LunarLander-v2') env = GymEnvironmentWrapper(gymenv) random_agent = RandomAgent(env) ep_ret, ep_cost = 0, 0 for t in range(100): new_state, reward, failed, _ = random_agent.step() ep_ret += reward ep_cost += env.info.get('cost', 0) # env.gym_env.render() if env.done: print('Episode Return: %.3f \t Episode Cost: %.3f' % ( ep_ret, ep_cost)) ep_ret, ep_cost = 0, 0 random_agent.reset() def test_gym_control_frequency(self): gymenv = gym.make('CartPole-v1') env = GymEnvironmentWrapper(gymenv, control_frequency=2) random_agent = RandomAgent(env) ep_ret, ep_cost = 0, 0 for t in range(100): new_state, reward, failed, _ = random_agent.step() ep_ret += reward ep_cost += env.info.get('cost', 0) # env.gym_env.render() if env.done: print('Episode Return: %.3f \t Episode Cost: %.3f' % (ep_ret, ep_cost)) ep_ret, ep_cost = 0, 0 random_agent.reset() if __name__ == '__main__': unittest.main()
[ "unittest.main", "numpy.abs", "gym.make", "warnings.filterwarnings", "edge.gym_wrappers.DiscreteWrapper", "gym.spaces.Discrete", "edge.gym_wrappers.BoxWrapper", "gym.spaces.Box", "edge.gym_wrappers.GymEnvironmentWrapper", "numpy.array", "edge.agent.RandomAgent" ]
[((4610, 4625), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4623, 4625), False, 'import unittest\n'), ((396, 429), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (419, 429), False, 'import warnings\n'), ((443, 467), 'gym.spaces.Box', 'spaces.Box', (['(0)', '(1)', '(2, 2)'], {}), '(0, 1, (2, 2))\n', (453, 467), True, 'import gym.spaces as spaces\n'), ((480, 512), 'edge.gym_wrappers.BoxWrapper', 'BoxWrapper', (['gb', '(10, 10, 10, 10)'], {}), '(gb, (10, 10, 10, 10))\n', (490, 512), False, 'from edge.gym_wrappers import BoxWrapper, DiscreteWrapper, GymEnvironmentWrapper\n'), ((803, 827), 'edge.gym_wrappers.BoxWrapper', 'BoxWrapper', (['gb', '(10, 10)'], {}), '(gb, (10, 10))\n', (813, 827), False, 'from edge.gym_wrappers import BoxWrapper, DiscreteWrapper, GymEnvironmentWrapper\n'), ((1059, 1092), 'gym.spaces.Box', 'spaces.Box', (['(-np.inf)', 'np.inf', '(1,)'], {}), '(-np.inf, np.inf, (1,))\n', (1069, 1092), True, 'import gym.spaces as spaces\n'), ((1106, 1142), 'edge.gym_wrappers.BoxWrapper', 'BoxWrapper', (['gb', '(10,)'], {'inf_ceiling': '(5)'}), '(gb, (10,), inf_ceiling=5)\n', (1116, 1142), False, 'from edge.gym_wrappers import BoxWrapper, DiscreteWrapper, GymEnvironmentWrapper\n'), ((1348, 1367), 'gym.spaces.Discrete', 'spaces.Discrete', (['(10)'], {}), '(10)\n', (1363, 1367), True, 'import gym.spaces as spaces\n'), ((1381, 1400), 'edge.gym_wrappers.DiscreteWrapper', 'DiscreteWrapper', (['gd'], {}), '(gd)\n', (1396, 1400), False, 'from edge.gym_wrappers import BoxWrapper, DiscreteWrapper, GymEnvironmentWrapper\n'), ((3188, 3214), 'gym.make', 'gym.make', (['"""LunarLander-v2"""'], {}), "('LunarLander-v2')\n", (3196, 3214), False, 'import gym\n'), ((3229, 3258), 'edge.gym_wrappers.GymEnvironmentWrapper', 'GymEnvironmentWrapper', (['gymenv'], {}), '(gymenv)\n', (3250, 3258), False, 'from edge.gym_wrappers import BoxWrapper, DiscreteWrapper, GymEnvironmentWrapper\n'), ((3274, 3326), 'edge.gym_wrappers.GymEnvironmentWrapper', 'GymEnvironmentWrapper', (['gymenv'], {'failure_critical': '(True)'}), '(gymenv, failure_critical=True)\n', (3295, 3326), False, 'from edge.gym_wrappers import BoxWrapper, DiscreteWrapper, GymEnvironmentWrapper\n'), ((3412, 3438), 'gym.make', 'gym.make', (['"""LunarLander-v2"""'], {}), "('LunarLander-v2')\n", (3420, 3438), False, 'import gym\n'), ((3453, 3482), 'edge.gym_wrappers.GymEnvironmentWrapper', 'GymEnvironmentWrapper', (['gymenv'], {}), '(gymenv)\n', (3474, 3482), False, 'from edge.gym_wrappers import BoxWrapper, DiscreteWrapper, GymEnvironmentWrapper\n'), ((3506, 3522), 'edge.agent.RandomAgent', 'RandomAgent', (['env'], {}), '(env)\n', (3517, 3522), False, 'from edge.agent import RandomAgent\n'), ((4024, 4047), 'gym.make', 'gym.make', (['"""CartPole-v1"""'], {}), "('CartPole-v1')\n", (4032, 4047), False, 'import gym\n'), ((4062, 4112), 'edge.gym_wrappers.GymEnvironmentWrapper', 'GymEnvironmentWrapper', (['gymenv'], {'control_frequency': '(2)'}), '(gymenv, control_frequency=2)\n', (4083, 4112), False, 'from edge.gym_wrappers import BoxWrapper, DiscreteWrapper, GymEnvironmentWrapper\n'), ((4136, 4152), 'edge.agent.RandomAgent', 'RandomAgent', (['env'], {}), '(env)\n', (4147, 4152), False, 'from edge.agent import RandomAgent\n'), ((756, 772), 'numpy.array', 'np.array', (['[0, 1]'], {}), '([0, 1])\n', (764, 772), True, 'import numpy as np\n'), ((773, 789), 'numpy.array', 'np.array', (['[2, 3]'], {}), '([2, 3])\n', (781, 789), True, 'import numpy as np\n'), ((1233, 1246), 'numpy.abs', 'np.abs', (['eelem'], {}), '(eelem)\n', (1239, 1246), True, 'import numpy as np\n')]
from django.conf import settings from django.contrib.auth import get_user_model from django.contrib import messages from django.http import JsonResponse, HttpResponse from django.views import generic from django.views.decorators.csrf import csrf_exempt from django.shortcuts import get_object_or_404, redirect, render from rest_framework.views import APIView from rest_framework.response import Response import stripe from djvideomem.content.models import Pricing User = get_user_model() stripe.api_key = settings.STRIPE_SECRET_KEY @csrf_exempt def webhook(request): # You can use webhooks to receive information about asynchronous payment events. # For more about our webhook events check out https://stripe.com/docs/webhooks. webhook_secret = settings.STRIPE_WEBHOOK_SECRET payload = request.body # Retrieve the event by verifying the signature using the raw body and secret if webhook signing is configured. signature = request.META["HTTP_STRIPE_SIGNATURE"] try: event = stripe.Webhook.construct_event( payload=payload, sig_header=signature, secret=webhook_secret) data = event['data'] except Exception as e: return e # Get the type of webhook event sent - used to check the status of PaymentIntents. event_type = event['type'] data_object = data['object'] if event_type == 'invoice.paid': # Used to provision services after the trial has ended. # The status of the invoice will show up as paid. Store the status in your # database to reference when a user accesses your service to avoid hitting rate # limits. # TODO: change the users subscription and pricing webhook_object = data["object"] stripe_customer_id = webhook_object["customer"] stripe_sub = stripe.Subscription.retrieve(webhook_object["subscription"]) stripe_price_id = stripe_sub["plan"]["id"] pricing = Pricing.objects.get(stripe_price_id=stripe_price_id) user = User.objects.get(stripe_customer_id=stripe_customer_id) user.subscription.status = stripe_sub["status"] user.subscription.stripe_subscription_id = webhook_object["subscription"] user.subscription.pricing = pricing user.subscription.save() if event_type == 'invoice.finalized': # If you want to manually send out invoices to your customers # or store them locally to reference to avoid hitting Stripe rate limits. print(data) if event_type == 'customer.subscription.deleted': # handle subscription cancelled automatically based # upon your subscription settings. Or if the user cancels it. webhook_object = data["object"] stripe_customer_id = webhook_object["customer"] stripe_sub = stripe.Subscription.retrieve(webhook_object["id"]) user = User.objects.get(stripe_customer_id=stripe_customer_id) user.subscription.status = stripe_sub["status"] user.subscription.save() if event_type == 'customer.subscription.trial_will_end': # Send notification to your user that the trial will end print(data) if event_type == 'customer.subscription.updated': print(data) return HttpResponse() class EnrollView(generic.TemplateView): template_name = "payment/enroll.html" def PaymentView(request, slug): subscription = request.user.subscription pricing = get_object_or_404(Pricing, slug=slug) if subscription.pricing == pricing and subscription.is_active: messages.info(request, "You are already enrolled for this package") return redirect("payment:enroll") context = { "pricing_tier": pricing, "STRIPE_PUBLIC_KEY": settings.STRIPE_PUBLIC_KEY } if subscription.is_active and subscription.pricing.stripe_price_id != "django-free-trial": return render(request, "payment/change.html", context) return render(request, "payment/checkout.html", context) class CreateSubscriptionView(APIView): def post(self, request, *args, **kwargs): data = request.data customer_id = request.user.stripe_customer_id try: # Attach the payment method to the customer stripe.PaymentMethod.attach( data['paymentMethodId'], customer=customer_id, ) # Set the default payment method on the customer stripe.Customer.modify( customer_id, invoice_settings={ 'default_payment_method': data['paymentMethodId'], }, ) # Create the subscription subscription = stripe.Subscription.create( customer=customer_id, items=[{'price': data["priceId"]}], expand=['latest_invoice.payment_intent'], ) data = {} data.update(subscription) return Response(data) except Exception as e: return Response({ "error": {'message': str(e)} }) class RetryInvoiceView(APIView): def post(self, request, *args, **kwargs): data = request.data customer_id = request.user.stripe_customer_id try: stripe.PaymentMethod.attach( data['paymentMethodId'], customer=customer_id, ) # Set the default payment method on the customer stripe.Customer.modify( customer_id, invoice_settings={ 'default_payment_method': data['paymentMethodId'], }, ) invoice = stripe.Invoice.retrieve( data['invoiceId'], expand=['payment_intent'], ) data = {} data.update(invoice) return Response(data) except Exception as e: return Response({ "error": {'message': str(e)} }) class ChangeSubscriptionView(APIView): def post(self, request, *args, **kwargs): print(request.data) subscription_id = request.user.subscription.stripe_subscription_id subscription = stripe.Subscription.retrieve(subscription_id) try: updatedSubscription = stripe.Subscription.modify( subscription_id, cancel_at_period_end=False, items=[{ 'id': subscription['items']['data'][0].id, 'price': request.data["priceId"], }], proration_behavior="always_invoice" ) data = {} data.update(updatedSubscription) return Response(data) except Exception as e: return Response({ "error": {'message': str(e)} })
[ "djvideomem.content.models.Pricing.objects.get", "django.http.HttpResponse", "stripe.Webhook.construct_event", "stripe.Invoice.retrieve", "django.shortcuts.redirect", "stripe.Subscription.modify", "django.contrib.auth.get_user_model", "stripe.Subscription.retrieve", "django.shortcuts.get_object_or_4...
[((473, 489), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (487, 489), False, 'from django.contrib.auth import get_user_model\n'), ((3255, 3269), 'django.http.HttpResponse', 'HttpResponse', ([], {}), '()\n', (3267, 3269), False, 'from django.http import JsonResponse, HttpResponse\n'), ((3446, 3483), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Pricing'], {'slug': 'slug'}), '(Pricing, slug=slug)\n', (3463, 3483), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3957, 4006), 'django.shortcuts.render', 'render', (['request', '"""payment/checkout.html"""', 'context'], {}), "(request, 'payment/checkout.html', context)\n", (3963, 4006), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((1015, 1111), 'stripe.Webhook.construct_event', 'stripe.Webhook.construct_event', ([], {'payload': 'payload', 'sig_header': 'signature', 'secret': 'webhook_secret'}), '(payload=payload, sig_header=signature,\n secret=webhook_secret)\n', (1045, 1111), False, 'import stripe\n'), ((1822, 1882), 'stripe.Subscription.retrieve', 'stripe.Subscription.retrieve', (["webhook_object['subscription']"], {}), "(webhook_object['subscription'])\n", (1850, 1882), False, 'import stripe\n'), ((1953, 2005), 'djvideomem.content.models.Pricing.objects.get', 'Pricing.objects.get', ([], {'stripe_price_id': 'stripe_price_id'}), '(stripe_price_id=stripe_price_id)\n', (1972, 2005), False, 'from djvideomem.content.models import Pricing\n'), ((2810, 2860), 'stripe.Subscription.retrieve', 'stripe.Subscription.retrieve', (["webhook_object['id']"], {}), "(webhook_object['id'])\n", (2838, 2860), False, 'import stripe\n'), ((3560, 3627), 'django.contrib.messages.info', 'messages.info', (['request', '"""You are already enrolled for this package"""'], {}), "(request, 'You are already enrolled for this package')\n", (3573, 3627), False, 'from django.contrib import messages\n'), ((3643, 3669), 'django.shortcuts.redirect', 'redirect', (['"""payment:enroll"""'], {}), "('payment:enroll')\n", (3651, 3669), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3893, 3940), 'django.shortcuts.render', 'render', (['request', '"""payment/change.html"""', 'context'], {}), "(request, 'payment/change.html', context)\n", (3899, 3940), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((6258, 6303), 'stripe.Subscription.retrieve', 'stripe.Subscription.retrieve', (['subscription_id'], {}), '(subscription_id)\n', (6286, 6303), False, 'import stripe\n'), ((4257, 4331), 'stripe.PaymentMethod.attach', 'stripe.PaymentMethod.attach', (["data['paymentMethodId']"], {'customer': 'customer_id'}), "(data['paymentMethodId'], customer=customer_id)\n", (4284, 4331), False, 'import stripe\n'), ((4452, 4562), 'stripe.Customer.modify', 'stripe.Customer.modify', (['customer_id'], {'invoice_settings': "{'default_payment_method': data['paymentMethodId']}"}), "(customer_id, invoice_settings={\n 'default_payment_method': data['paymentMethodId']})\n", (4474, 4562), False, 'import stripe\n'), ((4710, 4841), 'stripe.Subscription.create', 'stripe.Subscription.create', ([], {'customer': 'customer_id', 'items': "[{'price': data['priceId']}]", 'expand': "['latest_invoice.payment_intent']"}), "(customer=customer_id, items=[{'price': data[\n 'priceId']}], expand=['latest_invoice.payment_intent'])\n", (4736, 4841), False, 'import stripe\n'), ((4981, 4995), 'rest_framework.response.Response', 'Response', (['data'], {}), '(data)\n', (4989, 4995), False, 'from rest_framework.response import Response\n'), ((5306, 5380), 'stripe.PaymentMethod.attach', 'stripe.PaymentMethod.attach', (["data['paymentMethodId']"], {'customer': 'customer_id'}), "(data['paymentMethodId'], customer=customer_id)\n", (5333, 5380), False, 'import stripe\n'), ((5501, 5611), 'stripe.Customer.modify', 'stripe.Customer.modify', (['customer_id'], {'invoice_settings': "{'default_payment_method': data['paymentMethodId']}"}), "(customer_id, invoice_settings={\n 'default_payment_method': data['paymentMethodId']})\n", (5523, 5611), False, 'import stripe\n'), ((5716, 5785), 'stripe.Invoice.retrieve', 'stripe.Invoice.retrieve', (["data['invoiceId']"], {'expand': "['payment_intent']"}), "(data['invoiceId'], expand=['payment_intent'])\n", (5739, 5785), False, 'import stripe\n'), ((5908, 5922), 'rest_framework.response.Response', 'Response', (['data'], {}), '(data)\n', (5916, 5922), False, 'from rest_framework.response import Response\n'), ((6351, 6555), 'stripe.Subscription.modify', 'stripe.Subscription.modify', (['subscription_id'], {'cancel_at_period_end': '(False)', 'items': "[{'id': subscription['items']['data'][0].id, 'price': request.data['priceId']}]", 'proration_behavior': '"""always_invoice"""'}), "(subscription_id, cancel_at_period_end=False,\n items=[{'id': subscription['items']['data'][0].id, 'price': request.\n data['priceId']}], proration_behavior='always_invoice')\n", (6377, 6555), False, 'import stripe\n'), ((6771, 6785), 'rest_framework.response.Response', 'Response', (['data'], {}), '(data)\n', (6779, 6785), False, 'from rest_framework.response import Response\n')]
import os from datetime import datetime from typing import Optional import click from click import Context from cellphonedb.src.api_endpoints.terminal_api.tools_terminal_api_endpoints.tools_terminal_commands import \ generate_proteins, generate_complex, _set_paths, generate_interactions, filter_all, generate_genes from cellphonedb.src.app.cellphonedb_app import output_dir from cellphonedb.src.database.manager import DatabaseVersionManager from cellphonedb.src.database.manager.DatabaseVersionManager import collect_database from cellphonedb.utils.utils import set_paths @click.command("collect") @click.option('--database', default='cellphone_custom_{}.db'.format(datetime.now().strftime("%Y-%m-%d-%H_%M")), help='output file name [cellphone_custom_<current date_time>.db]') @click.option('--result-path', default='', help='output folder for the collected database') def collect(database, result_path): output_path = set_paths(output_dir, result_path) DatabaseVersionManager.collect_database(database, output_path) @click.command("download") @click.option('--version', type=str, default='latest') def download(version: str): DatabaseVersionManager.download_database(version) @click.command("list_remote") def list_remote(): DatabaseVersionManager.list_remote_database_versions() @click.command("list_local") def list_local(): DatabaseVersionManager.list_local_database_versions() @click.command("generate") @click.option('--user-protein', type=click.Path(file_okay=True, exists=True, dir_okay=False)) @click.option('--user-gene', type=click.Path(file_okay=True, exists=True, dir_okay=False)) @click.option('--user-complex', type=click.Path(file_okay=True, exists=True, dir_okay=False)) @click.option('--user-interactions', type=click.Path(file_okay=True, exists=True, dir_okay=False)) @click.option('--user-interactions-only', is_flag=True) @click.option('--fetch', is_flag=True) @click.option('--result-path', type=str, default=None) @click.option('--log-file', type=str, default='log.txt') @click.option('--project-name', type=str, default=None) @click.pass_context def generate(ctx: Context, user_protein: Optional[str], user_gene: Optional[str], user_complex: Optional[str], user_interactions: Optional[str], user_interactions_only: Optional[str], fetch: bool, result_path: Optional[str], log_file: str, project_name: str ): ctx.invoke(generate_proteins, user_protein=user_protein, fetch_uniprot=fetch, result_path=result_path, log_file=log_file, project_name=project_name ) ctx.invoke(generate_genes, user_gene=user_gene, fetch_uniprot=fetch, fetch_ensembl=fetch, result_path=result_path, project_name=project_name ) ctx.invoke(generate_complex, user_complex=user_complex, result_path=result_path, log_file=log_file, project_name=project_name ) output_path = _set_paths(result_path, project_name) proteins_file = os.path.join(output_path, 'protein_generated.csv') genes_file = os.path.join(output_path, 'gene_generated.csv') complex_file = os.path.join(output_path, 'complex_generated.csv') ctx.invoke(generate_interactions, proteins=proteins_file, genes=genes_file, complex=complex_file, user_interactions=user_interactions, user_interactions_only=user_interactions_only, result_path=result_path, fetch_imex=fetch, fetch_iuphar=fetch, project_name=project_name ) ctx.invoke(filter_all, input_path=output_path, result_path=result_path) db_name = 'cellphonedb_user_{}.db'.format(datetime.now().strftime("%Y-%m-%d-%H_%M")) collect_database(db_name, output_path, protein_filename='protein_input.csv', gene_filename='gene_input.csv', complex_filename='complex_input.csv', interaction_filename='interaction_input.csv', data_path=output_path) @click.command("collect_generated") @click.argument('path', type=str) @click.option('--result-path', type=str, default=None) @click.option('--project-name', type=str, default=None) def collect_generated(path: str, result_path: Optional[str], project_name: str): db_name = 'cellphonedb_user_{}.db'.format(datetime.now().strftime("%Y-%m-%d-%H_%M")) output_path = _set_paths(result_path, project_name) collect_database(db_name, output_path, protein_filename='{}/protein_input.csv'.format(path), gene_filename='{}/gene_input.csv'.format(path), complex_filename='{}/complex_input.csv'.format(path), interaction_filename='{}/interaction_input.csv'.format(path), data_path=output_path)
[ "cellphonedb.src.database.manager.DatabaseVersionManager.download_database", "click.argument", "click.option", "datetime.datetime.now", "cellphonedb.utils.utils.set_paths", "click.command", "cellphonedb.src.database.manager.DatabaseVersionManager.list_remote_database_versions", "cellphonedb.src.databa...
[((583, 607), 'click.command', 'click.command', (['"""collect"""'], {}), "('collect')\n", (596, 607), False, 'import click\n'), ((802, 897), 'click.option', 'click.option', (['"""--result-path"""'], {'default': '""""""', 'help': '"""output folder for the collected database"""'}), "('--result-path', default='', help=\n 'output folder for the collected database')\n", (814, 897), False, 'import click\n'), ((1053, 1078), 'click.command', 'click.command', (['"""download"""'], {}), "('download')\n", (1066, 1078), False, 'import click\n'), ((1080, 1133), 'click.option', 'click.option', (['"""--version"""'], {'type': 'str', 'default': '"""latest"""'}), "('--version', type=str, default='latest')\n", (1092, 1133), False, 'import click\n'), ((1219, 1247), 'click.command', 'click.command', (['"""list_remote"""'], {}), "('list_remote')\n", (1232, 1247), False, 'import click\n'), ((1329, 1356), 'click.command', 'click.command', (['"""list_local"""'], {}), "('list_local')\n", (1342, 1356), False, 'import click\n'), ((1436, 1461), 'click.command', 'click.command', (['"""generate"""'], {}), "('generate')\n", (1449, 1461), False, 'import click\n'), ((1841, 1895), 'click.option', 'click.option', (['"""--user-interactions-only"""'], {'is_flag': '(True)'}), "('--user-interactions-only', is_flag=True)\n", (1853, 1895), False, 'import click\n'), ((1897, 1934), 'click.option', 'click.option', (['"""--fetch"""'], {'is_flag': '(True)'}), "('--fetch', is_flag=True)\n", (1909, 1934), False, 'import click\n'), ((1936, 1989), 'click.option', 'click.option', (['"""--result-path"""'], {'type': 'str', 'default': 'None'}), "('--result-path', type=str, default=None)\n", (1948, 1989), False, 'import click\n'), ((1991, 2046), 'click.option', 'click.option', (['"""--log-file"""'], {'type': 'str', 'default': '"""log.txt"""'}), "('--log-file', type=str, default='log.txt')\n", (2003, 2046), False, 'import click\n'), ((2048, 2102), 'click.option', 'click.option', (['"""--project-name"""'], {'type': 'str', 'default': 'None'}), "('--project-name', type=str, default=None)\n", (2060, 2102), False, 'import click\n'), ((4392, 4426), 'click.command', 'click.command', (['"""collect_generated"""'], {}), "('collect_generated')\n", (4405, 4426), False, 'import click\n'), ((4428, 4460), 'click.argument', 'click.argument', (['"""path"""'], {'type': 'str'}), "('path', type=str)\n", (4442, 4460), False, 'import click\n'), ((4462, 4515), 'click.option', 'click.option', (['"""--result-path"""'], {'type': 'str', 'default': 'None'}), "('--result-path', type=str, default=None)\n", (4474, 4515), False, 'import click\n'), ((4517, 4571), 'click.option', 'click.option', (['"""--project-name"""'], {'type': 'str', 'default': 'None'}), "('--project-name', type=str, default=None)\n", (4529, 4571), False, 'import click\n'), ((947, 981), 'cellphonedb.utils.utils.set_paths', 'set_paths', (['output_dir', 'result_path'], {}), '(output_dir, result_path)\n', (956, 981), False, 'from cellphonedb.utils.utils import set_paths\n'), ((987, 1049), 'cellphonedb.src.database.manager.DatabaseVersionManager.collect_database', 'DatabaseVersionManager.collect_database', (['database', 'output_path'], {}), '(database, output_path)\n', (1026, 1049), False, 'from cellphonedb.src.database.manager import DatabaseVersionManager\n'), ((1166, 1215), 'cellphonedb.src.database.manager.DatabaseVersionManager.download_database', 'DatabaseVersionManager.download_database', (['version'], {}), '(version)\n', (1206, 1215), False, 'from cellphonedb.src.database.manager import DatabaseVersionManager\n'), ((1271, 1325), 'cellphonedb.src.database.manager.DatabaseVersionManager.list_remote_database_versions', 'DatabaseVersionManager.list_remote_database_versions', ([], {}), '()\n', (1323, 1325), False, 'from cellphonedb.src.database.manager import DatabaseVersionManager\n'), ((1379, 1432), 'cellphonedb.src.database.manager.DatabaseVersionManager.list_local_database_versions', 'DatabaseVersionManager.list_local_database_versions', ([], {}), '()\n', (1430, 1432), False, 'from cellphonedb.src.database.manager import DatabaseVersionManager\n'), ((3223, 3260), 'cellphonedb.src.api_endpoints.terminal_api.tools_terminal_api_endpoints.tools_terminal_commands._set_paths', '_set_paths', (['result_path', 'project_name'], {}), '(result_path, project_name)\n', (3233, 3260), False, 'from cellphonedb.src.api_endpoints.terminal_api.tools_terminal_api_endpoints.tools_terminal_commands import generate_proteins, generate_complex, _set_paths, generate_interactions, filter_all, generate_genes\n'), ((3282, 3332), 'os.path.join', 'os.path.join', (['output_path', '"""protein_generated.csv"""'], {}), "(output_path, 'protein_generated.csv')\n", (3294, 3332), False, 'import os\n'), ((3350, 3397), 'os.path.join', 'os.path.join', (['output_path', '"""gene_generated.csv"""'], {}), "(output_path, 'gene_generated.csv')\n", (3362, 3397), False, 'import os\n'), ((3417, 3467), 'os.path.join', 'os.path.join', (['output_path', '"""complex_generated.csv"""'], {}), "(output_path, 'complex_generated.csv')\n", (3429, 3467), False, 'import os\n'), ((4068, 4291), 'cellphonedb.src.database.manager.DatabaseVersionManager.collect_database', 'collect_database', (['db_name', 'output_path'], {'protein_filename': '"""protein_input.csv"""', 'gene_filename': '"""gene_input.csv"""', 'complex_filename': '"""complex_input.csv"""', 'interaction_filename': '"""interaction_input.csv"""', 'data_path': 'output_path'}), "(db_name, output_path, protein_filename='protein_input.csv',\n gene_filename='gene_input.csv', complex_filename='complex_input.csv',\n interaction_filename='interaction_input.csv', data_path=output_path)\n", (4084, 4291), False, 'from cellphonedb.src.database.manager.DatabaseVersionManager import collect_database\n'), ((4760, 4797), 'cellphonedb.src.api_endpoints.terminal_api.tools_terminal_api_endpoints.tools_terminal_commands._set_paths', '_set_paths', (['result_path', 'project_name'], {}), '(result_path, project_name)\n', (4770, 4797), False, 'from cellphonedb.src.api_endpoints.terminal_api.tools_terminal_api_endpoints.tools_terminal_commands import generate_proteins, generate_complex, _set_paths, generate_interactions, filter_all, generate_genes\n'), ((1499, 1554), 'click.Path', 'click.Path', ([], {'file_okay': '(True)', 'exists': '(True)', 'dir_okay': '(False)'}), '(file_okay=True, exists=True, dir_okay=False)\n', (1509, 1554), False, 'import click\n'), ((1590, 1645), 'click.Path', 'click.Path', ([], {'file_okay': '(True)', 'exists': '(True)', 'dir_okay': '(False)'}), '(file_okay=True, exists=True, dir_okay=False)\n', (1600, 1645), False, 'import click\n'), ((1684, 1739), 'click.Path', 'click.Path', ([], {'file_okay': '(True)', 'exists': '(True)', 'dir_okay': '(False)'}), '(file_okay=True, exists=True, dir_okay=False)\n', (1694, 1739), False, 'import click\n'), ((1783, 1838), 'click.Path', 'click.Path', ([], {'file_okay': '(True)', 'exists': '(True)', 'dir_okay': '(False)'}), '(file_okay=True, exists=True, dir_okay=False)\n', (1793, 1838), False, 'import click\n'), ((4020, 4034), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4032, 4034), False, 'from datetime import datetime\n'), ((4699, 4713), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4711, 4713), False, 'from datetime import datetime\n'), ((676, 690), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (688, 690), False, 'from datetime import datetime\n')]
""" Implementation of the model. Parts of the code are inherited from the official CAAE implementation (https://arxiv.org/abs/1702.08423, https://github.com/ZZUTK/Face-Aging-CAAE). """ import os import sys import time from glob import glob import numpy as np import tensorflow as tf from scipy.io import loadmat, savemat from PK_Utils.PK_config import * from PK_Utils.PK_image_ops import * from PK_Utils.PK_subnetworks import encoder, generator, d_img, d_prior, d_em from PK_Utils.PK_vgg_face import face_embedding from metrics import concordance_cc class Model(object): """ Implementation of the model used. """ def __init__(self, session, useEmotion=False): self.useEmotion = useEmotion self.session = session self.vgg_weights = loadmat(vggMat) # -- INPUT PLACEHOLDERS ----------------------------------------------------------- # --------------------------------------------------------------------------------- self.input_image = tf.compat.v1.placeholder( tf.float32, [size_batch, size_image, size_image, 3], name='input_images' ) self.valence = tf.compat.v1.placeholder( tf.float32, [size_batch, 1], name='valence_labels' ) self.arousal = tf.compat.v1.placeholder( tf.float32, [size_batch, 1], name='arousal_labels' ) self.z_prior = tf.compat.v1.placeholder( tf.float32, [size_batch, num_z_channels], name='z_prior' ) # -- GRAPH ------------------------------------------------------------------------ # --------------------------------------------------------------------------------- print ('\n\t SETTING UP THE GRAPH') with tf.compat.v1.variable_scope(tf.compat.v1.get_variable_scope()): # with tf.device('/device:GPU:0'): with tf.device(device): # -- NETWORKS ------------------------------------------------------------- # ------------------------------------------------------------------------- # encoder: self.z = encoder(self.input_image) # generator: z + arousal + valence --> generated image self.G = generator(self.z, valence=self.valence, arousal=self.arousal) # Discriminator Z self.Dz, self.Dz_logits = d_prior(self.z) # Discriminator_Z on encoded image self.Dz_prior, self.Dz_prior_logits = d_prior(self.z_prior, reuse_variables=True) # Discriminator_Z on prior image #Discriminator Image self.Dimg_G, self.Dimg_G_logits = d_img(self.G, valence=self.valence, arousal=self.arousal) # discriminator on Generated # discriminator on input image self.Dimg_Original, self.Dimg_Original_logits = d_img(self.input_image, valence=self.valence, arousal=self.arousal, reuse_variables=True) # discriminator on original image # # discriminator on arousal/valence # # if self.useEmotion: self.D_emArousal, self.D_emValence, self.D_em_arousal_logits, self.D_em_valence_logits = d_em(self.z, reuse_variables=True) # self.D_emArousal_G, self.D_emValence_G, self.D_em_arousal_logits_G, self.D_em_valence_logits_G = d_em(self.G, reuse_variables=True) # -- LOSSES --------------------------------------------------------------- # ------------------------------------------------------------------------- # ---- VGG LOSS --------------------------------------------------------- # The computation of this loss is inherited from the official ExprGan implementation (https://arxiv.org/abs/1709.03842, https://github.com/HuiDingUMD/ExprGAN). real_conv1_2, real_conv2_2, real_conv3_2, real_conv4_2, real_conv5_2 = face_embedding(self.vgg_weights, self.input_image) fake_conv1_2, fake_conv2_2, fake_conv3_2, fake_conv4_2, fake_conv5_2 = face_embedding(self.vgg_weights, self.G) conv1_2_loss = tf.reduce_mean(tf.abs(real_conv1_2 - fake_conv1_2)) / 224. / 224. conv2_2_loss = tf.reduce_mean(tf.abs(real_conv2_2 - fake_conv2_2)) / 112. / 112. conv3_2_loss = tf.reduce_mean(tf.abs(real_conv3_2 - fake_conv3_2)) / 56. / 56. conv4_2_loss = tf.reduce_mean(tf.abs(real_conv4_2 - fake_conv4_2)) / 28. / 28. conv5_2_loss = tf.reduce_mean(tf.abs(real_conv5_2 - fake_conv5_2)) / 14. / 14. # ----------------------------------------------------------------------- # loss function of discriminator on z self.D_z_loss_z = tf.reduce_mean( tf.nn.sigmoid_cross_entropy_with_logits(logits= self.Dz_logits, labels=tf.ones_like(self.Dz_logits)) ) self.D_z_loss_prior = tf.reduce_mean( tf.nn.sigmoid_cross_entropy_with_logits(logits=1 - self.Dz_prior_logits, labels=tf.zeros_like(self.Dz_prior_logits)) ) # self.E_z_loss = tf.reduce_mean( # tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_z_logits, labels=tf.ones_like(self.D_z_logits)) # ) # loss function of discriminator on image self.D_img_loss_input = tf.reduce_mean( tf.nn.sigmoid_cross_entropy_with_logits(logits=self.Dimg_Original_logits, labels=tf.ones_like(self.Dimg_Original_logits)) ) self.D_img_loss_G = tf.reduce_mean( tf.nn.sigmoid_cross_entropy_with_logits(logits=1 - self.Dimg_G_logits, labels=tf.zeros_like(self.Dimg_G_logits)) ) if self.useEmotion: # loss function of discriminator on emotion self.D_em_arousal_loss = tf.compat.v1.losses.mean_squared_error(predictions=self.D_em_arousal_logits, labels=self.arousal) self.D_em_valence_loss = tf.compat.v1.losses.mean_squared_error(predictions=self.D_em_valence_logits, labels=self.valence) # self.G_img_loss = tf.reduce_mean( # tf.nn.sigmoid_cross_entropy_with_logits(logits=self.Dimg_G_logits, labels=tf.ones_like(self.Dimg_G_logits)) # ) # # loss function of d_em on arousal and valence values # self.D_em_loss = tf.compat.v1.losses.mean_squared_error(predictions=self.D_em_valence_logits, labels=self.valence) + tf.compat.v1.losses.mean_squared_error(self.D_em_arousal_logits, self.arousal) # # # #CCC for arousal and valence # self.D_em_ccc_arousal = concordance_cc(self.D_em_arousal_logits, self.arousal) # self.D_em_ccc_valence = concordance_cc(self.D_em_valence_logits, self.valence) # --------------------------------------------------------------------------------- # Losses # --------------------------------------------------------------------------------- # reconstruction loss of encoder+generator # self.loss_rec = tf.reduce_mean(tf.abs(self.input_image - self.G)) # L1 loss self.loss_rec = tf.reduce_mean(tf.abs(self.input_image - self.G)) # L1 loss self.loss_Iden = conv1_2_loss + conv2_2_loss + conv3_2_loss + conv4_2_loss + conv5_2_loss self.loss_Lz = self.D_z_loss_prior + self.D_z_loss_z self.loss_Di = self.D_img_loss_input + self.D_img_loss_G if self.useEmotion: self.loss_Dem = self.D_em_arousal_loss + self.D_em_valence_loss self.loss_Total = self.loss_rec + self.loss_Iden * 0.3 + self.loss_Lz * 0.01 + self.loss_Di * 0.01 + self.loss_Dem*0.001 else: self.loss_Total = self.loss_rec + self.loss_Iden * 0.3 + self.loss_Lz * 0.01 + self.loss_Di * 0.01 # self.loss_EG = self.EG_loss + self.D_em_loss * 0.02 + self.vgg_loss * 0.3 + 0.01 * self.E_z_loss + 0.01 * self.G_img_loss # -- TRAINABLE VARIABLES ---------------------------------------------------------- # --------------------------------------------------------------------------------- trainable_variables =tf.compat.v1.trainable_variables() # variables of encoder self.E_variables = [var for var in trainable_variables if 'E_' in var.name] # variables of generator self.G_variables = [var for var in trainable_variables if 'G_' in var.name] # variables of discriminator on prior self.D_z_variables = [var for var in trainable_variables if 'D_prior_' in var.name] # variables of discriminator on realImage self.D_img_variables = [var for var in trainable_variables if 'D_img_' in var.name] # # variables of discriminator on emotions # self.D_em_variables = [var for var in trainable_variables if 'D_em_' in var.name] # -- SUMMARY ---------------------------------------------------------------------- # --------------------------------------------------------------------------------- # with tf.device('/device:CPU:0'): # self.z_summary = tf.compat.v1.summary.histogram('z', self.z) # self.z_prior_summary = tf.compat.v1.summary.histogram('z_prior', self.z_prior) # self.EG_loss_summary = tf.summary.scalar('EG_loss', self.EG_loss) # self.D_z_loss_z_summary = tf.summary.scalar('D_z_loss_z', self.D_z_loss_z) # self.D_z_loss_prior_summary = tf.summary.scalar('D_z_loss_prior', self.D_z_loss_prior) # self.E_z_loss_summary = tf.summary.scalar('E_z_loss', self.E_z_loss) # self.D_z_logits_summary = tf.compat.v1.summary.histogram('D_z_logits', self.D_z_logits) # self.D_z_prior_logits_summary = tf.compat.v1.summary.histogram('D_z_prior_logits', self.D_z_prior_logits) # self.D_img_loss_input_summary = tf.summary.scalar('D_img_loss_input', self.D_img_loss_input) # self.D_img_loss_G_summary = tf.summary.scalar('D_img_loss_G', self.D_img_loss_G) # self.G_img_loss_summary = tf.summary.scalar('G_img_loss', self.G_img_loss) # self.D_G_logits_summary = tf.compat.v1.summary.histogram('D_G_logits', self.D_G_logits) # self.D_input_logits_summary = tf.compat.v1.summary.histogram('D_input_logits', self.D_input_logits) # self.D_em_arousal_logits_summary = tf.compat.v1.summary.histogram('D_em_arousal_logits', self.D_em_arousal_logits) # self.D_em_valence_logits_summary = tf.compat.v1.summary.histogram('D_em_valence_logits', # self.D_em_valence_logits) # self.D_em_loss_summary = tf.compat.v1.summary.histogram('D_em_loss', self.D_em_loss) # self.D_em_ccc_arousal_summary = tf.compat.v1.summary.histogram('D_em_ccc_arousal', self.D_em_ccc_arousal) # self.D_em_ccc_valence_summary = tf.compat.v1.summary.histogram('D_em_ccc_valence', self.D_em_ccc_valence) # self.vgg_loss_summary = tf.summary.scalar('VGG_loss', self.vgg_loss) # # for saving the graph and variables self.saver = tf.compat.v1.train.Saver(max_to_keep=10) def train(self, num_epochs=2, # number of epochs learning_rate=0.0002, # learning rate of optimizer beta1=0.5, # parameter for Adam optimizer decay_rate=1.0, # learning rate decay (0, 1], 1 means no decay use_trained_model=False, # used the saved checkpoint to initialize the model ): enable_shuffle = True # set learning rate decay with tf.compat.v1.variable_scope(tf.compat.v1.get_variable_scope()): with tf.device('/device:CPU:0'): self.EG_global_step = tf.Variable(0, trainable=False, name='global_step') # -- LOAD FILE NAMES -------------------------------------------------------------- # --------------------------------------------------------------------------------- # ---- TRAINING DATA file_names = [data_path + x for x in os.listdir(data_path)] file_names = self.fill_up_equally(file_names) size_data = len(file_names) np.random.shuffle(file_names) # ---- VALIDATION DATA self.validation_files = [validation_path + v for v in os.listdir(validation_path)] # -- OPTIMIZERS ------------------------------------------------------------------- # --------------------------------------------------------------------------------- with tf.device(device): # with tf.device('/device:GPU:0'): EG_learning_rate = tf.compat.v1.train.exponential_decay( learning_rate=learning_rate, global_step=self.EG_global_step, decay_steps=size_data / size_batch * 2, decay_rate=decay_rate, staircase=True ) # optimizer for encoder + generator self.EG_optimizer = tf.compat.v1.train.AdamOptimizer( learning_rate=EG_learning_rate, beta1=beta1 ).minimize( loss=self.loss_Total, global_step=self.EG_global_step, var_list=self.E_variables + self.G_variables ) # # optimizer for discriminator on z # self.D_z_optimizer = tf.compat.v1.train.AdamOptimizer( # learning_rate=EG_learning_rate, # beta1=beta1 # ).minimize( # loss=self.loss_Lz, # var_list=self.D_z_variables # ) # # # optimizer for discriminator on image # self.D_img_optimizer = tf.compat.v1.train.AdamOptimizer( # learning_rate=EG_learning_rate, # beta1=beta1 # ).minimize( # loss=self.loss_Di, # var_list=self.D_img_variables # ) # # optimizer for emotion # self.D_em_optimizer = tf.compat.v1.train.AdamOptimizer( # learning_rate=EG_learning_rate, # beta1=beta1 # ).minimize( # loss=self.D_em_loss, # var_list=self.D_em_variables # ) # # -- TENSORBOARD WRITER ---------------------------------------------------------- # # --------------------------------------------------------------------------------- # self.writer = tf.summary.create_file_writer(save_dir) # -- TENSORBOARD SUMMARY ---------------------------------------------------------- # --------------------------------------------------------------------------------- # with tf.device('/device:CPU:0'): # self.EG_learning_rate_summary = tf.summary.scalar('EG_learning_rate', EG_learning_rate) # self.summary = tf.compat.v1.summary.merge([ # self.z_summary, self.z_prior_summary, # self.D_z_loss_z_summary, self.D_z_loss_prior_summary, # self.D_z_logits_summary, self.D_z_prior_logits_summary, # self.EG_loss_summary, self.E_z_loss_summary, # self.D_img_loss_input_summary, self.D_img_loss_G_summary, # self.G_img_loss_summary, self.EG_learning_rate_summary, # self.D_G_logits_summary, self.D_input_logits_summary, # self.vgg_loss_summary, self.D_em_arousal_logits_summary, self.D_em_valence_logits_summary, self.D_em_loss_summary, self.D_em_ccc_arousal_summary, self.D_em_ccc_valence_summary # ]) # self.writer = tf.summary.FileWriter(os.path.join(save_dir, 'summary'), self.session.graph) # ************* get some random samples as testing data to visualize the learning process ********************* sample_files = file_names[0:size_batch] file_names[0:size_batch] = [] sample = [load_image( image_path=sample_file, image_size=size_image, image_value_range=image_value_range, is_gray=False, ) for sample_file in sample_files] sample_images = np.array(sample).astype(np.float32) sample_label_arousal = np.asarray([[float(x.split('__')[2])] for x in sample_files]) sample_label_valence = np.asarray([[float(x.split('__')[3][0:-4])] for x in sample_files]) # ******************************************* training ******************************************************* print('\n\tPreparing for training ...') # initialize the graph tf.compat.v1.global_variables_initializer().run() # load check point if use_trained_model: if self.load_checkpoint(): print("\tSUCCESS ^_^") else: print("\tFAILED >_<!") # epoch iteration num_batches = len(file_names) // size_batch for epoch in range(num_epochs): if enable_shuffle: np.random.shuffle(file_names) for ind_batch in range(num_batches): start_time = time.time() # read batch images and labels batch_files = file_names[ind_batch*size_batch:(ind_batch+1)*size_batch] batch = [load_image( image_path=batch_file, image_size=size_image, image_value_range=image_value_range, is_gray=False, ) for batch_file in batch_files] batch_images = np.array(batch).astype(np.float32) batch_label_valence = np.asarray([[float(x.split('__')[2])] for x in batch_files]) batch_label_arousal = np.asarray([[float(x.split('__')[3][0:-4])] for x in batch_files]) # prior distribution on the prior of z batch_z_prior = np.random.uniform( image_value_range[0], image_value_range[-1], [size_batch, num_z_channels] ).astype(np.float32) # # update # _, _, _, EG_err, Ez_err, Dz_err, Dzp_err, Gi_err, DiG_err, Di_err, vgg, em, arousalCCC, valenceCCC = self.session.run( # fetches = [ # self.EG_optimizer, # self.D_z_optimizer, # self.D_img_optimizer, # self.EG_loss, # self.E_z_loss, # self.D_z_loss_z, # self.D_z_loss_prior, # self.G_img_loss, # self.D_img_loss_G, # self.D_img_loss_input, # # self.tv_loss, # self.vgg_loss, # self.D_em_loss, # self.D_em_ccc_arousal, # self.D_em_ccc_valence # ], # feed_dict={ # self.input_image: batch_images, # self.valence: batch_label_valence, # self.arousal: batch_label_arousal, # self.z_prior: batch_z_prior # } # ) # update # _, _, _, EG_err, Ez_err, Dz_err, Dzp_err, Gi_err, DiG_err, Di_err, vgg = self.session.run( # fetches=[ # self.EG_optimizer, # self.D_z_optimizer, # self.D_img_optimizer, # self.loss_rec, # self.E_z_loss, # self.D_z_loss_z, # self.D_z_loss_prior, # self.G_img_loss, # self.D_img_loss_G, # self.D_img_loss_input, # # self.tv_loss, # self.loss_Iden # ], # feed_dict={ # self.input_image: batch_images, # self.valence: batch_label_valence, # self.arousal: batch_label_arousal, # self.z_prior: batch_z_prior # } # ) # print("\nEpoch: [%3d/%3d] Batch: [%3d/%3d]\n\tEG_err=%.4f\tVGG=%.4f" % # (epoch + 1, num_epochs, ind_batch + 1, num_batches, EG_err, vgg)) # print("\tEz=%.4f\tDz=%.4f\tDzp=%.4f" % (Ez_err, Dz_err, Dzp_err)) # print("\tGi=%.4f\tDi=%.4f\tDiG=%.4f" % (Gi_err, Di_err, DiG_err)) # # # update if self.useEmotion: _, lossTotal, lossRec, lossIden, lossLz, lossLzPrior, lossLzOriginal, lossDimg, lossDimgInput, lossDimgGenerated, lossDem, lossDemArousal, lossDemValence = self.session.run( fetches=[ self.EG_optimizer, self.loss_Total, self.loss_rec, self.loss_Iden, self.loss_Lz, self.D_z_loss_prior, self.D_z_loss_z, self.loss_Di, self.D_img_loss_input, self.D_img_loss_G, self.loss_Dem, self.D_em_arousal_loss, self.D_em_valence_loss ], feed_dict={ self.input_image: batch_images, self.valence: batch_label_valence, self.arousal: batch_label_arousal, self.z_prior: batch_z_prior } ) print("\nEpoch: [%3d/%3d] Batch: [%3d/%3d]\n\tLoss_Total=%.4f" % (epoch + 1, num_epochs, ind_batch + 1, num_batches, lossTotal), file=open(save_dir+"Log.txt", "a")) print("\tL_rec=%.4f\tL_Iden=%.4f\tL_Z=%.4f\tL_Img=%.4f\tL_em=%.4f" % (lossRec, lossIden, lossLz, lossDimg,lossDem), file=open(save_dir+"Log.txt", "a")) print("\tL_Z_Prior=%.4f\tL_Z_original=%.4f" % (lossLzPrior, lossLzOriginal), file=open(save_dir+"Log.txt", "a")) print("\tL_Img_Input=%.4f\tL_Img_Generated=%.4f" % (lossDimgInput, lossDimgGenerated), file=open(save_dir+"Log.txt", "a")) print("\tL_Dem_Arousal=%.4f\tL_Dem_Valence=%.4f" % (lossDemArousal, lossDemValence), file=open(save_dir+"Log.txt", "a")) else: _, lossTotal, lossRec, lossIden, lossLz, lossLzPrior, lossLzOriginal, lossDimg, lossDimgInput, lossDimgGenerated = self.session.run( fetches=[ self.EG_optimizer, self.loss_Total, self.loss_rec, self.loss_Iden, self.loss_Lz, self.D_z_loss_prior, self.D_z_loss_z, self.loss_Di, self.D_img_loss_input, self.D_img_loss_G, ], feed_dict={ self.input_image: batch_images, self.valence: batch_label_valence, self.arousal: batch_label_arousal, self.z_prior: batch_z_prior } ) print("\nEpoch: [%3d/%3d] Batch: [%3d/%3d]\n\tLoss_Total=%.4f"% (epoch + 1, num_epochs, ind_batch + 1, num_batches, lossTotal), file=open(save_dir+"Log.txt", "a")) print("\tL_rec=%.4f\tL_Iden=%.4f\tL_Z=%.4f\tL_Img=%.4f" % (lossRec, lossIden, lossLz,lossDimg), file=open(save_dir+"Log.txt", "a")) print("\tL_Z_Prior=%.4f\tL_Z_original=%.4f" % (lossLzPrior, lossLzOriginal), file=open(save_dir+"Log.txt", "a")) print("\tL_Img_Input=%.4f\tL_Img_Generated=%.4f" % (lossDimgInput, lossDimgGenerated), file=open(save_dir+"Log.txt", "a")) # print("\nEpoch: [%3d/%3d] Batch: [%3d/%3d]\n\tEG_err=%.4f\tVGG=%.4f\tEm=%.4f" % # (epoch+1, num_epochs, ind_batch+1, num_batches, EG_err, vgg, em)) # print("\tArousalCCC=%.4f\tValenceCCC=%.4f" % (arousalCCC, valenceCCC)) # estimate left run time elapse = time.time() - start_time time_left = ((num_epochs - epoch - 1) * num_batches + (num_batches - ind_batch - 1)) * elapse print("\tTime left: %02d:%02d:%02d" % (int(time_left / 3600), int(time_left % 3600 / 60), time_left % 60)) # # add to summary # summary = self.summary.eval( # feed_dict={ # self.input_image: batch_images, # self.valence: batch_label_valence, # self.arousal: batch_label_arousal, # self.z_prior: batch_z_prior # } # ) # self.writer.add_summary(summary, self.EG_global_step.eval()) if ind_batch%500 == 0: # save sample images for each epoch name = '{:02d}_{:02d}'.format(epoch+1, ind_batch) self.sample(sample_images, sample_label_valence, sample_label_arousal, name+'.png') # TEST test_dir = os.path.join(save_dir, 'test') if not os.path.exists(test_dir): os.makedirs(test_dir) self.test(sample_images, test_dir, name+'.png') # save checkpoint for each epoch # VALIDATE name = '{:02d}_model'.format(epoch+1) self.validate(name) self.save_checkpoint(name=name) def save_checkpoint(self, name=''): checkpoint_dir = os.path.join(save_dir, 'checkpoint') if not os.path.exists(checkpoint_dir): os.makedirs(checkpoint_dir) self.saver.save( sess=self.session, save_path=os.path.join(checkpoint_dir, name) ) def load_checkpoint(self): print("\n\tLoading pre-trained model ...") checkpoint_dir = os.path.join(save_dir, 'checkpoint') checkpoints = tf.train.get_checkpoint_state(checkpoint_dir) if checkpoints and checkpoints.model_checkpoint_path: checkpoints_name = os.path.basename(checkpoints.model_checkpoint_path) self.saver.restore(self.session, os.path.join(checkpoint_dir, checkpoints_name)) return True else: return False def sample(self, images, valence, arousal, name): sample_dir = os.path.join(save_dir, 'samples') if not os.path.exists(sample_dir): os.makedirs(sample_dir) z, G = self.session.run( [self.z, self.G], feed_dict={ self.input_image: images, self.valence: valence, self.arousal: arousal } ) size_frame = int(np.sqrt(size_batch))+1 save_batch_images( batch_images=G, save_path=os.path.join(sample_dir, name), image_value_range=image_value_range, size_frame=[size_frame, size_frame] ) save_batch_images( batch_images=images, save_path=os.path.join(sample_dir, "input.png"), image_value_range=image_value_range, size_frame=[size_frame, size_frame] ) def validate(self, name): # Create Validation Directory if needed val_dir = os.path.join(save_dir, 'validation') if not os.path.exists(val_dir): os.makedirs(val_dir) # Create Name Directory if needed name_dir = os.path.join(val_dir, name) if not os.path.exists(name_dir): os.makedirs(name_dir) # validate testFile = self.validation_files[0:10] for image_path in testFile: n = image_path.split("/")[-1]+".png" self.test(np.array([load_image(image_path, image_size=96)]), name_dir, n) def test(self, images, test_dir, name): images = images[:1, :, :, :] # valence if size_batch == 25: valence = np.arange(0.75, -0.751, -0.375) valence = np.repeat(valence, 5).reshape((25, 1)) # valence = np.repeat(valence, 7, axis=0) # arousal arousal = [np.arange(0.75, -0.751, -0.375)] arousal = np.repeat(arousal, 5).reshape((25, 1)) arousal = np.asarray([item for sublist in arousal for item in sublist]).reshape((25, 1)) query_images = np.tile(images, (25, 1, 1, 1)) size_frame = (6,7) elif size_batch == 49: valence = np.arange(0.75, -0.751, -0.25) valence = np.repeat(valence, 7).reshape((49, 1)) # valence = np.repeat(valence, 7, axis=0) # arousal arousal = [np.arange(0.75, -0.751, -0.25)] arousal = np.repeat(arousal, 7).reshape((49, 1)) arousal = np.asarray([item for sublist in arousal for item in sublist]).reshape((49, 1)) query_images = np.tile(images, (49, 1, 1, 1)) size_frame = (8, 9) z, G = self.session.run( [self.z, self.G], feed_dict={ self.input_image: query_images, self.valence: valence, self.arousal: arousal } ) save_output( input_image=images, output=G, path=os.path.join(test_dir, name), image_value_range = image_value_range, size_frame = size_frame ) def fill_up_equally(self, X): # print ("Value:", X[0]) # print ("Value:", X[0].split("s")) # input("here") sorted_samples = [[x for x in X if int(x.split('__')[1]) == r] for r in range(8)] amounts = [len(x) for x in sorted_samples] differences = [max(amounts) - a for a in amounts] for i, d in enumerate(differences): samples = sorted_samples[i] added = [samples[x] for x in np.random.choice(range(len(samples)), d)] sorted_samples[i] = sorted_samples[i] + added sorted_samples_flat = [item for sublist in sorted_samples for item in sublist] np.random.seed = 1234567 np.random.shuffle(sorted_samples_flat) return sorted_samples_flat class Logger(object): def __init__(self, output_file): self.terminal = sys.stdout self.log = open(output_file, "a") def write(self, message): self.terminal.write(message) if not self.log.closed: self.log.write(message) def close(self): self.log.close() def flush(self): self.close() # needed for python 3 compatibility pass
[ "tensorflow.compat.v1.losses.mean_squared_error", "scipy.io.loadmat", "tensorflow.zeros_like", "tensorflow.compat.v1.get_variable_scope", "PK_Utils.PK_subnetworks.generator", "tensorflow.compat.v1.train.exponential_decay", "PK_Utils.PK_vgg_face.face_embedding", "numpy.arange", "numpy.tile", "tenso...
[((779, 794), 'scipy.io.loadmat', 'loadmat', (['vggMat'], {}), '(vggMat)\n', (786, 794), False, 'from scipy.io import loadmat, savemat\n'), ((1015, 1118), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', (['tf.float32', '[size_batch, size_image, size_image, 3]'], {'name': '"""input_images"""'}), "(tf.float32, [size_batch, size_image, size_image, 3\n ], name='input_images')\n", (1039, 1118), True, 'import tensorflow as tf\n'), ((1184, 1260), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', (['tf.float32', '[size_batch, 1]'], {'name': '"""valence_labels"""'}), "(tf.float32, [size_batch, 1], name='valence_labels')\n", (1208, 1260), True, 'import tensorflow as tf\n'), ((1339, 1415), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', (['tf.float32', '[size_batch, 1]'], {'name': '"""arousal_labels"""'}), "(tf.float32, [size_batch, 1], name='arousal_labels')\n", (1363, 1415), True, 'import tensorflow as tf\n'), ((1486, 1573), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', (['tf.float32', '[size_batch, num_z_channels]'], {'name': '"""z_prior"""'}), "(tf.float32, [size_batch, num_z_channels], name=\n 'z_prior')\n", (1510, 1573), True, 'import tensorflow as tf\n'), ((13121, 13150), 'numpy.random.shuffle', 'np.random.shuffle', (['file_names'], {}), '(file_names)\n', (13138, 13150), True, 'import numpy as np\n'), ((27272, 27308), 'os.path.join', 'os.path.join', (['save_dir', '"""checkpoint"""'], {}), "(save_dir, 'checkpoint')\n", (27284, 27308), False, 'import os\n'), ((27627, 27663), 'os.path.join', 'os.path.join', (['save_dir', '"""checkpoint"""'], {}), "(save_dir, 'checkpoint')\n", (27639, 27663), False, 'import os\n'), ((27686, 27731), 'tensorflow.train.get_checkpoint_state', 'tf.train.get_checkpoint_state', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (27715, 27731), True, 'import tensorflow as tf\n'), ((28109, 28142), 'os.path.join', 'os.path.join', (['save_dir', '"""samples"""'], {}), "(save_dir, 'samples')\n", (28121, 28142), False, 'import os\n'), ((29044, 29080), 'os.path.join', 'os.path.join', (['save_dir', '"""validation"""'], {}), "(save_dir, 'validation')\n", (29056, 29080), False, 'import os\n'), ((29215, 29242), 'os.path.join', 'os.path.join', (['val_dir', 'name'], {}), '(val_dir, name)\n', (29227, 29242), False, 'import os\n'), ((31849, 31887), 'numpy.random.shuffle', 'np.random.shuffle', (['sorted_samples_flat'], {}), '(sorted_samples_flat)\n', (31866, 31887), True, 'import numpy as np\n'), ((8953, 8987), 'tensorflow.compat.v1.trainable_variables', 'tf.compat.v1.trainable_variables', ([], {}), '()\n', (8985, 8987), True, 'import tensorflow as tf\n'), ((12022, 12062), 'tensorflow.compat.v1.train.Saver', 'tf.compat.v1.train.Saver', ([], {'max_to_keep': '(10)'}), '(max_to_keep=10)\n', (12046, 12062), True, 'import tensorflow as tf\n'), ((13473, 13490), 'tensorflow.device', 'tf.device', (['device'], {}), '(device)\n', (13482, 13490), True, 'import tensorflow as tf\n'), ((13583, 13768), 'tensorflow.compat.v1.train.exponential_decay', 'tf.compat.v1.train.exponential_decay', ([], {'learning_rate': 'learning_rate', 'global_step': 'self.EG_global_step', 'decay_steps': '(size_data / size_batch * 2)', 'decay_rate': 'decay_rate', 'staircase': '(True)'}), '(learning_rate=learning_rate,\n global_step=self.EG_global_step, decay_steps=size_data / size_batch * 2,\n decay_rate=decay_rate, staircase=True)\n', (13619, 13768), True, 'import tensorflow as tf\n'), ((27324, 27354), 'os.path.exists', 'os.path.exists', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (27338, 27354), False, 'import os\n'), ((27368, 27395), 'os.makedirs', 'os.makedirs', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (27379, 27395), False, 'import os\n'), ((27825, 27876), 'os.path.basename', 'os.path.basename', (['checkpoints.model_checkpoint_path'], {}), '(checkpoints.model_checkpoint_path)\n', (27841, 27876), False, 'import os\n'), ((28158, 28184), 'os.path.exists', 'os.path.exists', (['sample_dir'], {}), '(sample_dir)\n', (28172, 28184), False, 'import os\n'), ((28198, 28221), 'os.makedirs', 'os.makedirs', (['sample_dir'], {}), '(sample_dir)\n', (28209, 28221), False, 'import os\n'), ((29096, 29119), 'os.path.exists', 'os.path.exists', (['val_dir'], {}), '(val_dir)\n', (29110, 29119), False, 'import os\n'), ((29133, 29153), 'os.makedirs', 'os.makedirs', (['val_dir'], {}), '(val_dir)\n', (29144, 29153), False, 'import os\n'), ((29258, 29282), 'os.path.exists', 'os.path.exists', (['name_dir'], {}), '(name_dir)\n', (29272, 29282), False, 'import os\n'), ((29296, 29317), 'os.makedirs', 'os.makedirs', (['name_dir'], {}), '(name_dir)\n', (29307, 29317), False, 'import os\n'), ((29708, 29739), 'numpy.arange', 'np.arange', (['(0.75)', '(-0.751)', '(-0.375)'], {}), '(0.75, -0.751, -0.375)\n', (29717, 29739), True, 'import numpy as np\n'), ((30122, 30152), 'numpy.tile', 'np.tile', (['images', '(25, 1, 1, 1)'], {}), '(images, (25, 1, 1, 1))\n', (30129, 30152), True, 'import numpy as np\n'), ((1887, 1920), 'tensorflow.compat.v1.get_variable_scope', 'tf.compat.v1.get_variable_scope', ([], {}), '()\n', (1918, 1920), True, 'import tensorflow as tf\n'), ((1987, 2004), 'tensorflow.device', 'tf.device', (['device'], {}), '(device)\n', (1996, 2004), True, 'import tensorflow as tf\n'), ((2244, 2269), 'PK_Utils.PK_subnetworks.encoder', 'encoder', (['self.input_image'], {}), '(self.input_image)\n', (2251, 2269), False, 'from PK_Utils.PK_subnetworks import encoder, generator, d_img, d_prior, d_em\n'), ((2367, 2428), 'PK_Utils.PK_subnetworks.generator', 'generator', (['self.z'], {'valence': 'self.valence', 'arousal': 'self.arousal'}), '(self.z, valence=self.valence, arousal=self.arousal)\n', (2376, 2428), False, 'from PK_Utils.PK_subnetworks import encoder, generator, d_img, d_prior, d_em\n'), ((2576, 2591), 'PK_Utils.PK_subnetworks.d_prior', 'd_prior', (['self.z'], {}), '(self.z)\n', (2583, 2591), False, 'from PK_Utils.PK_subnetworks import encoder, generator, d_img, d_prior, d_em\n'), ((2681, 2724), 'PK_Utils.PK_subnetworks.d_prior', 'd_prior', (['self.z_prior'], {'reuse_variables': '(True)'}), '(self.z_prior, reuse_variables=True)\n', (2688, 2724), False, 'from PK_Utils.PK_subnetworks import encoder, generator, d_img, d_prior, d_em\n'), ((2908, 2965), 'PK_Utils.PK_subnetworks.d_img', 'd_img', (['self.G'], {'valence': 'self.valence', 'arousal': 'self.arousal'}), '(self.G, valence=self.valence, arousal=self.arousal)\n', (2913, 2965), False, 'from PK_Utils.PK_subnetworks import encoder, generator, d_img, d_prior, d_em\n'), ((3221, 3314), 'PK_Utils.PK_subnetworks.d_img', 'd_img', (['self.input_image'], {'valence': 'self.valence', 'arousal': 'self.arousal', 'reuse_variables': '(True)'}), '(self.input_image, valence=self.valence, arousal=self.arousal,\n reuse_variables=True)\n', (3226, 3314), False, 'from PK_Utils.PK_subnetworks import encoder, generator, d_img, d_prior, d_em\n'), ((4503, 4553), 'PK_Utils.PK_vgg_face.face_embedding', 'face_embedding', (['self.vgg_weights', 'self.input_image'], {}), '(self.vgg_weights, self.input_image)\n', (4517, 4553), False, 'from PK_Utils.PK_vgg_face import face_embedding\n'), ((4641, 4681), 'PK_Utils.PK_vgg_face.face_embedding', 'face_embedding', (['self.vgg_weights', 'self.G'], {}), '(self.vgg_weights, self.G)\n', (4655, 4681), False, 'from PK_Utils.PK_vgg_face import face_embedding\n'), ((6482, 6583), 'tensorflow.compat.v1.losses.mean_squared_error', 'tf.compat.v1.losses.mean_squared_error', ([], {'predictions': 'self.D_em_arousal_logits', 'labels': 'self.arousal'}), '(predictions=self.D_em_arousal_logits,\n labels=self.arousal)\n', (6520, 6583), True, 'import tensorflow as tf\n'), ((6623, 6724), 'tensorflow.compat.v1.losses.mean_squared_error', 'tf.compat.v1.losses.mean_squared_error', ([], {'predictions': 'self.D_em_valence_logits', 'labels': 'self.valence'}), '(predictions=self.D_em_valence_logits,\n labels=self.valence)\n', (6661, 6724), True, 'import tensorflow as tf\n'), ((7920, 7953), 'tensorflow.abs', 'tf.abs', (['(self.input_image - self.G)'], {}), '(self.input_image - self.G)\n', (7926, 7953), True, 'import tensorflow as tf\n'), ((12549, 12582), 'tensorflow.compat.v1.get_variable_scope', 'tf.compat.v1.get_variable_scope', ([], {}), '()\n', (12580, 12582), True, 'import tensorflow as tf\n'), ((12602, 12628), 'tensorflow.device', 'tf.device', (['"""/device:CPU:0"""'], {}), "('/device:CPU:0')\n", (12611, 12628), True, 'import tensorflow as tf\n'), ((12668, 12719), 'tensorflow.Variable', 'tf.Variable', (['(0)'], {'trainable': '(False)', 'name': '"""global_step"""'}), "(0, trainable=False, name='global_step')\n", (12679, 12719), True, 'import tensorflow as tf\n'), ((13000, 13021), 'os.listdir', 'os.listdir', (['data_path'], {}), '(data_path)\n', (13010, 13021), False, 'import os\n'), ((13245, 13272), 'os.listdir', 'os.listdir', (['validation_path'], {}), '(validation_path)\n', (13255, 13272), False, 'import os\n'), ((17117, 17133), 'numpy.array', 'np.array', (['sample'], {}), '(sample)\n', (17125, 17133), True, 'import numpy as np\n'), ((17555, 17598), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\n', (17596, 17598), True, 'import tensorflow as tf\n'), ((17964, 17993), 'numpy.random.shuffle', 'np.random.shuffle', (['file_names'], {}), '(file_names)\n', (17981, 17993), True, 'import numpy as np\n'), ((18072, 18083), 'time.time', 'time.time', ([], {}), '()\n', (18081, 18083), False, 'import time\n'), ((27474, 27508), 'os.path.join', 'os.path.join', (['checkpoint_dir', 'name'], {}), '(checkpoint_dir, name)\n', (27486, 27508), False, 'import os\n'), ((27922, 27968), 'os.path.join', 'os.path.join', (['checkpoint_dir', 'checkpoints_name'], {}), '(checkpoint_dir, checkpoints_name)\n', (27934, 27968), False, 'import os\n'), ((28479, 28498), 'numpy.sqrt', 'np.sqrt', (['size_batch'], {}), '(size_batch)\n', (28486, 28498), True, 'import numpy as np\n'), ((28579, 28609), 'os.path.join', 'os.path.join', (['sample_dir', 'name'], {}), '(sample_dir, name)\n', (28591, 28609), False, 'import os\n'), ((28801, 28838), 'os.path.join', 'os.path.join', (['sample_dir', '"""input.png"""'], {}), "(sample_dir, 'input.png')\n", (28813, 28838), False, 'import os\n'), ((29900, 29931), 'numpy.arange', 'np.arange', (['(0.75)', '(-0.751)', '(-0.375)'], {}), '(0.75, -0.751, -0.375)\n', (29909, 29931), True, 'import numpy as np\n'), ((30237, 30267), 'numpy.arange', 'np.arange', (['(0.75)', '(-0.751)', '(-0.25)'], {}), '(0.75, -0.751, -0.25)\n', (30246, 30267), True, 'import numpy as np\n'), ((30649, 30679), 'numpy.tile', 'np.tile', (['images', '(49, 1, 1, 1)'], {}), '(images, (49, 1, 1, 1))\n', (30656, 30679), True, 'import numpy as np\n'), ((31041, 31069), 'os.path.join', 'os.path.join', (['test_dir', 'name'], {}), '(test_dir, name)\n', (31053, 31069), False, 'import os\n'), ((3776, 3810), 'PK_Utils.PK_subnetworks.d_em', 'd_em', (['self.z'], {'reuse_variables': '(True)'}), '(self.z, reuse_variables=True)\n', (3780, 3810), False, 'from PK_Utils.PK_subnetworks import encoder, generator, d_img, d_prior, d_em\n'), ((13936, 14013), 'tensorflow.compat.v1.train.AdamOptimizer', 'tf.compat.v1.train.AdamOptimizer', ([], {'learning_rate': 'EG_learning_rate', 'beta1': 'beta1'}), '(learning_rate=EG_learning_rate, beta1=beta1)\n', (13968, 14013), True, 'import tensorflow as tf\n'), ((25690, 25701), 'time.time', 'time.time', ([], {}), '()\n', (25699, 25701), False, 'import time\n'), ((26772, 26802), 'os.path.join', 'os.path.join', (['save_dir', '"""test"""'], {}), "(save_dir, 'test')\n", (26784, 26802), False, 'import os\n'), ((29762, 29783), 'numpy.repeat', 'np.repeat', (['valence', '(5)'], {}), '(valence, 5)\n', (29771, 29783), True, 'import numpy as np\n'), ((29955, 29976), 'numpy.repeat', 'np.repeat', (['arousal', '(5)'], {}), '(arousal, 5)\n', (29964, 29976), True, 'import numpy as np\n'), ((30016, 30077), 'numpy.asarray', 'np.asarray', (['[item for sublist in arousal for item in sublist]'], {}), '([item for sublist in arousal for item in sublist])\n', (30026, 30077), True, 'import numpy as np\n'), ((30428, 30458), 'numpy.arange', 'np.arange', (['(0.75)', '(-0.751)', '(-0.25)'], {}), '(0.75, -0.751, -0.25)\n', (30437, 30458), True, 'import numpy as np\n'), ((4725, 4760), 'tensorflow.abs', 'tf.abs', (['(real_conv1_2 - fake_conv1_2)'], {}), '(real_conv1_2 - fake_conv1_2)\n', (4731, 4760), True, 'import tensorflow as tf\n'), ((4818, 4853), 'tensorflow.abs', 'tf.abs', (['(real_conv2_2 - fake_conv2_2)'], {}), '(real_conv2_2 - fake_conv2_2)\n', (4824, 4853), True, 'import tensorflow as tf\n'), ((4911, 4946), 'tensorflow.abs', 'tf.abs', (['(real_conv3_2 - fake_conv3_2)'], {}), '(real_conv3_2 - fake_conv3_2)\n', (4917, 4946), True, 'import tensorflow as tf\n'), ((5002, 5037), 'tensorflow.abs', 'tf.abs', (['(real_conv4_2 - fake_conv4_2)'], {}), '(real_conv4_2 - fake_conv4_2)\n', (5008, 5037), True, 'import tensorflow as tf\n'), ((5093, 5128), 'tensorflow.abs', 'tf.abs', (['(real_conv5_2 - fake_conv5_2)'], {}), '(real_conv5_2 - fake_conv5_2)\n', (5099, 5128), True, 'import tensorflow as tf\n'), ((5415, 5443), 'tensorflow.ones_like', 'tf.ones_like', (['self.Dz_logits'], {}), '(self.Dz_logits)\n', (5427, 5443), True, 'import tensorflow as tf\n'), ((5662, 5697), 'tensorflow.zeros_like', 'tf.zeros_like', (['self.Dz_prior_logits'], {}), '(self.Dz_prior_logits)\n', (5675, 5697), True, 'import tensorflow as tf\n'), ((6101, 6140), 'tensorflow.ones_like', 'tf.ones_like', (['self.Dimg_Original_logits'], {}), '(self.Dimg_Original_logits)\n', (6113, 6140), True, 'import tensorflow as tf\n'), ((6298, 6331), 'tensorflow.zeros_like', 'tf.zeros_like', (['self.Dimg_G_logits'], {}), '(self.Dimg_G_logits)\n', (6311, 6331), True, 'import tensorflow as tf\n'), ((18515, 18530), 'numpy.array', 'np.array', (['batch'], {}), '(batch)\n', (18523, 18530), True, 'import numpy as np\n'), ((18843, 18939), 'numpy.random.uniform', 'np.random.uniform', (['image_value_range[0]', 'image_value_range[-1]', '[size_batch, num_z_channels]'], {}), '(image_value_range[0], image_value_range[-1], [size_batch,\n num_z_channels])\n', (18860, 18939), True, 'import numpy as np\n'), ((26830, 26854), 'os.path.exists', 'os.path.exists', (['test_dir'], {}), '(test_dir)\n', (26844, 26854), False, 'import os\n'), ((26880, 26901), 'os.makedirs', 'os.makedirs', (['test_dir'], {}), '(test_dir)\n', (26891, 26901), False, 'import os\n'), ((30290, 30311), 'numpy.repeat', 'np.repeat', (['valence', '(7)'], {}), '(valence, 7)\n', (30299, 30311), True, 'import numpy as np\n'), ((30482, 30503), 'numpy.repeat', 'np.repeat', (['arousal', '(7)'], {}), '(arousal, 7)\n', (30491, 30503), True, 'import numpy as np\n'), ((30543, 30604), 'numpy.asarray', 'np.asarray', (['[item for sublist in arousal for item in sublist]'], {}), '([item for sublist in arousal for item in sublist])\n', (30553, 30604), True, 'import numpy as np\n')]
#!/usr/bin/env python # -*- coding: utf-8 -*- """ This module contains functions used to fix a osm file. Attributes: postcode_re (:re obj:): Regex compiler to USA postal codes. """ import xml.etree.cElementTree as ET # Use cElementTree or lxml if too slow import pprint import re #postcode regex https://stackoverflow.com/questions/7425860/regular-expression-get-us-zip-code postcode_re = re.compile(r'.*(\d{5}(\-\d{4})?)$', re.IGNORECASE) # Como e WAY so existe 'US' defini que o padrao é US def fix_country(tag): fix_dict = {'USA': 'US'} if tag.get('v') in fix_dict: tag.attrib['v'] = fix_dict[tag.get('v')] #O padrao é Florida ou FL def fix_state(tag): fix_dict = {'florida': 'Florida', 'F': 'Florida', 'fl': 'Florida', 'Fl': 'Florida', 'FL.': 'Florida'} if tag.get('v') in fix_dict: tag.attrib['v'] = fix_dict[tag.get('v')] # http://mentalfloss.com/article/53384/what%E2%80%99s-deal-those-last-4-digits-zip-codes def fix_postcode(tag): postal_code = postcode_re.search(tag.get('v')) if postal_code: tag.attrib['v'] = postal_code.groups()[0] else: tag.clear() def fix_node(element): """ Function that fixes NODE tags. Args: element (:obj:): cElementTree object. In this case, a NODE tag. """ for tag in element.iter("tag"): if tag.get('k') == 'addr:country': fix_country(tag) elif tag.get('k') == 'addr:state': fix_state(tag) if tag.get('k') == 'addr:postcode': fix_postcode(tag) def fix_way(element): """ Function that fixes WAY tags. Args: element (:obj:): cElementTree object. In this case, a WAY tag. """ for tag in element.iter("tag"): if tag.get('k') == 'addr:country': fix_country(tag) elif tag.get('k') == 'addr:state': fix_state(tag) elif tag.get('k') == 'addr:postcode': fix_postcode(tag) def fix_data(osm): """Main function used to fix the osm data. This function call all other specific audition funcitons and save the results to a file. Args: osm (string): Path to the osm file. """ NODE_TAG = 'node' WAY_TAG = 'way' context = ET.iterparse(osm, events=("start",)) for event, elem in context: if elem.tag == NODE_TAG: fix_node(elem) if elem.tag == WAY_TAG: fix_way(elem) ET.ElementTree(context.root).write('miami_florida_v1.osm')
[ "xml.etree.cElementTree.iterparse", "xml.etree.cElementTree.ElementTree", "re.compile" ]
[((417, 469), 're.compile', 're.compile', (['""".*(\\\\d{5}(\\\\-\\\\d{4})?)$"""', 're.IGNORECASE'], {}), "('.*(\\\\d{5}(\\\\-\\\\d{4})?)$', re.IGNORECASE)\n", (427, 469), False, 'import re\n'), ((2351, 2387), 'xml.etree.cElementTree.iterparse', 'ET.iterparse', (['osm'], {'events': "('start',)"}), "(osm, events=('start',))\n", (2363, 2387), True, 'import xml.etree.cElementTree as ET\n'), ((2568, 2596), 'xml.etree.cElementTree.ElementTree', 'ET.ElementTree', (['context.root'], {}), '(context.root)\n', (2582, 2596), True, 'import xml.etree.cElementTree as ET\n')]
""" ================================== Plotting two simple sine functions ================================== A simple example plotting a fit of two sine functions. """ import numpy import matplotlib.pyplot as plt from pyearth import Earth # Create some fake data numpy.random.seed(2) m = 10000 n = 10 X = 80 * numpy.random.uniform(size=(m, n)) - 40 y1 = 100 * \ numpy.abs(numpy.sin((X[:, 6]) / 10) - 4.0) + \ 10 * numpy.random.normal(size=m) y2 = 100 * \ numpy.abs(numpy.sin((X[:, 6]) / 2) - 8.0) + \ 5 * numpy.random.normal(size=m) # Fit an Earth model model = Earth(max_degree=3, minspan_alpha=.5) y_mix = numpy.concatenate((y1[:, numpy.newaxis], y2[:, numpy.newaxis]), axis=1) model.fit(X, y_mix) # Print the model print(model.trace()) print(model.summary()) # Plot the model y_hat = model.predict(X) fig = plt.figure() ax = fig.add_subplot(1, 2, 1) ax.plot(X[:, 6], y_mix[:, 0], 'r.') ax.plot(X[:, 6], model.predict(X)[:, 0], 'b.') ax = fig.add_subplot(1, 2, 2) ax.plot(X[:, 6], y_mix[:, 1], 'r.') ax.plot(X[:, 6], model.predict(X)[:, 1], 'b.') plt.show()
[ "numpy.random.uniform", "numpy.random.seed", "matplotlib.pyplot.show", "pyearth.Earth", "matplotlib.pyplot.figure", "numpy.sin", "numpy.random.normal", "numpy.concatenate" ]
[((266, 286), 'numpy.random.seed', 'numpy.random.seed', (['(2)'], {}), '(2)\n', (283, 286), False, 'import numpy\n'), ((583, 621), 'pyearth.Earth', 'Earth', ([], {'max_degree': '(3)', 'minspan_alpha': '(0.5)'}), '(max_degree=3, minspan_alpha=0.5)\n', (588, 621), False, 'from pyearth import Earth\n'), ((629, 700), 'numpy.concatenate', 'numpy.concatenate', (['(y1[:, numpy.newaxis], y2[:, numpy.newaxis])'], {'axis': '(1)'}), '((y1[:, numpy.newaxis], y2[:, numpy.newaxis]), axis=1)\n', (646, 700), False, 'import numpy\n'), ((834, 846), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (844, 846), True, 'import matplotlib.pyplot as plt\n'), ((1075, 1085), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1083, 1085), True, 'import matplotlib.pyplot as plt\n'), ((313, 346), 'numpy.random.uniform', 'numpy.random.uniform', ([], {'size': '(m, n)'}), '(size=(m, n))\n', (333, 346), False, 'import numpy\n'), ((425, 452), 'numpy.random.normal', 'numpy.random.normal', ([], {'size': 'm'}), '(size=m)\n', (444, 452), False, 'import numpy\n'), ((525, 552), 'numpy.random.normal', 'numpy.random.normal', ([], {'size': 'm'}), '(size=m)\n', (544, 552), False, 'import numpy\n'), ((379, 402), 'numpy.sin', 'numpy.sin', (['(X[:, 6] / 10)'], {}), '(X[:, 6] / 10)\n', (388, 402), False, 'import numpy\n'), ((481, 503), 'numpy.sin', 'numpy.sin', (['(X[:, 6] / 2)'], {}), '(X[:, 6] / 2)\n', (490, 503), False, 'import numpy\n')]
#!/usr/bin/env python import os import re import shutil import filecmp import tempfile import subprocess from setuptools import setup VERSIONFILE = "px/version.py" git_version = ( subprocess.check_output(["git", "describe", "--dirty"]).decode("utf-8").strip() ) with tempfile.NamedTemporaryFile(suffix=".py", delete=False) as tmp: tmp.write(b"# NOTE: Auto generated by setup.py, no touchie!\n") tmp.write(b'VERSION = "%s"\n' % bytearray(git_version, "utf_8")) # Flushing is required for filecmp.cmp() to work (below) tmp.flush() if not os.path.isfile(VERSIONFILE): # No version file found shutil.move(tmp.name, VERSIONFILE) elif not filecmp.cmp(tmp.name, VERSIONFILE): # Version file needs updating shutil.move(tmp.name, VERSIONFILE) else: # VERSIONFILE was already up to date. If we touch it in this # case, it will have its file timestamp updated, which will # force the slow px_integration_test.py tests to get rerun. # # Just clean up our tempfile and be merry. os.remove(tmp.name) requirements = None with open("requirements.txt") as reqsfile: requirements = reqsfile.readlines() with open(os.path.join(os.path.dirname(__file__), "README.rst")) as fp: LONG_DESCRIPTION = fp.read() if not re.match(r"^[0-9]+\.[0-9]+\.[0-9]+$", git_version): # Setuptools wants nice version numbers git_version = "0.0.0" setup( name="pxpx", version=git_version, description="ps and top for Human Beings", long_description=LONG_DESCRIPTION, author="<NAME>", author_email="<EMAIL>", url="https://github.com/walles/px", license="MIT", classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Operating System :: MacOS", "Operating System :: POSIX :: Linux", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Topic :: System :: Monitoring", "Topic :: System :: Systems Administration", "Topic :: Utilities", ], packages=["px"], install_requires=requirements, # See: http://setuptools.readthedocs.io/en/latest/setuptools.html#setting-the-zip-safe-flag zip_safe=True, setup_requires=[ "pytest-runner", ], tests_require=[ "pytest", ], entry_points={ "console_scripts": ["px = px.px:main", "ptop = px.px:main"], } # Note that we're by design *not* installing man pages here. # Using "data_files=" only puts the man pages in the egg file, # and installing that egg doesn't put them on the destination # system. # # After trying to figure this out for a bit, my conclusion is # that "pip install" simply isn't meant for installing any man # pages. # # /<EMAIL> 2018aug27 )
[ "tempfile.NamedTemporaryFile", "os.remove", "setuptools.setup", "os.path.dirname", "subprocess.check_output", "re.match", "os.path.isfile", "shutil.move", "filecmp.cmp" ]
[((1445, 2328), 'setuptools.setup', 'setup', ([], {'name': '"""pxpx"""', 'version': 'git_version', 'description': '"""ps and top for Human Beings"""', 'long_description': 'LONG_DESCRIPTION', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""https://github.com/walles/px"""', 'license': '"""MIT"""', 'classifiers': "['Development Status :: 5 - Production/Stable', 'Environment :: Console',\n 'Intended Audience :: System Administrators',\n 'License :: OSI Approved :: MIT License', 'Operating System :: MacOS',\n 'Operating System :: POSIX :: Linux',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 3', 'Topic :: System :: Monitoring',\n 'Topic :: System :: Systems Administration', 'Topic :: Utilities']", 'packages': "['px']", 'install_requires': 'requirements', 'zip_safe': '(True)', 'setup_requires': "['pytest-runner']", 'tests_require': "['pytest']", 'entry_points': "{'console_scripts': ['px = px.px:main', 'ptop = px.px:main']}"}), "(name='pxpx', version=git_version, description=\n 'ps and top for Human Beings', long_description=LONG_DESCRIPTION,\n author='<NAME>', author_email='<EMAIL>', url=\n 'https://github.com/walles/px', license='MIT', classifiers=[\n 'Development Status :: 5 - Production/Stable', 'Environment :: Console',\n 'Intended Audience :: System Administrators',\n 'License :: OSI Approved :: MIT License', 'Operating System :: MacOS',\n 'Operating System :: POSIX :: Linux',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 3', 'Topic :: System :: Monitoring',\n 'Topic :: System :: Systems Administration', 'Topic :: Utilities'],\n packages=['px'], install_requires=requirements, zip_safe=True,\n setup_requires=['pytest-runner'], tests_require=['pytest'],\n entry_points={'console_scripts': ['px = px.px:main', 'ptop = px.px:main']})\n", (1450, 2328), False, 'from setuptools import setup\n'), ((275, 330), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'suffix': '""".py"""', 'delete': '(False)'}), "(suffix='.py', delete=False)\n", (302, 330), False, 'import tempfile\n'), ((1322, 1373), 're.match', 're.match', (['"""^[0-9]+\\\\.[0-9]+\\\\.[0-9]+$"""', 'git_version'], {}), "('^[0-9]+\\\\.[0-9]+\\\\.[0-9]+$', git_version)\n", (1330, 1373), False, 'import re\n'), ((566, 593), 'os.path.isfile', 'os.path.isfile', (['VERSIONFILE'], {}), '(VERSIONFILE)\n', (580, 593), False, 'import os\n'), ((635, 669), 'shutil.move', 'shutil.move', (['tmp.name', 'VERSIONFILE'], {}), '(tmp.name, VERSIONFILE)\n', (646, 669), False, 'import shutil\n'), ((683, 717), 'filecmp.cmp', 'filecmp.cmp', (['tmp.name', 'VERSIONFILE'], {}), '(tmp.name, VERSIONFILE)\n', (694, 717), False, 'import filecmp\n'), ((765, 799), 'shutil.move', 'shutil.move', (['tmp.name', 'VERSIONFILE'], {}), '(tmp.name, VERSIONFILE)\n', (776, 799), False, 'import shutil\n'), ((1084, 1103), 'os.remove', 'os.remove', (['tmp.name'], {}), '(tmp.name)\n', (1093, 1103), False, 'import os\n'), ((1232, 1257), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1247, 1257), False, 'import os\n'), ((188, 243), 'subprocess.check_output', 'subprocess.check_output', (["['git', 'describe', '--dirty']"], {}), "(['git', 'describe', '--dirty'])\n", (211, 243), False, 'import subprocess\n')]
import atexit import multiprocessing import pprint import signal import time from contextlib import suppress from typing import Callable, Union, Any, List, Mapping, Sequence, Tuple, cast from . import util from .consts import DEFAULT_NAMESPACE from .process import Process from .server import tools from .state.state import State from .task.map_plus import map_plus from .task.swarm import Swarm class ProcessList(list): def __str__(self): return ProcessList.__qualname__ + ": " + pprint.pformat(list(self)) def __repr__(self): return "<" + self.__str__() + ">" @staticmethod def _wait_or_catch_exc( process: Process, timeout: Union[int, float] = None ) -> Union[Exception, Any]: try: return process.wait(timeout) except Exception as e: return e def wait( self, timeout: Union[int, float] = None, safe: bool = False ) -> List[Union[Any, Exception]]: """ Call :py:meth:`~Process.wait()` on all the Processes in this list. :param timeout: Same as :py:meth:`~Process.wait()`. This parameter controls the timeout for all the Processes combined, not a single :py:meth:`~Process.wait()` call. :param safe: Suppress any errors that occur while waiting for a Process. The return value of failed :py:meth:`~Process.wait()` calls are substituted with the ``Exception`` that occurred. :return: A ``list`` containing the values returned by child Processes of this Context. """ if safe: _wait = self._wait_or_catch_exc else: _wait = Process.wait if timeout is None: return [_wait(process) for process in self] else: final = time.time() + timeout return [_wait(process, final - time.time()) for process in self] def start(self): """ Call :py:meth:`~Process.start()` on all the child processes of this Context Ignores if a Process is already started, unlike :py:meth:`~Process.start()`, which throws an ``AssertionError``. """ with suppress(AssertionError): for process in self: process.start() def stop(self): """ Call :py:meth:`~Process.stop()` on all the Processes in this list. Retains the same order as ``Context.process_list``. :return: A ``list`` containing the exitcodes of the child Processes of this Context. """ return [proc.stop() for proc in self] class Context: #: The :py:class:`multiprocessing.Process` object for the server. server_process: multiprocessing.Process def __init__( self, server_address: str = None, *, start_server: bool = True, backend: Callable = multiprocessing.Process, wait: bool = False, cleanup: bool = True, namespace: str = DEFAULT_NAMESPACE, **process_kwargs ) -> None: r""" Provides a high level interface to :py:class:`State` and :py:class:`Process`. Primarily used to manage and launch processes. All processes launched using a Context, share the same state. Don't share a Context object between Processes / Threads. A Context object is not thread-safe. :param server_address: The address of the server. If this is set to ``None``, a random address will be generated. :param start_server: Whether to start the ZProc server. It is started automatically by default. If this is set to ``None``, then you must either - - Start a server using a different Context object. - Start one manually, using :py:func:`start_server`. In both cases, it the user's responsibility to make sure that the ``server_address`` argument is satisfied. .. note:: If the server is not started before-hand, the Context object will block infinitely, waiting for the server to respond. In case you want to play around, the :py:func:`ping` function is handy, since it let's you *detect* the presence of a server at a given address. :param backend: .. include:: /api/snippets/backend.rst :param wait: Wait for all running process to finish their work before exiting. Alternative to manually calling :py:meth:`~Context.wait` at exit. :param cleanup: Whether to cleanup the process tree before exiting. Registers a signal handler for ``SIGTERM``, and an ``atexit`` handler. :param \*\*process_kwargs: Keyword arguments that :py:class:`~Process` takes, except ``server_address`` and ``target``. If provided, these will be used while creating processes using this Context. """ #: A :py:class:`ProcessList` object containing all Processes created under this Context. self.process_list = ProcessList() #: Passed on from the constructor. This is read-only. self.backend = backend #: Passed on from the constructor. This is read-only. self.namespace = namespace #: Passed on from the constructor. self.process_kwargs = process_kwargs self.process_kwargs.setdefault("namespace", self.namespace) self.process_kwargs.setdefault("backend", self.backend) self.server_address = cast(str, server_address) """The server's address. This holds the address this Context is connected to, not necessarily the value provided in the constructor. This is read-only.""" if start_server: self.start_server() assert self.server_address is not None, ( "Couldn't determine the server address. " "Hint: Either provide the `server_address` parameter, " "or pass `start_server=True`." ) # register cleanup before wait, so that wait runs before cleanup. # (order of execution is reversed) if cleanup: atexit.register(util.clean_process_tree) if util.is_main_thread(): signal.signal(signal.SIGTERM, util.clean_process_tree) if wait: atexit.register(self.wait) def __str__(self): return "%s - server: %r at %#x" % ( self.__class__.__qualname__, self.server_address, id(self), ) def __repr__(self): return util.enclose_in_brackets(self.__str__()) def create_state(self, value: dict = None, *, namespace: str = None): """ Creates a new :py:class:`State` object, sharing the same zproc server as this Context. :param value: If provided, call ``state.update(value)``. :param namespace: Use this as the namespace for the :py:class:`State` object, instead of this :py:class:`Context`\ 's namespace. :return: A :py:class:`State` object. """ if namespace is None: namespace = self.namespace state = State(self.server_address, namespace=namespace) if value is not None: state.update(value) return state def create_swarm(self, count: int = None): swarm = Swarm(self.server_address, namespace=self.namespace) swarm.start(count) return swarm def start_server(self) -> Tuple[multiprocessing.Process, str]: out = tools.start_server(self.server_address, backend=self.backend) self.server_process, self.server_address = out return out def _process( self, target: Callable = None, **process_kwargs ) -> Union[Process, Callable]: r""" Produce a child process bound to this context. Can be used both as a function and decorator: .. code-block:: python :caption: Usage @zproc.process(pass_context=True) # you may pass some arguments here def p1(ctx): print('hello', ctx) @zproc.process # or not... def p2(state): print('hello', state) def p3(state): print('hello', state) zproc.process(p3) # or just use as a good ol' function :param target: Passed on to the :py:class:`Process` constructor. *Must be omitted when using this as a decorator.* :param \*\*process_kwargs: .. include:: /api/context/params/process_kwargs.rst :return: The :py:class:`Process` instance produced. """ process = Process( self.server_address, target, **{**self.process_kwargs, **process_kwargs} ) self.process_list.append(process) return process def spawn(self, *targets: Callable, count: int = 1, **process_kwargs): r""" Produce one or many child process(s) bound to this context. :param \*targets: Passed on to the :py:class:`Process` constructor, one at a time. :param count: The number of processes to spawn for each item in ``targets``. :param \*\*process_kwargs: .. include:: /api/context/params/process_kwargs.rst :return: A ``ProcessList`` of the :py:class:`Process` instance(s) produced. """ if not targets: def wrapper(target: Callable): return self.spawn(target, count=count, **process_kwargs) return wrapper if len(targets) * count == 1: return self._process(targets[0], **process_kwargs) return ProcessList( self._process(target, **process_kwargs) for _ in range(count) for target in targets ) def spawn_map( self, target: Callable, map_iter: Sequence[Any] = None, *, map_args: Sequence[Sequence[Any]] = None, args: Sequence = None, map_kwargs: Sequence[Mapping[str, Any]] = None, kwargs: Mapping = None, **process_kwargs ): return ProcessList( map_plus( lambda *args, **kwargs: self._process( target, args=args, kwargs=kwargs, **process_kwargs ), map_iter, map_args, args, map_kwargs, kwargs, ) ) def wait( self, timeout: Union[int, float] = None, safe: bool = False ) -> List[Union[Any, Exception]]: """ alias for :py:meth:`ProcessList.wait()` """ return self.process_list.wait(timeout, safe) def start_all(self): """ alias for :py:meth:`ProcessList.start_all()` """ return self.process_list.start() def stop_all(self): """ alias for :py:meth:`ProcessList.stop_all()` """ return self.process_list.stop() def ping(self, **kwargs): r""" Ping the zproc server. :param \*\*kwargs: Keyword arguments that :py:func:`ping` takes, except ``server_address``. :return: Same as :py:func:`ping` """ return tools.ping(self.server_address, **kwargs)
[ "atexit.register", "typing.cast", "contextlib.suppress", "time.time", "signal.signal" ]
[((5651, 5676), 'typing.cast', 'cast', (['str', 'server_address'], {}), '(str, server_address)\n', (5655, 5676), False, 'from typing import Callable, Union, Any, List, Mapping, Sequence, Tuple, cast\n'), ((2193, 2217), 'contextlib.suppress', 'suppress', (['AssertionError'], {}), '(AssertionError)\n', (2201, 2217), False, 'from contextlib import suppress\n'), ((6320, 6360), 'atexit.register', 'atexit.register', (['util.clean_process_tree'], {}), '(util.clean_process_tree)\n', (6335, 6360), False, 'import atexit\n'), ((6499, 6525), 'atexit.register', 'atexit.register', (['self.wait'], {}), '(self.wait)\n', (6514, 6525), False, 'import atexit\n'), ((1821, 1832), 'time.time', 'time.time', ([], {}), '()\n', (1830, 1832), False, 'import time\n'), ((6415, 6469), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'util.clean_process_tree'], {}), '(signal.SIGTERM, util.clean_process_tree)\n', (6428, 6469), False, 'import signal\n'), ((1886, 1897), 'time.time', 'time.time', ([], {}), '()\n', (1895, 1897), False, 'import time\n')]
import re from hotsos.core.host_helpers import CLIHelper from hotsos.core.plugins.storage.bcache import BcacheChecksBase class BcacheSummary(BcacheChecksBase): def __summary_cachesets(self): csets = self.get_sysfs_cachesets() if csets: return csets def __summary_devices(self): devs = {} for dev_type in ['bcache', 'nvme']: for line in CLIHelper().ls_lanR_sys_block(): expr = r".+[0-9:]+\s+({}[0-9a-z]+)\s+.+".format(dev_type) ret = re.compile(expr).match(line) if ret: if dev_type not in devs: devs[dev_type] = {} devname = ret[1] devs[dev_type][devname] = {} for line in CLIHelper().udevadm_info_dev(device=devname): expr = r'.+\s+disk/by-dname/(.+)' ret = re.compile(expr).match(line) if ret: devs[dev_type][devname]['dname'] = ret[1] elif 'dname' not in devs[dev_type][devname]: devs[dev_type][devname]['dname'] = \ '<notfound>' if devs: return devs
[ "hotsos.core.host_helpers.CLIHelper", "re.compile" ]
[((405, 416), 'hotsos.core.host_helpers.CLIHelper', 'CLIHelper', ([], {}), '()\n', (414, 416), False, 'from hotsos.core.host_helpers import CLIHelper\n'), ((534, 550), 're.compile', 're.compile', (['expr'], {}), '(expr)\n', (544, 550), False, 'import re\n'), ((795, 806), 'hotsos.core.host_helpers.CLIHelper', 'CLIHelper', ([], {}), '()\n', (804, 806), False, 'from hotsos.core.host_helpers import CLIHelper\n'), ((929, 945), 're.compile', 're.compile', (['expr'], {}), '(expr)\n', (939, 945), False, 'import re\n')]
# -*- coding: utf-8 -*- import random import urllib from scrapy.http.headers import Headers from crawler.misc.spider import CommonSpider from crawler.misc import agents class BaseHelper(object): PROFILE_URL = "https://mobile.qzone.qq.com/profile?hostuin=USER_QQ_NUMBER" SHUOSHU_URL = "https://mobile.qzone.qq.com/list?g_tk=GTK&format=json&list_type=shuoshuo&action=0&res_uin=USER_QQ_NUMBER&count=PAGECOUNT" CODE_URL = "https://graph.qq.com/oauth2.0/token?grant_type=authorization_code&client_id=101347930&client_secret=68270da4c08fddb26486283c1fab1b0a&code=CODE&redirect_uri=http%3a%2f%2f29060abb.nat123.net%2fPBMSWEBOOK%2fqqlogin&state=203" OPENID_URL = "https://graph.qq.com/oauth2.0/me?access_token=ACCESS_TOKEN" ALBUMLIST_URL = "https://graph.qq.com/photo/list_album?access_token=ACCESS_TOKEN&oauth_consumer_key=101347930&openid=OPENID&format=json" NICKNAME_URL = "https://graph.qq.com/user/get_user_info?access_token=ACCESS_TOKEN&oauth_consumer_key=101347930&openid=OPENID" POTOLIST_URL = "https://graph.qq.com/photo/list_photo?access_token=ACCESS_TOKEN&oauth_consumer_key=101347930&openid=OPENID&format=json&albumid=ALBUMID" ALBUM_URL = "https://mobile.qzone.qq.com/list?g_tk=GTK&format=json&list_type=album&action=0&res_uin=USER_QQ_NUMBER" PHOTO_URL = "http://h5.qzone.qq.com/webapp/json/mqzone_photo/getPhotoList2?g_tk=GTK&uin=USER_QQ_NUMBER&albumid=ALBUMID&ps=PS&pn=PN" PAGE_COUNT = '40' @classmethod def get_headers(cls): return Headers({ # 'User-Agent': self._get_user_agent(), # 'Content-Type': 'application/json', # "Connection": "keep-alive", 'Accept': 'application/json', # 'Host': cls.BASE_URL, }) @classmethod def get_profile_url(cls, uid): return cls.PROFILE_URL.replace("USER_QQ_NUMBER", uid) @classmethod def get_shuoshuo_url(cls, uid, last_attach=None): url = cls.SHUOSHU_URL.replace("USER_QQ_NUMBER", uid) \ .replace("PAGECOUNT", cls.PAGE_COUNT) return url if last_attach is None \ else url + "&res_attach=" + cls._quote_url(last_attach) def get_code_url(self, uid): return self.SHUOSHU_URL.replace("USER_QQ_NUMBER", uid) def get_openid_url(self, uid): return self.SHUOSHU_URL.replace("USER_QQ_NUMBER", uid) def get_album_list_url(self, uid): return self.SHUOSHU_URL.replace("USER_QQ_NUMBER", uid) def get_photo_list_url(self, uid): return self.SHUOSHU_URL.replace("USER_QQ_NUMBER", uid) @classmethod def get_album_url(cls, uid, last_attach=None): url = cls.ALBUM_URL.replace("USER_QQ_NUMBER", uid) return url if last_attach is None \ else url + "&res_attach=" + cls._quote_url(last_attach) @classmethod def get_photo_url(cls, uid, album_id, ps, pn, last_attach=None): url = cls.PHOTO_URL.replace("USER_QQ_NUMBER", uid) \ .replace("ALBUMID", album_id) \ .replace("PS", ps) \ .replace("PN", pn) return url if last_attach is None \ else url + "&res_attach=" + cls._quote_url(last_attach) @staticmethod def get_cookie_key_prefix(spider): sep = "_" assert spider.name.index(sep) > 0 return "{}:Cookies".format(spider.name.split(sep)[0]) @staticmethod def _quote_url(url): return urllib.quote(unicode(str(url), "UTF-8"))
[ "scrapy.http.headers.Headers" ]
[((1503, 1542), 'scrapy.http.headers.Headers', 'Headers', (["{'Accept': 'application/json'}"], {}), "({'Accept': 'application/json'})\n", (1510, 1542), False, 'from scrapy.http.headers import Headers\n')]