code stringlengths 281 23.7M |
|---|
_log_on_failure_all
class TestP2PLibp2pConnectionIntegrationTest():
BASE_PORT_NUM: int = DEFAULT_PORT
def get_port(cls) -> int:
cls.BASE_PORT_NUM += 1
return cls.BASE_PORT_NUM
def make_connection(cls, name, **kwargs):
if (name in cls.multiplexers_dict):
raise ValueError(f'Connection with name `{name}` already added')
temp_dir = os.path.join(cls.t, name)
os.mkdir(temp_dir)
conn_options = copy(kwargs)
conn_options['port'] = conn_options.get('port', cls.get_port())
conn_options['data_dir'] = conn_options.get('data_dir', temp_dir)
conn = _make_libp2p_connection(**conn_options)
multiplexer = Multiplexer([conn], protocols=[MockDefaultMessageProtocol])
cls.log_files.append(conn.node.log_file)
multiplexer.connect()
cls.multiplexers_dict[name] = multiplexer
cls.connections_dict[name] = conn
return conn
def make_client_connection(cls, name, **kwargs):
if (name in cls.multiplexers_dict):
raise ValueError(f'Connection with name `{name}` already added')
temp_dir = os.path.join(cls.t, name)
os.mkdir(temp_dir)
conn_options = copy(kwargs)
conn_options['data_dir'] = conn_options.get('data_dir', temp_dir)
conn = _make_libp2p_client_connection(**conn_options)
multiplexer = Multiplexer([conn], protocols=[MockDefaultMessageProtocol])
multiplexer.connect()
cls.multiplexers_dict[name] = multiplexer
cls.connections_dict[name] = conn
return conn
def make_mailbox_connection(cls, name, **kwargs):
if (name in cls.multiplexers_dict):
raise ValueError(f'Connection with name `{name}` already added')
temp_dir = os.path.join(cls.t, name)
os.mkdir(temp_dir)
conn_options = copy(kwargs)
conn_options['data_dir'] = conn_options.get('data_dir', temp_dir)
conn = _make_libp2p_mailbox_connection(**conn_options)
multiplexer = Multiplexer([conn], protocols=[MockDefaultMessageProtocol])
multiplexer.connect()
cls.multiplexers_dict[name] = multiplexer
cls.connections_dict[name] = conn
return conn
_log_on_failure
def setup_class(cls):
cls.cwd = os.getcwd()
cls.t = tempfile.mkdtemp()
os.chdir(cls.t)
cls.log_files = []
cls.multiplexers_dict = {}
cls.connections_dict = {}
cls.multiplexers = []
try:
cls.main_relay = cls.make_connection('main_relay', relay=True)
main_relay = cls.main_relay.node.multiaddrs[0]
cls.relay_2 = cls.make_connection('relay_2', entry_peers=[main_relay], relay=True)
relay_peer_2 = cls.relay_2.node.multiaddrs[0]
cls.delegate_1 = cls.make_connection('delegate_1', entry_peers=[main_relay], relay=True, delegate=True, delegate_port=cls.get_port(), mailbox_port=cls.get_port(), mailbox=True)
cls.delegate_2 = cls.make_connection('delegate_2', entry_peers=[relay_peer_2], relay=True, delegate=True, delegate_port=cls.get_port(), mailbox_port=cls.get_port(), mailbox=True)
cls.agent_connection_1 = cls.make_connection('agent_connection_1', entry_peers=[main_relay], relay=False, delegate=False)
cls.agent_connection_2 = cls.make_connection('agent_connection_2', entry_peers=[relay_peer_2], relay=False, delegate=False)
cls.client_connection_1 = cls.make_client_connection('client_1', peer_public_key=cls.delegate_1.node.pub, **cls.get_delegate_host_port(cls.delegate_1.node.delegate_uri))
cls.client_connection_2 = cls.make_client_connection('client_2', peer_public_key=cls.delegate_2.node.pub, **cls.get_delegate_host_port(cls.delegate_2.node.delegate_uri))
cls.mailbox_connection_1 = cls.make_mailbox_connection('mailbox_1', peer_public_key=cls.delegate_1.node.pub, **cls.get_delegate_host_port(Uri(cls.delegate_1.node.mailbox_uri)))
cls.mailbox_connection_2 = cls.make_mailbox_connection('mailbox_2', peer_public_key=cls.delegate_2.node.pub, **cls.get_delegate_host_port(Uri(cls.delegate_2.node.mailbox_uri)))
except Exception:
cls.teardown_class()
raise
def get_delegate_host_port(cls, delegate_uri: Uri) -> dict:
return {'node_port': delegate_uri.port, 'node_host': delegate_uri.host}
def test_connection_is_established(self):
for conn in self.connections_dict.values():
assert (conn.is_connected is True)
def send_message(self, from_name: str, to_name: str) -> None:
from_addr = self.connections_dict[from_name].address
to_addr = self.connections_dict[to_name].address
from_multiplexer = self.multiplexers_dict[from_name]
to_multiplexer = self.multiplexers_dict[to_name]
msg = DefaultMessage(dialogue_reference=('', ''), message_id=1, target=0, performative=DefaultMessage.Performative.BYTES, content=b'hello')
envelope = Envelope(to=to_addr, sender=from_addr, message=msg)
from_multiplexer.put(envelope)
delivered_envelope = to_multiplexer.get(block=True, timeout=10)
assert (delivered_envelope is not None)
assert (delivered_envelope.to == envelope.to)
assert (delivered_envelope.sender == envelope.sender)
assert (delivered_envelope.protocol_specification_id == envelope.protocol_specification_id)
assert (delivered_envelope.message != envelope.message)
msg = DefaultMessage.serializer.decode(delivered_envelope.message)
msg.sender = delivered_envelope.sender
msg.to = delivered_envelope.to
assert (envelope.message == msg)
def test_send_and_receive(self):
for (from_name, to_name) in itertools.permutations(['client_1', 'client_2', 'agent_connection_1', 'agent_connection_2', 'mailbox_1', 'mailbox_2'], 2):
self.send_message(from_name, to_name)
def teardown_class(cls):
for mux in cls.multiplexers:
mux.disconnect()
for mux in cls.multiplexers_dict.values():
mux.disconnect()
os.chdir(cls.cwd)
try:
shutil.rmtree(cls.t)
except (OSError, IOError):
pass |
class Query(object):
def Allowance(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.feegrant.v1beta1.Query/Allowance', cosmos_dot_feegrant_dot_v1beta1_dot_query__pb2.QueryAllowanceRequest.SerializeToString, cosmos_dot_feegrant_dot_v1beta1_dot_query__pb2.QueryAllowanceResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def Allowances(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.feegrant.v1beta1.Query/Allowances', cosmos_dot_feegrant_dot_v1beta1_dot_query__pb2.QueryAllowancesRequest.SerializeToString, cosmos_dot_feegrant_dot_v1beta1_dot_query__pb2.QueryAllowancesResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) |
def twitter_recon(user_name):
if (not user_name):
return None
try:
url = '
soup = social_soup(url)
name = soup.find('h1').contents[1].text
try:
location = soup.find('span', class_='ProfileHeaderCard-locationText u-dir').contents[1].text
except:
location = None
description = soup.find('div', class_='ProfileHeaderCard').contents[5].text
created_at = soup.find('div', class_='ProfileHeaderCard-joinDate').contents[3].text
avatar = soup.find('div', class_='ProfileAvatar').find('img', class_='ProfileAvatar-image').get('src')
try:
recent_tweet = soup.find('div', class_='content').find('p', class_='TweetTextSize TweetTextSize--normal js-tweet-text tweet-text').text
except:
recent_tweet = None
except:
return None
return {'site': 'Twitter', 'name': name, 'location': location, 'description': description, 'created_at': created_at, 'avatar': avatar, 'recent_tweet': recent_tweet, 'url': url} |
def _create_gitfiles(project_path: Path) -> None:
gitignore = project_path.joinpath('.gitignore')
if (not gitignore.exists()):
with gitignore.open('w') as fp:
fp.write(GITIGNORE)
gitattributes = project_path.joinpath('.gitattributes')
if (not gitattributes.exists()):
with gitattributes.open('w') as fp:
fp.write(GITATTRIBUTES) |
class Stream(StreamT[T_co], Service):
logger = logger
mundane_level = 'debug'
events_total: int = 0
_processors: MutableSequence[Processor]
_anext_started = False
_passive = False
_finalized = False
_passive_started: asyncio.Event
def __init__(self, channel: AsyncIterator[T_co], *, app: AppT, processors: Iterable[Processor[T]]=None, combined: List[JoinableT]=None, on_start: Optional[Callable]=None, join_strategy: Optional[JoinT]=None, beacon: Optional[NodeT]=None, concurrency_index: Optional[int]=None, prev: Optional[StreamT]=None, active_partitions: Optional[Set[TP]]=None, enable_acks: bool=True, prefix: str='', loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
Service.__init__(self, loop=loop, beacon=beacon)
self.app = app
self.channel = channel
self.outbox = self.app.FlowControlQueue(maxsize=self.app.conf.stream_buffer_maxsize, clear_on_resume=True)
self._passive_started = asyncio.Event()
self.join_strategy = join_strategy
self.combined = (combined if (combined is not None) else [])
self.concurrency_index = concurrency_index
self._prev = prev
self.active_partitions = active_partitions
self.enable_acks = enable_acks
self.prefix = prefix
self._processors = (list(processors) if processors else [])
self._on_start = on_start
task = current_task(loop=self.loop)
if (task is not None):
self.task_owner = task
self._on_stream_event_in = self.app.sensors.on_stream_event_in
self._on_stream_event_out = self.app.sensors.on_stream_event_out
self._on_message_in = self.app.sensors.on_message_in
self._on_message_out = self.app.sensors.on_message_out
self._skipped_value = object()
def get_active_stream(self) -> StreamT:
return list(self._iter_ll_forwards())[(- 1)]
def get_root_stream(self) -> StreamT:
return list(self._iter_ll_backwards())[(- 1)]
def _iter_ll_forwards(self) -> Iterator[StreamT]:
return self._iter_ll(_LinkedListDirectionFwd)
def _iter_ll_backwards(self) -> Iterator[StreamT]:
return self._iter_ll(_LinkedListDirectionBwd)
def _iter_ll(self, dir_: _LinkedListDirection) -> Iterator[StreamT]:
node: Optional[StreamT] = self
seen: Set[StreamT] = set()
while node:
if (node in seen):
raise RuntimeError(f'Loop in Stream.{dir_.attr}: Call support!')
seen.add(node)
(yield node)
node = dir_.getter(node)
def add_processor(self, processor: Processor[T]) -> None:
self._processors.append(processor)
def info(self) -> Mapping[(str, Any)]:
return {'app': self.app, 'channel': self.channel, 'processors': self._processors, 'on_start': self._on_start, 'loop': self.loop, 'combined': self.combined, 'beacon': self.beacon, 'concurrency_index': self.concurrency_index, 'prev': self._prev, 'active_partitions': self.active_partitions}
def clone(self, **kwargs: Any) -> StreamT:
return self.__class__(**{**self.info(), **kwargs})
def _chain(self, **kwargs: Any) -> StreamT:
assert (not self._finalized)
self._next = new_stream = self.clone(on_start=self.maybe_start, prev=self, processors=list(self._processors), **kwargs)
self._processors.clear()
return new_stream
def noack(self) -> 'StreamT':
self._next = new_stream = self.clone(enable_acks=False)
return new_stream
async def items(self) -> AsyncIterator[Tuple[(K, T_co)]]:
async for event in self.events():
(yield (event.key, cast(T_co, event.value)))
async def events(self) -> AsyncIterable[EventT]:
async for _ in self:
if (self.current_event is not None):
(yield self.current_event)
async def take(self, max_: int, within: Seconds) -> AsyncIterable[Sequence[T_co]]:
buffer: List[T_co] = []
events: List[EventT] = []
buffer_add = buffer.append
event_add = events.append
buffer_size = buffer.__len__
buffer_full = asyncio.Event()
buffer_consumed = asyncio.Event()
timeout = (want_seconds(within) if within else None)
stream_enable_acks: bool = self.enable_acks
buffer_consuming: Optional[asyncio.Future] = None
channel_it = aiter(self.channel)
async def add_to_buffer(value: T) -> T:
try:
nonlocal buffer_consuming
if (buffer_consuming is not None):
try:
(await buffer_consuming)
finally:
buffer_consuming = None
buffer_add(cast(T_co, value))
event = self.current_event
if (event is None):
raise RuntimeError('Take buffer found current_event is None')
event_add(event)
if (buffer_size() >= max_):
buffer_full.set()
buffer_consumed.clear()
(await self.wait(buffer_consumed))
except CancelledError:
raise
except Exception as exc:
self.log.exception('Error adding to take buffer: %r', exc)
(await self.crash(exc))
return value
self.enable_acks = False
self.add_processor(add_to_buffer)
self._enable_passive(cast(ChannelT, channel_it))
try:
while (not self.should_stop):
(await self.wait_for_stopped(buffer_full, timeout=timeout))
if buffer:
buffer_consuming = self.loop.create_future()
try:
(yield list(buffer))
finally:
buffer.clear()
for event in events:
(await self.ack(event))
events.clear()
notify(buffer_consuming)
buffer_full.clear()
buffer_consumed.set()
else:
pass
else:
pass
finally:
self.enable_acks = stream_enable_acks
self._processors.remove(add_to_buffer)
async def take_events(self, max_: int, within: Seconds) -> AsyncIterable[Sequence[EventT]]:
buffer: List[T_co] = []
events: List[EventT] = []
buffer_add = buffer.append
event_add = events.append
buffer_size = buffer.__len__
buffer_full = asyncio.Event()
buffer_consumed = asyncio.Event()
timeout = (want_seconds(within) if within else None)
stream_enable_acks: bool = self.enable_acks
buffer_consuming: Optional[asyncio.Future] = None
channel_it = aiter(self.channel)
async def add_to_buffer(value: T) -> T:
try:
nonlocal buffer_consuming
if (buffer_consuming is not None):
try:
(await buffer_consuming)
finally:
buffer_consuming = None
buffer_add(cast(T_co, value))
event = self.current_event
if (event is None):
raise RuntimeError('Take buffer found current_event is None')
event_add(event)
if (buffer_size() >= max_):
buffer_full.set()
buffer_consumed.clear()
(await self.wait(buffer_consumed))
except CancelledError:
raise
except Exception as exc:
self.log.exception('Error adding to take buffer: %r', exc)
(await self.crash(exc))
return value
self.enable_acks = False
self.add_processor(add_to_buffer)
self._enable_passive(cast(ChannelT, channel_it))
try:
while (not self.should_stop):
(await self.wait_for_stopped(buffer_full, timeout=timeout))
if buffer:
buffer_consuming = self.loop.create_future()
try:
(yield list(events))
finally:
buffer.clear()
for event in events:
(await self.ack(event))
events.clear()
notify(buffer_consuming)
buffer_full.clear()
buffer_consumed.set()
else:
pass
else:
pass
finally:
self.enable_acks = stream_enable_acks
self._processors.remove(add_to_buffer)
async def take_with_timestamp(self, max_: int, within: Seconds, timestamp_field_name: str) -> AsyncIterable[Sequence[T_co]]:
buffer: List[T_co] = []
events: List[EventT] = []
buffer_add = buffer.append
event_add = events.append
buffer_size = buffer.__len__
buffer_full = asyncio.Event()
buffer_consumed = asyncio.Event()
timeout = (want_seconds(within) if within else None)
stream_enable_acks: bool = self.enable_acks
buffer_consuming: Optional[asyncio.Future] = None
channel_it = aiter(self.channel)
async def add_to_buffer(value: T) -> T:
try:
nonlocal buffer_consuming
if (buffer_consuming is not None):
try:
(await buffer_consuming)
finally:
buffer_consuming = None
event = self.current_event
if (isinstance(value, dict) and timestamp_field_name):
value[timestamp_field_name] = event.message.timestamp
buffer_add(value)
if (event is None):
raise RuntimeError('Take buffer found current_event is None')
event_add(event)
if (buffer_size() >= max_):
buffer_full.set()
buffer_consumed.clear()
(await self.wait(buffer_consumed))
except CancelledError:
raise
except Exception as exc:
self.log.exception('Error adding to take buffer: %r', exc)
(await self.crash(exc))
return value
self.enable_acks = False
self.add_processor(add_to_buffer)
self._enable_passive(cast(ChannelT, channel_it))
try:
while (not self.should_stop):
(await self.wait_for_stopped(buffer_full, timeout=timeout))
if buffer:
buffer_consuming = self.loop.create_future()
try:
(yield list(buffer))
finally:
buffer.clear()
for event in events:
(await self.ack(event))
events.clear()
notify(buffer_consuming)
buffer_full.clear()
buffer_consumed.set()
else:
pass
else:
pass
finally:
self.enable_acks = stream_enable_acks
self._processors.remove(add_to_buffer)
def enumerate(self, start: int=0) -> AsyncIterable[Tuple[(int, T_co)]]:
return aenumerate(self, start)
async def noack_take(self, max_: int, within: Seconds) -> AsyncIterable[Sequence[T_co]]:
buffer: List[T_co] = []
events: List[EventT] = []
buffer_add = buffer.append
event_add = events.append
buffer_size = buffer.__len__
buffer_full = asyncio.Event()
buffer_consumed = asyncio.Event()
timeout = (want_seconds(within) if within else None)
stream_enable_acks: bool = self.enable_acks
buffer_consuming: Optional[asyncio.Future] = None
channel_it = aiter(self.channel)
async def add_to_buffer(value: T) -> T:
try:
nonlocal buffer_consuming
if (buffer_consuming is not None):
try:
(await buffer_consuming)
finally:
buffer_consuming = None
event = self.current_event
buffer_add(cast(T_co, event))
if (event is None):
raise RuntimeError('Take buffer found current_event is None')
event_add(event)
if (buffer_size() >= max_):
buffer_full.set()
buffer_consumed.clear()
(await self.wait(buffer_consumed))
except CancelledError:
raise
except Exception as exc:
self.log.exception('Error adding to take buffer: %r', exc)
(await self.crash(exc))
return value
self.enable_acks = False
self.add_processor(add_to_buffer)
self._enable_passive(cast(ChannelT, channel_it))
try:
while (not self.should_stop):
(await self.wait_for_stopped(buffer_full, timeout=timeout))
if buffer:
buffer_consuming = self.loop.create_future()
try:
(yield list(buffer))
finally:
buffer.clear()
events.clear()
notify(buffer_consuming)
buffer_full.clear()
buffer_consumed.set()
else:
pass
else:
pass
finally:
self.enable_acks = stream_enable_acks
self._processors.remove(add_to_buffer)
def through(self, channel: Union[(str, ChannelT)]) -> StreamT:
if self._finalized:
return self
if (self.concurrency_index is not None):
raise ImproperlyConfigured('Agent with concurrency>1 cannot use stream.through!')
if isinstance(channel, str):
channelchannel = cast(ChannelT, self.derive_topic(channel))
else:
channelchannel = channel
channel_it = aiter(channelchannel)
if (self._next is not None):
raise ImproperlyConfigured('Stream is already using group_by/through')
through = self._chain(channel=channel_it)
async def forward(value: T) -> T:
event = self.current_event
return (await maybe_forward(event, channelchannel))
self.add_processor(forward)
self._enable_passive(cast(ChannelT, channel_it), declare=True)
return through
def _enable_passive(self, channel: ChannelT, *, declare: bool=False) -> None:
if (not self._passive):
self._passive = True
self.add_future(self._passive_drainer(channel, declare))
async def _passive_drainer(self, channel: ChannelT, declare: bool=False) -> None:
try:
if declare:
(await channel.maybe_declare())
self._passive_started.set()
try:
async for item in self:
...
except BaseException as exc:
(await channel.throw(exc))
finally:
self._channel_stop_iteration(channel)
self._passive = False
def _channel_stop_iteration(self, channel: Any) -> None:
try:
on_stop_iteration = channel.on_stop_iteration
except AttributeError:
pass
else:
on_stop_iteration()
def echo(self, *channels: Union[(str, ChannelT)]) -> StreamT:
_channels = [(self.derive_topic(c) if isinstance(c, str) else c) for c in channels]
async def echoing(value: T) -> T:
(await asyncio.wait([asyncio.ensure_future(maybe_forward(value, channel)) for channel in _channels], return_when=asyncio.ALL_COMPLETED))
return value
self.add_processor(echoing)
return self
def group_by(self, key: GroupByKeyArg, *, name: Optional[str]=None, topic: Optional[TopicT]=None, partitions: Optional[int]=None) -> StreamT:
if self._finalized:
return self
channel: ChannelT
if (self.concurrency_index is not None):
raise ImproperlyConfigured('Agent with concurrency>1 cannot use stream.group_by!')
if (not name):
if isinstance(key, FieldDescriptorT):
name = key.ident
else:
raise TypeError('group_by with callback must set name=topic_suffix')
if (topic is not None):
channel = topic
else:
prefix = ''
if (self.prefix and (not cast(TopicT, self.channel).has_prefix)):
prefix = (self.prefix + '-')
suffix = f'-{name}-repartition'
p = (partitions if partitions else self.app.conf.topic_partitions)
channel = cast(ChannelT, self.channel).derive(prefix=prefix, suffix=suffix, partitions=p, internal=True)
format_key = self._format_key
channel_it = aiter(channel)
if (self._next is not None):
raise ImproperlyConfigured('Stream already uses group_by/through')
grouped = self._chain(channel=channel_it)
async def repartition(value: T) -> T:
event = self.current_event
if (event is None):
raise RuntimeError('Cannot repartition stream with non-topic channel')
new_key = (await format_key(key, value))
(await event.forward(channel, key=new_key))
return value
self.add_processor(repartition)
self._enable_passive(cast(ChannelT, channel_it), declare=True)
return grouped
def filter(self, fun: Processor[T]) -> StreamT:
async def on_value(value: T) -> T:
if (not (await maybe_async(fun(value)))):
raise Skip()
else:
return value
self.add_processor(on_value)
return self
async def _format_key(self, key: GroupByKeyArg, value: T_contra) -> str:
try:
if isinstance(key, FieldDescriptorT):
return key.getattr(cast(ModelT, value))
return (await maybe_async(cast(Callable, key)(value)))
except BaseException as exc:
self.log.exception('Error in grouping key : %r', exc)
raise Skip() from exc
def derive_topic(self, name: str, *, schema: Optional[SchemaT]=None, key_type: ModelArg=None, value_type: ModelArg=None, prefix: str='', suffix: str='') -> TopicT:
if isinstance(self.channel, TopicT):
return cast(TopicT, self.channel).derive_topic(topics=[name], schema=schema, key_type=key_type, value_type=value_type, prefix=prefix, suffix=suffix)
raise ValueError('Cannot derive topic from non-topic channel.')
async def throw(self, exc: BaseException) -> None:
(await cast(ChannelT, self.channel).throw(exc))
def combine(self, *nodes: JoinableT, **kwargs: Any) -> StreamT:
if self._finalized:
return self
stream = self._chain(combined=(self.combined + list(nodes)))
for node in stream.combined:
node.contribute_to_stream(stream)
return stream
def contribute_to_stream(self, active: StreamT) -> None:
self.outbox = active.outbox
async def remove_from_stream(self, stream: StreamT) -> None:
(await self.stop())
def join(self, *fields: FieldDescriptorT) -> StreamT:
return self._join(joins.RightJoin(stream=self, fields=fields))
def left_join(self, *fields: FieldDescriptorT) -> StreamT:
return self._join(joins.LeftJoin(stream=self, fields=fields))
def inner_join(self, *fields: FieldDescriptorT) -> StreamT:
return self._join(joins.InnerJoin(stream=self, fields=fields))
def outer_join(self, *fields: FieldDescriptorT) -> StreamT:
return self._join(joins.OuterJoin(stream=self, fields=fields))
def _join(self, join_strategy: JoinT) -> StreamT:
return self.clone(join_strategy=join_strategy)
async def on_merge(self, value: T=None) -> Optional[T]:
join_strategy = self.join_strategy
if join_strategy:
value = (await join_strategy.process(value))
return value
async def on_start(self) -> None:
if self._on_start:
(await self._on_start())
if self._passive:
(await self._passive_started.wait())
async def stop(self) -> None:
for s in cast(Stream, self.get_root_stream())._iter_ll_forwards():
(await Service.stop(cast(Service, s)))
async def on_stop(self) -> None:
self._passive = False
self._passive_started.clear()
for table_or_stream in self.combined:
(await table_or_stream.remove_from_stream(self))
def __iter__(self) -> Any:
return self
def __next__(self) -> T:
raise NotImplementedError('Streams are asynchronous: use `async for`')
def __aiter__(self) -> AsyncIterator[T_co]:
if (_CStreamIterator is not None):
return self._c_aiter()
else:
return self._py_aiter()
async def _c_aiter(self) -> AsyncIterator[T_co]:
self.log.dev('Using Cython optimized __aiter__')
skipped_value = self._skipped_value
self._finalized = True
started_by_aiter = (await self.maybe_start())
it = _CStreamIterator(self)
try:
while (not self.should_stop):
do_ack = self.enable_acks
(value, sensor_state) = (await it.next())
try:
if (value is not skipped_value):
self.events_total += 1
(yield value)
finally:
(event, self.current_event) = (self.current_event, None)
it.after(event, (do_ack or (value is skipped_value)), sensor_state)
except StopAsyncIteration:
return
finally:
self._channel_stop_iteration(self.channel)
if started_by_aiter:
(await self.stop())
self.service_reset()
def _set_current_event(self, event: Optional[EventT]=None) -> None:
if (event is None):
_current_event.set(None)
else:
_current_event.set(weakref.ref(event))
self.current_event = event
async def _py_aiter(self) -> AsyncIterator[T_co]:
self._finalized = True
started_by_aiter = (await self.maybe_start())
on_merge = self.on_merge
on_stream_event_out = self._on_stream_event_out
on_message_out = self._on_message_out
channel = self.channel
if isinstance(channel, ChannelT):
chan_is_channel = True
chan = cast(ChannelT, self.channel)
chan_queue = chan.queue
chan_queue_empty = chan_queue.empty
chan_errors = chan_queue._errors
chan_quick_get = chan_queue.get_nowait
else:
chan_is_channel = False
chan_queue = cast(ThrowableQueue, None)
chan_queue_empty = cast(Callable, None)
chan_errors = cast(Deque, None)
chan_quick_get = cast(Callable, None)
chan_slow_get = channel.__anext__
processors = self._processors
on_stream_event_in = self._on_stream_event_in
create_ref = weakref.ref
_maybe_async = maybe_async
event_cls = EventT
_current_event_contextvar = _current_event
consumer: ConsumerT = self.app.consumer
unacked: Set[Message] = consumer.unacked
add_unacked: Callable[([Message], None)] = unacked.add
acking_topics: Set[str] = self.app.topics.acking_topics
on_message_in = self._on_message_in
sleep = asyncio.sleep
trace = self.app.trace
_shortlabel = shortlabel
sensor_state: Optional[Dict] = None
skipped_value = self._skipped_value
try:
while (not self.should_stop):
event = None
do_ack = self.enable_acks
value: Any = None
while ((value is None) and (event is None)):
(await sleep(0))
channel_value: Any
if chan_is_channel:
if chan_errors:
raise chan_errors.popleft()
if chan_queue_empty():
channel_value = (await chan_slow_get())
else:
channel_value = chan_quick_get()
else:
channel_value = (await chan_slow_get())
if isinstance(channel_value, event_cls):
event = channel_value
message = event.message
topic = message.topic
tp = message.tp
offset = message.offset
if ((not self.app.flow_control.is_active()) or (message.generation_id != self.app.consumer_generation_id)):
value = skipped_value
self.log.dev('Skipping message %r with generation_id %r because app generation_id is %r flow_control.is_active %r', message, message.generation_id, self.app.consumer_generation_id, self.app.flow_control.is_active())
break
if ((topic in acking_topics) and (not message.tracked)):
message.tracked = True
add_unacked(message)
on_message_in(message.tp, message.offset, message)
sensor_state = on_stream_event_in(tp, offset, self, event)
_current_event_contextvar.set(create_ref(event))
self.current_event = event
value = event.value
else:
value = channel_value
self.current_event = None
sensor_state = None
try:
for processor in processors:
with trace(f'processor-{_shortlabel(processor)}'):
value = (await _maybe_async(processor(value)))
value = (await on_merge(value))
except Skip:
value = skipped_value
try:
if (value is not skipped_value):
self.events_total += 1
(yield value)
finally:
self.current_event = None
if ((event is not None) and (do_ack or (value is skipped_value))):
last_stream_to_ack = event.ack()
message = event.message
tp = event.message.tp
offset = event.message.offset
on_stream_event_out(tp, offset, self, event, sensor_state)
if last_stream_to_ack:
on_message_out(tp, offset, message)
except StopAsyncIteration:
return
finally:
self._channel_stop_iteration(channel)
if started_by_aiter:
(await self.stop())
self.service_reset()
async def __anext__(self) -> T:
...
async def ack(self, event: EventT) -> bool:
last_stream_to_ack = event.ack()
message = event.message
tp = message.tp
offset = message.offset
self._on_stream_event_out(tp, offset, self, event)
if last_stream_to_ack:
self._on_message_out(tp, offset, message)
return last_stream_to_ack
def __and__(self, other: Any) -> Any:
return self.combine(self, other)
def __copy__(self) -> Any:
return self.clone()
def _repr_info(self) -> str:
if self.combined:
return reprlib.repr(self.combined)
return reprlib.repr(self.channel)
def label(self) -> str:
return f'{type(self).__name__}: {self._repr_channel()}'
def _repr_channel(self) -> str:
return reprlib.repr(self.channel)
def shortlabel(self) -> str:
return f'Stream: {self._human_channel()}'
def _human_channel(self) -> str:
if self.combined:
return '&'.join((s._human_channel() for s in self.combined))
return f'{type(self.channel).__name__}: {self.channel}' |
class Harmony(metaclass=ABCMeta):
def harmonize(self, color: Color, space: str) -> list[Color]:
def get_cylinder(self, color: Color, space: str) -> Color:
color = color.convert(space, norm=False).normalize()
if isinstance(color._space, Cylindrical):
return color
if isinstance(color._space, Labish):
cs = color._space
name = color.space()
class HarmonyLCh(_HarmonyLCh):
NAME = '-harmony-cylinder'
SERIALIZE = ('---harmoncy-cylinder',)
BASE = name
WHITE = cs.WHITE
DYAMIC_RANGE = cs.DYNAMIC_RANGE
INDEXES = cs.indexes()
ORIG_SPACE = cs
def is_achromatic(self, coords: Vector) -> (bool | None):
return self.ORIG_SPACE.is_achromatic(self.to_base(coords))
class ColorCyl(type(color)):
ColorCyl.register(HarmonyLCh())
return ColorCyl(color).convert('-harmony-cylinder')
if isinstance(color._space, Regular):
cs = color._space
name = color.space()
class HarmonyHSL(_HarmonyHSL, HSL):
NAME = '-harmony-cylinder'
SERIALIZE = ('---harmoncy-cylinder',)
BASE = name
GAMUT_CHECK = name
CLIP_SPACE = None
WHITE = cs.WHITE
DYAMIC_RANGE = cs.DYNAMIC_RANGE
INDEXES = (cs.indexes() if hasattr(cs, 'indexes') else [0, 1, 2])
ORIG_SPACE = cs
def is_achromatic(self, coords: Vector) -> (bool | None):
return self.ORIG_SPACE.is_achromatic(self.to_base(coords))
class ColorCyl(type(color)):
ColorCyl.register(HarmonyHSL())
return ColorCyl(color).convert('-harmony-cylinder')
raise ValueError('Unsupported color space type {}'.format(color.space())) |
class IntegrationTestCase(unittest.TestCase):
mock_response = None
def __init__(self, *args, **kwargs):
super(IntegrationTestCase, self).__init__(*args, **kwargs)
FacebookAdsApi.init(access_token='access_token', crash_log=False)
def setUp(self):
self.patcher = patch('requests.Session.request')
self.mock_request = self.patcher.start()
self.mock_response = Response()
warnings.filterwarnings(action='ignore', category=DeprecationWarning)
def tearDown(self):
mock_response = None
self.patcher.stop() |
def test_update_action_is_unsupported(db: Session, policy: Policy) -> None:
with pytest.raises(RuleValidationError) as exc:
Rule.create(db=db, data={'action_type': ActionType.update.value, 'client_id': policy.client_id, 'name': 'Invalid Rule', 'policy_id': policy.id, 'storage_destination_id': policy.rules[0].storage_destination.id})
assert (exc.value.args[0] == 'update Rules are not supported at this time.') |
class Node():
def __init__(self, position):
self.location = position
self.prev = None
self.g_cost = 0
self.h_cost = 0
self.f_cost = 0
self.Occupied = False
def update_cost(self, g, h):
self.g_cost = g
self.h_cost = h
self.f_cost = (g + h)
def __eq__(self, node):
return (self.location == node.location) |
def test_receipt_processing_with_warn_flag(indexed_event_contract, dup_txn_receipt):
event_instance = indexed_event_contract.events.LogSingleWithIndex()
with pytest.warns(UserWarning, match='Expected 1 log topics. Got 0'):
returned_logs = event_instance.process_receipt(dup_txn_receipt, errors=WARN)
assert (len(returned_logs) == 0) |
def gen_function(func_attrs: Dict[(str, Any)], shape_eval_template: jinja2.Template, exec_template: jinja2.Template, extra_header_template: jinja2.Template, get_func_signature: Any) -> str:
rank = func_attrs['inputs'][0]._rank()
eps = func_attrs.get('eps', '1e-5')
input_accessor = func_attrs['input_accessors'][0]
output_accessor = func_attrs['output_accessors'][0]
input_strides = []
output_strides = []
for (i, _) in enumerate(input_accessor.original_shapes):
input_strides.append(input_accessor.stride(i))
output_strides.append(output_accessor.stride(i))
input_offset = input_accessor.offset
output_offset = output_accessor.offset
exec_path = func_attrs['exec_path']
op_instance = func_attrs['op_instance']
inst_def_flag = set()
instances = {}
instance_decl = ''
for exec_item in exec_path.values():
fname = ('f' + sha1(exec_item.exec_cond.encode()).hexdigest())
algo = exec_item.algo
if (algo not in inst_def_flag):
config = norm_common.emit_instance(op_instance[algo])
inst_def_flag.add(algo)
else:
config = ''
inst = norm_common.INSTANCE_TEMPLATE.render(config=config, name=fname, config_name=norm_common.extract_config_name(config))
instances[exec_item.exec_cond] = inst
instance_decl += inst
shape_eval = (shape_eval_template.render(rank=rank) if shape_eval_template else '')
exec_cond_template = func_attrs['exec_cond_template']
exec_paths = ''
for (key, _) in instances.items():
fname = ('f' + sha1(key.encode()).hexdigest())
program = exec_template.render(instance=fname, dtype='void', reduce_dims=(rank - 1), eps=eps, input_strides=input_strides, output_strides=output_strides, input_offset=input_offset, output_offset=output_offset)
exec_inst = exec_cond_template.render(indent=' ', cond=key, program=program)
exec_paths += exec_inst
return norm_common.FUNC_TEMPLATE.render(instances_decl=instance_decl, func_signature=get_func_signature(func_attrs), shape_eval=shape_eval, exec_paths=exec_paths, extra_headers=extra_header_template.render()) |
class TestMultilabelAccuracy(testslide.TestCase):
def test_top_k(self) -> None:
metric = MultilabelAccuracy(top_k=2)
self.assertTrue(torch.equal(metric.compute(), torch.tensor(0.0)))
preds = torch.tensor([[1.0, (- 1.0), 2.0]])
target = torch.tensor([[0, 1.0, 0]])
metric.update(preds, target)
self.assertTrue(torch.equal(metric.compute(), torch.tensor(0.0)))
preds = torch.tensor([[1.0, (- 0.5), 2.0]])
target = torch.tensor([[1, 1, 0]])
metric.update(preds, target)
self.assertTrue(torch.equal(metric.compute(), torch.tensor(0.5)))
def test_invalid_top_k(self) -> None:
metric = MultilabelAccuracy(top_k=10)
preds = torch.tensor([[1.0]])
target = torch.tensor([[0]])
with self.assertRaises(AssertionError):
metric.update(preds, target) |
def build_zips(series_name):
for folder in os.scandir(series_name):
folder = folder.path
zip_name = folder.replace('\\', '/').split('/')[1]
with ZipFile(f'Builds/{zip_name}.zip', 'w') as zf:
for file in glob(f'{folder}/**/*.*', recursive=True):
if (not file.endswith('.pak')):
new_name = file.replace('\\', '/').split('/', 2)[2]
zf.write(file, new_name) |
class IBCCoreChannelRestClient(IBCCoreChannel):
API_URL = '/ibc/core/channel/v1beta1'
def __init__(self, rest_api: RestClient) -> None:
self._rest_api = rest_api
def Channel(self, request: QueryChannelRequest) -> QueryChannelResponse:
json_response = self._rest_api.get(f'{self.API_URL}/channels/{request.channel_id}/ports/{request.port_id}')
return Parse(json_response, QueryChannelResponse())
def Channels(self, request: QueryChannelsRequest) -> QueryChannelsResponse:
json_response = self._rest_api.get(f'{self.API_URL}/channels', request)
return Parse(json_response, QueryChannelsResponse())
def ConnectionChannels(self, request: QueryConnectionChannelsRequest) -> QueryConnectionChannelsResponse:
json_response = self._rest_api.get(f'{self.API_URL}/connections/{request.connection}/channels', request)
return Parse(json_response, QueryConnectionChannelsResponse())
def ChannelClientState(self, request: QueryChannelClientStateRequest) -> QueryChannelClientStateResponse:
json_response = self._rest_api.get(f'{self.API_URL}/channels/{request.channel_id}/ports/{request.port_id}/client_state')
return Parse(json_response, QueryChannelClientStateResponse())
def ChannelConsensusState(self, request: QueryChannelConsensusStateRequest) -> QueryChannelConsensusStateResponse:
json_response = self._rest_api.get(f'{self.API_URL}/channels/{request.channel_id}/ports/{request.port_id}/consensus_state/revision/{request.revision_number}/height/{request.revision_height}')
return Parse(json_response, QueryChannelConsensusStateResponse())
def PacketCommitment(self, request: QueryPacketCommitmentRequest) -> QueryPacketCommitmentResponse:
json_response = self._rest_api.get(f'{self.API_URL}/channels/{request.channel_id}/ports/{request.port_id}/packet_commitments/{request.sequence}')
return Parse(json_response, QueryPacketCommitmentResponse())
def PacketCommitments(self, request: QueryPacketCommitmentsRequest) -> QueryPacketCommitmentsResponse:
json_response = self._rest_api.get(f'{self.API_URL}/channels/{request.channel_id}/ports/{request.port_id}/packet_commitments', request)
return Parse(json_response, QueryPacketCommitmentsResponse())
def PacketReceipt(self, request: QueryPacketReceiptRequest) -> QueryPacketReceiptResponse:
json_response = self._rest_api.get(f'{self.API_URL}/channels/{request.channel_id}/ports/{request.port_id}/packet_receipts/{request.sequence}')
return Parse(json_response, QueryPacketReceiptResponse())
def PacketAcknowledgement(self, request: QueryPacketAcknowledgementRequest) -> QueryPacketAcknowledgementResponse:
json_response = self._rest_api.get(f'{self.API_URL}/channels/{request.channel_id}/ports/{request.port_id}/packet_acks/{request.sequence}')
return Parse(json_response, QueryPacketAcknowledgementResponse())
def PacketAcknowledgements(self, request: QueryPacketAcknowledgementsRequest) -> QueryPacketAcknowledgementsResponse:
json_response = self._rest_api.get(f'{self.API_URL}/channels/{request.channel_id}/ports/{request.port_id}/packet_acknowledgements', request)
return Parse(json_response, QueryPacketAcknowledgementsResponse())
def UnreceivedPackets(self, request: QueryUnreceivedPacketsRequest) -> QueryUnreceivedPacketsResponse:
json_response = self._rest_api.get(f"{self.API_URL}/channels/{request.channel_id}/ports/{request.port_id}/packet_commitments/{','.join(map(str, request.packet_commitment_sequences))}/unreceived_packets", request)
return Parse(json_response, QueryUnreceivedPacketsResponse())
def UnreceivedAcks(self, request: QueryUnreceivedAcksRequest) -> QueryUnreceivedAcksResponse:
json_response = self._rest_api.get(f"{self.API_URL}/channels/{request.channel_id}/ports/{request.port_id}/packet_commitments/{','.join(map(str, request.packet_ack_sequences))}/unreceived_acks", request)
return Parse(json_response, QueryUnreceivedAcksResponse())
def NextSequenceReceive(self, request: QueryNextSequenceReceiveRequest) -> QueryNextSequenceReceiveResponse:
json_response = self._rest_api.get(f'{self.API_URL}/channels/{request.channel_id}/ports/{request.port_id}/next_sequence')
return Parse(json_response, QueryNextSequenceReceiveResponse()) |
class GetShareTokenResponse(DatClass, str):
share_token: str = None
expire_time: str = None
expires_in: int = None
share_id: str = None
share_pwd: str = None
def __new__(cls, *args, **kwargs):
share_token = kwargs.get('share_token')
if (share_token is None):
share_token = args[0]
DatClass.__new__(cls, *args, **kwargs)
return str.__new__(cls, share_token)
def __str__(self):
return self.share_token
def __repr__(self):
return f'{self.share_token!r}' |
def extended_lvis_load(json_file, image_root, dataset_name=None):
from lvis import LVIS
json_file = _cache_json_file(json_file)
timer = Timer()
lvis_api = LVIS(json_file)
if (timer.seconds() > 1):
logger.info('Loading {} takes {:.2f} seconds.'.format(json_file, timer.seconds()))
img_ids = sorted(list(lvis_api.imgs.keys()))
imgs = lvis_api.load_imgs(img_ids)
anns = [lvis_api.img_ann_map[img_id] for img_id in img_ids]
ann_ids = [ann['id'] for anns_per_image in anns for ann in anns_per_image]
assert (len(set(ann_ids)) == len(ann_ids)), "Annotation ids in '{}' are not unique".format(json_file)
imgs_anns = list(zip(imgs, anns))
logger.info('Loaded {} images in the LVIS format from {}'.format(len(imgs_anns), json_file))
dataset_dicts = []
count_ignore_image_root_warning = 0
for (img_dict, anno_dict_list) in imgs_anns:
record = {}
if ('://' not in img_dict['file_name']):
file_name = img_dict['file_name']
if img_dict['file_name'].startswith('COCO'):
file_name = file_name[(- 16):]
record['file_name'] = os.path.join(image_root, file_name)
else:
if (image_root is not None):
count_ignore_image_root_warning += 1
if (count_ignore_image_root_warning == 1):
logger.warning("Found '://' in file_name: {}, ignore image_root: {}(logged once per dataset).".format(img_dict['file_name'], image_root))
record['file_name'] = img_dict['file_name']
record['height'] = img_dict['height']
record['width'] = img_dict['width']
record['not_exhaustive_category_ids'] = img_dict.get('not_exhaustive_category_ids', [])
record['neg_category_ids'] = img_dict.get('neg_category_ids', [])
image_id = record['image_id'] = img_dict['id']
objs = []
for anno in anno_dict_list:
assert (anno['image_id'] == image_id)
obj = {'bbox': anno['bbox'], 'bbox_mode': BoxMode.XYWH_ABS}
obj['category_id'] = (anno['category_id'] - 1)
segm = anno['segmentation']
valid_segm = [poly for poly in segm if (((len(poly) % 2) == 0) and (len(poly) >= 6))]
assert (len(segm) == len(valid_segm)), 'Annotation contains an invalid polygon with < 3 points'
assert (len(segm) > 0)
obj['segmentation'] = segm
objs.append(obj)
record['annotations'] = objs
dataset_dicts.append(record)
if dataset_name:
meta = MetadataCatalog.get(dataset_name)
meta.thing_classes = get_extended_lvis_instances_meta(lvis_api)['thing_classes']
return dataset_dicts |
class _IamProjectsRolesRepository(repository_mixins.ListQueryMixin, _base_repository.GCPRepository):
def __init__(self, **kwargs):
super(_IamProjectsRolesRepository, self).__init__(key_field='parent', max_results_field='pageSize', component='projects.roles', **kwargs)
def get_name(project_id):
if (project_id and (not project_id.startswith('projects/'))):
project_id = 'projects/{}'.format(project_id)
return project_id |
def test_mine_multiple_timestamp(devnetwork, chain):
chain.mine(5, timestamp=(chain.time() + 123))
timestamps = [i.timestamp for i in list(chain)[(- 5):]]
assert ((chain.time() - timestamps[(- 1)]) < 3)
for i in range(1, 5):
assert (timestamps[i] > timestamps[(i - 1)])
assert ((timestamps[0] + 123) == timestamps[(- 1)]) |
def get_ao_document_query(q, **kwargs):
categories = {'F': 'Final Opinion', 'V': 'Votes', 'D': 'Draft Documents', 'R': 'AO Request, Supplemental Material, and Extensions of Time', 'W': 'Withdrawal of Request', 'C': 'Comments and Ex parte Communications', 'S': 'Commissioner Statements'}
if kwargs.get('ao_category'):
ao_category = [categories[c] for c in kwargs.get('ao_category')]
combined_query = [Q('terms', documents__category=ao_category)]
else:
combined_query = []
if q:
combined_query.append(Q('query_string', query=q, fields=['documents.text']))
return Q('nested', path='documents', inner_hits=INNER_HITS, query=Q('bool', must=combined_query)) |
class FakerApi(ProviderInterface, AudioInterface):
provider_name = 'faker'
def __init__(self, api_keys: Dict={}) -> None:
super().__init__()
def audio__speech_to_text_async__launch_job(self, file: str, language: str, speakers: int, profanity_filter: bool, vocabulary: list, audio_attributes: tuple, file_url: str='', provider_params: dict=dict()) -> AsyncLaunchJobResponseType:
sleep(randint(1, 3))
return AsyncLaunchJobResponseType(provider_job_id='SomeFakeID')
def audio__speech_to_text_async__get_job_result(self, provider_job_id: str) -> AsyncBaseResponseType[SpeechToTextAsyncDataClass]:
sleep(randint(1, 3))
standardized_response = SpeechToTextAsyncDataClass(text='empty', diarization=SpeechDiarization(total_speakers=1))
provider_correct_response = AsyncResponseType[SpeechToTextAsyncDataClass](original_response={}, standardized_response=standardized_response, provider_job_id=provider_job_id)
chance_to_stop = randint(2, 1000)
if (provider_job_id == 'FINISHED'):
return provider_correct_response
if (provider_job_id == 'ERROR'):
raise Exception('error')
if (provider_job_id == 'pending'):
return AsyncPendingResponseType[SpeechToTextAsyncDataClass](provider_job_id=provider_job_id)
if (chance_to_stop < 250):
return provider_correct_response
if (chance_to_stop > 994):
raise Exception('error')
return AsyncPendingResponseType[SpeechToTextAsyncDataClass](provider_job_id=provider_job_id) |
(scope='function')
def create_test_data(create_maya_test_db, create_pymel, create_maya_env):
logger.debug('creating user1')
data = dict()
data['temp_repo_path'] = create_maya_test_db
pm = create_pymel
maya_env = create_maya_env
from anima.dcc.mayaEnv import auxiliary
data['user1'] = User(name='User 1', login='user1', email='', password='12345')
logger.debug('creating repo1')
data['repo1'] = Repository(name='Test Project Repository', code='TPR', linux_path=data['temp_repo_path'], windows_path=data['temp_repo_path'], osx_path=data['temp_repo_path'])
logger.debug('committing repo1')
DBSession.add(data['repo1'])
DBSession.commit()
logger.debug('commit repo1 done!')
logger.debug('creating statuses')
data['status_new'] = Status.query.filter_by(code='NEW').first()
data['status_wip'] = Status.query.filter_by(code='WIP').first()
data['status_comp'] = Status.query.filter_by(code='CMPL').first()
logger.debug('creating statuses done')
logger.debug('creating filename template')
data['task_template'] = FilenameTemplate(name='Task Template', target_entity_type='Task', path='$REPO{{project.repository.code}}/{{project.code}}/{%- for parent_task in parent_tasks -%}{{parent_task.nice_name}}/{%- endfor -%}', filename='{{version.nice_name}}_v{{"%03d"|format(version.version_number)}}')
logger.debug('creating filename template done')
logger.debug('creating asset template')
data['asset_template'] = FilenameTemplate(name='Asset Template', target_entity_type='Asset', path='$REPO{{project.repository.code}}/{{project.code}}/{%- for parent_task in parent_tasks -%}{{parent_task.nice_name}}/{%- endfor -%}', filename='{{version.nice_name}}_v{{"%03d"|format(version.version_number)}}')
logger.debug('creating asset template done')
logger.debug('creating shot template')
data['shot_template'] = FilenameTemplate(name='Shot Template', target_entity_type='Shot', path='$REPO{{project.repository.code}}/{{project.code}}/{%- for parent_task in parent_tasks -%}{{parent_task.nice_name}}/{%- endfor -%}', filename='{{version.nice_name}}_v{{"%03d"|format(version.version_number)}}')
logger.debug('creating shot template done')
data['sequence_template'] = FilenameTemplate(name='Sequence Template', target_entity_type='Sequence', path='$REPO{{project.repository.code}}/{{project.code}}/{%- for parent_task in parent_tasks -%}{{parent_task.nice_name}}/{%- endfor -%}', filename='{{version.nice_name}}_v{{"%03d"|format(version.version_number)}}')
data['structure'] = Structure(name='Project Struture', templates=[data['task_template'], data['asset_template'], data['shot_template'], data['sequence_template']])
data['project_status_list'] = StatusList.query.filter_by(target_entity_type='Project').first()
data['image_format'] = ImageFormat(name='HD 1080', width=1920, height=1080, pixel_aspect=1.0)
data['project'] = Project(name='Test Project', code='TP', repositories=[data['repo1']], status_list=data['project_status_list'], structure=data['structure'], image_format=data['image_format'])
DBSession.add(data['project'])
DBSession.commit()
data['task_status_list'] = StatusList.query.filter_by(target_entity_type='Task').first()
data['asset_status_list'] = StatusList.query.filter_by(target_entity_type='Asset').first()
data['shot_status_list'] = StatusList.query.filter_by(target_entity_type='Shot').first()
data['sequence_status_list'] = StatusList.query.filter_by(target_entity_type='Sequence').first()
data['character_type'] = Type(name='Character', code='CHAR', target_entity_type='Asset')
data['character_design_type'] = Type(name='Character Design', code='chardesign', target_entity_type='Task')
data['anim_type'] = Type(name='Animation', code='anim', target_entity_type='Task')
data['model_type'] = Type(name='Model', code='model', target_entity_type='Task')
data['look_development_type'] = Type(name='Look Development', code='lookdev', target_entity_type='Task')
data['rig_type'] = Type(name='Rig', code='rig', target_entity_type='Task')
data['exterior_type'] = Type(name='Exterior', code='rig', target_entity_type='Asset')
data['building_type'] = Type(name='Building', code='building', target_entity_type='Asset')
data['layout_type'] = Type(name='Layout', code='layout', target_entity_type='Task')
data['prop_type'] = Type(name='Prop', code='prop', target_entity_type='Asset')
data['vegetation_type'] = Type(name='Vegetation', code='vegetation', target_entity_type='Task')
data['task1'] = Task(name='Test Task 1', project=data['project'])
data['task2'] = Task(name='Test Task 2', project=data['project'])
data['task3'] = Task(name='Test Task 3', project=data['project'])
data['task4'] = Task(name='Test Task 4', parent=data['task1'])
data['task5'] = Task(name='Test Task 5', parent=data['task1'])
data['task6'] = Task(name='Test Task 6', parent=data['task1'])
data['asset1'] = Asset(name='Asset 1', code='asset1', type=data['character_type'], project=data['project'])
data['asset1_model'] = Task(name='Model', type=data['model_type'], parent=data['asset1'])
data['asset1_lookdev'] = Task(name='LookDev', type=data['look_development_type'], parent=data['asset1'])
data['asset2'] = Asset(name='Asset 2', code='asset2', type=data['character_type'], parent=data['task4'])
data['asset2_model'] = Task(name='Model', type=data['model_type'], parent=data['asset2'])
data['asset2_lookdev'] = Task(name='LookDev', type=data['look_development_type'], parent=data['asset2'])
data['sequence1'] = Sequence(name='Sequence1', code='SEQ1', project=data['project'])
data['sequence2'] = Sequence(name='Sequence2', code='SEQ2', parent=data['task2'])
data['shot1'] = Shot(code='SH001', project=data['project'])
data['shot2'] = Shot(code='SH002', parent=data['sequence1'])
data['shot3'] = Shot(code='SH003', parent=data['sequence2'])
data['shot3_anim'] = Task(name='Anim', type=data['anim_type'], parent=data['shot3'])
data['assets'] = Task(name='Assets', project=data['project'])
data['characters'] = Task(name='Characters', parent=data['assets'])
data['char1'] = Asset(name='Char1', code='Char1', type=data['character_type'], parent=data['characters'])
data['char1_char_design'] = Task(name='Character Design', type=data['character_design_type'], parent=data['char1'])
data['char1_model'] = Task(name='Model', parent=data['char1'], type=data['model_type'], depends=[data['char1_char_design']])
data['char1_look_dev'] = Task(name='Look Dev', parent=data['char1'], type=data['look_development_type'], depends=[data['char1_model']])
data['char1_rig'] = Task(name='Rig', parent=data['char1'], type=data['rig_type'], depends=[data['char1_model']])
data['environments'] = Task(name='Environments', parent=data['assets'])
data['exteriors'] = Task(name='Exteriors', parent=data['environments'])
data['ext1'] = Asset(name='Ext1', code='Ext1', type=data['exterior_type'], parent=data['exteriors'])
data['ext2'] = Asset(name='Ext2', code='Ext2', type=data['exterior_type'], parent=data['exteriors'])
data['building1'] = Asset(name='Building1', code='Building1', type=data['building_type'], parent=data['ext1'])
data['building1_layout'] = Task(name='Layout', type=data['layout_type'], parent=data['building1'])
data['building1_look_dev'] = Task(name='LookDev', type=data['look_development_type'], parent=data['building1'])
data['building1_props'] = Task(name='Props', parent=data['building1'])
data['building1_yapi'] = Asset(name='YAPI', code='YAPI', type=data['building_type'], parent=data['building1_props'])
data['building1_yapi_model'] = Task(name='Model', type=data['model_type'], parent=data['building1_yapi'])
data['building1_yapi_look_dev'] = Task(name='LookDev', type=data['look_development_type'], parent=data['building1_yapi'], depends=[data['building1_yapi_model']])
data['building1_layout'].depends.append(data['building1_yapi_model'])
data['building2'] = Asset(name='Building2', code='Building2', type=data['building_type'], parent=data['ext1'])
data['building2_layout'] = Task(name='Layout', type=data['layout_type'], parent=data['building2'])
data['building2_look_dev'] = Task(name='LookDev', type=data['look_development_type'], parent=data['building2'])
data['building2_props'] = Task(name='Props', parent=data['building2'])
data['building2_yapi'] = Asset(name='YAPI', code='YAPI', type=data['building_type'], parent=data['building2_props'])
data['building2_yapi_model'] = Task(name='Model', type=data['model_type'], parent=data['building2_yapi'])
data['building2_yapi_look_dev'] = Task(name='LookDev', type=data['look_development_type'], parent=data['building2_yapi'], depends=[data['building2_yapi_model']])
data['building2_layout'].depends.append(data['building2_yapi_model'])
data['ext1_layout'] = Task(name='Layout', type=data['layout_type'], parent=data['ext1'])
data['ext1_look_dev'] = Task(name='LookDev', type=data['look_development_type'], parent=data['ext1'], depends=[data['ext1_layout']])
data['ext2_model'] = Task(name='Model', type=data['model_type'], parent=data['ext2'])
data['ext2_look_dev'] = Task(name='LookDev', type=data['look_development_type'], parent=data['ext2'], depends=[data['ext2_model']])
data['ext2_layout'] = Task(name='Layout', type=data['layout_type'], parent=data['ext2'], depends=[data['ext2_look_dev']])
data['ext1_props'] = Task(name='Props', parent=data['ext1'])
data['prop1'] = Asset(name='Prop1', code='Prop1', type=data['prop_type'], parent=data['ext1_props'])
data['prop1_model'] = Task(name='Model', type=data['model_type'], parent=data['prop1'])
data['prop1_look_dev'] = Task(name='LookDev', type=data['look_development_type'], parent=data['prop1'])
data['ext1_vegetation'] = Task(name='Vegetation', parent=data['ext1'], type=data['vegetation_type'])
DBSession.add_all([data['repo1'], data['status_new'], data['status_wip'], data['status_comp'], data['project_status_list'], data['project'], data['task_status_list'], data['asset_status_list'], data['shot_status_list'], data['sequence_status_list'], data['task1'], data['task2'], data['task3'], data['task4'], data['task5'], data['task6'], data['asset1'], data['asset2'], data['shot1'], data['shot2'], data['shot3'], data['sequence1'], data['sequence2'], data['task_template'], data['asset_template'], data['shot_template'], data['sequence_template'], data['character_design_type'], data['model_type'], data['look_development_type'], data['rig_type'], data['exterior_type'], data['building_type'], data['layout_type'], data['prop_type'], data['vegetation_type'], data['assets'], data['characters'], data['char1'], data['char1_char_design'], data['char1_model'], data['char1_look_dev'], data['char1_rig'], data['environments'], data['exteriors'], data['ext1'], data['ext2'], data['building1'], data['building1_layout'], data['building1_look_dev'], data['building1_props'], data['building1_yapi'], data['building1_yapi_model'], data['building1_yapi_look_dev'], data['building2'], data['building2_layout'], data['building2_look_dev'], data['building2_props'], data['building2_yapi'], data['building2_yapi_model'], data['building2_yapi_look_dev'], data['ext1_layout'], data['ext1_look_dev'], data['ext1_props'], data['ext2_model'], data['ext2_look_dev'], data['ext2_layout'], data['prop1'], data['prop1_model'], data['prop1_look_dev'], data['ext1_vegetation']])
DBSession.commit()
data['asset2_model_main_v001'] = create_version(data['asset2_model'], 'Main')
data['asset2_model_main_v002'] = create_version(data['asset2_model'], 'Main')
data['asset2_model_main_v003'] = create_version(data['asset2_model'], 'Main')
data['asset2_model_take1_v001'] = create_version(data['asset2_model'], 'Take1')
data['asset2_model_take1_v002'] = create_version(data['asset2_model'], 'Take1')
data['asset2_model_take1_v003'] = create_version(data['asset2_model'], 'Take1')
data['asset2_lookdev_main_v001'] = create_version(data['asset2_lookdev'], 'Main')
data['asset2_lookdev_main_v002'] = create_version(data['asset2_lookdev'], 'Main')
data['asset2_lookdev_main_v003'] = create_version(data['asset2_lookdev'], 'Main')
data['asset2_lookdev_take1_v001'] = create_version(data['asset2_lookdev'], 'Take1')
data['asset2_lookdev_take1_v002'] = create_version(data['asset2_lookdev'], 'Take1')
data['asset2_lookdev_take1_v003'] = create_version(data['asset2_lookdev'], 'Take1')
data['version7'] = create_version(data['task5'], 'Main')
data['version8'] = create_version(data['task5'], 'Main')
data['version9'] = create_version(data['task5'], 'Main')
data['version10'] = create_version(data['task5'], 'Take1')
data['version11'] = create_version(data['task5'], 'Take1')
data['version12'] = create_version(data['task5'], 'Take1')
data['version13'] = create_version(data['task6'], 'Main')
data['version14'] = create_version(data['task6'], 'Main')
data['version15'] = create_version(data['task6'], 'Main')
data['version16'] = create_version(data['task6'], 'Take1')
data['version17'] = create_version(data['task6'], 'Take1')
data['version18'] = create_version(data['task6'], 'Take1')
data['shot3_anim_main_v001'] = create_version(data['shot3_anim'], 'Main')
data['shot3_anim_main_v002'] = create_version(data['shot3_anim'], 'Main')
data['shot3_anim_main_v003'] = create_version(data['shot3_anim'], 'Main')
data['shot3_anim_take1_v001'] = create_version(data['shot3_anim'], 'Take1')
data['shot3_anim_take1_v002'] = create_version(data['shot3_anim'], 'Take1')
data['shot3_anim_take1_v003'] = create_version(data['shot3_anim'], 'Take1')
data['version25'] = create_version(data['task3'], 'Main')
data['version26'] = create_version(data['task3'], 'Main')
data['version27'] = create_version(data['task3'], 'Main')
data['version28'] = create_version(data['task3'], 'Take1')
data['version29'] = create_version(data['task3'], 'Take1')
data['version30'] = create_version(data['task3'], 'Take1')
data['version31'] = create_version(data['asset1'], 'Main')
data['version32'] = create_version(data['asset1'], 'Main')
data['version33'] = create_version(data['asset1'], 'Main')
data['version34'] = create_version(data['asset1'], 'Take1')
data['version35'] = create_version(data['asset1'], 'Take1')
data['version36'] = create_version(data['asset1'], 'Take1')
data['version37'] = create_version(data['shot2'], 'Main')
data['version38'] = create_version(data['shot2'], 'Main')
data['version39'] = create_version(data['shot2'], 'Main')
data['version40'] = create_version(data['shot2'], 'Take1')
data['version41'] = create_version(data['shot2'], 'Take1')
data['version42'] = create_version(data['shot2'], 'Take1')
data['version43'] = create_version(data['shot1'], 'Main')
data['version44'] = create_version(data['shot1'], 'Main')
data['version45'] = create_version(data['shot1'], 'Main')
data['version46'] = create_version(data['shot1'], 'Take1')
data['version47'] = create_version(data['shot1'], 'Take1')
data['version48'] = create_version(data['shot1'], 'Take1')
data['char1_char_design_main_v001'] = create_version(data['char1_char_design'], 'Main')
data['char1_char_design_main_v002'] = create_version(data['char1_char_design'], 'Main')
data['char1_char_design_main_v003'] = create_version(data['char1_char_design'], 'Main')
data['char1_model_main_v001'] = create_version(data['char1_model'], 'Main')
data['char1_model_main_v002'] = create_version(data['char1_model'], 'Main')
data['char1_model_main_v003'] = create_version(data['char1_model'], 'Main')
data['char1_look_dev_main_v001'] = create_version(data['char1_look_dev'], 'Main')
data['char1_look_dev_main_v002'] = create_version(data['char1_look_dev'], 'Main')
data['char1_look_dev_main_v003'] = create_version(data['char1_look_dev'], 'Main')
data['char1_rig_main_v001'] = create_version(data['char1_rig'], 'Main')
data['char1_rig_main_v002'] = create_version(data['char1_rig'], 'Main')
data['char1_rig_main_v003'] = create_version(data['char1_rig'], 'Main')
data['building1_layout_main_v001'] = create_version(data['building1_layout'], 'Main')
data['building1_layout_main_v002'] = create_version(data['building1_layout'], 'Main')
data['building1_layout_main_v003'] = create_version(data['building1_layout'], 'Main')
data['building1_look_dev_main_v001'] = create_version(data['building1_look_dev'], 'Main')
data['building1_look_dev_main_v002'] = create_version(data['building1_look_dev'], 'Main')
data['building1_look_dev_main_v003'] = create_version(data['building1_look_dev'], 'Main')
data['building1_yapi_model_main_v001'] = create_version(data['building1_yapi_model'], 'Main')
data['building1_yapi_model_main_v002'] = create_version(data['building1_yapi_model'], 'Main')
data['building1_yapi_model_main_v003'] = create_version(data['building1_yapi_model'], 'Main')
data['building1_yapi_look_dev_main_v001'] = create_version(data['building1_yapi_look_dev'], 'Main')
data['building1_yapi_look_dev_main_v002'] = create_version(data['building1_yapi_look_dev'], 'Main')
data['building1_yapi_look_dev_main_v003'] = create_version(data['building1_yapi_look_dev'], 'Main')
data['building2_layout_main_v001'] = create_version(data['building2_layout'], 'Main')
data['building2_layout_main_v002'] = create_version(data['building2_layout'], 'Main')
data['building2_layout_main_v003'] = create_version(data['building2_layout'], 'Main')
data['building2_look_dev_main_v001'] = create_version(data['building2_look_dev'], 'Main')
data['building2_look_dev_main_v002'] = create_version(data['building2_look_dev'], 'Main')
data['building2_look_dev_main_v003'] = create_version(data['building2_look_dev'], 'Main')
data['building2_yapi_model_main_v001'] = create_version(data['building2_yapi_model'], 'Main')
data['building2_yapi_model_main_v002'] = create_version(data['building2_yapi_model'], 'Main')
data['building2_yapi_model_main_v003'] = create_version(data['building2_yapi_model'], 'Main')
data['building2_yapi_look_dev_main_v001'] = create_version(data['building2_yapi_look_dev'], 'Main')
data['building2_yapi_look_dev_main_v002'] = create_version(data['building2_yapi_look_dev'], 'Main')
data['building2_yapi_look_dev_main_v003'] = create_version(data['building2_yapi_look_dev'], 'Main')
data['ext1_layout_main_v001'] = create_version(data['ext1_layout'], 'Main')
data['ext1_layout_main_v002'] = create_version(data['ext1_layout'], 'Main')
data['ext1_layout_main_v003'] = create_version(data['ext1_layout'], 'Main')
data['ext1_look_dev_main_v001'] = create_version(data['ext1_look_dev'], 'Main')
data['ext1_look_dev_main_v002'] = create_version(data['ext1_look_dev'], 'Main')
data['ext1_look_dev_main_v003'] = create_version(data['ext1_look_dev'], 'Main')
data['prop1_model_main_v001'] = create_version(data['prop1_model'], 'Main')
data['prop1_model_main_v002'] = create_version(data['prop1_model'], 'Main')
data['prop1_model_main_v003'] = create_version(data['prop1_model'], 'Main')
data['prop1_look_dev_main_v001'] = create_version(data['prop1_look_dev'], 'Main')
data['prop1_look_dev_main_v002'] = create_version(data['prop1_look_dev'], 'Main')
data['prop1_look_dev_main_v003'] = create_version(data['prop1_look_dev'], 'Main')
data['ext1_vegetation_main_v001'] = create_version(data['ext1_vegetation'], 'Main')
data['ext1_vegetation_main_v002'] = create_version(data['ext1_vegetation'], 'Main')
data['ext1_vegetation_main_v003'] = create_version(data['ext1_vegetation'], 'Main')
data['prop1_look_dev_kisa_v001'] = create_version(data['prop1_look_dev'], 'Kisa')
data['prop1_look_dev_kisa_v002'] = create_version(data['prop1_look_dev'], 'Kisa')
data['prop1_look_dev_kisa_v003'] = create_version(data['prop1_look_dev'], 'Kisa')
data['prop1_model_kisa_v001'] = create_version(data['prop1_model'], 'Kisa')
data['prop1_model_kisa_v002'] = create_version(data['prop1_model'], 'Kisa')
data['prop1_model_kisa_v003'] = create_version(data['prop1_model'], 'Kisa')
data['ext2_model_main_v001'] = create_version(data['ext2_model'], 'Main')
data['ext2_model_main_v002'] = create_version(data['ext2_model'], 'Main')
data['ext2_model_main_v003'] = create_version(data['ext2_model'], 'Main')
data['ext2_look_dev_main_v001'] = create_version(data['ext2_look_dev'], 'Main')
data['ext2_look_dev_main_v002'] = create_version(data['ext2_look_dev'], 'Main')
data['ext2_look_dev_main_v003'] = create_version(data['ext2_look_dev'], 'Main')
data['ext2_layout_main_v001'] = create_version(data['ext2_layout'], 'Main')
data['ext2_layout_main_v002'] = create_version(data['ext2_layout'], 'Main')
data['ext2_layout_main_v003'] = create_version(data['ext2_layout'], 'Main')
DBSession.commit()
try:
pm.loadPlugin('mtoa')
except RuntimeError:
pass
pm.newFile(force=True)
base_transform = pm.nt.Transform(name='kksEnv___vegetation_ALL')
strokes = pm.nt.Transform(name='kks___vegetation_pfxStrokes')
strokes.v.set(0)
polygons = pm.nt.Transform(name='kks___vegetation_pfxPolygons')
paintable_geos = pm.nt.Transform(name='kks___vegetation_paintableGeos')
paintable_geos.setAttr('v', False)
pm.parent(strokes, base_transform)
pm.parent(polygons, base_transform)
pm.parent(paintable_geos, base_transform)
pm.parent(pm.nt.Transform(name='KksEnv_PFXbrush___acacia___strokes'), strokes)
pm.parent(pm.nt.Transform(name='Kks_PFXbrush___clover___strokes'), strokes)
acacia_polygons = pm.nt.Transform(name='KksEnv_PFXbrush___acacia___polygons')
pm.parent(acacia_polygons, polygons)
acacia_mesh_group = pm.nt.Transform(name='kksEnv_PFXbrush___acacia1MeshGroup')
acacia_main = pm.polyCube(name='kksEnv_PFXbrush___acacia1Main')[0]
acacia_leaf = pm.polyCube(name='kksEnv_PFXbrush___acacia1Leaf')[0]
pm.runtime.DeleteHistory()
pm.parent(acacia_mesh_group, acacia_polygons)
pm.parent(acacia_main, acacia_mesh_group)
pm.parent(acacia_leaf, acacia_mesh_group)
clover_polygons = pm.nt.Transform(name='KksEnv_PFXbrush___clover___polygons')
pm.parent(clover_polygons, polygons)
clover_mesh_group = pm.nt.Transform(name='kksEnv_PFXbrush___clover1MeshGroup')
clover_main = pm.polyCube(name='kksEnv_PFXbrush___clover1Main')[0]
clover_leaf = pm.polyCube(name='kksEnv_PFXbrush___clover1Leaf')[0]
pm.runtime.DeleteHistory()
pm.parent(clover_mesh_group, clover_polygons)
pm.parent(clover_main, clover_mesh_group)
pm.parent(clover_leaf, clover_mesh_group)
maya_env.save_as(version=data['ext1_vegetation_main_v001'])
maya_env.save_as(version=data['ext1_vegetation_main_v002'])
maya_env.save_as(version=data['ext1_vegetation_main_v003'])
pm.newFile(force=True)
root_node = pm.nt.Transform(name='prop1')
kulp = pm.polyCube(name='kulp')
pm.parent(kulp[0], root_node)
pm.runtime.DeleteHistory()
maya_env.save_as(data['prop1_model_main_v001'])
maya_env.save_as(data['prop1_model_main_v002'])
maya_env.save_as(data['prop1_model_main_v003'])
data['prop1_model_main_v003'].is_published = True
maya_env.save_as(data['prop1_model_kisa_v001'])
maya_env.save_as(data['prop1_model_kisa_v002'])
maya_env.save_as(data['prop1_model_kisa_v003'])
data['prop1_model_kisa_v003'].is_published = True
pm.newFile(force=True)
maya_env.save_as(data['prop1_look_dev_main_v001'])
maya_env.reference(data['prop1_model_main_v003'])
mat = pm.createSurfaceShader('aiStandard', name='kulp_aiStandard')
pm.sets(mat[1], fe=pm.ls(type='mesh'))
maya_env.save_as(data['prop1_look_dev_main_v001'])
maya_env.save_as(data['prop1_look_dev_main_v002'])
maya_env.save_as(data['prop1_look_dev_main_v003'])
data['prop1_look_dev_main_v003'].is_published = True
pm.newFile(force=True)
maya_env.save_as(data['prop1_look_dev_kisa_v001'])
maya_env.reference(data['prop1_model_kisa_v003'])
mat = pm.createSurfaceShader('aiStandard', name='kulp_aiStandard')
pm.sets(mat[1], fe=pm.ls(type='mesh'))
maya_env.save_as(data['prop1_look_dev_kisa_v001'])
maya_env.save_as(data['prop1_look_dev_kisa_v002'])
maya_env.save_as(data['prop1_look_dev_kisa_v003'])
data['prop1_look_dev_kisa_v003'].is_published = True
pm.newFile(force=True)
building1_yapi = pm.nt.Transform(name='building1_yapi')
some_cube = pm.polyCube(name='duvarlar')
pm.runtime.DeleteHistory(some_cube[1])
pm.parent(some_cube[0], building1_yapi)
maya_env.save_as(data['building1_yapi_model_main_v001'])
maya_env.save_as(data['building1_yapi_model_main_v002'])
maya_env.save_as(data['building1_yapi_model_main_v003'])
data['building1_yapi_model_main_v003'].is_published = True
building1_yapi.rename('building2_yapi')
maya_env.save_as(data['building2_yapi_model_main_v001'])
maya_env.save_as(data['building2_yapi_model_main_v002'])
maya_env.save_as(data['building2_yapi_model_main_v003'])
data['building2_yapi_model_main_v003'].is_published = True
pm.newFile(force=True)
maya_env.save_as(data['building1_yapi_look_dev_main_v001'])
maya_env.reference(data['building1_yapi_model_main_v003'])
mat = pm.createSurfaceShader('aiStandard', name='bina_aiStandard')
pm.sets(mat[1], fe=pm.ls(type='mesh'))
maya_env.save_as(data['building1_yapi_look_dev_main_v001'])
maya_env.save_as(data['building1_yapi_look_dev_main_v002'])
maya_env.save_as(data['building1_yapi_look_dev_main_v003'])
data['building1_yapi_look_dev_main_v003'].is_published = True
pm.listReferences()[0].replaceWith(data['building2_yapi_model_main_v003'].absolute_full_path)
maya_env.save_as(data['building2_yapi_look_dev_main_v001'])
maya_env.save_as(data['building2_yapi_look_dev_main_v002'])
maya_env.save_as(data['building2_yapi_look_dev_main_v003'])
data['building2_yapi_look_dev_main_v003'].is_published = True
pm.newFile(force=1)
base_group = pm.nt.Transform(name='building1_layout')
maya_env.save_as(data['building1_layout_main_v001'])
ref_node = maya_env.reference(data['building1_yapi_look_dev_main_v003'])
ref_root_node = auxiliary.get_root_nodes(ref_node)
pm.parent(ref_root_node, base_group)
maya_env.save_as(data['building1_layout_main_v001'])
maya_env.save_as(data['building1_layout_main_v002'])
maya_env.save_as(data['building1_layout_main_v003'])
data['building1_layout_main_v003'].is_published = True
pm.newFile(force=1)
base_group = pm.nt.Transform(name='building2_layout')
maya_env.save_as(data['building2_layout_main_v001'])
ref_node = maya_env.reference(data['building2_yapi_look_dev_main_v003'])
ref_root_node = auxiliary.get_root_nodes(ref_node)
pm.parent(ref_root_node, base_group)
maya_env.save_as(data['building2_layout_main_v001'])
maya_env.save_as(data['building2_layout_main_v002'])
maya_env.save_as(data['building2_layout_main_v003'])
data['building2_layout_main_v003'].is_published = True
pm.newFile(force=True)
maya_env.save_as(data['building1_look_dev_main_v001'])
maya_env.reference(data['building1_layout_main_v003'])
maya_env.save_as(data['building1_look_dev_main_v001'])
maya_env.save_as(data['building1_look_dev_main_v002'])
maya_env.save_as(data['building1_look_dev_main_v003'])
data['building1_look_dev_main_v003'].is_published = True
pm.newFile(force=True)
maya_env.save_as(data['building2_look_dev_main_v001'])
maya_env.reference(data['building2_layout_main_v003'])
maya_env.save_as(data['building2_look_dev_main_v001'])
maya_env.save_as(data['building2_look_dev_main_v002'])
maya_env.save_as(data['building2_look_dev_main_v003'])
data['building2_look_dev_main_v003'].is_published = True
pm.newFile(force=True)
maya_env.save_as(data['ext1_layout_main_v001'])
maya_env.reference(data['building1_layout_main_v003'])
maya_env.reference(data['building2_layout_main_v003'])
root_node = pm.nt.Transform(name='ext1_layout')
building1_layout = pm.ls('building1_layout', r=1)[0]
building2_layout = pm.ls('building2_layout', r=1)[0]
pm.parent(building1_layout, root_node)
pm.parent(building2_layout, root_node)
building1_layout.setAttr('t', (10, 0, 0))
building2_layout.setAttr('t', (0, 0, 10))
ref_node = maya_env.reference(data['ext1_vegetation_main_v003'])
ref_root_node = auxiliary.get_root_nodes(ref_node)
pm.parent(ref_root_node, root_node)
maya_env.save_as(data['ext1_layout_main_v001'])
maya_env.save_as(data['ext1_layout_main_v002'])
maya_env.save_as(data['ext1_layout_main_v003'])
data['ext1_layout_main_v003'].is_published = True
pm.newFile(force=True)
maya_env.save_as(data['ext1_look_dev_main_v001'])
maya_env.reference(data['ext1_layout_main_v003'])
maya_env.save_as(data['ext1_look_dev_main_v001'])
maya_env.save_as(data['ext1_look_dev_main_v002'])
maya_env.save_as(data['ext1_look_dev_main_v003'])
data['ext1_look_dev_main_v003'].is_published = True
pm.newFile(force=True)
pm.newFile(force=True)
(yield data) |
class OptionPlotoptionsSunburstSonificationContexttracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class ACE():
def __init__(self, ace, modifier):
self.type_name = ace['TypeName']
ace = ace['Ace']
self.object_type_guid = '-0000-0000-0000-'
self.object_type_friendly = ''
if (self.type_name == 'ACCESS_ALLOWED_OBJECT_ACE'):
if ace.hasFlag(ace.ACE_OBJECT_TYPE_PRESENT):
object_type = ace['ObjectType']
self.object_type_guid = self.guid_to_string(object_type)
try:
self.object_type_friendly = MS_PKI_GUIDS[self.object_type_guid]
except KeyError:
pass
self.access_mask = ace['Mask']['Mask']
self.sid = ace['Sid'].formatCanonical()
self.identity = modifier.resolve_sid(self.sid)
self.rights = self.resolve_flags(ace['Mask'])
def guid_to_string(self, guid):
return '{:02x}{:02x}{:02x}{:02x}-{:02x}{:02x}-{:02x}{:02x}-{:02x}{:02x}-{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}'.format(guid[3], guid[2], guid[1], guid[0], guid[5], guid[4], guid[7], guid[6], guid[8], guid[9], guid[10], guid[11], guid[12], guid[13], guid[14], guid[15])
def resolve_flags(self, access_mask):
rights = ''
for flag in ACCESS_MASK_FLAGS:
if access_mask.hasPriv(ACCESS_MASK_FLAGS[flag][1]):
rights += (', ' + ACCESS_MASK_FLAGS[flag][0])
for flag in ACCESS_ALLOWED_OBJECT_ACE_FLAGS:
if access_mask.hasPriv(ACCESS_ALLOWED_OBJECT_ACE_FLAGS[flag][1]):
rights += (', ' + ACCESS_ALLOWED_OBJECT_ACE_FLAGS[flag][0])
return rights[2:]
def print_ace(self):
print()
print(f'ActiveDirectoryRights: {self.rights}')
if (self.object_type_friendly != ''):
print(f'ObjectType: {self.object_type_guid} ({self.object_type_friendly})')
else:
print(f'ObjectType: {self.object_type_guid}')
print(f'AccessControlType: {self.type_name}')
print(f'IdentityReference: {self.identity}') |
class P1_Affine(object):
thisown = property((lambda x: x.this.own()), (lambda x, v: x.this.own(v)), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_blst.P1_Affine_swiginit(self, _blst.new_P1_Affine(*args))
dup = _swig_new_instance_method(_blst.P1_Affine_dup)
to_jacobian = _swig_new_instance_method(_blst.P1_Affine_to_jacobian)
serialize = _swig_new_instance_method(_blst.P1_Affine_serialize)
compress = _swig_new_instance_method(_blst.P1_Affine_compress)
on_curve = _swig_new_instance_method(_blst.P1_Affine_on_curve)
in_group = _swig_new_instance_method(_blst.P1_Affine_in_group)
is_inf = _swig_new_instance_method(_blst.P1_Affine_is_inf)
is_equal = _swig_new_instance_method(_blst.P1_Affine_is_equal)
core_verify = _swig_new_instance_method(_blst.P1_Affine_core_verify)
generator = _swig_new_static_method(_blst.P1_Affine_generator)
__swig_destroy__ = _blst.delete_P1_Affine |
.parametrize('event_definition_path', pathlib.Path('.').glob('definitions/Eiffel*Event/*.yml'))
def test_history_table_contains_valid_release(event_definition_path, manifest):
event_type = event_definition_path.parent.name
event_version = event_definition_path.stem
definition = definition_loader.load(event_definition_path)
for entry in definition.get('_history', []):
edition = entry.get('introduced_in', None)
if (edition is not None):
assert manifest.is_edition_tag(edition), f"Nonexistent edition '{edition}' in history table for {event_type} {event_version}" |
def split_list_by_content_size(obj_list: List[str], content_size: int=102400) -> List[List[str]]:
if (content_size < 1):
content_size = 1
if ((len(obj_list) == 1) or (len(''.join(obj_list)) <= content_size)):
return [obj_list]
mid = math.ceil((len(obj_list) / 2))
ll = []
ll.extend(split_list_by_content_size(obj_list[:mid], content_size=content_size))
ll.extend(split_list_by_content_size(obj_list[mid:], content_size=content_size))
return ll |
class InetScopeType(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec
subtypeSpec += ConstraintsUnion(SingleValueConstraint(*(1, 2, 3, 4, 5, 8, 14)))
namedValues = NamedValues(*(('adminLocal', 4), ('global', 14), ('interfaceLocal', 1), ('linkLocal', 2), ('organizationLocal', 8), ('siteLocal', 5), ('subnetLocal', 3)))
if mibBuilder.loadTexts:
description = 'Represents a scope type. This textual convention can be used in cases where a\nMIB has to represent different scope types and there is no context information,\nsuch as an InetAddress object, that implicitly defines the scope type. Note\nthat not all possible values have been assigned yet, but they may be assigned\nin future revisions of this specification. Applications should therefore be\nable to deal with values not yet assigned.\n' |
def sanitize_address(address, encoding='utf-8'):
if isinstance(address, str):
address = parseaddr(address)
(name, address) = address
name = Header(name, encoding).encode()
try:
address.encode('ascii')
except UnicodeEncodeError:
if ('' in address):
(localpart, domain) = address.split('', 1)
localpart = str(Header(localpart, encoding))
domain = domain.encode('idna').decode('ascii')
address = ''.join([localpart, domain])
else:
address = Header(address, encoding).encode()
return formataddr((name, address)) |
class TimeZoneValidator(object):
def __init__(self, coerce_function):
self.coerce_function = coerce_function
def __call__(self, form, field):
try:
self.coerce_function(str(field.data))
except Exception:
msg = u'Not a valid timezone (e.g. "America/New_York", "Africa/Johannesburg", "Asia/Singapore").'
raise ValidationError(field.gettext(msg)) |
def chain_for_config(trinity_config: TrinityConfig, event_bus: EndpointAPI) -> Iterator[AsyncChainAPI]:
if trinity_config.has_app_config(Eth1AppConfig):
eth1_app_config = trinity_config.get_app_config(Eth1AppConfig)
with chain_for_eth1_config(trinity_config, eth1_app_config, event_bus) as eth1_chain:
(yield eth1_chain)
else:
raise Exception('Unsupported Node Type') |
def identify_from_file(file_path: Path, api_key: str) -> dict:
(dt, longitude, latitude) = extract_exif(file_path)
params = {'images': [encode_file(file_path)], 'datetime': (int(dt.timestamp()) if dt else None), 'longitude': longitude, 'latitude': latitude}
headers = {'Content-Type': 'application/json', 'Api-Key': api_key}
response = requests.post(' json=params, headers=headers)
assert (response.status_code < 300), response.text
return response.json() |
def shempty(shape, dtype=float):
if numeric.isint(shape):
shape = (shape,)
else:
assert all((numeric.isint(sh) for sh in shape))
dtype = numpy.dtype(dtype)
size = util.product(map(int, shape), int(dtype.itemsize))
if ((size == 0) or (maxprocs.current == 1)):
return numpy.empty(shape, dtype)
return numpy.frombuffer(mmap.mmap((- 1), size), dtype).reshape(shape) |
def test_inbox_get():
agent_address = 'Agent0'
receiver_address = 'Agent1'
msg = DefaultMessage(performative=DefaultMessage.Performative.BYTES, content='hello')
msg.to = receiver_address
multiplexer = Multiplexer([_make_dummy_connection()])
envelope = Envelope(to=receiver_address, sender=agent_address, message=msg)
multiplexer.in_queue.put(envelope)
inbox = InBox(multiplexer)
assert (inbox.get() == envelope), 'Checks if the returned envelope is the same with the queued envelope.' |
class OptionYaxisCrosshair(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#cccccc')
def color(self, text: str):
self._config(text, js_type=False)
def dashStyle(self):
return self._config_get('Solid')
def dashStyle(self, text: str):
self._config(text, js_type=False)
def snap(self):
return self._config_get(True)
def snap(self, flag: bool):
self._config(flag, js_type=False)
def width(self):
return self._config_get(1)
def width(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(2)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class RemoteConnector():
MAX_RETRIES = 5
def __init__(self, parse_config: bool=False, **kwargs):
es_args = ['cloud_id', 'ignore_ssl_errors', 'elasticsearch_url', 'es_user', 'es_password', 'timeout']
kibana_args = ['cloud_id', 'ignore_ssl_errors', 'kibana_url', 'kibana_user', 'kibana_password', 'space', 'kibana_cookie', 'provider_type', 'provider_name']
if parse_config:
es_kwargs = {arg: getdefault(arg)() for arg in es_args}
kibana_kwargs = {arg: getdefault(arg)() for arg in kibana_args}
try:
if ('max_retries' not in es_kwargs):
es_kwargs['max_retries'] = self.MAX_RETRIES
self.es_client = get_elasticsearch_client(**es_kwargs, **kwargs)
except ClientError:
self.es_client = None
try:
self.kibana_client = get_kibana_client(**kibana_kwargs, **kwargs)
except HTTPError:
self.kibana_client = None
def auth_es(self, *, cloud_id: Optional[str]=None, ignore_ssl_errors: Optional[bool]=None, elasticsearch_url: Optional[str]=None, es_user: Optional[str]=None, es_password: Optional[str]=None, timeout: Optional[int]=None, **kwargs) -> Elasticsearch:
if ('max_retries' not in kwargs):
kwargs['max_retries'] = self.MAX_RETRIES
self.es_client = get_elasticsearch_client(cloud_id=cloud_id, ignore_ssl_errors=ignore_ssl_errors, elasticsearch_url=elasticsearch_url, es_user=es_user, es_password=es_password, timeout=timeout, **kwargs)
return self.es_client
def auth_kibana(self, *, cloud_id: Optional[str]=None, ignore_ssl_errors: Optional[bool]=None, kibana_url: Optional[str]=None, kibana_user: Optional[str]=None, kibana_password: Optional[str]=None, space: Optional[str]=None, kibana_cookie: Optional[str]=None, provider_type: Optional[str]=None, provider_name: Optional[str]=None, **kwargs) -> Kibana:
self.kibana_client = get_kibana_client(cloud_id=cloud_id, ignore_ssl_errors=ignore_ssl_errors, kibana_url=kibana_url, kibana_user=kibana_user, kibana_password=kibana_password, space=space, kibana_cookie=kibana_cookie, provider_type=provider_type, provider_name=provider_name, **kwargs)
return self.kibana_client |
class Wavedrom():
def draw_regs(self, imgdir, rmap):
imgdir.mkdir(exist_ok=True)
bits = config.globcfg['data_width']
lanes = ((bits // 16) if (bits > 16) else 1)
for reg in rmap:
reg_wd = {'reg': [], 'config': {'bits': bits, 'lanes': lanes, 'fontsize': 10}}
bit_pos = (- 1)
for bf in reg:
if ((bit_pos == (- 1)) and (bf.lsb > 0)):
reg_wd['reg'].append({'bits': bf.lsb})
elif ((bf.lsb - bit_pos) > 1):
reg_wd['reg'].append({'bits': ((bf.lsb - bit_pos) - 1)})
name = bf.name
name_max_len = (5 * bf.width)
if (len(bf.name) > name_max_len):
name = (bf.name[:(name_max_len - 1)] + '..')
reg_wd['reg'].append({'name': name, 'attr': bf.access, 'bits': bf.width})
bit_pos = bf.msb
if ((bits - 1) > bit_pos):
reg_wd['reg'].append({'bits': ((bits - bit_pos) - 1)})
wavedrom.render(json.dumps(reg_wd)).saveas(str((imgdir / ('%s.svg' % reg.name.lower())))) |
class SigningHandler(Handler):
SUPPORTED_PROTOCOL = SigningMessage.protocol_id
def setup(self) -> None:
def handle(self, message: Message) -> None:
signing_msg = cast(SigningMessage, message)
signing_dialogues = cast(SigningDialogues, self.context.signing_dialogues)
signing_dialogue = cast(Optional[SigningDialogue], signing_dialogues.update(signing_msg))
if (signing_dialogue is None):
self._handle_unidentified_dialogue(signing_msg)
return
if (signing_msg.performative is SigningMessage.Performative.SIGNED_TRANSACTION):
self._handle_signed_transaction(signing_msg, signing_dialogue)
elif (signing_msg.performative is SigningMessage.Performative.ERROR):
self._handle_error(signing_msg, signing_dialogue)
else:
self._handle_invalid(signing_msg, signing_dialogue)
def teardown(self) -> None:
def _handle_unidentified_dialogue(self, signing_msg: SigningMessage) -> None:
self.context.logger.info(f'received invalid signing message={signing_msg}, unidentified dialogue.')
def _handle_signed_transaction(self, signing_msg: SigningMessage, signing_dialogue: SigningDialogue) -> None:
self.context.logger.info('transaction signing was successful.')
ledger_api_dialogue = signing_dialogue.associated_ledger_api_dialogue
last_ledger_api_msg = ledger_api_dialogue.last_incoming_message
if (last_ledger_api_msg is None):
raise ValueError('Could not retrieve last message in ledger api dialogue')
ledger_api_msg = ledger_api_dialogue.reply(performative=LedgerApiMessage.Performative.SEND_SIGNED_TRANSACTION, target_message=last_ledger_api_msg, signed_transaction=signing_msg.signed_transaction)
self.context.outbox.put_message(message=ledger_api_msg)
self.context.logger.info('sending transaction to ledger.')
def _handle_error(self, signing_msg: SigningMessage, signing_dialogue: SigningDialogue) -> None:
self.context.logger.info(f'transaction signing was not successful. Error_code={signing_msg.error_code} in dialogue={signing_dialogue}')
signing_msg_ = cast(Optional[SigningMessage], signing_dialogue.last_outgoing_message)
if ((signing_msg_ is not None) and (signing_msg_.performative == SigningMessage.Performative.SIGN_TRANSACTION)):
tx_behaviour = cast(TransactionBehaviour, self.context.behaviours.transaction)
ledger_api_dialogue = signing_dialogue.associated_ledger_api_dialogue
tx_behaviour.failed_processing(ledger_api_dialogue)
def _handle_invalid(self, signing_msg: SigningMessage, signing_dialogue: SigningDialogue) -> None:
self.context.logger.warning(f'cannot handle signing message of performative={signing_msg.performative} in dialogue={signing_dialogue}.') |
_db(transaction=True)
def test_load_table_to_delta_for_detached_award_procurement(spark, s3_unittest_data_bucket, hive_unittest_metastore_db):
baker.make('transactions.SourceProcurementTransaction', detached_award_procurement_id='4', created_at=datetime.fromtimestamp(0), updated_at=datetime.fromtimestamp(0), federal_action_obligation=1000001, _fill_optional=True)
baker.make('transactions.SourceProcurementTransaction', detached_award_procurement_id='5', created_at=datetime.fromtimestamp(0), updated_at=datetime.fromtimestamp(0), federal_action_obligation=1000001, _fill_optional=True)
verify_delta_table_loaded_to_delta(spark, 'detached_award_procurement', s3_unittest_data_bucket) |
class Statistics(object):
_shared_state = {}
_storage = None
_registered_keys = None
def __init__(self, storage=None):
self.__dict__ = self._shared_state
if (self._storage is None):
if (not isinstance(storage, StorageClientAsync)):
raise TypeError('Must be a valid Async Storage object')
self._storage = storage
async def _init(self):
if (self._registered_keys is None):
(await self._load_keys())
async def update_bulk(self, stat_list):
if (not isinstance(stat_list, dict)):
raise TypeError('stat_list must be a dict')
try:
payload = {'updates': []}
for (k, v) in stat_list.items():
payload_item = PayloadBuilder().WHERE(['key', '=', k]).EXPR(['value', '+', v]).payload()
payload['updates'].append(json.loads(payload_item))
(await self._storage.update_tbl('statistics', json.dumps(payload, sort_keys=False)))
except Exception as ex:
_logger.exception(ex, 'Unable to bulk update statistics')
raise
async def update(self, key, value_increment):
if (not isinstance(key, str)):
raise TypeError('key must be a string')
if (not isinstance(value_increment, int)):
raise ValueError('value must be an integer')
try:
payload = PayloadBuilder().WHERE(['key', '=', key]).EXPR(['value', '+', value_increment]).payload()
(await self._storage.update_tbl('statistics', payload))
except Exception as ex:
msg = 'Unable to update statistics value based on statistics_key {} and value_increment {}'.format(key, value_increment)
_logger.exception(ex, msg)
raise
async def add_update(self, sensor_stat_dict):
for (key, value_increment) in sensor_stat_dict.items():
try:
payload = PayloadBuilder().WHERE(['key', '=', key]).EXPR(['value', '+', value_increment]).payload()
result = (await self._storage.update_tbl('statistics', payload))
if (result['response'] != 'updated'):
raise KeyError
except KeyError:
_logger.exception('Statistics key %s has not been registered', key)
raise
except Exception as ex:
msg = 'Unable to update statistics value based on statistics_key {} and value_increment {}'.format(key, value_increment)
_logger.exception(ex, msg)
raise
async def register(self, key, description):
if (key in self._registered_keys):
return
if (len(self._registered_keys) == 0):
(await self._load_keys())
try:
payload = PayloadBuilder().INSERT(key=key, description=description, value=0, previous_value=0).payload()
(await self._storage.insert_into_tbl('statistics', payload))
self._registered_keys.append(key)
except Exception as ex:
(await self._load_keys())
if (key not in self._registered_keys):
_logger.exception(ex, 'Unable to create new statistic {} key.'.format(key))
raise
async def _load_keys(self):
self._registered_keys = []
try:
payload = PayloadBuilder().SELECT('key').payload()
results = (await self._storage.query_tbl_with_payload('statistics', payload))
for row in results['rows']:
self._registered_keys.append(row['key'])
except Exception as ex:
_logger.exception(ex, 'Failed to retrieve statistics keys') |
class OptionSeriesBulletSonificationTracksMapping(Options):
def frequency(self) -> 'OptionSeriesBulletSonificationTracksMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesBulletSonificationTracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesBulletSonificationTracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesBulletSonificationTracksMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesBulletSonificationTracksMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesBulletSonificationTracksMappingHighpass)
def lowpass(self) -> 'OptionSeriesBulletSonificationTracksMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesBulletSonificationTracksMappingLowpass)
def noteDuration(self) -> 'OptionSeriesBulletSonificationTracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesBulletSonificationTracksMappingNoteduration)
def pan(self) -> 'OptionSeriesBulletSonificationTracksMappingPan':
return self._config_sub_data('pan', OptionSeriesBulletSonificationTracksMappingPan)
def pitch(self) -> 'OptionSeriesBulletSonificationTracksMappingPitch':
return self._config_sub_data('pitch', OptionSeriesBulletSonificationTracksMappingPitch)
def playDelay(self) -> 'OptionSeriesBulletSonificationTracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesBulletSonificationTracksMappingPlaydelay)
def rate(self) -> 'OptionSeriesBulletSonificationTracksMappingRate':
return self._config_sub_data('rate', OptionSeriesBulletSonificationTracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesBulletSonificationTracksMappingTime':
return self._config_sub_data('time', OptionSeriesBulletSonificationTracksMappingTime)
def tremolo(self) -> 'OptionSeriesBulletSonificationTracksMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesBulletSonificationTracksMappingTremolo)
def volume(self) -> 'OptionSeriesBulletSonificationTracksMappingVolume':
return self._config_sub_data('volume', OptionSeriesBulletSonificationTracksMappingVolume) |
class Version(namedtuple('Version', ['major', 'minor', 'micro', 'release', 'pre', 'post', 'dev'])):
def __new__(cls, major, minor, micro, release='final', pre=0, post=0, dev=0):
for value in (major, minor, micro, pre, post):
if (not (isinstance(value, int) and (value >= 0))):
raise ValueError("All version parts except 'release' should be integers.")
if (release not in REL_MAP):
raise ValueError(f"'{release}' is not a valid release type.")
if ('.dev-candidate' < release < 'final'):
if (pre == 0):
raise ValueError('Implicit pre-releases not allowed.')
elif dev:
raise ValueError('Version is not a development release.')
elif post:
raise ValueError('Post-releases are not allowed with pre-releases.')
elif (release < 'alpha'):
if ((release > '.dev') and (pre == 0)):
raise ValueError('Implicit pre-release not allowed.')
elif post:
raise ValueError('Post-releases are not allowed with pre-releases.')
elif pre:
raise ValueError('Version is not a pre-release.')
elif dev:
raise ValueError('Version is not a development release.')
return super().__new__(cls, major, minor, micro, release, pre, post, dev)
def _is_pre(self):
return (self.pre > 0)
def _is_dev(self):
return bool((self.release < 'alpha'))
def _is_post(self):
return (self.post > 0)
def _get_dev_status(self):
return DEV_STATUS[self.release]
def _get_canonical(self):
if (self.micro == 0):
ver = f'{self.major}.{self.minor}'
else:
ver = f'{self.major}.{self.minor}.{self.micro}'
if self._is_pre():
ver += f'{REL_MAP[self.release]}{self.pre}'
if self._is_post():
ver += f'.post{self.post}'
if self._is_dev():
ver += f'.dev{self.dev}'
return ver |
def test_recover_from_public_key_class(key_api, private_key):
signature = key_api.ecdsa_sign(MSGHASH, private_key)
public_key = key_api.PublicKey.recover_from_msg_hash(MSGHASH, signature)
assert (public_key == key_api.PublicKey.recover_from_msg(MSG, signature))
assert (public_key == private_key.public_key) |
.object(source_tree.SourceTree, 'get_previous_commit_hash')
def test_get_image_hashes_from_disk_source_fail(mock_get_previous_commit_hash):
job_control = proto_control.JobControl(remote=False, scavenging_benchmark=True)
_generate_default_benchmark_images(job_control)
_generate_default_envoy_source(job_control)
mock_get_previous_commit_hash.return_value = ''
manager = source_manager.SourceManager(job_control)
tree = manager.get_source_tree(proto_source.SourceRepository.SourceIdentity.SRCID_ENVOY)
hashes = {}
with pytest.raises(source_manager.SourceManagerError) as source_error:
hashes = manager.get_image_hashes_from_disk_source(tree, 'expected_baseline_hash')
assert (not hashes)
assert (str(source_error.value) == 'Received empty commit hash prior to [expected_baseline_hash]') |
class categorical_datasource(CategoricalDataSource):
def __init__(self, fnames_file, desc_file, feat_name, feats_dir, feats_dict=None):
super(categorical_datasource, self).__init__(fnames_file, desc_file, feat_name, feats_dir, feats_dict)
def __getitem__(self, idx):
assert (self.feat_type == 'categorical')
fname = str(self.filenames_array[idx]).strip()
clean_fname = ('clean_fileid_' + fname.split('_fileid_')[1])
clean_fname = (((self.feats_dir + '/') + clean_fname) + '.feats')
if (self.feat_name == 'quant'):
clean_fname += '.npy'
return populate_quantsarray(clean_fname, self.feats_dir)
else:
print('Unknown feature type: ', self.feat_name)
sys.exit() |
class OptionSeriesAreasplinerangeSonificationDefaultspeechoptionsMapping(Options):
def pitch(self) -> 'OptionSeriesAreasplinerangeSonificationDefaultspeechoptionsMappingPitch':
return self._config_sub_data('pitch', OptionSeriesAreasplinerangeSonificationDefaultspeechoptionsMappingPitch)
def playDelay(self) -> 'OptionSeriesAreasplinerangeSonificationDefaultspeechoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesAreasplinerangeSonificationDefaultspeechoptionsMappingPlaydelay)
def rate(self) -> 'OptionSeriesAreasplinerangeSonificationDefaultspeechoptionsMappingRate':
return self._config_sub_data('rate', OptionSeriesAreasplinerangeSonificationDefaultspeechoptionsMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesAreasplinerangeSonificationDefaultspeechoptionsMappingTime':
return self._config_sub_data('time', OptionSeriesAreasplinerangeSonificationDefaultspeechoptionsMappingTime)
def volume(self) -> 'OptionSeriesAreasplinerangeSonificationDefaultspeechoptionsMappingVolume':
return self._config_sub_data('volume', OptionSeriesAreasplinerangeSonificationDefaultspeechoptionsMappingVolume) |
class MovusStation(BikeShareStation):
def __init__(self, name, lat, lng, info, has_ebikes):
super(MovusStation, self).__init__()
self.name = name
self.latitude = float(lat)
self.longitude = float(lng)
if ('Actualizandose' in info):
self.bikes = 0
return
if has_ebikes:
rgx = 'mecanicas:\\s(\\d+).*electricas:\\s(\\d+).*libres:\\s(\\d+)'
(bikes, ebikes, free) = re.search(rgx, info).groups()
self.bikes = (int(bikes) + int(ebikes))
self.free = int(free)
self.extra = {'ebikes': int(ebikes)}
else:
rgx = 'Totales=(\\d+).*disponibles=(\\d+).*libres=(-?\\d+)'
(slots, bikes, free) = re.search(rgx, info).groups()
self.bikes = int(bikes)
self.free = max(int(free), 0)
self.extra = {'slots': int(slots)} |
class LeafLookup():
def __init__(self, sources: Dict[(int, str)], sinks: Dict[(int, str)], features: Dict[(int, str)]) -> None:
self._lookup: Dict[(SharedTextKind, Dict[(int, str)])] = {SharedTextKind.SOURCE: sources, SharedTextKind.SINK: sinks, SharedTextKind.FEATURE: features}
def create(session: Session) -> 'LeafLookup':
return LeafLookup({int(id): contents for (id, contents) in session.query(SharedText.id, SharedText.contents).filter((SharedText.kind == SharedTextKind.SOURCE))}, {int(id): contents for (id, contents) in session.query(SharedText.id, SharedText.contents).filter((SharedText.kind == SharedTextKind.SINK))}, {int(id): contents for (id, contents) in session.query(SharedText.id, SharedText.contents).filter((SharedText.kind == SharedTextKind.FEATURE))})
def resolve(self, ids: Sequence[int], kind: SharedTextKind) -> Set[str]:
if (kind not in [SharedTextKind.SOURCE, SharedTextKind.SINK, SharedTextKind.FEATURE]):
raise ValueError(f'Cannot resolve ids of kind `{kind}`')
lookup = self._lookup[kind]
return {lookup[id] for id in ids if (id in lookup)} |
class RatingControl(RatingWidget, BaseControl):
name = 'rating'
title = _('Rating')
description = _('Select rating of the current track')
def __init__(self):
RatingWidget.__init__(self, player=player.PLAYER)
BaseControl.__init__(self)
def do_rating_changed(self, rating):
if (player.PLAYER.current is not None):
player.PLAYER.current.set_rating(rating)
maximum = settings.get_option('rating/maximum', 5)
event.log_event('rating_changed', self, ((100 * rating) / maximum)) |
class CoinChooserPrivacy(CoinChooserRandom):
def keys(self, coins):
return [coin.keyinstance_id for coin in coins]
def penalty_func(self, tx):
out_values = [output.value for output in tx.outputs]
max_change = (max(out_values) * 1.5)
spent_amount = sum(out_values)
def penalty(buckets):
badness = (len(buckets) - 1)
total_input = sum((bucket.value for bucket in buckets))
change = float((total_input - spent_amount))
if (change > max_change):
badness += ((change - max_change) / (max_change + 10000))
badness += (change / (COIN * 5))
return badness
return penalty |
class Brand(BaseObject):
def __init__(self, api=None, active=None, brand_url=None, created_at=None, default=None, has_help_center=None, help_center_state=None, host_mapping=None, id=None, logo=None, name=None, subdomain=None, updated_at=None, url=None, **kwargs):
self.api = api
self.active = active
self.brand_url = brand_url
self.created_at = created_at
self.default = default
self.has_help_center = has_help_center
self.help_center_state = help_center_state
self.host_mapping = host_mapping
self.id = id
self.logo = logo
self.name = name
self.subdomain = subdomain
self.updated_at = updated_at
self.url = url
for (key, value) in kwargs.items():
setattr(self, key, value)
for key in self.to_dict():
if (getattr(self, key) is None):
try:
self._dirty_attributes.remove(key)
except KeyError:
continue
def created(self):
if self.created_at:
return dateutil.parser.parse(self.created_at)
def created(self, created):
if created:
self.created_at = created
def updated(self):
if self.updated_at:
return dateutil.parser.parse(self.updated_at)
def updated(self, updated):
if updated:
self.updated_at = updated |
class LiteEthPHYRGMIITX(LiteXModule):
def __init__(self, pads):
self.sink = sink = stream.Endpoint(eth_phy_description(8))
tx_ctl_obuf = Signal()
tx_data_obuf = Signal(4)
self.specials += [Instance('ODDR2', p_DDR_ALIGNMENT='C0', p_SRTYPE='ASYNC', o_Q=tx_ctl_obuf, i_C0=ClockSignal('eth_tx'), i_C1=(~ ClockSignal('eth_tx')), i_CE=1, i_D0=sink.valid, i_D1=sink.valid, i_R=ResetSignal('eth_tx'), i_S=0), Instance('IODELAY2', p_IDELAY_TYPE='FIXED', p_ODELAY_VALUE=0, p_DELAY_SRC='ODATAIN', o_DOUT=pads.tx_ctl, i_CAL=0, i_CE=0, i_CLK=0, i_IDATAIN=0, i_INC=0, i_IOCLK0=0, i_IOCLK1=0, i_ODATAIN=tx_ctl_obuf, i_RST=0, i_T=0)]
for i in range(4):
self.specials += [Instance('ODDR2', p_DDR_ALIGNMENT='C0', p_SRTYPE='ASYNC', o_Q=tx_data_obuf[i], i_C0=ClockSignal('eth_tx'), i_C1=(~ ClockSignal('eth_tx')), i_CE=1, i_D0=sink.data[i], i_D1=sink.data[(4 + i)], i_R=ResetSignal('eth_tx'), i_S=0), Instance('IODELAY2', p_IDELAY_TYPE='FIXED', p_ODELAY_VALUE=0, p_DELAY_SRC='ODATAIN', o_DOUT=pads.tx_data[i], i_CAL=0, i_CE=0, i_CLK=0, i_IDATAIN=0, i_INC=0, i_IOCLK0=0, i_IOCLK1=0, i_ODATAIN=tx_data_obuf[i], i_RST=0, i_T=0)]
self.comb += sink.ready.eq(1) |
class ServeEntity(Model):
__tablename__ = SERVER_APP_TABLE_NAME
id = Column(Integer, primary_key=True, comment='Auto increment id')
gmt_created = Column(DateTime, default=datetime.now, comment='Record creation time')
gmt_modified = Column(DateTime, default=datetime.now, comment='Record update time')
def __repr__(self):
return f"ServeEntity(id={self.id}, gmt_created='{self.gmt_created}', gmt_modified='{self.gmt_modified}')" |
class BaseFilter(object):
def __init__(self, name, options=None, data_type=None, key_name=None):
self.name = name
self.options = options
self.data_type = data_type
self.key_name = key_name
def get_options(self, view):
options = self.options
if options:
if callable(options):
options = options()
return options
return None
def validate(self, value):
try:
self.clean(value)
return True
except ValueError:
return False
def clean(self, value):
return value
def apply(self, query, value):
raise NotImplementedError()
def operation(self):
raise NotImplementedError()
def __unicode__(self):
return self.name |
def _get_or_new_item_value(item, key, subkey):
try:
(_, value) = _get_item_key_and_value(item, key)
if (not type_util.is_dict_or_list_or_tuple(value)):
raise TypeError
except (IndexError, KeyError, TypeError):
value = _new_item_value(subkey)
_set_item_value(item, key, value)
return value |
def extractGaijinbitchTumblrCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('prohibited isekai life', 'The Prohibited Isekai Life of a Certain Transmigrating Brother and Sister', 'translated'), ("he's not a lizard but a dragon", 'Hes Not a Lizard, But a Dragon ', 'translated'), ('story of gothic lady', 'The Story of Gothic Lady Who Met a Grave Keeper in Another World', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsOrganizationOnpoint(Options):
def connectorOptions(self) -> 'OptionPlotoptionsOrganizationOnpointConnectoroptions':
return self._config_sub_data('connectorOptions', OptionPlotoptionsOrganizationOnpointConnectoroptions)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def position(self) -> 'OptionPlotoptionsOrganizationOnpointPosition':
return self._config_sub_data('position', OptionPlotoptionsOrganizationOnpointPosition) |
def extractCrimsonzayerBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
_meta(characters.kyouko.ResonanceAction)
class ResonanceAction():
def choose_card_text(self, act, cards):
if act.cond(cards):
return (True, f'<style=Card.Name></style>:{N.char(act.victim)}')
else:
return (False, f'<style=Card.Name></style>:{N.char(act.victim)}')
def effect_string_before(self, act):
return f'{N.char(act.source)}{N.char(act.target)}<style=Skill.Name></style>'
def ray(self, act):
(src, tgt) = (act.source, act.target)
return [(src, tgt)]
def sound_effect(self, act):
return 'thb-cv-kyouko_resonance' |
class recursive_generator(_coconut_base_callable):
__slots__ = ('func', 'reit_store')
def __init__(self, func):
self.func = func
self.reit_store = _coconut.dict()
def __call__(self, *args, **kwargs):
key = (0, args, _coconut.frozenset(kwargs.items()))
try:
_coconut.hash(key)
except _coconut.TypeError:
try:
key = (1, _coconut.pickle.dumps(key, (- 1)))
except _coconut.Exception:
raise _coconut.TypeError('recursive_generator() requires function arguments to be hashable or pickleable')
reit = self.reit_store.get(key)
if (reit is None):
reit = reiterable(self.func(*args, **kwargs))
self.reit_store[key] = reit
return reit
def __repr__(self):
return ('recursive_generator(%r)' % (self.func,))
def __reduce__(self):
return (self.__class__, (self.func,)) |
def use_front_material(source_node):
front_material = source_node.ParameterBlock.frontMtl.Value
for (parent, param, i) in ConversionManager.outputs(source_node):
ConversionManager.connect_attr(front_material, parent, ('%s[%i]' % (param.GetName(), i)))
while True:
try:
source_node.FindDependentNode().Material = front_material
except RuntimeError:
break |
def test_scripts():
faldbt = FalDbt(profiles_dir=profiles_dir, project_dir=project_dir)
assert isinstance(faldbt._global_script_paths, dict)
assert (0 == len(faldbt._global_script_paths['after']))
faldbt._global_script_paths
models = faldbt.list_models()
for model in models:
if (model.name == 'model_feature_store'):
assert (0 == len(model.get_scripts(before=False)))
if (model.name == 'model_with_scripts'):
assert (1 == len(model.get_scripts(before=False)))
assert (0 == len(model.get_scripts(before=True)))
if (model.name == 'model_with_before_scripts'):
assert (1 == len(model.get_scripts(before=True)))
assert (0 == len(model.get_scripts(before=False))) |
def run(args):
sources = args.sources.split(',')
print(f'Sources: {sources}')
for source in sources:
print(f'Pulling from source: {source} ...')
if (source == 'Twitter'):
op = OperatorTwitter()
data = pull_twitter(args, op, source)
save_twitter(args, op, source, data)
elif (source == 'Article'):
op = OperatorArticle()
data = pull_article(args, op, source)
save_article(args, op, source, data)
elif (source == 'Youtube'):
op = OperatorYoutube()
data = pull_youtube(args, op, source)
save_youtube(args, op, source, data)
elif (source == 'RSS'):
op = OperatorRSS()
data = pull_rss(args, op, source)
save_rss(args, op, source, data)
elif (source == 'Reddit'):
op = OperatorReddit()
data = pull_reddit(args, op, source)
save_reddit(args, op, source, data) |
class Manager(BaseManager):
def __init__(self, name, credentials, unit_price_4dps=False, user_agent=None):
from xero import __version__ as VERSION
self.credentials = credentials
self.name = name
self.base_url = (credentials.base_url + XERO_API_URL)
self.extra_params = ({'unitdp': 4} if unit_price_4dps else {})
self.singular = singular(name)
self.user_agent = resolve_user_agent(user_agent, getattr(credentials, 'user_agent', None))
for method_name in self.DECORATED_METHODS:
method = getattr(self, ('_%s' % method_name))
setattr(self, method_name, self._get_data(method))
if (self.name in self.OBJECT_DECORATED_METHODS.keys()):
object_decorated_methods = self.OBJECT_DECORATED_METHODS[self.name]
for method_name in object_decorated_methods:
method = getattr(self, ('_%s' % method_name))
setattr(self, method_name, self._get_data(method)) |
def filter_firewall_wildcard_fqdn_custom_data(json):
option_list = ['color', 'comment', 'name', 'uuid', 'visibility', 'wildcard_fqdn']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
def deprecated(alternative=None):
def real_decorator(func: _AnnotatedFunctionT) -> _AnnotatedFunctionT:
(func)
def new_func(*args, **kwargs):
msg = 'Call to deprecated function {0}.'.format(func.__name__)
if alternative:
msg += ' Use {0} instead'.format(alternative)
warnings.warn_explicit(msg, category=DeprecationWarning, filename=func.__code__.co_filename, lineno=(func.__code__.co_firstlineno + 1))
return func(*args, **kwargs)
return new_func
return real_decorator |
class TestPrecisionScore(SimpleClassificationTestTopK):
name = 'Precision Score'
def get_value(self, result: DatasetClassificationQuality):
return result.precision
def get_description(self, value: Numeric) -> str:
return f'The Precision Score is {value:.3g}. The test threshold is {self.get_condition()}' |
class OptionPlotoptionsVectorEvents(Options):
def afterAnimate(self):
return self._config_get(None)
def afterAnimate(self, value: Any):
self._config(value, js_type=False)
def checkboxClick(self):
return self._config_get(None)
def checkboxClick(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def hide(self):
return self._config_get(None)
def hide(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def show(self):
return self._config_get(None)
def show(self, value: Any):
self._config(value, js_type=False) |
def test_save_to_json(temp_output_dir, mocker):
model = mocker.MagicMock(name='model')
model().loss.__truediv__().detach.return_value = torch.tensor(1)
mock_tensor = mocker.MagicMock(name='tensor')
batch = {'input': mock_tensor}
train_dataloader = [batch, batch, batch, batch, batch]
eval_dataloader = None
tokenizer = mocker.MagicMock()
optimizer = mocker.MagicMock()
lr_scheduler = mocker.MagicMock()
gradient_accumulation_steps = 1
train_config = mocker.MagicMock()
train_config.enable_fsdp = False
train_config.use_fp16 = False
train_config.run_validation = False
train_config.gradient_clipping = False
train_config.save_metrics = True
train_config.output_dir = temp_output_dir
results = train(model, train_dataloader, eval_dataloader, tokenizer, optimizer, lr_scheduler, gradient_accumulation_steps, train_config, local_rank=0)
assert (results['metrics_filename'] not in ['', None])
assert os.path.isfile(results['metrics_filename']) |
def filter_log_tacacsplusaccounting3_filter_data(json):
option_list = ['cli_cmd_audit', 'config_change_audit', 'login_audit']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
class OptionSeriesPyramid3dSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesFunnel3dSonificationTracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class IndexTransforms(TestCase, Common, Edges):
def setUp(self):
super().setUp()
self.seq = nutils.transformseq.IndexTransforms(ndims=2, length=4, offset=2)
self.check = [(nutils.transform.Index(2, i),) for i in range(2, 6)]
self.checkmissing = ((nutils.transform.Index(1, 0),), (nutils.transform.Index(2, (- 1)),), (nutils.transform.Index(2, 6),))
self.checkrefs = References.uniform(triangle, 4)
self.checktodims = 2
self.checkfromdims = 2 |
def _calc_score(priority: int, days_delta: float) -> float:
prio_score = (1 + (((priority - 1) / 99) * (PRIORITY_SCALE_FACTOR - 1)))
if (days_delta < 0.5):
return (days_delta + (prio_score / (PRIORITY_MOD * 10000)))
else:
return ((PRIORITY_SCALE_FACTOR * days_delta) + (PRIORITY_MOD * prio_score)) |
_handler(func=(lambda m: True), content_types=['new_chat_participant'])
def on_user_joins(message):
if (not is_api_group(message.chat.id)):
return
name = message.new_chat_participant.first_name
if (hasattr(message.new_chat_participant, 'last_name') and (message.new_chat_participant.last_name is not None)):
name += u' {}'.format(message.new_chat_participant.last_name)
if (hasattr(message.new_chat_participant, 'username') and (message.new_chat_participant.username is not None)):
name += u' ({})'.format(message.new_chat_participant.username)
bot.reply_to(message, text_messages['welcome'].format(name=name)) |
class SetImageTextureNodesTosRGB(Operator):
bl_idname = 'anima_toolbox.render_set_image_texture_nodes_to_srgb'
bl_label = 'set to sRGB'
bl_description = 'Sets the selected Image Texture nodes color space attribute to sRGB'
bl_icon = None
def execute(self, context):
from anima.dcc.blender import auxiliary
r = auxiliary.Render()
r.set_selected_image_texture_nodes_to_srgb()
return {'FINISHED'} |
def test_clean_plus_PINT_smooth(output_dir):
clean_nii_sm0 = os.path.join(output_dir, 'output_clean_s0.dtseries.nii')
run(['ciftify_clean_img', '--debug', '--drop-dummy=3', '--clean-config={}'.format(cleaning_config), '--confounds-tsv={}'.format(confounds_tsv), '--output-file={}'.format(clean_nii_sm0), test_dtseries])
run(['ciftify_PINT_vertices', '--pcorr', '--pre-smooth', '8', clean_nii_sm0, left_surface, right_surface, os.path.join(ciftify.config.find_ciftify_global(), 'PINT', 'Yeo7_2011_80verts.csv'), os.path.join(output_dir, 'testsub_clean_sm0_sm8')])
assert os.path.exists(os.path.join(output_dir, 'testsub_clean_sm0_sm8_tvertex_meants.csv'))
fixture_meants_t = read_meants_csv(os.path.join(get_test_data_path(), 'PINT', 'pint_clean_sm0_sm8_tvertex_meants.csv'))
new_meants_t = read_meants_csv(os.path.join(output_dir, 'testsub_clean_sm0_sm8_tvertex_meants.csv'))
assert (fixture_meants_t == new_meants_t).all().all()
assert os.path.isfile(os.path.join(output_dir, 'testsub_clean_sm0_sm8_summary.csv'))
fixture_summary = pd.read_csv(pint_summary)
new_summary = pd.read_csv(os.path.join(output_dir, 'testsub_clean_sm0_sm8_summary.csv'))
assert (fixture_summary == new_summary).all().all()
assert os.path.isfile(os.path.join(output_dir, 'testsub_clean_sm0_sm8_pvertex_meants.csv'))
fixture_meants_p = read_meants_csv(os.path.join(get_test_data_path(), 'PINT', 'pint_clean_sm0_sm8_pvertex_meants.csv'))
new_meants_p = read_meants_csv(os.path.join(output_dir, 'testsub_clean_sm0_sm8_pvertex_meants.csv'))
assert (fixture_meants_p == new_meants_p).all().all() |
class TestFilesHelperValidation(OpenEventTestCase):
def getsizes(self, file):
im = Image.open(file)
return im.size
def test_uploaded_image_local(self):
with self.app.test_request_context():
file_content = 'data:image/gif;base64, R0lGODlhEAAQAMQAAORHHOVSKudfOulrSOp3WOyDZu6QdvCchPGolfO0o/XBs/ fNwfjZ0frl3/zy7////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAACH5BAkAABAALAAAAAAQABAAAAVVICSOZGlCQAosJ6mu7f iyZeKqNKToQGDsM8hBADgUXoGAiqhSvp5QAnQKGIgUhwFUYLCVDFCrKUE1lBavAViFIDl TImbKC5Gm2hB0SlBCBMQiB0UjIQA7'
uploaded_img = uploaded_image(file_content=file_content)
file_path = uploaded_img.file_path
actual_file_path = ((self.app.config.get('BASE_DIR') + '/static/uploads/') + uploaded_img.filename)
assert (file_path == actual_file_path)
assert os.path.exists(file_path)
def test_upload_single_file(self):
class FileObj(BytesIO):
def close(self):
pass
class MyRequest(Request):
def _get_file_stream(*args, **kwargs):
return FileObj()
self.app.request_class = MyRequest
.route('/test_upload', methods=['POST'])
def upload():
files = request.files['file']
file_uploaded = uploaded_file(files=files)
return jsonify({'path': file_uploaded.file_path, 'name': file_uploaded.filename})
with self.app.test_request_context():
client = self.app.test_client()
resp = client.post('/test_upload', data={'file': (BytesIO(b'1,2,3,4'), 'test_file.csv')})
data = resp.get_json()
file_path = data['path']
filename = data['name']
actual_file_path = ((self.app.config.get('BASE_DIR') + '/static/uploads/') + filename)
assert (file_path == actual_file_path)
assert os.path.exists(file_path)
def test_upload_multiple_file(self):
class FileObj(BytesIO):
def close(self):
pass
class MyRequest(Request):
def _get_file_stream(*args, **kwargs):
return FileObj()
self.app.request_class = MyRequest
.route('/test_upload_multi', methods=['POST'])
def upload_multi():
files = request.files.getlist('files[]')
file_uploaded = uploaded_file(files=files, multiple=True)
files_uploaded = []
for file in file_uploaded:
files_uploaded.append({'path': file.file_path, 'name': file.filename})
return jsonify({'files': files_uploaded})
with self.app.test_request_context():
client = self.app.test_client()
resp = client.post('/test_upload_multi', data={'files[]': [(BytesIO(b'1,2,3,4'), 'test_file.csv'), (BytesIO(b'10,20,30,40'), 'test_file2.csv')]})
datas = resp.get_json()['files']
for data in datas:
file_path = data['path']
filename = data['name']
actual_file_path = ((self.app.config.get('BASE_DIR') + '/static/uploads/') + filename)
assert (file_path == actual_file_path)
assert os.path.exists(file_path)
def test_create_save_resized_image(self):
with self.app.test_request_context():
image_url_test = image_link
width = 500
height = 200
aspect_ratio = False
upload_path = 'test'
resized_image_url = create_save_resized_image(image_url_test, width, aspect_ratio, height, upload_path, ext='png')
resized_image_path = urlparse(resized_image_url).path
resized_image_file = (self.app.config.get('BASE_DIR') + resized_image_path)
(resized_width, resized_height) = self.getsizes(resized_image_file)
assert os.path.exists(resized_image_file)
assert (resized_width == width)
assert (resized_height == height)
def test_create_save_image_sizes(self):
with self.app.test_request_context():
image_url_test = image_link
image_sizes_type = 'event-image'
width_large = 1300
width_thumbnail = 500
width_icon = 75
image_sizes = create_save_image_sizes(image_url_test, image_sizes_type)
image_sizes = {url_name: urlparse(image_sizes[url_name]).path for url_name in image_sizes}
resized_image_url = image_sizes['original_image_url']
resized_image_url_large = image_sizes['large_image_url']
resized_image_url_thumbnail = image_sizes['thumbnail_image_url']
resized_image_url_icon = image_sizes['icon_image_url']
resized_image_file = (self.app.config.get('BASE_DIR') + resized_image_url)
resized_image_file_large = (self.app.config.get('BASE_DIR') + resized_image_url_large)
resized_image_file_thumbnail = (self.app.config.get('BASE_DIR') + resized_image_url_thumbnail)
resized_image_file_icon = (self.app.config.get('BASE_DIR') + resized_image_url_icon)
(resized_width_large, _) = self.getsizes(resized_image_file_large)
(resized_width_thumbnail, _) = self.getsizes(resized_image_file_thumbnail)
(resized_width_icon, _) = self.getsizes(resized_image_file_icon)
assert os.path.exists(resized_image_file)
assert (resized_width_large == width_large)
assert (resized_width_thumbnail == width_thumbnail)
assert (resized_width_icon == width_icon) |
class FromReader(UtilsFromReader):
def __init__(self, ffrom, call0, data_pointers, code_pointers, call0_blocks, data_pointers_blocks, code_pointers_blocks):
super().__init__(ffrom)
self._write_zeros_to_from(call0_blocks, call0, overwrite_size=3)
if (data_pointers_blocks is not None):
self._write_zeros_to_from(data_pointers_blocks, data_pointers)
if (code_pointers_blocks is not None):
self._write_zeros_to_from(code_pointers_blocks, code_pointers) |
def derive_child_key_from_index(private_key: bytes, chain_code: bytes, index: int) -> Tuple[(bytes, bytes)]:
parsed_private_key = PrivateKey(private_key)
public_key = parsed_private_key.public_key.public_key_bytes
is_hardened = (index & (1 << 31))
if is_hardened:
data_bytes = ((b'\x00' + private_key) + index.to_bytes(4, 'big'))
else:
data_bytes = (public_key + index.to_bytes(4, 'big'))
(il_bytes, ir_bytes) = split_hmac(hmac.digest(chain_code, data_bytes, 'sha512'))
il_int = int.from_bytes(il_bytes, byteorder='big', signed=False)
private_key_int = int.from_bytes(private_key, byteorder='big', signed=False)
new_private_key_int = ((il_int + private_key_int) % parsed_private_key.curve.order)
new_private_key_bytes = new_private_key_int.to_bytes(32, 'big')
return (new_private_key_bytes, ir_bytes) |
(scope='function')
def ac_system_with_privacy_declaration(db: Session) -> System:
system = System.create(db=db, data={'fides_key': f'ac_system{uuid.uuid4()}', 'vendor_id': 'gacp.8', 'name': f'Test AC System', 'organization_fides_key': 'default_organization', 'system_type': 'Service'})
PrivacyDeclaration.create(db=db, data={'system_id': system.id, 'data_use': 'functional.storage', 'legal_basis_for_processing': 'Consent', 'features': ['Link different devices']})
PrivacyDeclaration.create(db=db, data={'system_id': system.id, 'data_use': 'analytics.reporting.content_performance', 'legal_basis_for_processing': 'Legitimate interests', 'features': ['Link different devices']})
PrivacyDeclaration.create(db=db, data={'system_id': system.id, 'data_use': 'marketing.advertising', 'legal_basis_for_processing': 'Consent', 'features': ['Link different devices']})
return system |
class AtomicDatabaseBatchAPITestSuite():
def test_atomic_batch_set_and_get(self, atomic_db: AtomicDatabaseAPI) -> None:
with atomic_db.atomic_batch() as batch:
batch.set(b'1', b'2')
assert (batch.get(b'1') == b'2')
assert (atomic_db.get(b'1') == b'2')
def test_atomic_db_cannot_recursively_batch(self, atomic_db: AtomicDatabaseAPI) -> None:
with atomic_db.atomic_batch() as batch:
assert (not hasattr(batch, 'atomic_batch'))
def test_atomic_db_with_set_and_delete_batch(self, atomic_db: AtomicDatabaseAPI) -> None:
atomic_db[b'key-1'] = b'origin'
with atomic_db.atomic_batch() as batch:
batch.delete(b'key-1')
assert (b'key-1' not in batch)
with pytest.raises(KeyError):
assert batch[b'key-1']
with pytest.raises(KeyError):
atomic_db[b'key-1']
def test_atomic_db_unbatched_sets_are_immediate(self, atomic_db: AtomicDatabaseAPI) -> None:
atomic_db[b'1'] = b'A'
with atomic_db.atomic_batch() as batch:
atomic_db[b'1'] = b'B'
assert (batch[b'1'] == b'B')
batch[b'1'] = b'C1'
atomic_db[b'1'] = b'C2'
assert (batch[b'1'] == b'C1')
assert (atomic_db[b'1'] == b'C1')
def test_atomic_db_unbatched_deletes_are_immediate(self, atomic_db: AtomicDatabaseAPI) -> None:
atomic_db[b'1'] = b'A'
with atomic_db.atomic_batch() as batch:
assert (b'1' in batch)
del atomic_db[b'1']
assert (b'1' not in batch)
batch[b'1'] = b'C1'
atomic_db[b'1'] = b'C2'
assert (batch[b'1'] == b'C1')
assert (atomic_db[b'1'] == b'C1')
def test_atomic_db_cannot_use_batch_after_context(self, atomic_db: AtomicDatabaseAPI) -> None:
atomic_db[b'1'] = b'A'
with atomic_db.atomic_batch() as batch:
batch[b'1'] = b'B'
with pytest.raises(ValidationError):
batch[b'1'] = b'C'
with pytest.raises(ValidationError):
batch.set(b'1', b'C')
with pytest.raises(ValidationError):
batch[b'1']
with pytest.raises(ValidationError):
batch.get(b'1')
with pytest.raises(ValidationError):
assert (b'1' in batch)
with pytest.raises(ValidationError):
batch.exists(b'1')
with pytest.raises(ValidationError):
del batch[b'1']
with pytest.raises(ValidationError):
batch.delete(b'1')
assert (atomic_db[b'1'] == b'B')
def test_atomic_db_with_reverted_delete_batch(self, atomic_db: AtomicDatabaseAPI) -> None:
class CustomException(Exception):
pass
atomic_db[b'key-1'] = b'origin'
with pytest.raises(CustomException):
with atomic_db.atomic_batch() as batch:
batch.delete(b'key-1')
assert (b'key-1' not in batch)
with pytest.raises(KeyError):
assert batch[b'key-1']
raise CustomException('pretend something went wrong')
assert (atomic_db[b'key-1'] == b'origin')
def test_atomic_db_temporary_state_dropped_across_batches(self, atomic_db: AtomicDatabaseAPI) -> None:
class CustomException(Exception):
pass
atomic_db[b'key-1'] = b'origin'
with pytest.raises(CustomException):
with atomic_db.atomic_batch() as batch:
batch.delete(b'key-1')
batch.set(b'key-2', b'val-2')
raise CustomException('pretend something went wrong')
with atomic_db.atomic_batch() as batch:
assert (batch[b'key-1'] == b'origin')
assert (b'key-2' not in batch)
def test_atomic_db_with_exception_batch(self, atomic_db: AtomicDatabaseAPI) -> None:
atomic_db.set(b'key-1', b'value-1')
try:
with atomic_db.atomic_batch() as batch:
batch.set(b'key-1', b'new-value-1')
batch.set(b'key-2', b'value-2')
raise Exception
except Exception:
pass
assert (atomic_db.get(b'key-1') == b'value-1')
with pytest.raises(KeyError):
atomic_db[b'key-2'] |
def _get_uploaded_file_binary(request_file: FileStorage) -> (bytes | None):
if (not request_file):
return None
with TemporaryDirectory(prefix='fact_upload_', dir=config.common.temp_dir_path) as tmp_dir:
tmp_file_path = (Path(tmp_dir) / 'upload.bin')
try:
request_file.save(str(tmp_file_path))
return tmp_file_path.read_bytes()
except OSError:
logging.error('Encountered error when trying to read uploaded file:', exc_info=True)
return None |
def calculate_part_sizes(rosfile: Path, parts: List[dict]):
for part in parts:
if is_last_part(part['index'], len(parts)):
part['size'] = (rosfile.stat().st_size - part['offset'])
else:
part['size'] = (parts[(part['index'] + 1)]['offset'] - part['offset']) |
def delete_import_record(year, month):
print('delete_import_record')
import_records = load_import_records()
logs = import_records['prescribing']
new_logs = [r for r in logs if (f'prescribing_v2/{year}_{month}' not in r['imported_file'])]
assert (len(logs) == (len(new_logs) + 1))
import_records['prescribing'] = new_logs
dump_import_records(import_records) |
class StorageExplorer(AmazonS3Handler, GoogleStorageHandler, AzureStorageHandler):
def __init__(self, host, logger):
super().__init__(host, logger)
self.host = host
self.logger = logger
self.buckets_found = set()
def _get_image_sources_from_html(soup):
images = soup.select('img')
return {img.get('src') for img in images if img.get('src')}
def _add_to_found_storage(self, storage_url):
storage_url = self._normalize_url(storage_url)
bucket = S3Bucket(storage_url)
if (bucket.url not in self.storage_urls_found):
try:
res = self.request_handler.send('GET', url=storage_url)
if self._is_amazon_s3_bucket(res):
self.storage_urls_found.add(bucket.url)
self.s3_buckets.add(bucket)
except RequestHandlerException:
pass
def run(self, soup):
img_srcs = self._get_image_sources_from_html(soup)
urls = {src for src in img_srcs if self._is_s3_url(src)}
for url in urls:
self._add_to_found_storage(url)
if self.s3_buckets:
self.logger.info('{} S3 buckets discovered. Testing for permissions'.format(COLORED_COMBOS.NOTIFY))
for bucket in self.s3_buckets:
if (bucket.no_scheme_url in self.storage_urls_found):
continue
else:
self._test_s3_bucket_permissions(bucket)
if (self.num_files_found > 0):
self.logger.info('{} Found {}{}{} sensitive files in S3 buckets. inspect web scan logs for more information.'.format(COLORED_COMBOS.GOOD, COLOR.GREEN, self.num_files_found, COLOR.RESET))
elif any((b.vulnerable for b in self.s3_buckets)):
self.logger.info("{} No sensitive files found in target's cloud storage".format(COLORED_COMBOS.BAD))
else:
self.logger.info("{} Could not access target's cloud storage. All permissions are set properly".format(COLORED_COMBOS.BAD)) |
def import_horizon_roxapi(project, name, category, stype, realisation):
roxutils = RoxUtils(project, readonly=True)
_check_stypes_names_category(roxutils, stype, name, category)
proj = roxutils.project
args = _roxapi_import_surface(proj, name, category, stype, realisation)
roxutils.safe_close()
return args |
def create_snapshot(server_id, snapshot_desc):
url = (((base_url + '/servers/') + str(server_id)) + '/actions/create_image')
r = requests.post(url=url, json={'description': snapshot_desc, 'type': 'snapshot', 'labels': {'AUTOBACKUP': ''}}, headers=headers)
if (not r.ok):
print(f'Snapshot for Server #{server_id} could not be created: {r.reason}')
print(r.text)
else:
image_id = r.json()['image']['id']
print(f'Snapshot #{image_id} (Server #{server_id}) has been created') |
def main():
global config, options
parser = ArgumentParser()
common.setup_global_opts(parser)
parser.add_argument('appid', nargs='*', help=_('application ID of file to operate on'))
parser.add_argument('--auto', action='store_true', default=False, help=_('Process auto-updates'))
parser.add_argument('--autoonly', action='store_true', default=False, help=_('Only process apps with auto-updates'))
parser.add_argument('--commit', action='store_true', default=False, help=_('Commit changes'))
parser.add_argument('--allow-dirty', action='store_true', default=False, help=_('Run on git repo that has uncommitted changes'))
metadata.add_metadata_arguments(parser)
options = parser.parse_args()
metadata.warnings_action = options.W
config = common.read_config(options)
if (not options.allow_dirty):
status = subprocess.check_output(['git', 'status', '--porcelain'])
if status:
logging.error(_('Build metadata git repo has uncommited changes!'))
sys.exit(1)
allapps = metadata.read_metadata()
apps = common.read_app_args(options.appid, allapps, False)
processed = []
failed = dict()
exit_code = 0
for (appid, app) in apps.items():
if (options.autoonly and (app.AutoUpdateMode in ('None', 'Static'))):
logging.debug(_('Nothing to do for {appid}.').format(appid=appid))
continue
msg = _('Processing {appid}').format(appid=appid)
logging.info(msg)
try:
checkupdates_app(app)
processed.append(appid)
except Exception as e:
msg = _('...checkupdate failed for {appid} : {error}').format(appid=appid, error=e)
logging.error(msg)
logging.debug(traceback.format_exc())
failed[appid] = str(e)
exit_code = 1
status_update_json(processed, failed)
sys.exit(exit_code) |
def get_head_img(self, userName=None, chatroomUserName=None, picDir=None):
params = {'userName': (userName or chatroomUserName or self.storageClass.userName), 'skey': self.loginInfo['skey'], 'type': 'big'}
url = ('%s/webwxgeticon' % self.loginInfo['url'])
if (chatroomUserName is None):
infoDict = (self.storageClass.search_friends(userName=userName) or self.storageClass.search_mps(userName=userName))
if (infoDict is None):
return ReturnValue({'BaseResponse': {'ErrMsg': 'No friend found', 'Ret': (- 1001)}})
elif (userName is None):
url = ('%s/webwxgetheadimg' % self.loginInfo['url'])
else:
chatroom = self.storageClass.search_chatrooms(userName=chatroomUserName)
if (chatroomUserName is None):
return ReturnValue({'BaseResponse': {'ErrMsg': 'No chatroom found', 'Ret': (- 1001)}})
if ('EncryChatRoomId' in chatroom):
params['chatroomid'] = chatroom['EncryChatRoomId']
params['chatroomid'] = (params.get('chatroomid') or chatroom['UserName'])
headers = {'User-Agent': self.user_agent}
r = self.s.get(url, params=params, stream=True, headers=headers)
tempStorage = io.BytesIO()
for block in r.iter_content(1024):
tempStorage.write(block)
if (picDir is None):
return tempStorage.getvalue()
with open(picDir, 'wb') as f:
f.write(tempStorage.getvalue())
tempStorage.seek(0)
return ReturnValue({'BaseResponse': {'ErrMsg': 'Successfully downloaded', 'Ret': 0}, 'PostFix': utils.get_image_postfix(tempStorage.read(20))}) |
def FQ12(lib):
if (lib == bn128):
return bn128_FQ12
elif (lib == optimized_bn128):
return optimized_bn128_FQ12
elif (lib == bls12_381):
return bls12_381_FQ12
elif (lib == optimized_bls12_381):
return optimized_bls12_381_FQ12
else:
raise Exception('Library Not Found') |
.parametrize('dynasty_logout_delay, error', [((- 42), VALIDATION_ERROR), ((- 1), VALIDATION_ERROR), (0, TRANSACTION_FAILED), (1, TRANSACTION_FAILED), (2, None), (3, None), (100, None), (3000000, None)])
def test_init_dynasty_logout_delay(dynasty_logout_delay, error, casper_args, deploy_casper_contract, assert_tx_failed):
casper = deploy_casper_contract(casper_args, initialize_contract=False)
if error:
assert_tx_failed((lambda : casper.functions.init(*casper_args).transact()), error)
return
casper.functions.init(*casper_args).transact()
assert (casper.functions.DYNASTY_LOGOUT_DELAY().call() == dynasty_logout_delay) |
def generate_jwt(user: User) -> str:
claims = {'sub': str(user.id), 'password_fgpt': password_helper.hash(user.hashed_password), 'aud': RESET_PASSWORD_TOKEN_AUDIENCE}
signing_key = user.tenant.get_sign_jwk()
token = jwt.JWT(header={'alg': 'RS256', 'kid': signing_key['kid']}, claims=claims)
token.make_signed_token(signing_key)
return token.serialize() |
()
('--from', 'param_from', default=None, type=click.DateTime(DATETIME_FORMATS), help='Start time limit for statistics.')
('--to', 'param_to', default=None, type=click.DateTime(DATETIME_FORMATS), help='End time limit for statistics.')
_decorator
def statistics(param_from: (datetime or None), param_to: (datetime or None)):
(from_timestamp, to_timestamp) = _parse_from_to_timestamps(param_from, param_to)
statistics_data = API_CLIENT.get_statistics(from_time=from_timestamp, to_time=to_timestamp)
if JSON_OUTPUT:
_print_json(statistics_data)
return
text = ('From: %s\n' % _timestamp_ms_to_date(statistics_data['from']))
text += ('To: %s\n\n' % _timestamp_ms_to_date(statistics_data['to']))
headers = ['Total', 'Income', 'Expense', '#IncomeCategories', '#ExpenseCategories']
values = ['total', 'totalIncome', 'totalExpense', (lambda x: len(x.get('incomeItems'))), (lambda x: len(x.get('expenseItems')))]
text += _create_table_from_dict(headers, value_functions=values, data=[statistics_data])
text += '\n\n'
headers = ['Category', 'Income', 'Expense', 'Total']
keys = ['id', 'income', 'expense', 'total']
text += _create_table_from_dict(headers, keys, statistics_data['items'], numalign='right')
click.echo(text.strip()) |
def extractHighenergytranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return False
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def make_qrels(query_dict, corpus_dict, qrels_file='./data/qrels.train.tsv', writer_file='./data/query_doc.csv', test_file='./data/query_doc_test.csv', test_num=1000):
reader = csv.reader(open(qrels_file), delimiter='\t')
writer = csv.writer(open(writer_file, 'w'))
test_writer = csv.writer(open(test_file, 'w'))
reader = [line for line in reader]
shuffle(reader)
train_lines = reader[:(- test_num)]
test_lines = reader[(- test_num):]
print(len(train_lines), len(test_lines))
max_len = 0
writer.writerow(['query', 'doc'])
test_writer.writerow(['query', 'doc'])
for line in tqdm(train_lines):
q_id = int(line[0])
v_id = int(line[2])
q = query_dict[q_id]
v = corpus_dict[v_id]
writer.writerow([q, v])
max_len = max(len(q), max_len)
max_len = max(len(v), max_len)
for line in tqdm(test_lines):
q_id = int(line[0])
v_id = int(line[2])
q = query_dict[q_id]
v = corpus_dict[v_id]
test_writer.writerow([q, v])
max_len = max(len(q), max_len)
max_len = max(len(v), max_len)
print(max_len) |
def update(module, objects):
for obj in objects:
if isinstance(obj, tuple):
try:
(n, o) = obj
except:
raise ValueError(("expected (name, object) and got '%r'" % obj))
setattr(module, n, o)
elif (inspect.isclass(obj) or inspect.isfunction(obj)):
setattr(module, obj.__name__, obj)
else:
raise ValueError(("cannot plug '%r'" % obj)) |
class OpenMlJob(Runner):
async def __call__(self, es, params):
import elasticsearch
job_id = mandatory(params, 'job-id', self)
try:
(await es.ml.open_job(job_id=job_id))
except elasticsearch.BadRequestError:
(await es.perform_request(method='POST', path=f'/_xpack/ml/anomaly_detectors/{job_id}/_open'))
def __repr__(self, *args, **kwargs):
return 'open-ml-job' |
def _process_test_protocol(specification: Path, package_path: Path) -> None:
specification_content = specification.read_text()
_save_specification_in_temporary_file(package_path.name, specification_content)
_generate_protocol(package_path)
_fix_generated_protocol(package_path)
replacements = [(f'from packages.fetchai.protocols.{package_path.name}', f'from tests.data.generator.{package_path.name}')]
replace_in_directory(package_path.name, replacements)
_set_copyright_header(Path(PROTOCOLS_PLURALS, package_path.name))
run_isort_and_black(Path(PROTOCOLS_PLURALS, package_path.name), cwd=str(ROOT_DIR))
_fingerprint_protocol(package_path.name)
_update_original_protocol(package_path) |
def extractNoonascribbleBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesWaterfallOnpoint(Options):
def connectorOptions(self) -> 'OptionSeriesWaterfallOnpointConnectoroptions':
return self._config_sub_data('connectorOptions', OptionSeriesWaterfallOnpointConnectoroptions)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def position(self) -> 'OptionSeriesWaterfallOnpointPosition':
return self._config_sub_data('position', OptionSeriesWaterfallOnpointPosition) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.