nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
AIChallenger/AI_Challenger_2018
f0e4376152c8fe5a098ed92a973cec96b13e1a24
Baselines/autonomous_driving_perception208_baseline/detection/utils/config_util.py
python
_update_focal_loss_alpha
(configs, alpha)
Updates the alpha value for a sigmoid focal loss. The configs dictionary is updated in place, and hence not returned. Args: configs: Dictionary of configuration objects. See outputs from get_configs_from_pipeline_file() or get_configs_from_multiple_files(). alpha: Class weight multiplier for sigmoid loss. Raises: TypeError: If the classification loss is not `weighted_sigmoid_focal`.
Updates the alpha value for a sigmoid focal loss.
[ "Updates", "the", "alpha", "value", "for", "a", "sigmoid", "focal", "loss", "." ]
def _update_focal_loss_alpha(configs, alpha): """Updates the alpha value for a sigmoid focal loss. The configs dictionary is updated in place, and hence not returned. Args: configs: Dictionary of configuration objects. See outputs from get_configs_from_pipeline_file() or get_configs_from_multiple_files(). alpha: Class weight multiplier for sigmoid loss. Raises: TypeError: If the classification loss is not `weighted_sigmoid_focal`. """ classification_loss = _get_classification_loss(configs["model"]) classification_loss_type = classification_loss.WhichOneof( "classification_loss") if classification_loss_type != "weighted_sigmoid_focal": raise TypeError("Classification loss must be `weighted_sigmoid_focal`.") classification_loss.weighted_sigmoid_focal.alpha = alpha
[ "def", "_update_focal_loss_alpha", "(", "configs", ",", "alpha", ")", ":", "classification_loss", "=", "_get_classification_loss", "(", "configs", "[", "\"model\"", "]", ")", "classification_loss_type", "=", "classification_loss", ".", "WhichOneof", "(", "\"classificati...
https://github.com/AIChallenger/AI_Challenger_2018/blob/f0e4376152c8fe5a098ed92a973cec96b13e1a24/Baselines/autonomous_driving_perception208_baseline/detection/utils/config_util.py#L568-L586
QCoDeS/Qcodes
3cda2cef44812e2aa4672781f2423bf5f816f9f9
qcodes/instrument_drivers/rohde_schwarz/ZNB.py
python
ZNBChannel.__init__
( self, parent: "ZNB", name: str, channel: int, vna_parameter: Optional[str] = None, existing_trace_to_bind_to: Optional[str] = None, )
Args: parent: Instrument that this channel is bound to. name: Name to use for this channel. channel: channel on the VNA to use vna_parameter: Name of parameter on the vna that this should measure such as S12. If left empty this will fall back to `name`. existing_trace_to_bind_to: Name of an existing trace on the VNA. If supplied try to bind to an existing trace with this name rather than creating a new trace.
Args: parent: Instrument that this channel is bound to. name: Name to use for this channel. channel: channel on the VNA to use vna_parameter: Name of parameter on the vna that this should measure such as S12. If left empty this will fall back to `name`. existing_trace_to_bind_to: Name of an existing trace on the VNA. If supplied try to bind to an existing trace with this name rather than creating a new trace.
[ "Args", ":", "parent", ":", "Instrument", "that", "this", "channel", "is", "bound", "to", ".", "name", ":", "Name", "to", "use", "for", "this", "channel", ".", "channel", ":", "channel", "on", "the", "VNA", "to", "use", "vna_parameter", ":", "Name", "o...
def __init__( self, parent: "ZNB", name: str, channel: int, vna_parameter: Optional[str] = None, existing_trace_to_bind_to: Optional[str] = None, ) -> None: """ Args: parent: Instrument that this channel is bound to. name: Name to use for this channel. channel: channel on the VNA to use vna_parameter: Name of parameter on the vna that this should measure such as S12. If left empty this will fall back to `name`. existing_trace_to_bind_to: Name of an existing trace on the VNA. If supplied try to bind to an existing trace with this name rather than creating a new trace. """ n = channel self._instrument_channel = channel # Additional wait when adjusting instrument timeout to sweep time. self._additional_wait = 1 if vna_parameter is None: vna_parameter = name self._vna_parameter = vna_parameter super().__init__(parent, name) if existing_trace_to_bind_to is None: self._tracename = f"Trc{channel}" else: traces = self._parent.ask("CONFigure:TRACe:CATalog?") if existing_trace_to_bind_to not in traces: raise RuntimeError( f"Trying to bind to" f" {existing_trace_to_bind_to} " f"which is not in {traces}" ) self._tracename = existing_trace_to_bind_to # map hardware channel to measurement # hardware channels are mapped one to one to QCoDeS channels # we are not using sub traces within channels. if existing_trace_to_bind_to is None: self.write( f"CALC{self._instrument_channel}:PAR:SDEF" f" '{self._tracename}', '{self._vna_parameter}'" ) # Source power is dependent on model, but not well documented. # Here we assume -60 dBm for ZNB20, the others are set, # due to lack of knowledge, to -80 dBm as of before the edit. full_modelname = self._parent.get_idn()["model"] if full_modelname is not None: model = full_modelname.split("-")[0] else: raise RuntimeError("Could not determine ZNB model") self._model_min_source_power = { "ZNB4": -80, "ZNB8": -80, "ZNB20": -60, "ZNB40": -60, } if model not in self._model_min_source_power.keys(): raise RuntimeError(f"Unsupported ZNB model: {model}") self._min_source_power: float self._min_source_power = self._model_min_source_power[model] self.add_parameter( name="vna_parameter", label="VNA parameter", get_cmd=f"CALC{self._instrument_channel}:PAR:MEAS? " f"'{self._tracename}'", get_parser=self._strip, ) self.add_parameter( name="power", label="Power", unit="dBm", get_cmd=f"SOUR{n}:POW?", set_cmd=f"SOUR{n}:POW {{:.4f}}", get_parser=float, vals=vals.Numbers(self._min_source_power, 25), ) self.add_parameter( name="bandwidth", label="Bandwidth", unit="Hz", get_cmd=f"SENS{n}:BAND?", set_cmd=self._set_bandwidth, get_parser=int, vals=vals.Enum( *np.append(10 ** 6, np.kron([1, 1.5, 2, 3, 5, 7], 10 ** np.arange(6))) ), docstring="Measurement bandwidth of the IF filter. " "The inverse of this sets the integration " "time per point. " "There is an 'increased bandwidth option' " "(p. 4 of manual) that does not get taken " "into account here.", ) self.add_parameter( name="avg", label="Averages", unit="", get_cmd=f"SENS{n}:AVER:COUN?", set_cmd=f"SENS{n}:AVER:COUN {{:.4f}}", get_parser=int, vals=vals.Ints(1, 5000), ) self.add_parameter( name="start", get_cmd=f"SENS{n}:FREQ:START?", set_cmd=self._set_start, get_parser=float, vals=vals.Numbers(self._parent._min_freq, self._parent._max_freq - 10), ) self.add_parameter( name="stop", get_cmd=f"SENS{n}:FREQ:STOP?", set_cmd=self._set_stop, get_parser=float, vals=vals.Numbers(self._parent._min_freq + 1, self._parent._max_freq), ) self.add_parameter( name="center", get_cmd=f"SENS{n}:FREQ:CENT?", set_cmd=self._set_center, get_parser=float, vals=vals.Numbers( self._parent._min_freq + 0.5, self._parent._max_freq - 10 ), ) self.add_parameter( name="span", get_cmd=f"SENS{n}:FREQ:SPAN?", set_cmd=self._set_span, get_parser=float, vals=vals.Numbers(1, self._parent._max_freq - self._parent._min_freq), ) self.add_parameter( name="npts", get_cmd=f"SENS{n}:SWE:POIN?", set_cmd=self._set_npts, get_parser=int, ) self.add_parameter( name="status", get_cmd=f"CONF:CHAN{n}:MEAS?", set_cmd=f"CONF:CHAN{n}:MEAS {{}}", get_parser=int, ) self.add_parameter( name="format", get_cmd=partial(self._get_format, tracename=self._tracename), set_cmd=self._set_format, val_mapping={ "dB": "MLOG\n", "Linear Magnitude": "MLIN\n", "Phase": "PHAS\n", "Unwr Phase": "UPH\n", "Polar": "POL\n", "Smith": "SMIT\n", "Inverse Smith": "ISM\n", "SWR": "SWR\n", "Real": "REAL\n", "Imaginary": "IMAG\n", "Delay": "GDEL\n", "Complex": "COMP\n", }, ) self.add_parameter( name="trace_mag_phase", start=self.start(), stop=self.stop(), npts=self.npts(), channel=n, parameter_class=FrequencySweepMagPhase, ) self.add_parameter( name="trace", start=self.start(), stop=self.stop(), npts=self.npts(), channel=n, parameter_class=FrequencySweep, ) self.add_parameter( name="electrical_delay", label="Electrical delay", get_cmd=f"SENS{n}:CORR:EDEL2:TIME?", set_cmd=f"SENS{n}:CORR:EDEL2:TIME {{}}", get_parser=float, unit="s", ) self.add_parameter( name="sweep_time", label="Sweep time", get_cmd=f"SENS{n}:SWE:TIME?", get_parser=float, unit="s", ) self.add_parameter( name="sweep_type", get_cmd=f"SENS{n}:SWE:TYPE?", set_cmd=self._set_sweep_type, val_mapping={ "Linear": "LIN\n", "Logarithmic": "LOG\n", "Power": "POW\n", "CW_Time": "CW\n", "CW_Point": "POIN\n", "Segmented": "SEGM\n", }, docstring="The sweep_type parameter is used to set " "the type of measurement sweeps. It " "allows switching the default linear " "VNA sweep type to other types. Note that " "at the moment only the linear and " "CW_Point modes have supporting " "measurement parameters.", ) self.add_parameter( name="cw_frequency", get_cmd=f"SENS{n}:FREQ:CW?", set_cmd=self._set_cw_frequency, get_parser=float, vals=vals.Numbers( self._parent._min_freq + 0.5, self._parent._max_freq - 10 ), docstring="Parameter for setting frequency and " "querying for it when VNA sweep type is " "set to CW_Point mode.", ) self.add_parameter( "cw_check_sweep_first", parameter_class=ManualParameter, initial_value=True, vals=vals.Bool(), docstring="Parameter that enables a few commands " "which are called before each get in " "continuous wave mode checking whether " "the vna is setup correctly. Is recommended " "to be turned, but can be turned off if " "one wants to minimize overhead in fast " "measurements. ", ) self.add_parameter( name="trace_fixed_frequency", npts=self.npts(), bandwidth=self.bandwidth(), parameter_class=FixedFrequencyTraceIQ, ) self.add_parameter( name="point_fixed_frequency", parameter_class=FixedFrequencyPointIQ ) self.add_parameter( name="point_fixed_frequency_mag_phase", parameter_class=FixedFrequencyPointMagPhase, ) self.add_parameter( name="averaging_enabled", initial_value=False, get_cmd=None, set_cmd=self._enable_averaging, vals=vals.Bool(), val_mapping=create_on_off_val_mapping(on_val="ON", off_val="OFF"), ) self.add_parameter( name="auto_sweep_time_enabled", initial_value=False, get_cmd=None, set_cmd=self._enable_auto_sweep_time, vals=vals.Bool(), val_mapping=create_on_off_val_mapping(on_val="ON", off_val="OFF"), docstring="When enabled, the (minimum) sweep time is " "calculated internally using the other channel settings " "and zero delay", ) self.add_function( "set_electrical_delay_auto", call_cmd=f"SENS{n}:CORR:EDEL:AUTO ONCE" ) self.add_function( "autoscale", call_cmd=f"DISPlay:TRACe1:Y:SCALe:AUTO ONCE, {self._tracename}", )
[ "def", "__init__", "(", "self", ",", "parent", ":", "\"ZNB\"", ",", "name", ":", "str", ",", "channel", ":", "int", ",", "vna_parameter", ":", "Optional", "[", "str", "]", "=", "None", ",", "existing_trace_to_bind_to", ":", "Optional", "[", "str", "]", ...
https://github.com/QCoDeS/Qcodes/blob/3cda2cef44812e2aa4672781f2423bf5f816f9f9/qcodes/instrument_drivers/rohde_schwarz/ZNB.py#L277-L573
gitpython-developers/gitdb
2913a6454c9dfc803679dc5f75315e2d821ee977
gitdb/util.py
python
file_contents_ro
(fd, stream=False, allow_mmap=True)
return contents
:return: read-only contents of the file represented by the file descriptor fd :param fd: file descriptor opened for reading :param stream: if False, random access is provided, otherwise the stream interface is provided. :param allow_mmap: if True, its allowed to map the contents into memory, which allows large files to be handled and accessed efficiently. The file-descriptor will change its position if this is False
:return: read-only contents of the file represented by the file descriptor fd
[ ":", "return", ":", "read", "-", "only", "contents", "of", "the", "file", "represented", "by", "the", "file", "descriptor", "fd" ]
def file_contents_ro(fd, stream=False, allow_mmap=True): """:return: read-only contents of the file represented by the file descriptor fd :param fd: file descriptor opened for reading :param stream: if False, random access is provided, otherwise the stream interface is provided. :param allow_mmap: if True, its allowed to map the contents into memory, which allows large files to be handled and accessed efficiently. The file-descriptor will change its position if this is False""" try: if allow_mmap: # supports stream and random access try: return mmap.mmap(fd, 0, access=mmap.ACCESS_READ) except EnvironmentError: # python 2.4 issue, 0 wants to be the actual size return mmap.mmap(fd, os.fstat(fd).st_size, access=mmap.ACCESS_READ) # END handle python 2.4 except OSError: pass # END exception handling # read manually contents = os.read(fd, os.fstat(fd).st_size) if stream: return _RandomAccessBytesIO(contents) return contents
[ "def", "file_contents_ro", "(", "fd", ",", "stream", "=", "False", ",", "allow_mmap", "=", "True", ")", ":", "try", ":", "if", "allow_mmap", ":", "# supports stream and random access", "try", ":", "return", "mmap", ".", "mmap", "(", "fd", ",", "0", ",", ...
https://github.com/gitpython-developers/gitdb/blob/2913a6454c9dfc803679dc5f75315e2d821ee977/gitdb/util.py#L163-L189
pycurl/pycurl
23afe62edb66a751604974bef5dad63edd79d3de
python/curl/__init__.py
python
Curl.get_info
(self, *args)
return self.handle.getinfo(*args)
Get information about retrieval.
Get information about retrieval.
[ "Get", "information", "about", "retrieval", "." ]
def get_info(self, *args): "Get information about retrieval." return self.handle.getinfo(*args)
[ "def", "get_info", "(", "self", ",", "*", "args", ")", ":", "return", "self", ".", "handle", ".", "getinfo", "(", "*", "args", ")" ]
https://github.com/pycurl/pycurl/blob/23afe62edb66a751604974bef5dad63edd79d3de/python/curl/__init__.py#L124-L126
CT83/SmoothStream
7fbac6183008cdaeb99970ccadb344f113e90d50
camera/Camera.py
python
Camera.__init__
(self, height=RESOLUTION_H, width=RESOLUTION_W)
[]
def __init__(self, height=RESOLUTION_H, width=RESOLUTION_W): self.current_frame = None self.height = height self.width = width self.camera = None
[ "def", "__init__", "(", "self", ",", "height", "=", "RESOLUTION_H", ",", "width", "=", "RESOLUTION_W", ")", ":", "self", ".", "current_frame", "=", "None", "self", ".", "height", "=", "height", "self", ".", "width", "=", "width", "self", ".", "camera", ...
https://github.com/CT83/SmoothStream/blob/7fbac6183008cdaeb99970ccadb344f113e90d50/camera/Camera.py#L10-L14
nasa-jpl-memex/memex-explorer
d2910496238359b3676b4467721017fc82f0b324
source/apps/crawl_space/viz/stream.py
python
NutchUrlTrails.__init__
(self, crawl_name, num_urls=DEFAULT_NUM_URLS)
Create a NutchUrlTrails instance for visualizing a running Nutch crawl in real-time using Bokeh :param name: The name of the crawl (as identified by the queue) :param num_urls: The number of URLs to display in the visualization :return: A NutchUrLTrails instance
Create a NutchUrlTrails instance for visualizing a running Nutch crawl in real-time using Bokeh :param name: The name of the crawl (as identified by the queue) :param num_urls: The number of URLs to display in the visualization :return: A NutchUrLTrails instance
[ "Create", "a", "NutchUrlTrails", "instance", "for", "visualizing", "a", "running", "Nutch", "crawl", "in", "real", "-", "time", "using", "Bokeh", ":", "param", "name", ":", "The", "name", "of", "the", "crawl", "(", "as", "identified", "by", "the", "queue",...
def __init__(self, crawl_name, num_urls=DEFAULT_NUM_URLS): """ Create a NutchUrlTrails instance for visualizing a running Nutch crawl in real-time using Bokeh :param name: The name of the crawl (as identified by the queue) :param num_urls: The number of URLs to display in the visualization :return: A NutchUrLTrails instance """ self.crawl_name = crawl_name self.num_urls = num_urls self.open_urls = {} self.closed_urls = {} self.old_segments = None self.old_circles = None self.session = Session() self.session.use_doc(self.crawl_name) self.document = Document() con = Connection() exchange = Exchange(EXCHANGE_NAME, 'direct', durable=False) queue = Queue(crawl_name, exchange=exchange, routing_key=crawl_name) self.queue = con.SimpleQueue(name=queue)
[ "def", "__init__", "(", "self", ",", "crawl_name", ",", "num_urls", "=", "DEFAULT_NUM_URLS", ")", ":", "self", ".", "crawl_name", "=", "crawl_name", "self", ".", "num_urls", "=", "num_urls", "self", ".", "open_urls", "=", "{", "}", "self", ".", "closed_url...
https://github.com/nasa-jpl-memex/memex-explorer/blob/d2910496238359b3676b4467721017fc82f0b324/source/apps/crawl_space/viz/stream.py#L95-L117
qiueer/zabbix
31983dedbd59d917ecd71bb6f36b35302673a783
Mongodb/scripts/qiueer/python/QLog.py
python
Log.get_map_level
(self,level="debug")
[]
def get_map_level(self,level="debug"): level = str(level).lower() #print "get_map_level:",level if level == "debug": return logging.DEBUG if level == "info": return logging.INFO if level == "warn": return logging.WARN if level == "error": return logging.ERROR if level == "critical": return logging.CRITICAL
[ "def", "get_map_level", "(", "self", ",", "level", "=", "\"debug\"", ")", ":", "level", "=", "str", "(", "level", ")", ".", "lower", "(", ")", "#print \"get_map_level:\",level", "if", "level", "==", "\"debug\"", ":", "return", "logging", ".", "DEBUG", "if"...
https://github.com/qiueer/zabbix/blob/31983dedbd59d917ecd71bb6f36b35302673a783/Mongodb/scripts/qiueer/python/QLog.py#L119-L131
oracle/oci-python-sdk
3c1604e4e212008fb6718e2f68cdb5ef71fd5793
examples/showoci/showoci_service.py
python
ShowOCIService.__load_core_network_single_privateip
(self, virtual_network, ip_id, return_name=True)
[]
def __load_core_network_single_privateip(self, virtual_network, ip_id, return_name=True): try: if 'privateip' not in ip_id: return "" arr = virtual_network.get_private_ip( ip_id, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY ).data if arr: if return_name: return str(arr.ip_address) + " - " + str(arr.display_name) else: return str(arr.ip_address) return "" except oci.exceptions.ServiceError as e: if self.__check_service_error(e.code): pass raise except Exception as e: self.__print_error("__get_core_network_privateip", e) return ""
[ "def", "__load_core_network_single_privateip", "(", "self", ",", "virtual_network", ",", "ip_id", ",", "return_name", "=", "True", ")", ":", "try", ":", "if", "'privateip'", "not", "in", "ip_id", ":", "return", "\"\"", "arr", "=", "virtual_network", ".", "get_...
https://github.com/oracle/oci-python-sdk/blob/3c1604e4e212008fb6718e2f68cdb5ef71fd5793/examples/showoci/showoci_service.py#L3495-L3519
CGCookie/retopoflow
3d8b3a47d1d661f99ab0aeb21d31370bf15de35e
retopoflow/rf/rf_target.py
python
RetopoFlow_Target.get_vis_accel
(self, force=False)
return self.accel_vis_accel
[]
def get_vis_accel(self, force=False): target_version = self.get_target_version(selection=False) view_version = self.get_view_version() recompute = self.accel_recompute recompute |= target_version != self.accel_target_version recompute |= view_version != self.accel_view_version recompute |= self.accel_vis_verts is None recompute |= self.accel_vis_edges is None recompute |= self.accel_vis_faces is None recompute |= self.accel_vis_accel is None recompute |= options['visible bbox factor'] != self._last_visible_bbox_factor recompute |= options['visible dist offset'] != self._last_visible_dist_offset recompute |= options['selection occlusion test'] != self._last_selection_occlusion_test recompute |= options['selection backface test'] != self._last_selection_backface_test recompute &= not self.accel_defer_recomputing recompute &= not self._nav and (time.time() - self._nav_time) > 0.25 recompute &= self._draw_count != self._last_draw_count self.accel_recompute = False if force or recompute: # print(f'RECOMPUTE VIS ACCEL {random.random()}') # print(f' accel recompute: {self.accel_recompute}') # print(f' target change: {target_version != self.accel_target_version}') # print(f' view change: {view_version != self.accel_view_version} ({self.accel_view_version.get_hash() if self.accel_view_version else None}, {view_version.get_hash()})') # print(f' geom change: {self.accel_vis_verts is None} {self.accel_vis_edges is None} {self.accel_vis_faces is None} {self.accel_vis_accel is None}') # print(f' bbox change: {options["visible bbox factor"] != self._last_visible_bbox_factor}') # print(f' dist offset change: {options["visible dist offset"] != self._last_visible_dist_offset}') # print(f' navigating: {not self._nav} {time.time() - self._nav_time > 0.25}') # print(f' draw change: {self._draw_count != self._last_draw_count}') self.accel_target_version = target_version self.accel_view_version = view_version self.accel_vis_verts = self.visible_verts() self.accel_vis_edges = self.visible_edges(verts=self.accel_vis_verts) self.accel_vis_faces = self.visible_faces(verts=self.accel_vis_verts) self.accel_vis_accel = Accel2D(self.accel_vis_verts, self.accel_vis_edges, self.accel_vis_faces, self.get_point2D) self._last_visible_bbox_factor = options['visible bbox factor'] self._last_visible_dist_offset = options['visible dist offset'] self._last_selection_occlusion_test = options['selection occlusion test'] self._last_selection_backface_test = options['selection backface test'] self._last_draw_count = self._draw_count else: self.accel_vis_verts = { bmv for bmv in self.accel_vis_verts if bmv.is_valid } if self.accel_vis_verts is not None else None self.accel_vis_edges = { bme for bme in self.accel_vis_edges if bme.is_valid } if self.accel_vis_edges is not None else None self.accel_vis_faces = { bmf for bmf in self.accel_vis_faces if bmf.is_valid } if self.accel_vis_faces is not None else None return self.accel_vis_accel
[ "def", "get_vis_accel", "(", "self", ",", "force", "=", "False", ")", ":", "target_version", "=", "self", ".", "get_target_version", "(", "selection", "=", "False", ")", "view_version", "=", "self", ".", "get_view_version", "(", ")", "recompute", "=", "self"...
https://github.com/CGCookie/retopoflow/blob/3d8b3a47d1d661f99ab0aeb21d31370bf15de35e/retopoflow/rf/rf_target.py#L122-L169
psychopy/psychopy
01b674094f38d0e0bd51c45a6f66f671d7041696
psychopy/data/utils.py
python
checkValidFilePath
(filepath, makeValid=True)
return True
Checks whether file path location (e.g. is a valid folder) This should also check whether we have write-permissions to the folder but doesn't currently do that! added in: 1.90.00
Checks whether file path location (e.g. is a valid folder)
[ "Checks", "whether", "file", "path", "location", "(", "e", ".", "g", ".", "is", "a", "valid", "folder", ")" ]
def checkValidFilePath(filepath, makeValid=True): """Checks whether file path location (e.g. is a valid folder) This should also check whether we have write-permissions to the folder but doesn't currently do that! added in: 1.90.00 """ folder = os.path.split(os.path.abspath(filepath))[0] if not os.path.isdir(folder): os.makedirs(folder) # spit an error if we fail return True
[ "def", "checkValidFilePath", "(", "filepath", ",", "makeValid", "=", "True", ")", ":", "folder", "=", "os", ".", "path", ".", "split", "(", "os", ".", "path", ".", "abspath", "(", "filepath", ")", ")", "[", "0", "]", "if", "not", "os", ".", "path",...
https://github.com/psychopy/psychopy/blob/01b674094f38d0e0bd51c45a6f66f671d7041696/psychopy/data/utils.py#L48-L59
pantsbuild/pex
473c6ac732ed4bc338b4b20a9ec930d1d722c9b4
pex/vendor/_vendored/pip/pip/_vendor/pyparsing.py
python
Or.streamline
(self)
return self
[]
def streamline(self): super(Or, self).streamline() if __compat__.collect_all_And_tokens: self.saveAsList = any(e.saveAsList for e in self.exprs) return self
[ "def", "streamline", "(", "self", ")", ":", "super", "(", "Or", ",", "self", ")", ".", "streamline", "(", ")", "if", "__compat__", ".", "collect_all_And_tokens", ":", "self", ".", "saveAsList", "=", "any", "(", "e", ".", "saveAsList", "for", "e", "in",...
https://github.com/pantsbuild/pex/blob/473c6ac732ed4bc338b4b20a9ec930d1d722c9b4/pex/vendor/_vendored/pip/pip/_vendor/pyparsing.py#L4120-L4124
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/sympy/polys/galoistools.py
python
gf_trace_map
(a, b, c, n, f, p, K)
return gf_compose_mod(a, V, f, p, K), U
Compute polynomial trace map in ``GF(p)[x]/(f)``. Given a polynomial ``f`` in ``GF(p)[x]``, polynomials ``a``, ``b``, ``c`` in the quotient ring ``GF(p)[x]/(f)`` such that ``b = c**t (mod f)`` for some positive power ``t`` of ``p``, and a positive integer ``n``, returns a mapping:: a -> a**t**n, a + a**t + a**t**2 + ... + a**t**n (mod f) In factorization context, ``b = x**p mod f`` and ``c = x mod f``. This way we can efficiently compute trace polynomials in equal degree factorization routine, much faster than with other methods, like iterated Frobenius algorithm, for large degrees. Examples ======== >>> from sympy.polys.domains import ZZ >>> from sympy.polys.galoistools import gf_trace_map >>> gf_trace_map([1, 2], [4, 4], [1, 1], 4, [3, 2, 4], 5, ZZ) ([1, 3], [1, 3]) References ========== .. [1] [Gathen92]_
Compute polynomial trace map in ``GF(p)[x]/(f)``.
[ "Compute", "polynomial", "trace", "map", "in", "GF", "(", "p", ")", "[", "x", "]", "/", "(", "f", ")", "." ]
def gf_trace_map(a, b, c, n, f, p, K): """ Compute polynomial trace map in ``GF(p)[x]/(f)``. Given a polynomial ``f`` in ``GF(p)[x]``, polynomials ``a``, ``b``, ``c`` in the quotient ring ``GF(p)[x]/(f)`` such that ``b = c**t (mod f)`` for some positive power ``t`` of ``p``, and a positive integer ``n``, returns a mapping:: a -> a**t**n, a + a**t + a**t**2 + ... + a**t**n (mod f) In factorization context, ``b = x**p mod f`` and ``c = x mod f``. This way we can efficiently compute trace polynomials in equal degree factorization routine, much faster than with other methods, like iterated Frobenius algorithm, for large degrees. Examples ======== >>> from sympy.polys.domains import ZZ >>> from sympy.polys.galoistools import gf_trace_map >>> gf_trace_map([1, 2], [4, 4], [1, 1], 4, [3, 2, 4], 5, ZZ) ([1, 3], [1, 3]) References ========== .. [1] [Gathen92]_ """ u = gf_compose_mod(a, b, f, p, K) v = b if n & 1: U = gf_add(a, u, p, K) V = b else: U = a V = c n >>= 1 while n: u = gf_add(u, gf_compose_mod(u, v, f, p, K), p, K) v = gf_compose_mod(v, v, f, p, K) if n & 1: U = gf_add(U, gf_compose_mod(u, V, f, p, K), p, K) V = gf_compose_mod(v, V, f, p, K) n >>= 1 return gf_compose_mod(a, V, f, p, K), U
[ "def", "gf_trace_map", "(", "a", ",", "b", ",", "c", ",", "n", ",", "f", ",", "p", ",", "K", ")", ":", "u", "=", "gf_compose_mod", "(", "a", ",", "b", ",", "f", ",", "p", ",", "K", ")", "v", "=", "b", "if", "n", "&", "1", ":", "U", "=...
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/sympy/polys/galoistools.py#L1297-L1350
djblets/djblets
0496e1ec49e43d43d776768c9fc5b6f8af56ec2c
djblets/cache/serials.py
python
generate_ajax_serial
()
Generate a template-based AJAX serial number for requests and ETags. The serial number can be appended to filenames involving dynamic loads of URLs in order to make a URL that can be cached forever without fear of change. This will crawl the template files (using directories in :django:setting:`TEMPLATE_DIRS`), figuring out the latest timestamp, and return that value.
Generate a template-based AJAX serial number for requests and ETags.
[ "Generate", "a", "template", "-", "based", "AJAX", "serial", "number", "for", "requests", "and", "ETags", "." ]
def generate_ajax_serial(): """Generate a template-based AJAX serial number for requests and ETags. The serial number can be appended to filenames involving dynamic loads of URLs in order to make a URL that can be cached forever without fear of change. This will crawl the template files (using directories in :django:setting:`TEMPLATE_DIRS`), figuring out the latest timestamp, and return that value. """ AJAX_SERIAL = getattr(settings, "AJAX_SERIAL", 0) if not AJAX_SERIAL: template_dirs = itertools.chain.from_iterable( template_settings.get('DIRS', []) for template_settings in getattr(settings, 'TEMPLATES', None) ) for template_path in template_dirs: for root, dirs, files in os.walk(template_path): for name in files: mtime = int(os.stat(os.path.join(root, name)).st_mtime) if mtime > AJAX_SERIAL: AJAX_SERIAL = mtime setattr(settings, "AJAX_SERIAL", AJAX_SERIAL)
[ "def", "generate_ajax_serial", "(", ")", ":", "AJAX_SERIAL", "=", "getattr", "(", "settings", ",", "\"AJAX_SERIAL\"", ",", "0", ")", "if", "not", "AJAX_SERIAL", ":", "template_dirs", "=", "itertools", ".", "chain", ".", "from_iterable", "(", "template_settings",...
https://github.com/djblets/djblets/blob/0496e1ec49e43d43d776768c9fc5b6f8af56ec2c/djblets/cache/serials.py#L53-L80
saltstack/salt
fae5bc757ad0f1716483ce7ae180b451545c2058
salt/utils/zfs.py
python
from_bool_alt
(value)
return from_bool(value)
Convert zfs bool_alt to python bool
Convert zfs bool_alt to python bool
[ "Convert", "zfs", "bool_alt", "to", "python", "bool" ]
def from_bool_alt(value): """ Convert zfs bool_alt to python bool """ return from_bool(value)
[ "def", "from_bool_alt", "(", "value", ")", ":", "return", "from_bool", "(", "value", ")" ]
https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/utils/zfs.py#L477-L481
TarrySingh/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials
5bb97d7e3ffd913abddb4cfa7d78a1b4c868890e
tensorflow_dl_models/research/object_detection/core/post_processing.py
python
batch_multiclass_non_max_suppression
(boxes, scores, score_thresh, iou_thresh, max_size_per_class, max_total_size=0, clip_window=None, change_coordinate_frame=False, num_valid_boxes=None, masks=None, additional_fields=None, scope=None, parallel_iterations=32)
Multi-class version of non maximum suppression that operates on a batch. This op is similar to `multiclass_non_max_suppression` but operates on a batch of boxes and scores. See documentation for `multiclass_non_max_suppression` for details. Args: boxes: A [batch_size, num_anchors, q, 4] float32 tensor containing detections. If `q` is 1 then same boxes are used for all classes otherwise, if `q` is equal to number of classes, class-specific boxes are used. scores: A [batch_size, num_anchors, num_classes] float32 tensor containing the scores for each of the `num_anchors` detections. score_thresh: scalar threshold for score (low scoring boxes are removed). iou_thresh: scalar threshold for IOU (new boxes that have high IOU overlap with previously selected boxes are removed). max_size_per_class: maximum number of retained boxes per class. max_total_size: maximum number of boxes retained over all classes. By default returns all boxes retained after capping boxes per class. clip_window: A float32 tensor of the form [y_min, x_min, y_max, x_max] representing the window to clip boxes to before performing non-max suppression. change_coordinate_frame: Whether to normalize coordinates after clipping relative to clip_window (this can only be set to True if a clip_window is provided) num_valid_boxes: (optional) a Tensor of type `int32`. A 1-D tensor of shape [batch_size] representing the number of valid boxes to be considered for each image in the batch. This parameter allows for ignoring zero paddings. masks: (optional) a [batch_size, num_anchors, q, mask_height, mask_width] float32 tensor containing box masks. `q` can be either number of classes or 1 depending on whether a separate mask is predicted per class. additional_fields: (optional) If not None, a dictionary that maps keys to tensors whose dimensions are [batch_size, num_anchors, ...]. scope: tf scope name. parallel_iterations: (optional) number of batch items to process in parallel. Returns: 'nmsed_boxes': A [batch_size, max_detections, 4] float32 tensor containing the non-max suppressed boxes. 'nmsed_scores': A [batch_size, max_detections] float32 tensor containing the scores for the boxes. 'nmsed_classes': A [batch_size, max_detections] float32 tensor containing the class for boxes. 'nmsed_masks': (optional) a [batch_size, max_detections, mask_height, mask_width] float32 tensor containing masks for each selected box. This is set to None if input `masks` is None. 'nmsed_additional_fields': (optional) a dictionary of [batch_size, max_detections, ...] float32 tensors corresponding to the tensors specified in the input `additional_fields`. This is not returned if input `additional_fields` is None. 'num_detections': A [batch_size] int32 tensor indicating the number of valid detections per batch item. Only the top num_detections[i] entries in nms_boxes[i], nms_scores[i] and nms_class[i] are valid. The rest of the entries are zero paddings. Raises: ValueError: if `q` in boxes.shape is not 1 or not equal to number of classes as inferred from scores.shape.
Multi-class version of non maximum suppression that operates on a batch.
[ "Multi", "-", "class", "version", "of", "non", "maximum", "suppression", "that", "operates", "on", "a", "batch", "." ]
def batch_multiclass_non_max_suppression(boxes, scores, score_thresh, iou_thresh, max_size_per_class, max_total_size=0, clip_window=None, change_coordinate_frame=False, num_valid_boxes=None, masks=None, additional_fields=None, scope=None, parallel_iterations=32): """Multi-class version of non maximum suppression that operates on a batch. This op is similar to `multiclass_non_max_suppression` but operates on a batch of boxes and scores. See documentation for `multiclass_non_max_suppression` for details. Args: boxes: A [batch_size, num_anchors, q, 4] float32 tensor containing detections. If `q` is 1 then same boxes are used for all classes otherwise, if `q` is equal to number of classes, class-specific boxes are used. scores: A [batch_size, num_anchors, num_classes] float32 tensor containing the scores for each of the `num_anchors` detections. score_thresh: scalar threshold for score (low scoring boxes are removed). iou_thresh: scalar threshold for IOU (new boxes that have high IOU overlap with previously selected boxes are removed). max_size_per_class: maximum number of retained boxes per class. max_total_size: maximum number of boxes retained over all classes. By default returns all boxes retained after capping boxes per class. clip_window: A float32 tensor of the form [y_min, x_min, y_max, x_max] representing the window to clip boxes to before performing non-max suppression. change_coordinate_frame: Whether to normalize coordinates after clipping relative to clip_window (this can only be set to True if a clip_window is provided) num_valid_boxes: (optional) a Tensor of type `int32`. A 1-D tensor of shape [batch_size] representing the number of valid boxes to be considered for each image in the batch. This parameter allows for ignoring zero paddings. masks: (optional) a [batch_size, num_anchors, q, mask_height, mask_width] float32 tensor containing box masks. `q` can be either number of classes or 1 depending on whether a separate mask is predicted per class. additional_fields: (optional) If not None, a dictionary that maps keys to tensors whose dimensions are [batch_size, num_anchors, ...]. scope: tf scope name. parallel_iterations: (optional) number of batch items to process in parallel. Returns: 'nmsed_boxes': A [batch_size, max_detections, 4] float32 tensor containing the non-max suppressed boxes. 'nmsed_scores': A [batch_size, max_detections] float32 tensor containing the scores for the boxes. 'nmsed_classes': A [batch_size, max_detections] float32 tensor containing the class for boxes. 'nmsed_masks': (optional) a [batch_size, max_detections, mask_height, mask_width] float32 tensor containing masks for each selected box. This is set to None if input `masks` is None. 'nmsed_additional_fields': (optional) a dictionary of [batch_size, max_detections, ...] float32 tensors corresponding to the tensors specified in the input `additional_fields`. This is not returned if input `additional_fields` is None. 'num_detections': A [batch_size] int32 tensor indicating the number of valid detections per batch item. Only the top num_detections[i] entries in nms_boxes[i], nms_scores[i] and nms_class[i] are valid. The rest of the entries are zero paddings. Raises: ValueError: if `q` in boxes.shape is not 1 or not equal to number of classes as inferred from scores.shape. """ q = boxes.shape[2].value num_classes = scores.shape[2].value if q != 1 and q != num_classes: raise ValueError('third dimension of boxes must be either 1 or equal ' 'to the third dimension of scores') original_masks = masks original_additional_fields = additional_fields with tf.name_scope(scope, 'BatchMultiClassNonMaxSuppression'): boxes_shape = boxes.shape batch_size = boxes_shape[0].value num_anchors = boxes_shape[1].value if batch_size is None: batch_size = tf.shape(boxes)[0] if num_anchors is None: num_anchors = tf.shape(boxes)[1] # If num valid boxes aren't provided, create one and mark all boxes as # valid. if num_valid_boxes is None: num_valid_boxes = tf.ones([batch_size], dtype=tf.int32) * num_anchors # If masks aren't provided, create dummy masks so we can only have one copy # of _single_image_nms_fn and discard the dummy masks after map_fn. if masks is None: masks_shape = tf.stack([batch_size, num_anchors, 1, 0, 0]) masks = tf.zeros(masks_shape) if additional_fields is None: additional_fields = {} def _single_image_nms_fn(args): """Runs NMS on a single image and returns padded output. Args: args: A list of tensors consisting of the following: per_image_boxes - A [num_anchors, q, 4] float32 tensor containing detections. If `q` is 1 then same boxes are used for all classes otherwise, if `q` is equal to number of classes, class-specific boxes are used. per_image_scores - A [num_anchors, num_classes] float32 tensor containing the scores for each of the `num_anchors` detections. per_image_masks - A [num_anchors, q, mask_height, mask_width] float32 tensor containing box masks. `q` can be either number of classes or 1 depending on whether a separate mask is predicted per class. per_image_additional_fields - (optional) A variable number of float32 tensors each with size [num_anchors, ...]. per_image_num_valid_boxes - A tensor of type `int32`. A 1-D tensor of shape [batch_size] representing the number of valid boxes to be considered for each image in the batch. This parameter allows for ignoring zero paddings. Returns: 'nmsed_boxes': A [max_detections, 4] float32 tensor containing the non-max suppressed boxes. 'nmsed_scores': A [max_detections] float32 tensor containing the scores for the boxes. 'nmsed_classes': A [max_detections] float32 tensor containing the class for boxes. 'nmsed_masks': (optional) a [max_detections, mask_height, mask_width] float32 tensor containing masks for each selected box. This is set to None if input `masks` is None. 'nmsed_additional_fields': (optional) A variable number of float32 tensors each with size [max_detections, ...] corresponding to the input `per_image_additional_fields`. 'num_detections': A [batch_size] int32 tensor indicating the number of valid detections per batch item. Only the top num_detections[i] entries in nms_boxes[i], nms_scores[i] and nms_class[i] are valid. The rest of the entries are zero paddings. """ per_image_boxes = args[0] per_image_scores = args[1] per_image_masks = args[2] per_image_additional_fields = { key: value for key, value in zip(additional_fields, args[3:-1]) } per_image_num_valid_boxes = args[-1] per_image_boxes = tf.reshape( tf.slice(per_image_boxes, 3 * [0], tf.stack([per_image_num_valid_boxes, -1, -1])), [-1, q, 4]) per_image_scores = tf.reshape( tf.slice(per_image_scores, [0, 0], tf.stack([per_image_num_valid_boxes, -1])), [-1, num_classes]) per_image_masks = tf.reshape( tf.slice(per_image_masks, 4 * [0], tf.stack([per_image_num_valid_boxes, -1, -1, -1])), [-1, q, per_image_masks.shape[2].value, per_image_masks.shape[3].value]) if per_image_additional_fields is not None: for key, tensor in per_image_additional_fields.items(): additional_field_shape = tensor.get_shape() additional_field_dim = len(additional_field_shape) per_image_additional_fields[key] = tf.reshape( tf.slice(per_image_additional_fields[key], additional_field_dim * [0], tf.stack([per_image_num_valid_boxes] + (additional_field_dim - 1) * [-1])), [-1] + [dim.value for dim in additional_field_shape[1:]]) nmsed_boxlist = multiclass_non_max_suppression( per_image_boxes, per_image_scores, score_thresh, iou_thresh, max_size_per_class, max_total_size, clip_window=clip_window, change_coordinate_frame=change_coordinate_frame, masks=per_image_masks, additional_fields=per_image_additional_fields) padded_boxlist = box_list_ops.pad_or_clip_box_list(nmsed_boxlist, max_total_size) num_detections = nmsed_boxlist.num_boxes() nmsed_boxes = padded_boxlist.get() nmsed_scores = padded_boxlist.get_field(fields.BoxListFields.scores) nmsed_classes = padded_boxlist.get_field(fields.BoxListFields.classes) nmsed_masks = padded_boxlist.get_field(fields.BoxListFields.masks) nmsed_additional_fields = [ padded_boxlist.get_field(key) for key in per_image_additional_fields ] return ([nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks] + nmsed_additional_fields + [num_detections]) num_additional_fields = 0 if additional_fields is not None: num_additional_fields = len(additional_fields) num_nmsed_outputs = 4 + num_additional_fields batch_outputs = tf.map_fn( _single_image_nms_fn, elems=([boxes, scores, masks] + list(additional_fields.values()) + [num_valid_boxes]), dtype=(num_nmsed_outputs * [tf.float32] + [tf.int32]), parallel_iterations=parallel_iterations) batch_nmsed_boxes = batch_outputs[0] batch_nmsed_scores = batch_outputs[1] batch_nmsed_classes = batch_outputs[2] batch_nmsed_masks = batch_outputs[3] batch_nmsed_additional_fields = { key: value for key, value in zip(additional_fields, batch_outputs[4:-1]) } batch_num_detections = batch_outputs[-1] if original_masks is None: batch_nmsed_masks = None if original_additional_fields is None: batch_nmsed_additional_fields = None return (batch_nmsed_boxes, batch_nmsed_scores, batch_nmsed_classes, batch_nmsed_masks, batch_nmsed_additional_fields, batch_num_detections)
[ "def", "batch_multiclass_non_max_suppression", "(", "boxes", ",", "scores", ",", "score_thresh", ",", "iou_thresh", ",", "max_size_per_class", ",", "max_total_size", "=", "0", ",", "clip_window", "=", "None", ",", "change_coordinate_frame", "=", "False", ",", "num_v...
https://github.com/TarrySingh/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials/blob/5bb97d7e3ffd913abddb4cfa7d78a1b4c868890e/tensorflow_dl_models/research/object_detection/core/post_processing.py#L165-L395
perone/Pyevolve
589b6a9b92ed1fd9ef00987bf4bfe807c4a7b7e0
examples/pyevolve_ex19_gp.py
python
eval_func
(chromosome)
return score
[]
def eval_func(chromosome): sz = 20 code_comp = chromosome.getCompiledCode() square_accum = 0.0 for j in xrange(sz): a, b = random_lists(5) target_list = gp_add(gp_mul(a,b),gp_mul(a,b)) ret_list = eval(code_comp) square_accum += (sum(target_list)-sum(ret_list))**2 RMSE = sqrt(square_accum / float(sz)) score = (1.0 / (RMSE+1.0)) return score
[ "def", "eval_func", "(", "chromosome", ")", ":", "sz", "=", "20", "code_comp", "=", "chromosome", ".", "getCompiledCode", "(", ")", "square_accum", "=", "0.0", "for", "j", "in", "xrange", "(", "sz", ")", ":", "a", ",", "b", "=", "random_lists", "(", ...
https://github.com/perone/Pyevolve/blob/589b6a9b92ed1fd9ef00987bf4bfe807c4a7b7e0/examples/pyevolve_ex19_gp.py#L43-L56
oracle/graalpython
577e02da9755d916056184ec441c26e00b70145c
graalpython/lib-python/3/asyncio/locks.py
python
BoundedSemaphore.__init__
(self, value=1, *, loop=None)
[]
def __init__(self, value=1, *, loop=None): if loop: warnings.warn("The loop argument is deprecated since Python 3.8, " "and scheduled for removal in Python 3.10.", DeprecationWarning, stacklevel=2) self._bound_value = value super().__init__(value, loop=loop)
[ "def", "__init__", "(", "self", ",", "value", "=", "1", ",", "*", ",", "loop", "=", "None", ")", ":", "if", "loop", ":", "warnings", ".", "warn", "(", "\"The loop argument is deprecated since Python 3.8, \"", "\"and scheduled for removal in Python 3.10.\"", ",", "...
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/asyncio/locks.py#L522-L529
smart-mobile-software/gitstack
d9fee8f414f202143eb6e620529e8e5539a2af56
python/Lib/lib-tk/Tkinter.py
python
Misc.unbind
(self, sequence, funcid=None)
Unbind for this widget for event SEQUENCE the function identified with FUNCID.
Unbind for this widget for event SEQUENCE the function identified with FUNCID.
[ "Unbind", "for", "this", "widget", "for", "event", "SEQUENCE", "the", "function", "identified", "with", "FUNCID", "." ]
def unbind(self, sequence, funcid=None): """Unbind for this widget for event SEQUENCE the function identified with FUNCID.""" self.tk.call('bind', self._w, sequence, '') if funcid: self.deletecommand(funcid)
[ "def", "unbind", "(", "self", ",", "sequence", ",", "funcid", "=", "None", ")", ":", "self", ".", "tk", ".", "call", "(", "'bind'", ",", "self", ".", "_w", ",", "sequence", ",", "''", ")", "if", "funcid", ":", "self", ".", "deletecommand", "(", "...
https://github.com/smart-mobile-software/gitstack/blob/d9fee8f414f202143eb6e620529e8e5539a2af56/python/Lib/lib-tk/Tkinter.py#L986-L991
saltstack/salt
fae5bc757ad0f1716483ce7ae180b451545c2058
salt/modules/zonecfg.py
python
_property
(methode, zone, key, value)
return ret
internal handler for set and clear_property methode : string either set, add, or clear zone : string name of zone key : string name of property value : string value of property
internal handler for set and clear_property
[ "internal", "handler", "for", "set", "and", "clear_property" ]
def _property(methode, zone, key, value): """ internal handler for set and clear_property methode : string either set, add, or clear zone : string name of zone key : string name of property value : string value of property """ ret = {"status": True} # generate update script cfg_file = None if methode not in ["set", "clear"]: ret["status"] = False ret["message"] = "unkown methode {}!".format(methode) else: cfg_file = salt.utils.files.mkstemp() with salt.utils.files.fpopen(cfg_file, "w+", mode=0o600) as fp_: if methode == "set": if isinstance(value, dict) or isinstance(value, list): value = _sanitize_value(value) value = str(value).lower() if isinstance(value, bool) else str(value) fp_.write("{} {}={}\n".format(methode, key, _sanitize_value(value))) elif methode == "clear": fp_.write("{} {}\n".format(methode, key)) # update property if cfg_file: _dump_cfg(cfg_file) res = __salt__["cmd.run_all"]( "zonecfg -z {zone} -f {path}".format( zone=zone, path=cfg_file, ) ) ret["status"] = res["retcode"] == 0 ret["message"] = res["stdout"] if ret["status"] else res["stderr"] if ret["message"] == "": del ret["message"] else: ret["message"] = _clean_message(ret["message"]) # cleanup config file if __salt__["file.file_exists"](cfg_file): __salt__["file.remove"](cfg_file) return ret
[ "def", "_property", "(", "methode", ",", "zone", ",", "key", ",", "value", ")", ":", "ret", "=", "{", "\"status\"", ":", "True", "}", "# generate update script", "cfg_file", "=", "None", "if", "methode", "not", "in", "[", "\"set\"", ",", "\"clear\"", "]"...
https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/modules/zonecfg.py#L405-L457
adobe-type-tools/opentype-svg
57353dfd094b6ed2225f18482331ff895e003d48
lib/opentypesvg/utils.py
python
get_output_folder_path
(provided_folder_path, first_font_path)
return os.path.join(os.path.dirname(first_font_path), SVG_FOLDER_NAME)
If the path to the output folder was NOT provided, create a folder in the same directory where the first font is. If the path was provided, validate it. Returns a valid output folder.
If the path to the output folder was NOT provided, create a folder in the same directory where the first font is. If the path was provided, validate it. Returns a valid output folder.
[ "If", "the", "path", "to", "the", "output", "folder", "was", "NOT", "provided", "create", "a", "folder", "in", "the", "same", "directory", "where", "the", "first", "font", "is", ".", "If", "the", "path", "was", "provided", "validate", "it", ".", "Returns...
def get_output_folder_path(provided_folder_path, first_font_path): """ If the path to the output folder was NOT provided, create a folder in the same directory where the first font is. If the path was provided, validate it. Returns a valid output folder. """ if provided_folder_path: return validate_folder_path(provided_folder_path) return os.path.join(os.path.dirname(first_font_path), SVG_FOLDER_NAME)
[ "def", "get_output_folder_path", "(", "provided_folder_path", ",", "first_font_path", ")", ":", "if", "provided_folder_path", ":", "return", "validate_folder_path", "(", "provided_folder_path", ")", "return", "os", ".", "path", ".", "join", "(", "os", ".", "path", ...
https://github.com/adobe-type-tools/opentype-svg/blob/57353dfd094b6ed2225f18482331ff895e003d48/lib/opentypesvg/utils.py#L98-L107
awesto/django-shop
13d9a77aff7eede74a5f363c1d540e005d88dbcd
shop/search/mixins.py
python
SearchViewMixin.get_document
(self, language)
[]
def get_document(self, language): documents = registry.get_documents([ProductModel]) try: return next(doc for doc in documents if doc._language == language) except StopIteration: return next(doc for doc in documents if doc._language is None)
[ "def", "get_document", "(", "self", ",", "language", ")", ":", "documents", "=", "registry", ".", "get_documents", "(", "[", "ProductModel", "]", ")", "try", ":", "return", "next", "(", "doc", "for", "doc", "in", "documents", "if", "doc", ".", "_language...
https://github.com/awesto/django-shop/blob/13d9a77aff7eede74a5f363c1d540e005d88dbcd/shop/search/mixins.py#L9-L14
entropy1337/infernal-twin
10995cd03312e39a48ade0f114ebb0ae3a711bb8
Modules/build/pillow/build/lib.linux-i686-2.7/PIL/PSDraw.py
python
PSDraw.image
(self, box, im, dpi=None)
Draw a PIL image, centered in the given box.
Draw a PIL image, centered in the given box.
[ "Draw", "a", "PIL", "image", "centered", "in", "the", "given", "box", "." ]
def image(self, box, im, dpi=None): """Draw a PIL image, centered in the given box.""" # default resolution depends on mode if not dpi: if im.mode == "1": dpi = 200 # fax else: dpi = 100 # greyscale # image size (on paper) x = float(im.size[0] * 72) / dpi y = float(im.size[1] * 72) / dpi # max allowed size xmax = float(box[2] - box[0]) ymax = float(box[3] - box[1]) if x > xmax: y = y * xmax / x x = xmax if y > ymax: x = x * ymax / y y = ymax dx = (xmax - x) / 2 + box[0] dy = (ymax - y) / 2 + box[1] self._fp_write("gsave\n%f %f translate\n" % (dx, dy)) if (x, y) != im.size: # EpsImagePlugin._save prints the image at (0,0,xsize,ysize) sx = x / im.size[0] sy = y / im.size[1] self._fp_write("%f %f scale\n" % (sx, sy)) EpsImagePlugin._save(im, self.fp, None, 0) self._fp_write("\ngrestore\n")
[ "def", "image", "(", "self", ",", "box", ",", "im", ",", "dpi", "=", "None", ")", ":", "# default resolution depends on mode", "if", "not", "dpi", ":", "if", "im", ".", "mode", "==", "\"1\"", ":", "dpi", "=", "200", "# fax", "else", ":", "dpi", "=", ...
https://github.com/entropy1337/infernal-twin/blob/10995cd03312e39a48ade0f114ebb0ae3a711bb8/Modules/build/pillow/build/lib.linux-i686-2.7/PIL/PSDraw.py#L113-L142
hpfeeds/hpfeeds
539e738d7831ea6a246116acd4f2268ce7c0ef49
hpfeeds/blocking/reactor.py
python
Reactor.__init__
(self, protocol_class, connector)
[]
def __init__(self, protocol_class, connector): self.protocol_class = protocol_class self.connector = connector self.closing = False self.when_connected = threading.Event() self._outbox = queue.Queue()
[ "def", "__init__", "(", "self", ",", "protocol_class", ",", "connector", ")", ":", "self", ".", "protocol_class", "=", "protocol_class", "self", ".", "connector", "=", "connector", "self", ".", "closing", "=", "False", "self", ".", "when_connected", "=", "th...
https://github.com/hpfeeds/hpfeeds/blob/539e738d7831ea6a246116acd4f2268ce7c0ef49/hpfeeds/blocking/reactor.py#L19-L26
descarteslabs/descarteslabs-python
ace8a1a89d58b75df1bcaa613a4b3544d7bdc4be
descarteslabs/common/ibis/deserialization/compiler.py
python
all_equal
(left, right, cache=None)
return left == right
Check whether two objects `left` and `right` are equal. Parameters ---------- left : Union[object, Expr, Node] right : Union[object, Expr, Node] cache : Optional[Dict[Tuple[Node, Node], bool]] A dictionary indicating whether two Nodes are equal
Check whether two objects `left` and `right` are equal. Parameters ---------- left : Union[object, Expr, Node] right : Union[object, Expr, Node] cache : Optional[Dict[Tuple[Node, Node], bool]] A dictionary indicating whether two Nodes are equal
[ "Check", "whether", "two", "objects", "left", "and", "right", "are", "equal", ".", "Parameters", "----------", "left", ":", "Union", "[", "object", "Expr", "Node", "]", "right", ":", "Union", "[", "object", "Expr", "Node", "]", "cache", ":", "Optional", ...
def all_equal(left, right, cache=None): """Check whether two objects `left` and `right` are equal. Parameters ---------- left : Union[object, Expr, Node] right : Union[object, Expr, Node] cache : Optional[Dict[Tuple[Node, Node], bool]] A dictionary indicating whether two Nodes are equal """ if cache is None: cache = {} if util.is_iterable(left): # check that left and right are equal length iterables and that all # of their elements are equal return ( util.is_iterable(right) and len(left) == len(right) and all( itertools.starmap( functools.partial(all_equal, cache=cache), zip(left, right) ) ) ) if hasattr(left, "equals"): if isinstance(left, ir.GeoSpatialValue): return super(ir.GeoSpatialValue, left).equals(right, cache=cache) if isinstance(left, ir.GeoSpatialColumn): return super(ir.GeoSpatialColumn, left).equals(right, cache=cache) else: return left.equals(right, cache=cache) return left == right
[ "def", "all_equal", "(", "left", ",", "right", ",", "cache", "=", "None", ")", ":", "if", "cache", "is", "None", ":", "cache", "=", "{", "}", "if", "util", ".", "is_iterable", "(", "left", ")", ":", "# check that left and right are equal length iterables and...
https://github.com/descarteslabs/descarteslabs-python/blob/ace8a1a89d58b75df1bcaa613a4b3544d7bdc4be/descarteslabs/common/ibis/deserialization/compiler.py#L39-L72
asciidisco/plugin.video.netflix
ceb2638a9676f5839250dadfd079b9e4e4bdd759
resources/lib/Navigation.py
python
Navigation.open_settings
(self, url)
return Addon(url).openSettings()
Opens a foreign settings dialog
Opens a foreign settings dialog
[ "Opens", "a", "foreign", "settings", "dialog" ]
def open_settings(self, url): """Opens a foreign settings dialog""" url = 'inputstream.adaptive' if url == 'is' else url from xbmcaddon import Addon return Addon(url).openSettings()
[ "def", "open_settings", "(", "self", ",", "url", ")", ":", "url", "=", "'inputstream.adaptive'", "if", "url", "==", "'is'", "else", "url", "from", "xbmcaddon", "import", "Addon", "return", "Addon", "(", "url", ")", ".", "openSettings", "(", ")" ]
https://github.com/asciidisco/plugin.video.netflix/blob/ceb2638a9676f5839250dadfd079b9e4e4bdd759/resources/lib/Navigation.py#L1074-L1078
fortharris/Pcode
147962d160a834c219e12cb456abc130826468e4
Extensions/Projects/ProjectManager/ProjectView/ProjectView.py
python
SearchThread.run
(self)
[]
def run(self): resultsDict = {} for root, dirs, files in os.walk(self.projectDir): for i in files: if not self.filterDisabled: if not i.endswith('.py') or i.endswith('.pyw'): continue if i.startswith(self.searchName): if root in resultsDict: resultsDict[root].append(i) else: resultsDict[root] = [i] self.foundList.emit(resultsDict)
[ "def", "run", "(", "self", ")", ":", "resultsDict", "=", "{", "}", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "self", ".", "projectDir", ")", ":", "for", "i", "in", "files", ":", "if", "not", "self", ".", "filterDisab...
https://github.com/fortharris/Pcode/blob/147962d160a834c219e12cb456abc130826468e4/Extensions/Projects/ProjectManager/ProjectView/ProjectView.py#L580-L594
pypa/pip
7f8a6844037fb7255cfd0d34ff8e8cf44f2598d4
src/pip/_internal/req/req_uninstall.py
python
StashedUninstallPathSet.rollback
(self)
Undoes the uninstall by moving stashed files back.
Undoes the uninstall by moving stashed files back.
[ "Undoes", "the", "uninstall", "by", "moving", "stashed", "files", "back", "." ]
def rollback(self) -> None: """Undoes the uninstall by moving stashed files back.""" for p in self._moves: logger.info("Moving to %s\n from %s", *p) for new_path, path in self._moves: try: logger.debug("Replacing %s from %s", new_path, path) if os.path.isfile(new_path) or os.path.islink(new_path): os.unlink(new_path) elif os.path.isdir(new_path): rmtree(new_path) renames(path, new_path) except OSError as ex: logger.error("Failed to restore %s", new_path) logger.debug("Exception: %s", ex) self.commit()
[ "def", "rollback", "(", "self", ")", "->", "None", ":", "for", "p", "in", "self", ".", "_moves", ":", "logger", ".", "info", "(", "\"Moving to %s\\n from %s\"", ",", "*", "p", ")", "for", "new_path", ",", "path", "in", "self", ".", "_moves", ":", "tr...
https://github.com/pypa/pip/blob/7f8a6844037fb7255cfd0d34ff8e8cf44f2598d4/src/pip/_internal/req/req_uninstall.py#L277-L294
NVIDIA/NeMo
5b0c0b4dec12d87d3cd960846de4105309ce938e
nemo/collections/asr/modules/rnnt.py
python
RNNTJoint.joint
(self, f: torch.Tensor, g: torch.Tensor)
return res
Compute the joint step of the network. Here, B = Batch size T = Acoustic model timesteps U = Target sequence length H1, H2 = Hidden dimensions of the Encoder / Decoder respectively H = Hidden dimension of the Joint hidden step. V = Vocabulary size of the Decoder (excluding the RNNT blank token). NOTE: The implementation of this model is slightly modified from the original paper. The original paper proposes the following steps : (enc, dec) -> Expand + Concat + Sum [B, T, U, H1+H2] -> Forward through joint hidden [B, T, U, H] -- *1 *1 -> Forward through joint final [B, T, U, V + 1]. We instead split the joint hidden into joint_hidden_enc and joint_hidden_dec and act as follows: enc -> Forward through joint_hidden_enc -> Expand [B, T, 1, H] -- *1 dec -> Forward through joint_hidden_dec -> Expand [B, 1, U, H] -- *2 (*1, *2) -> Sum [B, T, U, H] -> Forward through joint final [B, T, U, V + 1]. Args: f: Output of the Encoder model. A torch.Tensor of shape [B, T, H1] g: Output of the Decoder model. A torch.Tensor of shape [B, U, H2] Returns: Logits / log softmaxed tensor of shape (B, T, U, V + 1).
Compute the joint step of the network.
[ "Compute", "the", "joint", "step", "of", "the", "network", "." ]
def joint(self, f: torch.Tensor, g: torch.Tensor) -> torch.Tensor: """ Compute the joint step of the network. Here, B = Batch size T = Acoustic model timesteps U = Target sequence length H1, H2 = Hidden dimensions of the Encoder / Decoder respectively H = Hidden dimension of the Joint hidden step. V = Vocabulary size of the Decoder (excluding the RNNT blank token). NOTE: The implementation of this model is slightly modified from the original paper. The original paper proposes the following steps : (enc, dec) -> Expand + Concat + Sum [B, T, U, H1+H2] -> Forward through joint hidden [B, T, U, H] -- *1 *1 -> Forward through joint final [B, T, U, V + 1]. We instead split the joint hidden into joint_hidden_enc and joint_hidden_dec and act as follows: enc -> Forward through joint_hidden_enc -> Expand [B, T, 1, H] -- *1 dec -> Forward through joint_hidden_dec -> Expand [B, 1, U, H] -- *2 (*1, *2) -> Sum [B, T, U, H] -> Forward through joint final [B, T, U, V + 1]. Args: f: Output of the Encoder model. A torch.Tensor of shape [B, T, H1] g: Output of the Decoder model. A torch.Tensor of shape [B, U, H2] Returns: Logits / log softmaxed tensor of shape (B, T, U, V + 1). """ # f = [B, T, H1] f = self.enc(f) f.unsqueeze_(dim=2) # (B, T, 1, H) # g = [B, U, H2] g = self.pred(g) g.unsqueeze_(dim=1) # (B, 1, U, H) inp = f + g # [B, T, U, H] del f, g res = self.joint_net(inp) # [B, T, U, V + 1] del inp if self.preserve_memory: torch.cuda.empty_cache() # If log_softmax is automatic if self.log_softmax is None: if not res.is_cuda: # Use log softmax only if on CPU res = res.log_softmax(dim=-1) else: if self.log_softmax: res = res.log_softmax(dim=-1) return res
[ "def", "joint", "(", "self", ",", "f", ":", "torch", ".", "Tensor", ",", "g", ":", "torch", ".", "Tensor", ")", "->", "torch", ".", "Tensor", ":", "# f = [B, T, H1]", "f", "=", "self", ".", "enc", "(", "f", ")", "f", ".", "unsqueeze_", "(", "dim"...
https://github.com/NVIDIA/NeMo/blob/5b0c0b4dec12d87d3cd960846de4105309ce938e/nemo/collections/asr/modules/rnnt.py#L945-L1002
zenodo/zenodo
3c45e52a742ad5a0a7788a67b02fbbc15ab4d8d5
zenodo/modules/records/serializers/schemas/legacyjson.py
python
FileSchemaV1.dump_checksum
(self, obj)
return hashval
Dump checksum.
Dump checksum.
[ "Dump", "checksum", "." ]
def dump_checksum(self, obj): """Dump checksum.""" checksum = obj.get('checksum') if not checksum: return missing algo, hashval = checksum.split(':') if algo != 'md5': return missing return hashval
[ "def", "dump_checksum", "(", "self", ",", "obj", ")", ":", "checksum", "=", "obj", ".", "get", "(", "'checksum'", ")", "if", "not", "checksum", ":", "return", "missing", "algo", ",", "hashval", "=", "checksum", ".", "split", "(", "':'", ")", "if", "a...
https://github.com/zenodo/zenodo/blob/3c45e52a742ad5a0a7788a67b02fbbc15ab4d8d5/zenodo/modules/records/serializers/schemas/legacyjson.py#L55-L64
Pymol-Scripts/Pymol-script-repo
bcd7bb7812dc6db1595953dfa4471fa15fb68c77
modules/pdb2pqr/contrib/ZSI-2.1-a1/ZSI/TC.py
python
TypeCode.checktype
(self, elt, ps)
See if the type of the "elt" element is what we're looking for. Return the element's type. Parameters: elt -- the DOM element being parsed ps -- the ParsedSoap object.
See if the type of the "elt" element is what we're looking for. Return the element's type. Parameters: elt -- the DOM element being parsed ps -- the ParsedSoap object.
[ "See", "if", "the", "type", "of", "the", "elt", "element", "is", "what", "we", "re", "looking", "for", ".", "Return", "the", "element", "s", "type", ".", "Parameters", ":", "elt", "--", "the", "DOM", "element", "being", "parsed", "ps", "--", "the", "...
def checktype(self, elt, ps): '''See if the type of the "elt" element is what we're looking for. Return the element's type. Parameters: elt -- the DOM element being parsed ps -- the ParsedSoap object. ''' typeName = _find_type(elt) if typeName is None or typeName == "": return (None,None) # Parse the QNAME. prefix,typeName = SplitQName(typeName) uri = ps.GetElementNSdict(elt).get(prefix) if uri is None: raise EvaluateException('Malformed type attribute (bad NS)', ps.Backtrace(elt)) #typeName = list[1] parselist,errorlist = self.get_parse_and_errorlist() if not parselist or \ (uri,typeName) in parselist or \ (_is_xsd_or_soap_ns(uri) and (None,typeName) in parselist): return (uri,typeName) raise EvaluateException( 'Type mismatch (%s namespace) (got %s wanted %s)' % \ (uri, typeName, errorlist), ps.Backtrace(elt))
[ "def", "checktype", "(", "self", ",", "elt", ",", "ps", ")", ":", "typeName", "=", "_find_type", "(", "elt", ")", "if", "typeName", "is", "None", "or", "typeName", "==", "\"\"", ":", "return", "(", "None", ",", "None", ")", "# Parse the QNAME.", "prefi...
https://github.com/Pymol-Scripts/Pymol-script-repo/blob/bcd7bb7812dc6db1595953dfa4471fa15fb68c77/modules/pdb2pqr/contrib/ZSI-2.1-a1/ZSI/TC.py#L203-L229
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.11.28-1/roles/openshift_facts/library/openshift_facts.py
python
set_builddefaults_facts
(facts)
return facts
Set build defaults including setting proxy values from http_proxy, https_proxy, no_proxy to the more specific builddefaults and builddefaults_git vars. 1. http_proxy, https_proxy, no_proxy 2. builddefaults_* 3. builddefaults_git_* Args: facts(dict): existing facts Returns: facts(dict): Updated facts with missing values
Set build defaults including setting proxy values from http_proxy, https_proxy, no_proxy to the more specific builddefaults and builddefaults_git vars. 1. http_proxy, https_proxy, no_proxy 2. builddefaults_* 3. builddefaults_git_*
[ "Set", "build", "defaults", "including", "setting", "proxy", "values", "from", "http_proxy", "https_proxy", "no_proxy", "to", "the", "more", "specific", "builddefaults", "and", "builddefaults_git", "vars", ".", "1", ".", "http_proxy", "https_proxy", "no_proxy", "2",...
def set_builddefaults_facts(facts): """ Set build defaults including setting proxy values from http_proxy, https_proxy, no_proxy to the more specific builddefaults and builddefaults_git vars. 1. http_proxy, https_proxy, no_proxy 2. builddefaults_* 3. builddefaults_git_* Args: facts(dict): existing facts Returns: facts(dict): Updated facts with missing values """ if 'builddefaults' in facts: builddefaults = facts['builddefaults'] common = facts['common'] # Copy values from common to builddefaults if 'http_proxy' not in builddefaults and 'http_proxy' in common: builddefaults['http_proxy'] = common['http_proxy'] if 'https_proxy' not in builddefaults and 'https_proxy' in common: builddefaults['https_proxy'] = common['https_proxy'] if 'no_proxy' not in builddefaults and 'no_proxy' in common: builddefaults['no_proxy'] = common['no_proxy'] # Create git specific facts from generic values, if git specific values are # not defined. if 'git_http_proxy' not in builddefaults and 'http_proxy' in builddefaults: builddefaults['git_http_proxy'] = builddefaults['http_proxy'] if 'git_https_proxy' not in builddefaults and 'https_proxy' in builddefaults: builddefaults['git_https_proxy'] = builddefaults['https_proxy'] if 'git_no_proxy' not in builddefaults and 'no_proxy' in builddefaults: builddefaults['git_no_proxy'] = builddefaults['no_proxy'] # If we're actually defining a builddefaults config then create admission_plugin_config # then merge builddefaults[config] structure into admission_plugin_config # 'config' is the 'openshift_builddefaults_json' inventory variable if 'config' in builddefaults: if 'admission_plugin_config' not in facts['master']: # Scaffold out the full expected datastructure facts['master']['admission_plugin_config'] = {'BuildDefaults': {'configuration': {'env': {}}}} facts['master']['admission_plugin_config'].update(builddefaults['config']) if 'env' in facts['master']['admission_plugin_config']['BuildDefaults']['configuration']: delete_empty_keys(facts['master']['admission_plugin_config']['BuildDefaults']['configuration']['env']) return facts
[ "def", "set_builddefaults_facts", "(", "facts", ")", ":", "if", "'builddefaults'", "in", "facts", ":", "builddefaults", "=", "facts", "[", "'builddefaults'", "]", "common", "=", "facts", "[", "'common'", "]", "# Copy values from common to builddefaults", "if", "'htt...
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.11.28-1/roles/openshift_facts/library/openshift_facts.py#L870-L914
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/config_entries.py
python
ConfigEntriesFlowManager.__init__
( self, hass: HomeAssistant, config_entries: ConfigEntries, hass_config: ConfigType, )
Initialize the config entry flow manager.
Initialize the config entry flow manager.
[ "Initialize", "the", "config", "entry", "flow", "manager", "." ]
def __init__( self, hass: HomeAssistant, config_entries: ConfigEntries, hass_config: ConfigType, ) -> None: """Initialize the config entry flow manager.""" super().__init__(hass) self.config_entries = config_entries self._hass_config = hass_config
[ "def", "__init__", "(", "self", ",", "hass", ":", "HomeAssistant", ",", "config_entries", ":", "ConfigEntries", ",", "hass_config", ":", "ConfigType", ",", ")", "->", "None", ":", "super", "(", ")", ".", "__init__", "(", "hass", ")", "self", ".", "config...
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/config_entries.py#L635-L644
rkcosmos/deepcut
deb40082efa39493d91070b7c6f9767225e6c5ce
deepcut/train.py
python
generate_best_dataset
(best_path, output_path='cleaned_data', create_val=False)
Generate CSV file for training and testing data Input ===== best_path: str, path to BEST folder which contains unzipped subfolder 'article', 'encyclopedia', 'news', 'novel' cleaned_data: str, path to output folder, the cleaned data will be saved in the given folder name where training set will be stored in `train` folder and testing set will be stored on `test` folder create_val: boolean, True or False, if True, divide training set into training set and validation set in `val` folder
Generate CSV file for training and testing data
[ "Generate", "CSV", "file", "for", "training", "and", "testing", "data" ]
def generate_best_dataset(best_path, output_path='cleaned_data', create_val=False): """ Generate CSV file for training and testing data Input ===== best_path: str, path to BEST folder which contains unzipped subfolder 'article', 'encyclopedia', 'news', 'novel' cleaned_data: str, path to output folder, the cleaned data will be saved in the given folder name where training set will be stored in `train` folder and testing set will be stored on `test` folder create_val: boolean, True or False, if True, divide training set into training set and validation set in `val` folder """ if not os.path.isdir(output_path): os.mkdir(output_path) if not os.path.isdir(os.path.join(output_path, 'train')): os.makedirs(os.path.join(output_path, 'train')) if not os.path.isdir(os.path.join(output_path, 'test')): os.makedirs(os.path.join(output_path, 'test')) if not os.path.isdir(os.path.join(output_path, 'val')) and create_val: os.makedirs(os.path.join(output_path, 'val')) for article_type in article_types: files = glob(os.path.join(best_path, article_type, '*.txt')) files_train, files_test = train_test_split(files, random_state=0, test_size=0.1) if create_val: files_train, files_val = train_test_split(files_train, random_state=0, test_size=0.1) val_words = generate_words(files_val) val_df = create_char_dataframe(val_words) val_df.to_csv(os.path.join(output_path, 'val', 'df_best_{}_val.csv'.format(article_type)), index=False) train_words = generate_words(files_train) test_words = generate_words(files_test) train_df = create_char_dataframe(train_words) test_df = create_char_dataframe(test_words) train_df.to_csv(os.path.join(output_path, 'train', 'df_best_{}_train.csv'.format(article_type)), index=False) test_df.to_csv(os.path.join(output_path, 'test', 'df_best_{}_test.csv'.format(article_type)), index=False) print("Save {} to CSV file".format(article_type))
[ "def", "generate_best_dataset", "(", "best_path", ",", "output_path", "=", "'cleaned_data'", ",", "create_val", "=", "False", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "output_path", ")", ":", "os", ".", "mkdir", "(", "output_path", ")",...
https://github.com/rkcosmos/deepcut/blob/deb40082efa39493d91070b7c6f9767225e6c5ce/deepcut/train.py#L65-L104
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/turtle.py
python
TurtleScreen.window_height
(self)
return self._window_size()[1]
Return the height of the turtle window. Example (for a TurtleScreen instance named screen): >>> screen.window_height() 480
Return the height of the turtle window.
[ "Return", "the", "height", "of", "the", "turtle", "window", "." ]
def window_height(self): """ Return the height of the turtle window. Example (for a TurtleScreen instance named screen): >>> screen.window_height() 480 """ return self._window_size()[1]
[ "def", "window_height", "(", "self", ")", ":", "return", "self", ".", "_window_size", "(", ")", "[", "1", "]" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/turtle.py#L1317-L1324
cbanack/comic-vine-scraper
8a7071796c61a9483079ad0e9ade56fcb7596bcd
src/py/gui/forms/finishform.py
python
FinishForm.__build_gui
(self, scraped_n, skipped_n)
Constructs and initializes the gui for this form. 'scraped_n' -> the number of books that were scraped (reported to user) 'skipped_n' -> the number of books that were skipped (reported to user)
Constructs and initializes the gui for this form. 'scraped_n' -> the number of books that were scraped (reported to user) 'skipped_n' -> the number of books that were skipped (reported to user)
[ "Constructs", "and", "initializes", "the", "gui", "for", "this", "form", ".", "scraped_n", "-", ">", "the", "number", "of", "books", "that", "were", "scraped", "(", "reported", "to", "user", ")", "skipped_n", "-", ">", "the", "number", "of", "books", "th...
def __build_gui(self, scraped_n, skipped_n): ''' Constructs and initializes the gui for this form. 'scraped_n' -> the number of books that were scraped (reported to user) 'skipped_n' -> the number of books that were skipped (reported to user) ''' # 1. --- build each gui component scrape_label = self.__build_scrape_label(scraped_n) skip_label = self.__build_skip_label(skipped_n) ok = self.__build_okbutton() # 2. --- configure this form, and add all the gui components to it self.AcceptButton = ok self.AutoScaleMode = AutoScaleMode.Font self.Text = i18n.get("FinishFormTitle").format(Resources.SCRIPT_VERSION) self.ClientSize = Size(300, 90) self.Controls.Add(scrape_label) self.Controls.Add(skip_label) self.Controls.Add(ok) # 3. --- define the keyboard focus tab traversal ordering ok.TabIndex = 0
[ "def", "__build_gui", "(", "self", ",", "scraped_n", ",", "skipped_n", ")", ":", "# 1. --- build each gui component", "scrape_label", "=", "self", ".", "__build_scrape_label", "(", "scraped_n", ")", "skip_label", "=", "self", ".", "__build_skip_label", "(", "skipped...
https://github.com/cbanack/comic-vine-scraper/blob/8a7071796c61a9483079ad0e9ade56fcb7596bcd/src/py/gui/forms/finishform.py#L41-L64
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/lib/PyAMF-0.6.1/pyamf/amf0.py
python
Encoder.writeString
(self, u)
Write a unicode to the data stream.
Write a unicode to the data stream.
[ "Write", "a", "unicode", "to", "the", "data", "stream", "." ]
def writeString(self, u): """ Write a unicode to the data stream. """ s = self.context.getBytesForString(u) self.writeBytes(s)
[ "def", "writeString", "(", "self", ",", "u", ")", ":", "s", "=", "self", ".", "context", ".", "getBytesForString", "(", "u", ")", "self", ".", "writeBytes", "(", "s", ")" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/PyAMF-0.6.1/pyamf/amf0.py#L505-L511
lovelylain/pyctp
fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d
example/ctp/futures/ApiStruct.py
python
QrySuperUserFunction.__init__
(self, UserID='')
[]
def __init__(self, UserID=''): self.UserID = ''
[ "def", "__init__", "(", "self", ",", "UserID", "=", "''", ")", ":", "self", ".", "UserID", "=", "''" ]
https://github.com/lovelylain/pyctp/blob/fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d/example/ctp/futures/ApiStruct.py#L3090-L3091
dimagi/commcare-hq
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
corehq/messaging/smsbackends/apposit/models.py
python
SQLAppositBackend.is_ethiopia_number
(self, msg)
return phone.startswith(ETHIOPIA_COUNTRY_CODE)
[]
def is_ethiopia_number(self, msg): phone = strip_plus(msg.phone_number) return phone.startswith(ETHIOPIA_COUNTRY_CODE)
[ "def", "is_ethiopia_number", "(", "self", ",", "msg", ")", ":", "phone", "=", "strip_plus", "(", "msg", ".", "phone_number", ")", "return", "phone", ".", "startswith", "(", "ETHIOPIA_COUNTRY_CODE", ")" ]
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/messaging/smsbackends/apposit/models.py#L63-L65
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.py
python
yield_lines
(strs)
Yield non-empty/non-comment lines of a string or sequence
Yield non-empty/non-comment lines of a string or sequence
[ "Yield", "non", "-", "empty", "/", "non", "-", "comment", "lines", "of", "a", "string", "or", "sequence" ]
def yield_lines(strs): """Yield non-empty/non-comment lines of a string or sequence""" if isinstance(strs, six.string_types): for s in strs.splitlines(): s = s.strip() # skip blank lines/comments if s and not s.startswith('#'): yield s else: for ss in strs: for s in yield_lines(ss): yield s
[ "def", "yield_lines", "(", "strs", ")", ":", "if", "isinstance", "(", "strs", ",", "six", ".", "string_types", ")", ":", "for", "s", "in", "strs", ".", "splitlines", "(", ")", ":", "s", "=", "s", ".", "strip", "(", ")", "# skip blank lines/comments", ...
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.py#L2253-L2264
EmuKit/emukit
cdcb0d070d7f1c5585260266160722b636786859
emukit/bayesian_optimization/loops/bayesian_optimization_loop.py
python
BayesianOptimizationLoop.__init__
(self, space: ParameterSpace, model: IModel, acquisition: Acquisition = None, update_interval: int = 1, batch_size: int = 1, acquisition_optimizer: AcquisitionOptimizerBase = None)
Emukit class that implement a loop for building modular Bayesian optimization :param space: Input space where the optimization is carried out. :param model: The model that approximates the underlying function :param acquisition: The acquisition function that will be used to collect new points (default, EI). If batch size is greater than one, this acquisition must output positive values only. :param update_interval: Number of iterations between optimization of model hyper-parameters. Defaults to 1. :param batch_size: How many points to evaluate in one iteration of the optimization loop. Defaults to 1. :param acquisition_optimizer: Optimizer selecting next evaluation points by maximizing acquisition. Gradient based optimizer is used if None. Defaults to None.
Emukit class that implement a loop for building modular Bayesian optimization
[ "Emukit", "class", "that", "implement", "a", "loop", "for", "building", "modular", "Bayesian", "optimization" ]
def __init__(self, space: ParameterSpace, model: IModel, acquisition: Acquisition = None, update_interval: int = 1, batch_size: int = 1, acquisition_optimizer: AcquisitionOptimizerBase = None): """ Emukit class that implement a loop for building modular Bayesian optimization :param space: Input space where the optimization is carried out. :param model: The model that approximates the underlying function :param acquisition: The acquisition function that will be used to collect new points (default, EI). If batch size is greater than one, this acquisition must output positive values only. :param update_interval: Number of iterations between optimization of model hyper-parameters. Defaults to 1. :param batch_size: How many points to evaluate in one iteration of the optimization loop. Defaults to 1. :param acquisition_optimizer: Optimizer selecting next evaluation points by maximizing acquisition. Gradient based optimizer is used if None. Defaults to None. """ self.model = model if acquisition is None: acquisition = ExpectedImprovement(model) model_updaters = FixedIntervalUpdater(model, update_interval) if acquisition_optimizer is None: acquisition_optimizer = GradientAcquisitionOptimizer(space) if batch_size == 1: _log.info("Batch size is 1, using SequentialPointCalculator") candidate_point_calculator = SequentialPointCalculator(acquisition, acquisition_optimizer) else: _log.info("Batch size is " + str(batch_size) + ", using LocalPenalizationPointCalculator") log_acquisition = LogAcquisition(acquisition) candidate_point_calculator = LocalPenalizationPointCalculator(log_acquisition, acquisition_optimizer, model, space, batch_size) loop_state = create_loop_state(model.X, model.Y) super().__init__(candidate_point_calculator, model_updaters, loop_state)
[ "def", "__init__", "(", "self", ",", "space", ":", "ParameterSpace", ",", "model", ":", "IModel", ",", "acquisition", ":", "Acquisition", "=", "None", ",", "update_interval", ":", "int", "=", "1", ",", "batch_size", ":", "int", "=", "1", ",", "acquisitio...
https://github.com/EmuKit/emukit/blob/cdcb0d070d7f1c5585260266160722b636786859/emukit/bayesian_optimization/loops/bayesian_optimization_loop.py#L23-L61
kamalgill/flask-appengine-template
11760f83faccbb0d0afe416fc58e67ecfb4643c2
src/lib/click/core.py
python
Parameter.value_from_envvar
(self, ctx)
return rv
[]
def value_from_envvar(self, ctx): rv = self.resolve_envvar_value(ctx) if rv is not None and self.nargs != 1: rv = self.type.split_envvar_value(rv) return rv
[ "def", "value_from_envvar", "(", "self", ",", "ctx", ")", ":", "rv", "=", "self", ".", "resolve_envvar_value", "(", "ctx", ")", "if", "rv", "is", "not", "None", "and", "self", ".", "nargs", "!=", "1", ":", "rv", "=", "self", ".", "type", ".", "spli...
https://github.com/kamalgill/flask-appengine-template/blob/11760f83faccbb0d0afe416fc58e67ecfb4643c2/src/lib/click/core.py#L1386-L1390
nucleic/enaml
65c2a2a2d765e88f2e1103046680571894bb41ed
enaml/workbench/ui/ui_plugin.py
python
UIPlugin._refresh_actions
(self)
Refresh the actions for the workbench window.
Refresh the actions for the workbench window.
[ "Refresh", "the", "actions", "for", "the", "workbench", "window", "." ]
def _refresh_actions(self): """ Refresh the actions for the workbench window. """ workbench = self.workbench point = workbench.get_extension_point(ACTIONS_POINT) extensions = point.extensions if not extensions: self._action_extensions.clear() self._model.menus = [] return menu_items = [] action_items = [] new_extensions = {} old_extensions = self._action_extensions for extension in extensions: if extension in old_extensions: m_items, a_items = old_extensions[extension] else: m_items, a_items = self._create_action_items(extension) new_extensions[extension] = (m_items, a_items) menu_items.extend(m_items) action_items.extend(a_items) menus = create_menus(workbench, menu_items, action_items) self._action_extensions = new_extensions self._model.menus = menus
[ "def", "_refresh_actions", "(", "self", ")", ":", "workbench", "=", "self", ".", "workbench", "point", "=", "workbench", ".", "get_extension_point", "(", "ACTIONS_POINT", ")", "extensions", "=", "point", ".", "extensions", "if", "not", "extensions", ":", "self...
https://github.com/nucleic/enaml/blob/65c2a2a2d765e88f2e1103046680571894bb41ed/enaml/workbench/ui/ui_plugin.py#L348-L375
EmilyAlsentzer/clinicalBERT
a9d91698929b7189311bba364ccdd0360e847276
lm_pretraining/create_pretraining_data.py
python
create_training_instances
(input_files, tokenizer, max_seq_length, dupe_factor, short_seq_prob, masked_lm_prob, max_predictions_per_seq, rng)
return instances
Create `TrainingInstance`s from raw text.
Create `TrainingInstance`s from raw text.
[ "Create", "TrainingInstance", "s", "from", "raw", "text", "." ]
def create_training_instances(input_files, tokenizer, max_seq_length, dupe_factor, short_seq_prob, masked_lm_prob, max_predictions_per_seq, rng): """Create `TrainingInstance`s from raw text.""" all_documents = [[]] # Input file format: # (1) One sentence per line. These should ideally be actual sentences, not # entire paragraphs or arbitrary spans of text. (Because we use the # sentence boundaries for the "next sentence prediction" task). # (2) Blank lines between documents. Document boundaries are needed so # that the "next sentence prediction" task doesn't span between documents. for input_file in input_files: with tf.gfile.GFile(input_file, "r") as reader: while True: line = tokenization.convert_to_unicode(reader.readline()) if not line: break line = line.strip() # Empty lines are used as document delimiters if not line: all_documents.append([]) tokens = tokenizer.tokenize(line) if tokens: all_documents[-1].append(tokens) # Remove empty documents all_documents = [x for x in all_documents if x] rng.shuffle(all_documents) vocab_words = list(tokenizer.vocab.keys()) instances = [] for _ in range(dupe_factor): for document_index in range(len(all_documents)): instances.extend( create_instances_from_document( all_documents, document_index, max_seq_length, short_seq_prob, masked_lm_prob, max_predictions_per_seq, vocab_words, rng)) rng.shuffle(instances) return instances
[ "def", "create_training_instances", "(", "input_files", ",", "tokenizer", ",", "max_seq_length", ",", "dupe_factor", ",", "short_seq_prob", ",", "masked_lm_prob", ",", "max_predictions_per_seq", ",", "rng", ")", ":", "all_documents", "=", "[", "[", "]", "]", "# In...
https://github.com/EmilyAlsentzer/clinicalBERT/blob/a9d91698929b7189311bba364ccdd0360e847276/lm_pretraining/create_pretraining_data.py#L175-L216
mlrun/mlrun
4c120719d64327a34b7ee1ab08fb5e01b258b00a
mlrun/k8s_utils.py
python
format_labels
(labels)
Convert a dictionary of labels into a comma separated string
Convert a dictionary of labels into a comma separated string
[ "Convert", "a", "dictionary", "of", "labels", "into", "a", "comma", "separated", "string" ]
def format_labels(labels): """ Convert a dictionary of labels into a comma separated string """ if labels: return ",".join([f"{k}={v}" for k, v in labels.items()]) else: return ""
[ "def", "format_labels", "(", "labels", ")", ":", "if", "labels", ":", "return", "\",\"", ".", "join", "(", "[", "f\"{k}={v}\"", "for", "k", ",", "v", "in", "labels", ".", "items", "(", ")", "]", ")", "else", ":", "return", "\"\"" ]
https://github.com/mlrun/mlrun/blob/4c120719d64327a34b7ee1ab08fb5e01b258b00a/mlrun/k8s_utils.py#L560-L565
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_hxb2/lib/python3.5/site-packages/compressor/cache.py
python
socket_cachekey
(key)
return 'django_compressor.%s.%s' % (socket.gethostname(), force_text(key))
[]
def socket_cachekey(key): return 'django_compressor.%s.%s' % (socket.gethostname(), force_text(key))
[ "def", "socket_cachekey", "(", "key", ")", ":", "return", "'django_compressor.%s.%s'", "%", "(", "socket", ".", "gethostname", "(", ")", ",", "force_text", "(", "key", ")", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/compressor/cache.py#L31-L32
google/rekall
55d1925f2df9759a989b35271b4fa48fc54a1c86
tools/layout_expert/layout_expert/c_ast/c_ast.py
python
CTypeReference.__init__
(self, name, **kw)
Initializes a CTypeReference object with type name. Args: name: a string representing the name of the referenced type.
Initializes a CTypeReference object with type name.
[ "Initializes", "a", "CTypeReference", "object", "with", "type", "name", "." ]
def __init__(self, name, **kw): """Initializes a CTypeReference object with type name. Args: name: a string representing the name of the referenced type. """ super(CTypeReference, self).__init__(**kw) self.name = name
[ "def", "__init__", "(", "self", ",", "name", ",", "*", "*", "kw", ")", ":", "super", "(", "CTypeReference", ",", "self", ")", ".", "__init__", "(", "*", "*", "kw", ")", "self", ".", "name", "=", "name" ]
https://github.com/google/rekall/blob/55d1925f2df9759a989b35271b4fa48fc54a1c86/tools/layout_expert/layout_expert/c_ast/c_ast.py#L240-L247
OpenEndedGroup/Field
4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c
Contents/lib/python/string.py
python
swapcase
(s)
return s.swapcase()
swapcase(s) -> string Return a copy of the string s with upper case characters converted to lowercase and vice versa.
swapcase(s) -> string
[ "swapcase", "(", "s", ")", "-", ">", "string" ]
def swapcase(s): """swapcase(s) -> string Return a copy of the string s with upper case characters converted to lowercase and vice versa. """ return s.swapcase()
[ "def", "swapcase", "(", "s", ")", ":", "return", "s", ".", "swapcase", "(", ")" ]
https://github.com/OpenEndedGroup/Field/blob/4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c/Contents/lib/python/string.py#L236-L243
saltstack/salt
fae5bc757ad0f1716483ce7ae180b451545c2058
salt/states/postgres_language.py
python
__virtual__
()
return True
Only load if the postgres module is present
Only load if the postgres module is present
[ "Only", "load", "if", "the", "postgres", "module", "is", "present" ]
def __virtual__(): """ Only load if the postgres module is present """ if "postgres.language_create" not in __salt__: return ( False, "Unable to load postgres module. Make sure `postgres.bins_dir` is set.", ) return True
[ "def", "__virtual__", "(", ")", ":", "if", "\"postgres.language_create\"", "not", "in", "__salt__", ":", "return", "(", "False", ",", "\"Unable to load postgres module. Make sure `postgres.bins_dir` is set.\"", ",", ")", "return", "True" ]
https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/states/postgres_language.py#L25-L34
naftaliharris/tauthon
5587ceec329b75f7caf6d65a036db61ac1bae214
Lib/mailbox.py
python
Maildir.add
(self, message)
return uniq
Add message and return assigned key.
Add message and return assigned key.
[ "Add", "message", "and", "return", "assigned", "key", "." ]
def add(self, message): """Add message and return assigned key.""" tmp_file = self._create_tmp() try: self._dump_message(message, tmp_file) except BaseException: tmp_file.close() os.remove(tmp_file.name) raise _sync_close(tmp_file) if isinstance(message, MaildirMessage): subdir = message.get_subdir() suffix = self.colon + message.get_info() if suffix == self.colon: suffix = '' else: subdir = 'new' suffix = '' uniq = os.path.basename(tmp_file.name).split(self.colon)[0] dest = os.path.join(self._path, subdir, uniq + suffix) if isinstance(message, MaildirMessage): os.utime(tmp_file.name, (os.path.getatime(tmp_file.name), message.get_date())) # No file modification should be done after the file is moved to its # final position in order to prevent race conditions with changes # from other programs try: if hasattr(os, 'link'): os.link(tmp_file.name, dest) os.remove(tmp_file.name) else: os.rename(tmp_file.name, dest) except OSError, e: os.remove(tmp_file.name) if e.errno == errno.EEXIST: raise ExternalClashError('Name clash with existing message: %s' % dest) else: raise return uniq
[ "def", "add", "(", "self", ",", "message", ")", ":", "tmp_file", "=", "self", ".", "_create_tmp", "(", ")", "try", ":", "self", ".", "_dump_message", "(", "message", ",", "tmp_file", ")", "except", "BaseException", ":", "tmp_file", ".", "close", "(", "...
https://github.com/naftaliharris/tauthon/blob/5587ceec329b75f7caf6d65a036db61ac1bae214/Lib/mailbox.py#L267-L306
dmlc/dgl
8d14a739bc9e446d6c92ef83eafe5782398118de
python/dgl/_deprecate/graph.py
python
DGLGraph.pop_n_repr
(self, key)
return self._node_frame.pop(key)
Get and remove the specified node repr. Parameters ---------- key : str The attribute name. Returns ------- Tensor The popped representation
Get and remove the specified node repr.
[ "Get", "and", "remove", "the", "specified", "node", "repr", "." ]
def pop_n_repr(self, key): """Get and remove the specified node repr. Parameters ---------- key : str The attribute name. Returns ------- Tensor The popped representation """ return self._node_frame.pop(key)
[ "def", "pop_n_repr", "(", "self", ",", "key", ")", ":", "return", "self", ".", "_node_frame", ".", "pop", "(", "key", ")" ]
https://github.com/dmlc/dgl/blob/8d14a739bc9e446d6c92ef83eafe5782398118de/python/dgl/_deprecate/graph.py#L2358-L2371
samuelclay/NewsBlur
2c45209df01a1566ea105e04d499367f32ac9ad2
apps/static/views.py
python
android
(request)
return render(request, 'static/android.xhtml')
[]
def android(request): return render(request, 'static/android.xhtml')
[ "def", "android", "(", "request", ")", ":", "return", "render", "(", "request", ",", "'static/android.xhtml'", ")" ]
https://github.com/samuelclay/NewsBlur/blob/2c45209df01a1566ea105e04d499367f32ac9ad2/apps/static/views.py#L48-L49
glutanimate/review-heatmap
c758478125b60a81c66c87c35b12b7968ec0a348
src/review_heatmap/libaddon/gui/basic/interface.py
python
CommonWidgetInterface._getComboCurrentValue
(self, combo_widget)
return (text, data)
Get current (item_text, item_data) tuple
Get current (item_text, item_data) tuple
[ "Get", "current", "(", "item_text", "item_data", ")", "tuple" ]
def _getComboCurrentValue(self, combo_widget): """ Get current (item_text, item_data) tuple """ index = self._getComboCurrentIndex(combo_widget) text = combo_widget.currentText() data = combo_widget.itemData(index, Qt.UserRole) return (text, data)
[ "def", "_getComboCurrentValue", "(", "self", ",", "combo_widget", ")", ":", "index", "=", "self", ".", "_getComboCurrentIndex", "(", "combo_widget", ")", "text", "=", "combo_widget", ".", "currentText", "(", ")", "data", "=", "combo_widget", ".", "itemData", "...
https://github.com/glutanimate/review-heatmap/blob/c758478125b60a81c66c87c35b12b7968ec0a348/src/review_heatmap/libaddon/gui/basic/interface.py#L912-L919
BillBillBillBill/Tickeys-linux
2df31b8665004c58a5d4ab05277f245267d96364
tickeys/kivy_32/kivy/core/image/__init__.py
python
Image.height
(self)
return self._size[1]
Image height
Image height
[ "Image", "height" ]
def height(self): '''Image height ''' return self._size[1]
[ "def", "height", "(", "self", ")", ":", "return", "self", ".", "_size", "[", "1", "]" ]
https://github.com/BillBillBillBill/Tickeys-linux/blob/2df31b8665004c58a5d4ab05277f245267d96364/tickeys/kivy_32/kivy/core/image/__init__.py#L746-L749
unknown-horizons/unknown-horizons
7397fb333006d26c3d9fe796c7bd9cb8c3b43a49
horizons/network/packets/__init__.py
python
packet.__init__
(self)
ctor
ctor
[ "ctor" ]
def __init__(self): """ctor"""
[ "def", "__init__", "(", "self", ")", ":" ]
https://github.com/unknown-horizons/unknown-horizons/blob/7397fb333006d26c3d9fe796c7bd9cb8c3b43a49/horizons/network/packets/__init__.py#L113-L114
Pymol-Scripts/Pymol-script-repo
bcd7bb7812dc6db1595953dfa4471fa15fb68c77
modules/ADT/AutoDockTools/cluster_ad.py
python
Cluster_AD.set_reference
(self, reference=None)
reference should be an index into the data array. If it's a problem to have the reference be part of the data array, reference could be a separate instance of data.
reference should be an index into the data array. If it's a problem to have the reference be part of the data array, reference could be a separate instance of data.
[ "reference", "should", "be", "an", "index", "into", "the", "data", "array", ".", "If", "it", "s", "a", "problem", "to", "have", "the", "reference", "be", "part", "of", "the", "data", "array", "reference", "could", "be", "a", "separate", "instance", "of",...
def set_reference(self, reference=None): """reference should be an index into the data array. If it's a problem to have the reference be part of the data array, reference could be a separate instance of data. """ if not reference: self.reference = self.argsort[0] # reference lowest energy conformation else: self.reference = reference self.get_coords(self.mol, self.reference)
[ "def", "set_reference", "(", "self", ",", "reference", "=", "None", ")", ":", "if", "not", "reference", ":", "self", ".", "reference", "=", "self", ".", "argsort", "[", "0", "]", "# reference lowest energy conformation", "else", ":", "self", ".", "reference"...
https://github.com/Pymol-Scripts/Pymol-script-repo/blob/bcd7bb7812dc6db1595953dfa4471fa15fb68c77/modules/ADT/AutoDockTools/cluster_ad.py#L46-L55
gbeced/pyalgotrade
ad2bcc6b25c06c66eee4a8d522ce844504d8ec62
pyalgotrade/talibext/indicator.py
python
TRIX
(ds, count, timeperiod=-2**31)
return call_talib_with_ds(ds, count, talib.TRIX, timeperiod)
1-day Rate-Of-Change (ROC) of a Triple Smooth EMA
1-day Rate-Of-Change (ROC) of a Triple Smooth EMA
[ "1", "-", "day", "Rate", "-", "Of", "-", "Change", "(", "ROC", ")", "of", "a", "Triple", "Smooth", "EMA" ]
def TRIX(ds, count, timeperiod=-2**31): """1-day Rate-Of-Change (ROC) of a Triple Smooth EMA""" return call_talib_with_ds(ds, count, talib.TRIX, timeperiod)
[ "def", "TRIX", "(", "ds", ",", "count", ",", "timeperiod", "=", "-", "2", "**", "31", ")", ":", "return", "call_talib_with_ds", "(", "ds", ",", "count", ",", "talib", ".", "TRIX", ",", "timeperiod", ")" ]
https://github.com/gbeced/pyalgotrade/blob/ad2bcc6b25c06c66eee4a8d522ce844504d8ec62/pyalgotrade/talibext/indicator.py#L850-L852
coleifer/walrus
2583ac8ba81d3f6aa43fccbe28c0c13b99a1fa9d
walrus/graph.py
python
Graph.store
(self, s, p, o)
Store a subject-predicate-object triple in the database.
Store a subject-predicate-object triple in the database.
[ "Store", "a", "subject", "-", "predicate", "-", "object", "triple", "in", "the", "database", "." ]
def store(self, s, p, o): """ Store a subject-predicate-object triple in the database. """ with self.walrus.atomic(): for key in self.keys_for_values(s, p, o): self._z[key] = 0
[ "def", "store", "(", "self", ",", "s", ",", "p", ",", "o", ")", ":", "with", "self", ".", "walrus", ".", "atomic", "(", ")", ":", "for", "key", "in", "self", ".", "keys_for_values", "(", "s", ",", "p", ",", "o", ")", ":", "self", ".", "_z", ...
https://github.com/coleifer/walrus/blob/2583ac8ba81d3f6aa43fccbe28c0c13b99a1fa9d/walrus/graph.py#L82-L88
ipython/ipykernel
0ab288fe42f3155c7c7d9257c9be8cf093d175e0
ipykernel/iostream.py
python
IOPubThread._really_send
(self, msg, *args, **kwargs)
The callback that actually sends messages
The callback that actually sends messages
[ "The", "callback", "that", "actually", "sends", "messages" ]
def _really_send(self, msg, *args, **kwargs): """The callback that actually sends messages""" mp_mode = self._check_mp_mode() if mp_mode != CHILD: # we are master, do a regular send self.socket.send_multipart(msg, *args, **kwargs) else: # we are a child, pipe to master # new context/socket for every pipe-out # since forks don't teardown politely, use ctx.term to ensure send has completed ctx, pipe_out = self._setup_pipe_out() pipe_out.send_multipart([self._pipe_uuid] + msg, *args, **kwargs) pipe_out.close() ctx.term()
[ "def", "_really_send", "(", "self", ",", "msg", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "mp_mode", "=", "self", ".", "_check_mp_mode", "(", ")", "if", "mp_mode", "!=", "CHILD", ":", "# we are master, do a regular send", "self", ".", "socket", ...
https://github.com/ipython/ipykernel/blob/0ab288fe42f3155c7c7d9257c9be8cf093d175e0/ipykernel/iostream.py#L225-L239
kozistr/Awesome-GANs
b4b9a3b8c3fd1d32c864dc5655d80c0650aebee1
awesome_gans/modules.py
python
conv1d
(x, f=64, k=3, s=1, pad='SAME', reuse=None, is_train=True, name='conv1d')
return tf.layers.conv1d( inputs=x, filters=f, kernel_size=k, strides=s, kernel_initializer=w_init, kernel_regularizer=w_reg, bias_initializer=b_init, padding=pad, reuse=reuse, name=name, )
:param x: input :param f: filters :param k: kernel size :param s: strides :param pad: padding :param reuse: reusable :param is_train: trainable :param name: scope name :return: net
:param x: input :param f: filters :param k: kernel size :param s: strides :param pad: padding :param reuse: reusable :param is_train: trainable :param name: scope name :return: net
[ ":", "param", "x", ":", "input", ":", "param", "f", ":", "filters", ":", "param", "k", ":", "kernel", "size", ":", "param", "s", ":", "strides", ":", "param", "pad", ":", "padding", ":", "param", "reuse", ":", "reusable", ":", "param", "is_train", ...
def conv1d(x, f=64, k=3, s=1, pad='SAME', reuse=None, is_train=True, name='conv1d'): """ :param x: input :param f: filters :param k: kernel size :param s: strides :param pad: padding :param reuse: reusable :param is_train: trainable :param name: scope name :return: net """ return tf.layers.conv1d( inputs=x, filters=f, kernel_size=k, strides=s, kernel_initializer=w_init, kernel_regularizer=w_reg, bias_initializer=b_init, padding=pad, reuse=reuse, name=name, )
[ "def", "conv1d", "(", "x", ",", "f", "=", "64", ",", "k", "=", "3", ",", "s", "=", "1", ",", "pad", "=", "'SAME'", ",", "reuse", "=", "None", ",", "is_train", "=", "True", ",", "name", "=", "'conv1d'", ")", ":", "return", "tf", ".", "layers",...
https://github.com/kozistr/Awesome-GANs/blob/b4b9a3b8c3fd1d32c864dc5655d80c0650aebee1/awesome_gans/modules.py#L135-L158
sknetwork-team/scikit-network
6e74a2338f53f1e54a25e7e19ab9fcf56371c275
sknetwork/linkpred/postprocessing.py
python
is_edge
(adjacency: sparse.csr_matrix, query: Union[int, Iterable, Tuple])
Given a query, return whether each edge is actually in the adjacency. Parameters ---------- adjacency : Adjacency matrix of the graph. query : int, Iterable or Tuple * If int i, queries (i, j) for all j. * If Iterable of integers, return queries (i, j) for i in query, for all j. * If tuple (i, j), queries (i, j). * If list of tuples or array of shape (n_queries, 2), queries (i, j) in for each line in query. Returns ------- y_true : Union[bool, np.ndarray] For each element in the query, returns ``True`` if the edge exists in the adjacency and ``False`` otherwise. Examples -------- >>> from sknetwork.data import house >>> adjacency = house() >>> is_edge(adjacency, 0) array([False, True, False, False, True]) >>> is_edge(adjacency, [0, 1]) array([[False, True, False, False, True], [ True, False, True, False, True]]) >>> is_edge(adjacency, (0, 1)) True >>> is_edge(adjacency, [(0, 1), (0, 2)]) array([ True, False])
Given a query, return whether each edge is actually in the adjacency.
[ "Given", "a", "query", "return", "whether", "each", "edge", "is", "actually", "in", "the", "adjacency", "." ]
def is_edge(adjacency: sparse.csr_matrix, query: Union[int, Iterable, Tuple]) -> Union[bool, np.ndarray]: """Given a query, return whether each edge is actually in the adjacency. Parameters ---------- adjacency : Adjacency matrix of the graph. query : int, Iterable or Tuple * If int i, queries (i, j) for all j. * If Iterable of integers, return queries (i, j) for i in query, for all j. * If tuple (i, j), queries (i, j). * If list of tuples or array of shape (n_queries, 2), queries (i, j) in for each line in query. Returns ------- y_true : Union[bool, np.ndarray] For each element in the query, returns ``True`` if the edge exists in the adjacency and ``False`` otherwise. Examples -------- >>> from sknetwork.data import house >>> adjacency = house() >>> is_edge(adjacency, 0) array([False, True, False, False, True]) >>> is_edge(adjacency, [0, 1]) array([[False, True, False, False, True], [ True, False, True, False, True]]) >>> is_edge(adjacency, (0, 1)) True >>> is_edge(adjacency, [(0, 1), (0, 2)]) array([ True, False]) """ if np.issubdtype(type(query), np.integer): return adjacency[query].toarray().astype(bool).ravel() if isinstance(query, Tuple): source, target = query neighbors = adjacency.indices[adjacency.indptr[source]:adjacency.indptr[source + 1]] return bool(np.isin(target, neighbors, assume_unique=True)) if isinstance(query, list): query = np.array(query) if isinstance(query, np.ndarray): if query.ndim == 1: return adjacency[query].toarray().astype(bool) elif query.ndim == 2 and query.shape[1] == 2: y_true = [] for edge in query: y_true.append(is_edge(adjacency, (edge[0], edge[1]))) return np.array(y_true) else: raise ValueError("Query not understood.") else: raise ValueError("Query not understood.")
[ "def", "is_edge", "(", "adjacency", ":", "sparse", ".", "csr_matrix", ",", "query", ":", "Union", "[", "int", ",", "Iterable", ",", "Tuple", "]", ")", "->", "Union", "[", "bool", ",", "np", ".", "ndarray", "]", ":", "if", "np", ".", "issubdtype", "...
https://github.com/sknetwork-team/scikit-network/blob/6e74a2338f53f1e54a25e7e19ab9fcf56371c275/sknetwork/linkpred/postprocessing.py#L13-L64
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/setuptools/__init__.py
python
Command.__init__
(self, dist, **kw)
Construct the command for dist, updating vars(self) with any keyword parameters.
Construct the command for dist, updating vars(self) with any keyword parameters.
[ "Construct", "the", "command", "for", "dist", "updating", "vars", "(", "self", ")", "with", "any", "keyword", "parameters", "." ]
def __init__(self, dist, **kw): """ Construct the command for dist, updating vars(self) with any keyword parameters. """ _Command.__init__(self, dist) vars(self).update(kw)
[ "def", "__init__", "(", "self", ",", "dist", ",", "*", "*", "kw", ")", ":", "_Command", ".", "__init__", "(", "self", ",", "dist", ")", "vars", "(", "self", ")", ".", "update", "(", "kw", ")" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/setuptools/__init__.py#L142-L148
nextstrain/ncov
71e7d593e5c97b67ad657bca41fb8e61b50c2803
workflow/lib/persistent_dict.py
python
ItemDirManager.error_clean_up
(self)
[]
def error_clean_up(self): if self.delete_on_error: self.reset()
[ "def", "error_clean_up", "(", "self", ")", ":", "if", "self", ".", "delete_on_error", ":", "self", ".", "reset", "(", ")" ]
https://github.com/nextstrain/ncov/blob/71e7d593e5c97b67ad657bca41fb8e61b50c2803/workflow/lib/persistent_dict.py#L172-L174
leo-editor/leo-editor
383d6776d135ef17d73d935a2f0ecb3ac0e99494
leo/core/leoGlobals.py
python
scanForAtLanguage
(c: Cmdr, p: Pos)
return c.target_language
Scan position p and p's ancestors looking only for @language and @ignore directives. Returns the language found, or c.target_language.
Scan position p and p's ancestors looking only for @language and @ignore directives.
[ "Scan", "position", "p", "and", "p", "s", "ancestors", "looking", "only", "for", "@language", "and", "@ignore", "directives", "." ]
def scanForAtLanguage(c: Cmdr, p: Pos): """Scan position p and p's ancestors looking only for @language and @ignore directives. Returns the language found, or c.target_language.""" # Unlike the code in x.scanAllDirectives, this code ignores @comment directives. if c and p: for p in p.self_and_parents(copy=False): d = g.get_directives_dict(p) if 'language' in d: z = d["language"] language, delim1, delim2, delim3 = g.set_language(z, 0) return language return c.target_language
[ "def", "scanForAtLanguage", "(", "c", ":", "Cmdr", ",", "p", ":", "Pos", ")", ":", "# Unlike the code in x.scanAllDirectives, this code ignores @comment directives.", "if", "c", "and", "p", ":", "for", "p", "in", "p", ".", "self_and_parents", "(", "copy", "=", "...
https://github.com/leo-editor/leo-editor/blob/383d6776d135ef17d73d935a2f0ecb3ac0e99494/leo/core/leoGlobals.py#L3631-L3643
rlgraph/rlgraph
428fc136a9a075f29a397495b4226a491a287be2
rlgraph/components/layers/nn/residual_layer.py
python
ResidualLayer.__init__
(self, residual_unit, repeats=2, scope="residual-layer", **kwargs)
Args: residual_unit (NeuralNetwork): repeats (int): The number of times that the residual unit should be repeated before applying the addition with the original input and the activation function.
Args: residual_unit (NeuralNetwork):
[ "Args", ":", "residual_unit", "(", "NeuralNetwork", ")", ":" ]
def __init__(self, residual_unit, repeats=2, scope="residual-layer", **kwargs): """ Args: residual_unit (NeuralNetwork): repeats (int): The number of times that the residual unit should be repeated before applying the addition with the original input and the activation function. """ super(ResidualLayer, self).__init__(scope=scope, **kwargs) self.residual_unit = residual_unit self.repeats = repeats # Copy the repeat_units n times and add them to this Component. self.residual_units = [self.residual_unit] + [ self.residual_unit.copy(scope=self.residual_unit.scope+"-rep"+str(i)) for i in range(repeats - 1) ] self.add_components(*self.residual_units)
[ "def", "__init__", "(", "self", ",", "residual_unit", ",", "repeats", "=", "2", ",", "scope", "=", "\"residual-layer\"", ",", "*", "*", "kwargs", ")", ":", "super", "(", "ResidualLayer", ",", "self", ")", ".", "__init__", "(", "scope", "=", "scope", ",...
https://github.com/rlgraph/rlgraph/blob/428fc136a9a075f29a397495b4226a491a287be2/rlgraph/components/layers/nn/residual_layer.py#L33-L50
chengzhengxin/groupsoftmax-simpledet
3f63a00998c57fee25241cf43a2e8600893ea462
config/cascade_r50v2_c5_red_1x.py
python
get_config
(is_train)
return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \ ModelParam, OptimizeParam, TestParam, \ transform, data_name, label_name, metric_list
[]
def get_config(is_train): class General: log_frequency = 10 name = __name__.rsplit("/")[-1].rsplit(".")[-1] batch_image = 2 if is_train else 1 fp16 = False class KvstoreParam: kvstore = "local" batch_image = General.batch_image gpus = [0, 1, 2, 3, 4, 5, 6, 7] fp16 = General.fp16 class NormalizeParam: # normalizer = normalizer_factory(type="syncbn", ndev=len(KvstoreParam.gpus)) normalizer = normalizer_factory(type="fixbn") class BackboneParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer class NeckParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer class reduce: channel = 1024 class RpnParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer batch_image = General.batch_image class anchor_generate: scale = (2, 4, 8, 16, 32) ratio = (0.5, 1.0, 2.0) stride = 16 image_anchor = 256 class head: conv_channel = 512 mean = (0, 0, 0, 0) std = (1, 1, 1, 1) class proposal: pre_nms_top_n = 12000 if is_train else 6000 post_nms_top_n = 2000 if is_train else 1000 nms_thr = 0.7 min_bbox_side = 0 class subsample_proposal: proposal_wo_gt = False image_roi = 256 fg_fraction = 0.25 fg_thr = 0.5 bg_thr_hi = 0.5 bg_thr_lo = 0.0 class bbox_target: num_reg_class = 2 class_agnostic = True weight = (1.0, 1.0, 1.0, 1.0) mean = (0.0, 0.0, 0.0, 0.0) std = (0.1, 0.1, 0.2, 0.2) class BboxParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer num_class = 1 + 80 image_roi = 256 batch_image = General.batch_image stage = "1st" loss_weight = 1.0 class regress_target: class_agnostic = True mean = (0.0, 0.0, 0.0, 0.0) std = (0.1, 0.1, 0.2, 0.2) class subsample_proposal: proposal_wo_gt = False image_roi = 256 fg_fraction = 0.25 fg_thr = 0.6 bg_thr_hi = 0.6 bg_thr_lo = 0.0 class bbox_target: num_reg_class = 2 class_agnostic = True weight = (1.0, 1.0, 1.0, 1.0) mean = (0.0, 0.0, 0.0, 0.0) std = (0.05, 0.05, 0.1, 0.1) class BboxParam2nd: fp16 = General.fp16 normalizer = NormalizeParam.normalizer num_class = 1 + 80 image_roi = 256 batch_image = General.batch_image stage = "2nd" loss_weight = 0.5 class regress_target: class_agnostic = True mean = (0.0, 0.0, 0.0, 0.0) std = (0.05, 0.05, 0.1, 0.1) class subsample_proposal: proposal_wo_gt = False image_roi = 256 fg_fraction = 0.25 fg_thr = 0.7 bg_thr_hi = 0.7 bg_thr_lo = 0.0 class bbox_target: num_reg_class = 2 class_agnostic = True weight = (1.0, 1.0, 1.0, 1.0) mean = (0.0, 0.0, 0.0, 0.0) std = (0.033, 0.033, 0.067, 0.067) class BboxParam3rd: fp16 = General.fp16 normalizer = NormalizeParam.normalizer num_class = 1 + 80 image_roi = 256 batch_image = General.batch_image stage = "3rd" loss_weight = 0.25 class regress_target: class_agnostic = True mean = (0.0, 0.0, 0.0, 0.0) std = (0.033, 0.033, 0.067, 0.067) class subsample_proposal: proposal_wo_gt = None image_roi = None fg_fraction = None fg_thr = None bg_thr_hi = None bg_thr_lo = None class bbox_target: num_reg_class = None class_agnostic = None weight = None mean = None std = None class RoiParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer out_size = 7 stride = 16 class DatasetParam: if is_train: image_set = ("coco_train2014", "coco_valminusminival2014") else: image_set = ("coco_minival2014", ) backbone = Backbone(BackboneParam) neck = Neck(NeckParam) rpn_head = RpnHead(RpnParam) roi_extractor = RoiExtractor(RoiParam) bbox_head = BboxHead(BboxParam) bbox_head_2nd = BboxHead(BboxParam2nd) bbox_head_3rd = BboxHead(BboxParam3rd) detector = Detector() if is_train: train_sym = detector.get_train_symbol( backbone, neck, rpn_head, roi_extractor, bbox_head, bbox_head_2nd, bbox_head_3rd ) rpn_test_sym = None test_sym = None else: train_sym = None rpn_test_sym = detector.get_rpn_test_symbol(backbone, neck, rpn_head) test_sym = detector.get_test_symbol( backbone, neck, rpn_head, roi_extractor, bbox_head, bbox_head_2nd, bbox_head_3rd ) class ModelParam: train_symbol = train_sym test_symbol = test_sym rpn_test_symbol = rpn_test_sym from_scratch = False random = True memonger = False memonger_until = "stage3_unit21_plus" class pretrain: prefix = "pretrain_model/resnet-50" epoch = 0 fixed_param = ["conv0", "stage1", "gamma", "beta"] class OptimizeParam: class optimizer: type = "sgd" lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image momentum = 0.9 wd = 0.0001 clip_gradient = None class schedule: begin_epoch = 0 end_epoch = 6 lr_iter = [60000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image), 80000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)] class warmup: type = "gradual" lr = 0.0 iter = 3000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image) class TestParam: min_det_score = 0.05 max_det_per_image = 100 process_roidb = lambda x: x process_output = lambda x, y: x class model: prefix = "experiments/{}/checkpoint".format(General.name) epoch = OptimizeParam.schedule.end_epoch class nms: type = "nms" thr = 0.5 class coco: annotation = "data/coco/annotations/instances_minival2014.json" # data processing class ResizeParam: short = 800 long = 1200 if is_train else 2000 class PadParam: short = 800 long = 1200 max_num_gt = 100 class AnchorTarget2DParam: class generate: short = 800 // 16 long = 1200 // 16 stride = 16 scales = (2, 4, 8, 16, 32) aspects = (0.5, 1.0, 2.0) class assign: allowed_border = 0 pos_thr = 0.7 neg_thr = 0.3 min_pos_thr = 0.0 class sample: image_anchor = 256 pos_fraction = 0.5 class RenameParam: mapping = dict(image="data") from core.detection_input import ReadRoiRecord, Resize2DImageBbox, \ ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \ RenameRecord, AnchorTarget2D if is_train: transform = [ ReadRoiRecord(None), Resize2DImageBbox(ResizeParam), Flip2DImageBbox(), Pad2DImageBbox(PadParam), ConvertImageFromHwcToChw(), AnchorTarget2D(AnchorTarget2DParam), RenameRecord(RenameParam.mapping) ] data_name = ["data", "im_info", "gt_bbox"] label_name = ["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"] else: transform = [ ReadRoiRecord(None), Resize2DImageBbox(ResizeParam), ConvertImageFromHwcToChw(), RenameRecord(RenameParam.mapping) ] data_name = ["data", "im_info", "im_id", "rec_id"] label_name = [] import core.detection_metric as metric rpn_acc_metric = metric.AccWithIgnore( "RpnAcc", ["rpn_cls_loss_output"], ["rpn_cls_label"] ) rpn_l1_metric = metric.L1( "RpnL1", ["rpn_reg_loss_output"], ["rpn_cls_label"] ) # for bbox, the label is generated in network so it is an output # stage1 metric box_acc_metric_1st = metric.AccWithIgnore( "RcnnAcc_1st", ["bbox_cls_loss_1st_output", "bbox_label_blockgrad_1st_output"], [] ) box_l1_metric_1st = metric.L1( "RcnnL1_1st", ["bbox_reg_loss_1st_output", "bbox_label_blockgrad_1st_output"], [] ) # stage2 metric box_acc_metric_2nd = metric.AccWithIgnore( "RcnnAcc_2nd", ["bbox_cls_loss_2nd_output", "bbox_label_blockgrad_2nd_output"], [] ) box_l1_metric_2nd = metric.L1( "RcnnL1_2nd", ["bbox_reg_loss_2nd_output", "bbox_label_blockgrad_2nd_output"], [] ) # stage3 metric box_acc_metric_3rd = metric.AccWithIgnore( "RcnnAcc_3rd", ["bbox_cls_loss_3rd_output", "bbox_label_blockgrad_3rd_output"], [] ) box_l1_metric_3rd = metric.L1( "RcnnL1_3rd", ["bbox_reg_loss_3rd_output", "bbox_label_blockgrad_3rd_output"], [] ) metric_list = [ rpn_acc_metric, rpn_l1_metric, box_acc_metric_1st, box_l1_metric_1st, box_acc_metric_2nd, box_l1_metric_2nd, box_acc_metric_3rd, box_l1_metric_3rd ] return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \ ModelParam, OptimizeParam, TestParam, \ transform, data_name, label_name, metric_list
[ "def", "get_config", "(", "is_train", ")", ":", "class", "General", ":", "log_frequency", "=", "10", "name", "=", "__name__", ".", "rsplit", "(", "\"/\"", ")", "[", "-", "1", "]", ".", "rsplit", "(", "\".\"", ")", "[", "-", "1", "]", "batch_image", ...
https://github.com/chengzhengxin/groupsoftmax-simpledet/blob/3f63a00998c57fee25241cf43a2e8600893ea462/config/cascade_r50v2_c5_red_1x.py#L10-L392
perfectblue/ctf-writeups
38c81bbce254885da6b485dd1dd76798120bc98c
2019/ctfzone-quals-2019/ocb2/ocb/aes.py
python
AES._expandKey
(self, key)
return expandedKey
Rijndael's key expansion expands an 128,192,256 key into an 176,208,240 bytes key expandedKey is a pointer to an char array of large enough size key is a pointer to a non-expanded key
Rijndael's key expansion expands an 128,192,256 key into an 176,208,240 bytes key
[ "Rijndael", "s", "key", "expansion", "expands", "an", "128", "192", "256", "key", "into", "an", "176", "208", "240", "bytes", "key" ]
def _expandKey(self, key): """ Rijndael's key expansion expands an 128,192,256 key into an 176,208,240 bytes key expandedKey is a pointer to an char array of large enough size key is a pointer to a non-expanded key """ size = self.keyBitSize // 8 # current expanded keySize, in bytes currentSize = 0 rconIteration = 1 # temporary 4-byte variable t = bytearray([0, 0, 0, 0]) assert self.expandedKeySize expandedKey = bytearray(self.expandedKeySize) # set the 16,24,32 bytes of the expanded key to the input key for j in range(size): expandedKey[j] = key[j] currentSize += size while currentSize < self.expandedKeySize: # assign the previous 4 bytes to the temporary value t for k in range(4): t[k] = expandedKey[(currentSize - 4) + k] # # every 16,24,32 bytes we apply the core schedule to t # and increment rconIteration afterwards # if currentSize % size == 0: t = self._core(t, rconIteration) rconIteration += 1; # For 256-bit keys, we add an extra sbox to the calculation if size == (256 // 8) and ((currentSize % size) == 16): for l in range(4): t[l] = self._getSBoxValue(t[l]) # # We XOR t with the four-byte block 16,24,32 bytes before the new expanded key. # This becomes the next four bytes in the expanded key. # for m in range(4): expandedKey[currentSize] = expandedKey[currentSize - size] ^ t[m] currentSize += 1 return expandedKey
[ "def", "_expandKey", "(", "self", ",", "key", ")", ":", "size", "=", "self", ".", "keyBitSize", "//", "8", "# current expanded keySize, in bytes", "currentSize", "=", "0", "rconIteration", "=", "1", "# temporary 4-byte variable", "t", "=", "bytearray", "(", "[",...
https://github.com/perfectblue/ctf-writeups/blob/38c81bbce254885da6b485dd1dd76798120bc98c/2019/ctfzone-quals-2019/ocb2/ocb/aes.py#L181-L226
XX-net/XX-Net
a9898cfcf0084195fb7e69b6bc834e59aecdf14f
python3.8.2/Lib/pkg_resources/__init__.py
python
IResourceProvider.get_resource_filename
(manager, resource_name)
Return a true filesystem path for `resource_name` `manager` must be an ``IResourceManager``
Return a true filesystem path for `resource_name`
[ "Return", "a", "true", "filesystem", "path", "for", "resource_name" ]
def get_resource_filename(manager, resource_name): """Return a true filesystem path for `resource_name` `manager` must be an ``IResourceManager``"""
[ "def", "get_resource_filename", "(", "manager", ",", "resource_name", ")", ":" ]
https://github.com/XX-net/XX-Net/blob/a9898cfcf0084195fb7e69b6bc834e59aecdf14f/python3.8.2/Lib/pkg_resources/__init__.py#L528-L531
ceph/calamari
5b8fa1424280984f434f22fd4db69e26de1c12f6
rlyeh/rlyeh/manager/request_collection.py
python
RequestCollection._on_rados_completion
(self, minion_id, request, result)
Handle JID completion from a ceph.rados_commands operation
Handle JID completion from a ceph.rados_commands operation
[ "Handle", "JID", "completion", "from", "a", "ceph", ".", "rados_commands", "operation" ]
def _on_rados_completion(self, minion_id, request, result): """ Handle JID completion from a ceph.rados_commands operation """ if request.state != UserRequest.SUBMITTED: # Unexpected, ignore. log.error("Received completion for request %s/%s in state %s" % ( request.id, request.jid, request.state )) return if result['error']: # This indicates a failure within ceph.rados_commands which was caught # by our code, like one of our Ceph commands returned an error code. # NB in future there may be UserRequest subclasses which want to receive # and handle these errors themselves, so this branch would be refactored # to allow that. log.error("Request %s experienced an error: %s" % (request.id, result['error_status'])) request.jid = None request.set_error(result['error_status']) request.complete() return try: with self._update_index(request): old_jid = request.jid request.complete_jid(result) assert request.jid != old_jid # After a jid completes, requests may start waiting for cluster # map updates, we ask ClusterMonitor to hurry up and get them on # behalf of the request. if request.awaiting_versions: assert request.fsid cluster_monitor = self._manager.clusters[request.fsid] for sync_type, version in request.awaiting_versions.items(): if version is not None: log.debug("Notifying cluster of awaited version %s/%s" % (sync_type.str, version)) cluster_monitor.on_version(minion_id, sync_type, version) # The request may be waiting for an epoch that we already have, if so # give it to the request right away for sync_type, want_version in request.awaiting_versions.items(): sync_object = cluster_monitor.get_sync_object(sync_type) if want_version and sync_type.cmp(sync_object.version, want_version) >= 0: log.info("Awaited %s %s is immediately available" % (sync_type, want_version)) request.on_map(sync_type, sync_object) except Exception as e: # Ensure that a misbehaving piece of code in a UserRequest subclass # results in a terminated job, not a zombie job log.exception("Calling complete_jid for %s/%s" % (request.id, request.jid)) request.jid = None request.set_error("Internal error %s" % e) request.complete()
[ "def", "_on_rados_completion", "(", "self", ",", "minion_id", ",", "request", ",", "result", ")", ":", "if", "request", ".", "state", "!=", "UserRequest", ".", "SUBMITTED", ":", "# Unexpected, ignore.", "log", ".", "error", "(", "\"Received completion for request ...
https://github.com/ceph/calamari/blob/5b8fa1424280984f434f22fd4db69e26de1c12f6/rlyeh/rlyeh/manager/request_collection.py#L199-L254
homebysix/recipe-robot
fc51b3134b6db7cd86641785d75a0b994ae88154
scripts/recipe_robot_lib/curler.py
python
check_url
(url, headers=None)
return url, head, None
Test a URL's headers, and switch to HTTPS if available. Args: url (str): The URL to check for HTTPS and get response headers from. headers (dict, optional): Header key/values to be added to HTTP request. Defaults to None. Returns: tuple: Tuple (string, dict, None) consisting of the checked URL (which may be an HTTPS equivalent to the provided HTTP URL), a dictionary of the header response from the URL, and a placeholder None value.
Test a URL's headers, and switch to HTTPS if available.
[ "Test", "a", "URL", "s", "headers", "and", "switch", "to", "HTTPS", "if", "available", "." ]
def check_url(url, headers=None): """Test a URL's headers, and switch to HTTPS if available. Args: url (str): The URL to check for HTTPS and get response headers from. headers (dict, optional): Header key/values to be added to HTTP request. Defaults to None. Returns: tuple: Tuple (string, dict, None) consisting of the checked URL (which may be an HTTPS equivalent to the provided HTTP URL), a dictionary of the header response from the URL, and a placeholder None value. """ # Switch to HTTPS if possible. if url.startswith("http:"): robo_print("Checking for HTTPS URL...", LogLevel.VERBOSE, 4) head, retcode = get_headers("https" + url[4:], headers=headers) if retcode == 0 and int(head.get("http_result_code")) < 400: url = "https" + url[4:] robo_print("Found HTTPS URL: %s" % url, LogLevel.VERBOSE, 4) return url, head, None else: robo_print("No usable HTTPS URL found.", LogLevel.VERBOSE, 4) # Get URL headers. head, retcode = get_headers(url, headers=headers) http_result = int(head.get("http_result_code")) if retcode == 0 and http_result < 400: return url, head, None # Try to mitigate errors. if http_result == 403: ua_safari = ( "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) " "AppleWebKit/605.1.15 (KHTML, like Gecko) " "Version/14.0.1 Safari/605.1.15" ) ua_chrome = ( "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) " "AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/87.0.4280.88 Safari/537.36" ) # Devmate URLs require a user-agent and don't need a HEAD check first. if "updates.devmate.com" in url: return url, head, ua_safari # Skip domains for which 403 is a known false positive on HEAD check. if any((x in url for x in KNOWN_403_ON_HEAD)): return url, head, None # Try again with alternate user-agents. for ua in (ua_safari, ua_chrome): headers["user-agent"] = ua head, retcode = get_headers(url, headers=headers) if int(head.get("http_result_code")) < 400: robo_print( "Using browser user-agent.", LogLevel.VERBOSE, 4, ) return url, head, headers["user-agent"] return url, head, None
[ "def", "check_url", "(", "url", ",", "headers", "=", "None", ")", ":", "# Switch to HTTPS if possible.", "if", "url", ".", "startswith", "(", "\"http:\"", ")", ":", "robo_print", "(", "\"Checking for HTTPS URL...\"", ",", "LogLevel", ".", "VERBOSE", ",", "4", ...
https://github.com/homebysix/recipe-robot/blob/fc51b3134b6db7cd86641785d75a0b994ae88154/scripts/recipe_robot_lib/curler.py#L300-L364
tensorflow/datasets
2e496976d7d45550508395fb2f35cf958c8a3414
tensorflow_datasets/core/features/audio_feature.py
python
Audio.repr_html
(self, ex: np.ndarray)
return (f'<audio controls src="data:audio/ogg;base64,{audio_str}" ' ' controlsList="nodownload" />')
Audio are displayed in the player.
Audio are displayed in the player.
[ "Audio", "are", "displayed", "in", "the", "player", "." ]
def repr_html(self, ex: np.ndarray) -> str: """Audio are displayed in the player.""" if self.sample_rate: rate = self.sample_rate else: # We should display an error message once to warn the user the sample # rate was auto-infered. Requirements: # * Should appear only once (even though repr_html is called once per # examples) # * Ideally should appear on Colab (while `logging.warning` is hidden # by default) rate = 16000 audio_str = utils.get_base64(lambda buff: _save_wav(buff, ex, rate)) return (f'<audio controls src="data:audio/ogg;base64,{audio_str}" ' ' controlsList="nodownload" />')
[ "def", "repr_html", "(", "self", ",", "ex", ":", "np", ".", "ndarray", ")", "->", "str", ":", "if", "self", ".", "sample_rate", ":", "rate", "=", "self", ".", "sample_rate", "else", ":", "# We should display an error message once to warn the user the sample", "#...
https://github.com/tensorflow/datasets/blob/2e496976d7d45550508395fb2f35cf958c8a3414/tensorflow_datasets/core/features/audio_feature.py#L110-L125
wang502/slack-sql
f6531e8bad65c09be779469c3bebdc89a28eccbf
PyGreSQL-5.0/pgdb.py
python
Cursor.callproc
(self, procname, parameters=None)
return parameters
Call a stored database procedure with the given name. The sequence of parameters must contain one entry for each input argument that the procedure expects. The result of the call is the same as this input sequence; replacement of output and input/output parameters in the return value is currently not supported. The procedure may also provide a result set as output. These can be requested through the standard fetch methods of the cursor.
Call a stored database procedure with the given name.
[ "Call", "a", "stored", "database", "procedure", "with", "the", "given", "name", "." ]
def callproc(self, procname, parameters=None): """Call a stored database procedure with the given name. The sequence of parameters must contain one entry for each input argument that the procedure expects. The result of the call is the same as this input sequence; replacement of output and input/output parameters in the return value is currently not supported. The procedure may also provide a result set as output. These can be requested through the standard fetch methods of the cursor. """ n = parameters and len(parameters) or 0 query = 'select * from "%s"(%s)' % (procname, ','.join(n * ['%s'])) self.execute(query, parameters) return parameters
[ "def", "callproc", "(", "self", ",", "procname", ",", "parameters", "=", "None", ")", ":", "n", "=", "parameters", "and", "len", "(", "parameters", ")", "or", "0", "query", "=", "'select * from \"%s\"(%s)'", "%", "(", "procname", ",", "','", ".", "join",...
https://github.com/wang502/slack-sql/blob/f6531e8bad65c09be779469c3bebdc89a28eccbf/PyGreSQL-5.0/pgdb.py#L990-L1004
pantsbuild/pex
473c6ac732ed4bc338b4b20a9ec930d1d722c9b4
pex/vendor/_vendored/setuptools/setuptools/_vendor/packaging/version.py
python
LegacyVersion.release
(self)
return None
[]
def release(self): return None
[ "def", "release", "(", "self", ")", ":", "return", "None" ]
https://github.com/pantsbuild/pex/blob/473c6ac732ed4bc338b4b20a9ec930d1d722c9b4/pex/vendor/_vendored/setuptools/setuptools/_vendor/packaging/version.py#L92-L93
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py
python
SQLCompiler.__init__
(self, dialect, statement, column_keys=None, inline=False, **kwargs)
Construct a new :class:`.SQLCompiler` object. :param dialect: :class:`.Dialect` to be used :param statement: :class:`.ClauseElement` to be compiled :param column_keys: a list of column names to be compiled into an INSERT or UPDATE statement. :param inline: whether to generate INSERT statements as "inline", e.g. not formatted to return any generated defaults :param kwargs: additional keyword arguments to be consumed by the superclass.
Construct a new :class:`.SQLCompiler` object.
[ "Construct", "a", "new", ":", "class", ":", ".", "SQLCompiler", "object", "." ]
def __init__(self, dialect, statement, column_keys=None, inline=False, **kwargs): """Construct a new :class:`.SQLCompiler` object. :param dialect: :class:`.Dialect` to be used :param statement: :class:`.ClauseElement` to be compiled :param column_keys: a list of column names to be compiled into an INSERT or UPDATE statement. :param inline: whether to generate INSERT statements as "inline", e.g. not formatted to return any generated defaults :param kwargs: additional keyword arguments to be consumed by the superclass. """ self.column_keys = column_keys # compile INSERT/UPDATE defaults/sequences inlined (no pre- # execute) self.inline = inline or getattr(statement, 'inline', False) # a dictionary of bind parameter keys to BindParameter # instances. self.binds = {} # a dictionary of BindParameter instances to "compiled" names # that are actually present in the generated SQL self.bind_names = util.column_dict() # stack which keeps track of nested SELECT statements self.stack = [] # relates label names in the final SQL to a tuple of local # column/label name, ColumnElement object (if any) and # TypeEngine. ResultProxy uses this for type processing and # column targeting self._result_columns = [] # true if the paramstyle is positional self.positional = dialect.positional if self.positional: self.positiontup = [] self._numeric_binds = dialect.paramstyle == "numeric" self.bindtemplate = BIND_TEMPLATES[dialect.paramstyle] self.ctes = None self.label_length = dialect.label_length \ or dialect.max_identifier_length # a map which tracks "anonymous" identifiers that are created on # the fly here self.anon_map = util.PopulateDict(self._process_anon) # a map which tracks "truncated" names based on # dialect.label_length or dialect.max_identifier_length self.truncated_names = {} Compiled.__init__(self, dialect, statement, **kwargs) if ( self.isinsert or self.isupdate or self.isdelete ) and statement._returning: self.returning = statement._returning if self.positional and self._numeric_binds: self._apply_numbered_params()
[ "def", "__init__", "(", "self", ",", "dialect", ",", "statement", ",", "column_keys", "=", "None", ",", "inline", "=", "False", ",", "*", "*", "kwargs", ")", ":", "self", ".", "column_keys", "=", "column_keys", "# compile INSERT/UPDATE defaults/sequences inlined...
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py#L393-L461
wucng/TensorExpand
4ea58f64f5c5082b278229b799c9f679536510b7
TensorExpand/Object detection/YOLO/tensorflow-yolo/yolo/net/yolo_net.py
python
YoloNet.inference
(self, images)
return predicts
Build the yolo model Args: images: 4-D tensor [batch_size, image_height, image_width, channels] Returns: predicts: 4-D tensor [batch_size, cell_size, cell_size, num_classes + 5 * boxes_per_cell]
Build the yolo model
[ "Build", "the", "yolo", "model" ]
def inference(self, images): """Build the yolo model Args: images: 4-D tensor [batch_size, image_height, image_width, channels] Returns: predicts: 4-D tensor [batch_size, cell_size, cell_size, num_classes + 5 * boxes_per_cell] """ conv_num = 1 temp_conv = self.conv2d('conv' + str(conv_num), images, [7, 7, 3, 64], stride=2) conv_num += 1 temp_pool = self.max_pool(temp_conv, [2, 2], 2) temp_conv = self.conv2d('conv' + str(conv_num), temp_pool, [3, 3, 64, 192], stride=1) conv_num += 1 temp_pool = self.max_pool(temp_conv, [2, 2], 2) temp_conv = self.conv2d('conv' + str(conv_num), temp_pool, [1, 1, 192, 128], stride=1) conv_num += 1 temp_conv = self.conv2d('conv' + str(conv_num), temp_conv, [3, 3, 128, 256], stride=1) conv_num += 1 temp_conv = self.conv2d('conv' + str(conv_num), temp_conv, [1, 1, 256, 256], stride=1) conv_num += 1 temp_conv = self.conv2d('conv' + str(conv_num), temp_conv, [3, 3, 256, 512], stride=1) conv_num += 1 temp_conv = self.max_pool(temp_conv, [2, 2], 2) for i in range(4): temp_conv = self.conv2d('conv' + str(conv_num), temp_conv, [1, 1, 512, 256], stride=1) conv_num += 1 temp_conv = self.conv2d('conv' + str(conv_num), temp_conv, [3, 3, 256, 512], stride=1) conv_num += 1 temp_conv = self.conv2d('conv' + str(conv_num), temp_conv, [1, 1, 512, 512], stride=1) conv_num += 1 temp_conv = self.conv2d('conv' + str(conv_num), temp_conv, [3, 3, 512, 1024], stride=1) conv_num += 1 temp_conv = self.max_pool(temp_conv, [2, 2], 2) for i in range(2): temp_conv = self.conv2d('conv' + str(conv_num), temp_conv, [1, 1, 1024, 512], stride=1) conv_num += 1 temp_conv = self.conv2d('conv' + str(conv_num), temp_conv, [3, 3, 512, 1024], stride=1) conv_num += 1 temp_conv = self.conv2d('conv' + str(conv_num), temp_conv, [3, 3, 1024, 1024], stride=1) conv_num += 1 temp_conv = self.conv2d('conv' + str(conv_num), temp_conv, [3, 3, 1024, 1024], stride=2) conv_num += 1 # temp_conv = self.conv2d('conv' + str(conv_num), temp_conv, [3, 3, 1024, 1024], stride=1) conv_num += 1 temp_conv = self.conv2d('conv' + str(conv_num), temp_conv, [3, 3, 1024, 1024], stride=1) conv_num += 1 #Fully connected layer local1 = self.local('local1', temp_conv, 49 * 1024, 4096) local1 = tf.nn.dropout(local1, keep_prob=0.5) local2 = self.local('local2', local1, 4096, self.cell_size * self.cell_size * ( self.num_classes + 5 * self.boxes_per_cell), leaky=False) local2 = tf.reshape(local2, [tf.shape(local2)[0], self.cell_size, self.cell_size, self.num_classes + 5 * self.boxes_per_cell]) predicts = local2 return predicts
[ "def", "inference", "(", "self", ",", "images", ")", ":", "conv_num", "=", "1", "temp_conv", "=", "self", ".", "conv2d", "(", "'conv'", "+", "str", "(", "conv_num", ")", ",", "images", ",", "[", "7", ",", "7", ",", "3", ",", "64", "]", ",", "st...
https://github.com/wucng/TensorExpand/blob/4ea58f64f5c5082b278229b799c9f679536510b7/TensorExpand/Object detection/YOLO/tensorflow-yolo/yolo/net/yolo_net.py#L33-L116
arsaboo/homeassistant-config
53c998986fbe84d793a0b174757154ab30e676e4
custom_components/aarlo/pyaarlo/device.py
python
ArloChildDevice.charger_type
(self)
return self._load(CHARGER_KEY, "None")
Returns how the device is recharging.
Returns how the device is recharging.
[ "Returns", "how", "the", "device", "is", "recharging", "." ]
def charger_type(self): """Returns how the device is recharging.""" return self._load(CHARGER_KEY, "None")
[ "def", "charger_type", "(", "self", ")", ":", "return", "self", ".", "_load", "(", "CHARGER_KEY", ",", "\"None\"", ")" ]
https://github.com/arsaboo/homeassistant-config/blob/53c998986fbe84d793a0b174757154ab30e676e4/custom_components/aarlo/pyaarlo/device.py#L350-L352
openhatch/oh-mainline
ce29352a034e1223141dcc2f317030bbc3359a51
vendor/packages/celery/celery/result.py
python
ResultSet.completed_count
(self)
return sum(imap(int, (result.successful() for result in self.results)))
Task completion count. :returns: the number of tasks completed.
Task completion count.
[ "Task", "completion", "count", "." ]
def completed_count(self): """Task completion count. :returns: the number of tasks completed. """ return sum(imap(int, (result.successful() for result in self.results)))
[ "def", "completed_count", "(", "self", ")", ":", "return", "sum", "(", "imap", "(", "int", ",", "(", "result", ".", "successful", "(", ")", "for", "result", "in", "self", ".", "results", ")", ")", ")" ]
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/celery/celery/result.py#L308-L314
pyparallel/pyparallel
11e8c6072d48c8f13641925d17b147bf36ee0ba3
Lib/imaplib.py
python
IMAP4.rename
(self, oldmailbox, newmailbox)
return self._simple_command('RENAME', oldmailbox, newmailbox)
Rename old mailbox name to new. (typ, [data]) = <instance>.rename(oldmailbox, newmailbox)
Rename old mailbox name to new.
[ "Rename", "old", "mailbox", "name", "to", "new", "." ]
def rename(self, oldmailbox, newmailbox): """Rename old mailbox name to new. (typ, [data]) = <instance>.rename(oldmailbox, newmailbox) """ return self._simple_command('RENAME', oldmailbox, newmailbox)
[ "def", "rename", "(", "self", ",", "oldmailbox", ",", "newmailbox", ")", ":", "return", "self", ".", "_simple_command", "(", "'RENAME'", ",", "oldmailbox", ",", "newmailbox", ")" ]
https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/imaplib.py#L641-L646
sauce-archives/isign
26ad90f8a7930b42d090d0565e8626eb229fc73c
isign/archive.py
python
AppArchive.__init__
(self, path)
[]
def __init__(self, path): self.path = path self.relative_bundle_dir = '.' self.bundle_info = self.get_info(self.path)
[ "def", "__init__", "(", "self", ",", "path", ")", ":", "self", ".", "path", "=", "path", "self", ".", "relative_bundle_dir", "=", "'.'", "self", ".", "bundle_info", "=", "self", ".", "get_info", "(", "self", ".", "path", ")" ]
https://github.com/sauce-archives/isign/blob/26ad90f8a7930b42d090d0565e8626eb229fc73c/isign/archive.py#L151-L154
kubernetes-client/python
47b9da9de2d02b2b7a34fbe05afb44afd130d73a
kubernetes/client/api/policy_v1beta1_api.py
python
PolicyV1beta1Api.delete_namespaced_pod_disruption_budget
(self, name, namespace, **kwargs)
return self.delete_namespaced_pod_disruption_budget_with_http_info(name, namespace, **kwargs)
delete_namespaced_pod_disruption_budget # noqa: E501 delete a PodDisruptionBudget # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_namespaced_pod_disruption_budget(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str name: name of the PodDisruptionBudget (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :param V1DeleteOptions body: :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: V1Status If the method is called asynchronously, returns the request thread.
delete_namespaced_pod_disruption_budget # noqa: E501
[ "delete_namespaced_pod_disruption_budget", "#", "noqa", ":", "E501" ]
def delete_namespaced_pod_disruption_budget(self, name, namespace, **kwargs): # noqa: E501 """delete_namespaced_pod_disruption_budget # noqa: E501 delete a PodDisruptionBudget # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_namespaced_pod_disruption_budget(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str name: name of the PodDisruptionBudget (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :param V1DeleteOptions body: :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True return self.delete_namespaced_pod_disruption_budget_with_http_info(name, namespace, **kwargs)
[ "def", "delete_namespaced_pod_disruption_budget", "(", "self", ",", "name", ",", "namespace", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "return", "self", ".", "delete_namespaced_pod_disruption_budge...
https://github.com/kubernetes-client/python/blob/47b9da9de2d02b2b7a34fbe05afb44afd130d73a/kubernetes/client/api/policy_v1beta1_api.py#L655-L685
AstusRush/AMaDiA
e2ad87318d9dd30bc24428e05c29cb32a29c83aa
External_Libraries/python_control_master/control/frdata.py
python
FrequencyResponseData.__add__
(self, other)
return FRD(self.fresp + other.fresp, other.omega)
Add two LTI objects (parallel connection).
Add two LTI objects (parallel connection).
[ "Add", "two", "LTI", "objects", "(", "parallel", "connection", ")", "." ]
def __add__(self, other): """Add two LTI objects (parallel connection).""" if isinstance(other, FRD): # verify that the frequencies match if len(other.omega) != len(self.omega) or \ (other.omega != self.omega).any(): warn("Frequency points do not match; expect " "truncation and interpolation.") # Convert the second argument to a frequency response function. # or re-base the frd to the current omega (if needed) other = _convertToFRD(other, omega=self.omega) # Check that the input-output sizes are consistent. if self.inputs != other.inputs: raise ValueError("The first summand has %i input(s), but the \ second has %i." % (self.inputs, other.inputs)) if self.outputs != other.outputs: raise ValueError("The first summand has %i output(s), but the \ second has %i." % (self.outputs, other.outputs)) return FRD(self.fresp + other.fresp, other.omega)
[ "def", "__add__", "(", "self", ",", "other", ")", ":", "if", "isinstance", "(", "other", ",", "FRD", ")", ":", "# verify that the frequencies match", "if", "len", "(", "other", ".", "omega", ")", "!=", "len", "(", "self", ".", "omega", ")", "or", "(", ...
https://github.com/AstusRush/AMaDiA/blob/e2ad87318d9dd30bc24428e05c29cb32a29c83aa/External_Libraries/python_control_master/control/frdata.py#L185-L207
pypa/pipenv
b21baade71a86ab3ee1429f71fbc14d4f95fb75d
pipenv/vendor/requirementslib/models/utils.py
python
extras_to_string
(extras)
return "[{0}]".format(",".join(sorted(set(extras))))
Turn a list of extras into a string. :param List[str]] extras: a list of extras to format :return: A string of extras :rtype: str
Turn a list of extras into a string.
[ "Turn", "a", "list", "of", "extras", "into", "a", "string", "." ]
def extras_to_string(extras): # type: (Iterable[S]) -> S """Turn a list of extras into a string. :param List[str]] extras: a list of extras to format :return: A string of extras :rtype: str """ if isinstance(extras, str): if extras.startswith("["): return extras else: extras = [extras] if not extras: return "" return "[{0}]".format(",".join(sorted(set(extras))))
[ "def", "extras_to_string", "(", "extras", ")", ":", "# type: (Iterable[S]) -> S", "if", "isinstance", "(", "extras", ",", "str", ")", ":", "if", "extras", ".", "startswith", "(", "\"[\"", ")", ":", "return", "extras", "else", ":", "extras", "=", "[", "extr...
https://github.com/pypa/pipenv/blob/b21baade71a86ab3ee1429f71fbc14d4f95fb75d/pipenv/vendor/requirementslib/models/utils.py#L205-L220
sqlalchemy/sqlalchemy
eb716884a4abcabae84a6aaba105568e925b7d27
lib/sqlalchemy/sql/type_api.py
python
TypeDecorator.type_engine
(self, dialect)
Return a dialect-specific :class:`.TypeEngine` instance for this :class:`.TypeDecorator`. In most cases this returns a dialect-adapted form of the :class:`.TypeEngine` type represented by ``self.impl``. Makes usage of :meth:`dialect_impl`. Behavior can be customized here by overriding :meth:`load_dialect_impl`.
Return a dialect-specific :class:`.TypeEngine` instance for this :class:`.TypeDecorator`.
[ "Return", "a", "dialect", "-", "specific", ":", "class", ":", ".", "TypeEngine", "instance", "for", "this", ":", "class", ":", ".", "TypeDecorator", "." ]
def type_engine(self, dialect): """Return a dialect-specific :class:`.TypeEngine` instance for this :class:`.TypeDecorator`. In most cases this returns a dialect-adapted form of the :class:`.TypeEngine` type represented by ``self.impl``. Makes usage of :meth:`dialect_impl`. Behavior can be customized here by overriding :meth:`load_dialect_impl`. """ adapted = dialect.type_descriptor(self) if not isinstance(adapted, type(self)): return adapted else: return self.load_dialect_impl(dialect)
[ "def", "type_engine", "(", "self", ",", "dialect", ")", ":", "adapted", "=", "dialect", ".", "type_descriptor", "(", "self", ")", "if", "not", "isinstance", "(", "adapted", ",", "type", "(", "self", ")", ")", ":", "return", "adapted", "else", ":", "ret...
https://github.com/sqlalchemy/sqlalchemy/blob/eb716884a4abcabae84a6aaba105568e925b7d27/lib/sqlalchemy/sql/type_api.py#L1491-L1506
IronLanguages/main
a949455434b1fda8c783289e897e78a9a0caabb5
External.LCA_RESTRICTED/Languages/IronPython/27/Lib/imaplib.py
python
IMAP4.response
(self, code)
return self._untagged_response(code, [None], code.upper())
Return data for response 'code' if received, or None. Old value for response 'code' is cleared. (code, [data]) = <instance>.response(code)
Return data for response 'code' if received, or None.
[ "Return", "data", "for", "response", "code", "if", "received", "or", "None", "." ]
def response(self, code): """Return data for response 'code' if received, or None. Old value for response 'code' is cleared. (code, [data]) = <instance>.response(code) """ return self._untagged_response(code, [None], code.upper())
[ "def", "response", "(", "self", ",", "code", ")", ":", "return", "self", ".", "_untagged_response", "(", "code", ",", "[", "None", "]", ",", "code", ".", "upper", "(", ")", ")" ]
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/IronPython/27/Lib/imaplib.py#L303-L310
merrychap/shellen
c0c5f8325dd3f0decf03e55d57c4714c797a2dea
shellen/shell.py
python
Shellen.__create_handlers
(self)
[]
def __create_handlers(self): self.handlers = { (self.RHELP, self.help), (self.RQUIT, self.quit), (self.RASM, self.asm), (self.RDSM, self.dsm), (self.RARCHS, self.archs), (self.RRUN, self.run), (self.RSETARCH, self.setarch), (self.RCLEAR, self.clear), (self.RSYSCALL, self.sys), (self.RSETOS, self.setos), (self.RVSYS, self.sysv), (self.RSHELL, self.shell) }
[ "def", "__create_handlers", "(", "self", ")", ":", "self", ".", "handlers", "=", "{", "(", "self", ".", "RHELP", ",", "self", ".", "help", ")", ",", "(", "self", ".", "RQUIT", ",", "self", ".", "quit", ")", ",", "(", "self", ".", "RASM", ",", "...
https://github.com/merrychap/shellen/blob/c0c5f8325dd3f0decf03e55d57c4714c797a2dea/shellen/shell.py#L108-L122
arizvisa/ida-minsc
8627a60f047b5e55d3efeecde332039cd1a16eea
base/database.py
python
address.prevref
(cls, **count)
return cls.prevref(ui.current.address(), count.pop('count', 1))
Return the previous address from the current one that has anything referencing it.
Return the previous address from the current one that has anything referencing it.
[ "Return", "the", "previous", "address", "from", "the", "current", "one", "that", "has", "anything", "referencing", "it", "." ]
def prevref(cls, **count): '''Return the previous address from the current one that has anything referencing it.''' return cls.prevref(ui.current.address(), count.pop('count', 1))
[ "def", "prevref", "(", "cls", ",", "*", "*", "count", ")", ":", "return", "cls", ".", "prevref", "(", "ui", ".", "current", ".", "address", "(", ")", ",", "count", ".", "pop", "(", "'count'", ",", "1", ")", ")" ]
https://github.com/arizvisa/ida-minsc/blob/8627a60f047b5e55d3efeecde332039cd1a16eea/base/database.py#L2612-L2614
liangliangyy/DjangoBlog
51d3cb9a29964904b6d59da3b771bb2454fd16ee
djangoblog/whoosh_cn_backend.py
python
WhooshSearchBackend.calculate_page
(self, start_offset=0, end_offset=None)
return page_num, page_length
[]
def calculate_page(self, start_offset=0, end_offset=None): # Prevent against Whoosh throwing an error. Requires an end_offset # greater than 0. if end_offset is not None and end_offset <= 0: end_offset = 1 # Determine the page. page_num = 0 if end_offset is None: end_offset = 1000000 if start_offset is None: start_offset = 0 page_length = end_offset - start_offset if page_length and page_length > 0: page_num = int(start_offset / page_length) # Increment because Whoosh uses 1-based page numbers. page_num += 1 return page_num, page_length
[ "def", "calculate_page", "(", "self", ",", "start_offset", "=", "0", ",", "end_offset", "=", "None", ")", ":", "# Prevent against Whoosh throwing an error. Requires an end_offset", "# greater than 0.", "if", "end_offset", "is", "not", "None", "and", "end_offset", "<=", ...
https://github.com/liangliangyy/DjangoBlog/blob/51d3cb9a29964904b6d59da3b771bb2454fd16ee/djangoblog/whoosh_cn_backend.py#L324-L346
SeldonIO/alibi-detect
b5ec53cfadcd8e3463d400259f2ea1b752ed1812
alibi_detect/cd/lsdd.py
python
LSDDDrift.predict
(self, x: Union[np.ndarray, list], return_p_val: bool = True, return_distance: bool = True)
return self._detector.predict(x, return_p_val, return_distance)
Predict whether a batch of data has drifted from the reference data. Parameters ---------- x Batch of instances. return_p_val Whether to return the p-value of the permutation test. return_distance Whether to return the LSDD metric between the new batch and reference data. Returns ------- Dictionary containing 'meta' and 'data' dictionaries. 'meta' has the model's metadata. 'data' contains the drift prediction and optionally the p-value, threshold and LSDD metric.
Predict whether a batch of data has drifted from the reference data.
[ "Predict", "whether", "a", "batch", "of", "data", "has", "drifted", "from", "the", "reference", "data", "." ]
def predict(self, x: Union[np.ndarray, list], return_p_val: bool = True, return_distance: bool = True) \ -> Dict[Dict[str, str], Dict[str, Union[int, float]]]: """ Predict whether a batch of data has drifted from the reference data. Parameters ---------- x Batch of instances. return_p_val Whether to return the p-value of the permutation test. return_distance Whether to return the LSDD metric between the new batch and reference data. Returns ------- Dictionary containing 'meta' and 'data' dictionaries. 'meta' has the model's metadata. 'data' contains the drift prediction and optionally the p-value, threshold and LSDD metric. """ return self._detector.predict(x, return_p_val, return_distance)
[ "def", "predict", "(", "self", ",", "x", ":", "Union", "[", "np", ".", "ndarray", ",", "list", "]", ",", "return_p_val", ":", "bool", "=", "True", ",", "return_distance", ":", "bool", "=", "True", ")", "->", "Dict", "[", "Dict", "[", "str", ",", ...
https://github.com/SeldonIO/alibi-detect/blob/b5ec53cfadcd8e3463d400259f2ea1b752ed1812/alibi_detect/cd/lsdd.py#L90-L110
IntelAI/models
1d7a53ccfad3e6f0e7378c9e3c8840895d63df8c
models/recommendation/pytorch/dlrm/training/bfloat16/mlperf_logger.py
python
mlperf_submission_log
(benchmark)
Logs information needed for MLPerf submission
Logs information needed for MLPerf submission
[ "Logs", "information", "needed", "for", "MLPerf", "submission" ]
def mlperf_submission_log(benchmark): """ Logs information needed for MLPerf submission """ config_logger(benchmark) log_event( key=constants.SUBMISSION_BENCHMARK, value=benchmark, ) log_event( key=constants.SUBMISSION_ORG, value='reference_implementation') log_event( key=constants.SUBMISSION_DIVISION, value='closed') log_event( key=constants.SUBMISSION_STATUS, value='onprem') log_event( key=constants.SUBMISSION_PLATFORM, value=f'reference_implementation') log_event( key=constants.SUBMISSION_ENTRY, value="reference_implementation") log_event( key=constants.SUBMISSION_POC_NAME, value='reference_implementation') log_event( key=constants.SUBMISSION_POC_EMAIL, value='reference_implementation')
[ "def", "mlperf_submission_log", "(", "benchmark", ")", ":", "config_logger", "(", "benchmark", ")", "log_event", "(", "key", "=", "constants", ".", "SUBMISSION_BENCHMARK", ",", "value", "=", "benchmark", ",", ")", "log_event", "(", "key", "=", "constants", "."...
https://github.com/IntelAI/models/blob/1d7a53ccfad3e6f0e7378c9e3c8840895d63df8c/models/recommendation/pytorch/dlrm/training/bfloat16/mlperf_logger.py#L93-L131
rembo10/headphones
b3199605be1ebc83a7a8feab6b1e99b64014187c
lib/cherrypy/_cpserver.py
python
Server._set_bind_addr
(self, value)
[]
def _set_bind_addr(self, value): if value is None: self.socket_file = None self.socket_host = None self.socket_port = None elif isinstance(value, basestring): self.socket_file = value self.socket_host = None self.socket_port = None else: try: self.socket_host, self.socket_port = value self.socket_file = None except ValueError: raise ValueError("bind_addr must be a (host, port) tuple " "(for TCP sockets) or a string (for Unix " "domain sockets), not %r" % value)
[ "def", "_set_bind_addr", "(", "self", ",", "value", ")", ":", "if", "value", "is", "None", ":", "self", ".", "socket_file", "=", "None", "self", ".", "socket_host", "=", "None", "self", ".", "socket_port", "=", "None", "elif", "isinstance", "(", "value",...
https://github.com/rembo10/headphones/blob/b3199605be1ebc83a7a8feab6b1e99b64014187c/lib/cherrypy/_cpserver.py#L178-L194
Erotemic/ubelt
221d5f6262d5c8e78638e1a38e3adcc9cc9a15e9
ubelt/timerit.py
python
Timerit.print
(self, verbose=1)
Prints human readable report using the print function Args: verbose (int): verbosity level SeeAlso: :func:`Timerit.report` Example: >>> import math >>> Timerit(num=10).call(math.factorial, 50).print(verbose=1) Timed best=...s, mean=...s >>> Timerit(num=10).call(math.factorial, 50).print(verbose=2) Timed for: 10 loops, best of 3 time per loop: best=...s, mean=...s >>> Timerit(num=10).call(math.factorial, 50).print(verbose=3) Timed for: 10 loops, best of 3 body took: ... time per loop: best=...s, mean=...s
Prints human readable report using the print function
[ "Prints", "human", "readable", "report", "using", "the", "print", "function" ]
def print(self, verbose=1): """ Prints human readable report using the print function Args: verbose (int): verbosity level SeeAlso: :func:`Timerit.report` Example: >>> import math >>> Timerit(num=10).call(math.factorial, 50).print(verbose=1) Timed best=...s, mean=...s >>> Timerit(num=10).call(math.factorial, 50).print(verbose=2) Timed for: 10 loops, best of 3 time per loop: best=...s, mean=...s >>> Timerit(num=10).call(math.factorial, 50).print(verbose=3) Timed for: 10 loops, best of 3 body took: ... time per loop: best=...s, mean=...s """ print(self.report(verbose=verbose))
[ "def", "print", "(", "self", ",", "verbose", "=", "1", ")", ":", "print", "(", "self", ".", "report", "(", "verbose", "=", "verbose", ")", ")" ]
https://github.com/Erotemic/ubelt/blob/221d5f6262d5c8e78638e1a38e3adcc9cc9a15e9/ubelt/timerit.py#L537-L559
intel/virtual-storage-manager
00706ab9701acbd0d5e04b19cc80c6b66a2973b8
source/vsm-dashboard/vsm_dashboard/api/vsm.py
python
start_server
(request, servers=None)
return vsmclient(request).servers.start(servers)
Start servers. servers = [{'id': 1}, {'id': 2}]
Start servers. servers = [{'id': 1}, {'id': 2}]
[ "Start", "servers", ".", "servers", "=", "[", "{", "id", ":", "1", "}", "{", "id", ":", "2", "}", "]" ]
def start_server(request, servers=None): """Start servers. servers = [{'id': 1}, {'id': 2}] """ return vsmclient(request).servers.start(servers)
[ "def", "start_server", "(", "request", ",", "servers", "=", "None", ")", ":", "return", "vsmclient", "(", "request", ")", ".", "servers", ".", "start", "(", "servers", ")" ]
https://github.com/intel/virtual-storage-manager/blob/00706ab9701acbd0d5e04b19cc80c6b66a2973b8/source/vsm-dashboard/vsm_dashboard/api/vsm.py#L121-L125
wger-project/wger
3a17a2cf133d242d1f8c357faa53cf675a7b3223
wger/exercises/models/image.py
python
ExerciseImage.delete
(self, *args, **kwargs)
Reset all cached infos
Reset all cached infos
[ "Reset", "all", "cached", "infos" ]
def delete(self, *args, **kwargs): """ Reset all cached infos """ super(ExerciseImage, self).delete(*args, **kwargs) for language in Language.objects.all(): delete_template_fragment_cache('muscle-overview', language.id) delete_template_fragment_cache('exercise-overview', language.id) delete_template_fragment_cache('exercise-overview-mobile', language.id) delete_template_fragment_cache('equipment-overview', language.id) # Make sure there is always a main image if not ExerciseImage.objects.accepted().filter( exercise_base=self.exercise_base, is_main=True ).count() and ExerciseImage.objects.accepted().filter(exercise_base=self.exercise_base ).filter(is_main=False).count(): image = ExerciseImage.objects.accepted() \ .filter(exercise_base=self.exercise_base, is_main=False)[0] image.is_main = True image.save()
[ "def", "delete", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "super", "(", "ExerciseImage", ",", "self", ")", ".", "delete", "(", "*", "args", ",", "*", "*", "kwargs", ")", "for", "language", "in", "Language", ".", "objects", ...
https://github.com/wger-project/wger/blob/3a17a2cf133d242d1f8c357faa53cf675a7b3223/wger/exercises/models/image.py#L142-L163
tendenci/tendenci
0f2c348cc0e7d41bc56f50b00ce05544b083bf1d
tendenci/apps/photos/utils/EXIF.py
python
s2n_motorola
(str)
return x
[]
def s2n_motorola(str): x = 0 for c in str: x = (x << 8) | ord(c) return x
[ "def", "s2n_motorola", "(", "str", ")", ":", "x", "=", "0", "for", "c", "in", "str", ":", "x", "=", "(", "x", "<<", "8", ")", "|", "ord", "(", "c", ")", "return", "x" ]
https://github.com/tendenci/tendenci/blob/0f2c348cc0e7d41bc56f50b00ce05544b083bf1d/tendenci/apps/photos/utils/EXIF.py#L1175-L1179
huawei-noah/vega
d9f13deede7f2b584e4b1d32ffdb833856129989
vega/tools/inference.py
python
_get_model
(args)
return model
Get model.
Get model.
[ "Get", "model", "." ]
def _get_model(args): """Get model.""" from vega.model_zoo import ModelZoo model = ModelZoo.get_model(args.model_desc, args.model) if vega.is_torch_backend(): if args.device == "GPU": model = model.cuda() model.eval() return model
[ "def", "_get_model", "(", "args", ")", ":", "from", "vega", ".", "model_zoo", "import", "ModelZoo", "model", "=", "ModelZoo", ".", "get_model", "(", "args", ".", "model_desc", ",", "args", ".", "model", ")", "if", "vega", ".", "is_torch_backend", "(", ")...
https://github.com/huawei-noah/vega/blob/d9f13deede7f2b584e4b1d32ffdb833856129989/vega/tools/inference.py#L65-L73
larryhastings/gilectomy
4315ec3f1d6d4f813cc82ce27a24e7f784dbfc1a
Tools/pybench/CommandLine.py
python
Application.check_files
(self,filelist)
return None
Apply some user defined checks on the files given in filelist. This may modify filelist in place. A typical application is checking that at least n files are given. If this method returns anything other than None, the process is terminated with the return value as exit code.
Apply some user defined checks on the files given in filelist.
[ "Apply", "some", "user", "defined", "checks", "on", "the", "files", "given", "in", "filelist", "." ]
def check_files(self,filelist): """ Apply some user defined checks on the files given in filelist. This may modify filelist in place. A typical application is checking that at least n files are given. If this method returns anything other than None, the process is terminated with the return value as exit code. """ return None
[ "def", "check_files", "(", "self", ",", "filelist", ")", ":", "return", "None" ]
https://github.com/larryhastings/gilectomy/blob/4315ec3f1d6d4f813cc82ce27a24e7f784dbfc1a/Tools/pybench/CommandLine.py#L482-L493
enzienaudio/hvcc
30e47328958d600c54889e2a254c3f17f2b2fd06
generators/ir2c/SignalBiquad.py
python
SignalBiquad.get_C_def
(clazz, obj_type, obj_id)
[]
def get_C_def(clazz, obj_type, obj_id): if obj_type == "__biquad_k~f": return ["SignalBiquad_k sBiquad_k_{0};".format(obj_id)] elif obj_type == "__biquad~f": return ["SignalBiquad sBiquad_s_{0};".format(obj_id)] else: raise Exception()
[ "def", "get_C_def", "(", "clazz", ",", "obj_type", ",", "obj_id", ")", ":", "if", "obj_type", "==", "\"__biquad_k~f\"", ":", "return", "[", "\"SignalBiquad_k sBiquad_k_{0};\"", ".", "format", "(", "obj_id", ")", "]", "elif", "obj_type", "==", "\"__biquad~f\"", ...
https://github.com/enzienaudio/hvcc/blob/30e47328958d600c54889e2a254c3f17f2b2fd06/generators/ir2c/SignalBiquad.py#L34-L40
makerbot/ReplicatorG
d6f2b07785a5a5f1e172fb87cb4303b17c575d5d
skein_engines/skeinforge-35/fabmetheus_utilities/geometry/geometry_utilities/evaluate_enumerables/string_attribute.py
python
_getAccessibleAttribute
(attributeName, stringObject)
return None
Get the accessible attribute.
Get the accessible attribute.
[ "Get", "the", "accessible", "attribute", "." ]
def _getAccessibleAttribute(attributeName, stringObject): 'Get the accessible attribute.' if attributeName in globalNativeFunctionSet: return getattr(stringObject, attributeName, None) if attributeName in globalAccessibleAttributeSet: stringAttribute = StringAttribute(stringObject) return getattr(stringAttribute, attributeName, None) return None
[ "def", "_getAccessibleAttribute", "(", "attributeName", ",", "stringObject", ")", ":", "if", "attributeName", "in", "globalNativeFunctionSet", ":", "return", "getattr", "(", "stringObject", ",", "attributeName", ",", "None", ")", "if", "attributeName", "in", "global...
https://github.com/makerbot/ReplicatorG/blob/d6f2b07785a5a5f1e172fb87cb4303b17c575d5d/skein_engines/skeinforge-35/fabmetheus_utilities/geometry/geometry_utilities/evaluate_enumerables/string_attribute.py#L19-L26
CvvT/dumpDex
92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1
python/idaapi.py
python
boolvec_t.insert
(self, *args)
return _idaapi.boolvec_t_insert(self, *args)
insert(self, it, x) -> qvector< bool >::iterator
insert(self, it, x) -> qvector< bool >::iterator
[ "insert", "(", "self", "it", "x", ")", "-", ">", "qvector<", "bool", ">", "::", "iterator" ]
def insert(self, *args): """ insert(self, it, x) -> qvector< bool >::iterator """ return _idaapi.boolvec_t_insert(self, *args)
[ "def", "insert", "(", "self", ",", "*", "args", ")", ":", "return", "_idaapi", ".", "boolvec_t_insert", "(", "self", ",", "*", "args", ")" ]
https://github.com/CvvT/dumpDex/blob/92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1/python/idaapi.py#L1715-L1719
JacquesLucke/animation_nodes
b1e3ace8dcb0a771fd882fc3ac4e490b009fa0d1
animation_nodes/tree_info/forest_data.py
python
ForestData.findLinksSkippingReroutes
(self)
[]
def findLinksSkippingReroutes(self): rerouteNodes = self.rerouteNodes nonRerouteNodes = filter(lambda n: n not in rerouteNodes, self.nodes) socketsByNode = self.socketsByNode linkedSockets = self.linkedSockets iterLinkedSockets = self.iterLinkedSockets chainIterable = chain.from_iterable for node in nonRerouteNodes: for socket in chainIterable(socketsByNode[node]): linkedSockets[socket] = tuple(iterLinkedSockets(socket, set()))
[ "def", "findLinksSkippingReroutes", "(", "self", ")", ":", "rerouteNodes", "=", "self", ".", "rerouteNodes", "nonRerouteNodes", "=", "filter", "(", "lambda", "n", ":", "n", "not", "in", "rerouteNodes", ",", "self", ".", "nodes", ")", "socketsByNode", "=", "s...
https://github.com/JacquesLucke/animation_nodes/blob/b1e3ace8dcb0a771fd882fc3ac4e490b009fa0d1/animation_nodes/tree_info/forest_data.py#L88-L99
cgarrard/osgeopy-code
bc85f4ec7a630b53502ee491e400057b67cdab22
Chapter13/listing13_3.py
python
plot_layer
(filename, symbol, layer_index=0, **kwargs)
Plots an OGR layer using the given symbol.
Plots an OGR layer using the given symbol.
[ "Plots", "an", "OGR", "layer", "using", "the", "given", "symbol", "." ]
def plot_layer(filename, symbol, layer_index=0, **kwargs): """Plots an OGR layer using the given symbol.""" ds = ogr.Open(filename) for row in ds.GetLayer(layer_index): geom = row.geometry() geom_type = geom.GetGeometryType() # Polygons if geom_type == ogr.wkbPolygon: plot_polygon(geom, symbol, **kwargs) # Multipolygons elif geom_type == ogr.wkbMultiPolygon: for i in range(geom.GetGeometryCount()): subgeom = geom.GetGeometryRef(i) plot_polygon(subgeom, symbol, **kwargs) # Lines elif geom_type == ogr.wkbLineString: plot_line(geom, symbol, **kwargs) # Multilines elif geom_type == ogr.wkbMultiLineString: for i in range(geom.GetGeometryCount()): subgeom = geom.GetGeometryRef(i) plot_line(subgeom, symbol, **kwargs) # Points elif geom_type == ogr.wkbPoint: plot_point(geom, symbol, **kwargs) # Multipoints elif geom_type == ogr.wkbMultiPoint: for i in range(geom.GetGeometryCount()): subgeom = geom.GetGeometryRef(i) plot_point(subgeom, symbol, **kwargs)
[ "def", "plot_layer", "(", "filename", ",", "symbol", ",", "layer_index", "=", "0", ",", "*", "*", "kwargs", ")", ":", "ds", "=", "ogr", ".", "Open", "(", "filename", ")", "for", "row", "in", "ds", ".", "GetLayer", "(", "layer_index", ")", ":", "geo...
https://github.com/cgarrard/osgeopy-code/blob/bc85f4ec7a630b53502ee491e400057b67cdab22/Chapter13/listing13_3.py#L39-L74