_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 75 19.8k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q272200 | tryReduceAnd | test | def tryReduceAnd(sig, val):
"""
Return sig and val reduced by & operator or None
if it is not possible to statically reduce expression
"""
m = sig._dtype.all_mask()
if val._isFullVld():
v = val.val
if v == m:
return sig
elif v == 0:
return val | python | {
"resource": ""
} |
q272201 | tryReduceXor | test | def tryReduceXor(sig, val):
"""
Return sig and val reduced by ^ operator or None
if it is not possible to statically reduce expression
"""
m = sig._dtype.all_mask()
if not val.vldMask:
return val
if val._isFullVld():
v = val.val
if v == m:
return ~sig
elif v == 0:
return sig | python | {
"resource": ""
} |
q272202 | GenericSerializer.getBaseNameScope | test | def getBaseNameScope(cls):
"""
Get root of name space
"""
s = NameScope(False)
s.setLevel(1)
s[0].update(cls._keywords_dict)
return s | python | {
"resource": ""
} |
q272203 | GenericSerializer.serializationDecision | test | def serializationDecision(cls, obj, serializedClasses,
serializedConfiguredUnits):
"""
Decide if this unit should be serialized or not eventually fix name
to fit same already serialized unit
:param obj: object to serialize
:param serializedClasses: dict {unitCls : unitobj}
:param serializedConfiguredUnits: (unitCls, paramsValues) : unitObj
where paramsValues are named tuple name:value
"""
isDeclaration = isinstance(obj, Entity)
isDefinition = isinstance(obj, Architecture)
if isDeclaration:
unit = obj.origin
elif isDefinition:
unit = obj.entity.origin
else:
return True
assert isinstance(unit, Unit)
sd = unit._serializeDecision
if sd is None:
return True
else:
prevPriv = serializedClasses.get(unit.__class__, None)
seriazlize, nextPriv = sd(unit, obj, isDeclaration, prevPriv)
serializedClasses[unit.__class__] = nextPriv
return seriazlize | python | {
"resource": ""
} |
q272204 | GenericSerializer.HdlType | test | def HdlType(cls, typ: HdlType, ctx: SerializerCtx, declaration=False):
"""
Serialize HdlType instance
"""
if isinstance(typ, Bits):
sFn = cls.HdlType_bits
elif isinstance(typ, HEnum):
sFn = cls.HdlType_enum
elif isinstance(typ, HArray):
sFn = cls.HdlType_array
elif isinstance(typ, Integer):
sFn = cls.HdlType_int
elif isinstance(typ, HBool):
sFn = cls.HdlType_bool
else:
raise NotImplementedError("type declaration is not implemented"
" for type %s"
% (typ.name))
return sFn(typ, ctx, declaration=declaration) | python | {
"resource": ""
} |
q272205 | GenericSerializer.IfContainer | test | def IfContainer(cls, ifc: IfContainer, ctx: SerializerCtx):
"""
Srialize IfContainer instance
"""
childCtx = ctx.withIndent()
def asHdl(statements):
return [cls.asHdl(s, childCtx) for s in statements]
try:
cond = cls.condAsHdl(ifc.cond, True, ctx)
except UnsupportedEventOpErr as e:
cond = None
if cond is None:
assert not ifc.elIfs
assert not ifc.ifFalse
stmBuff = [cls.asHdl(s, ctx) for s in ifc.ifTrue]
return "\n".join(stmBuff)
elIfs = []
ifTrue = ifc.ifTrue
ifFalse = ifc.ifFalse
if ifFalse is None:
ifFalse = []
for c, statements in ifc.elIfs:
try:
elIfs.append((cls.condAsHdl(c, True, ctx), asHdl(statements)))
except UnsupportedEventOpErr as e:
if len(ifc.elIfs) == 1 and not ifFalse:
# register expression is in valid format and this
# is just register with asynchronous reset or etc...
ifFalse = statements
else:
raise e
return cls.ifTmpl.render(
indent=getIndent(ctx.indent),
cond=cond,
ifTrue=asHdl(ifTrue),
elIfs=elIfs,
ifFalse=asHdl(ifFalse)) | python | {
"resource": ""
} |
q272206 | getBaseCond | test | def getBaseCond(c):
"""
if is negated return original cond and negated flag
"""
isNegated = False
try:
drivers = c.drivers
except AttributeError:
return (c, isNegated)
if len(drivers) == 1:
d = list(c.drivers)[0]
if isinstance(d, Operator) and d.operator == AllOps.NOT:
c = d.operands[0]
isNegated = True
return (c, isNegated) | python | {
"resource": ""
} |
q272207 | simBitsT | test | def simBitsT(width: int, signed: Union[bool, None]):
"""
Construct SimBitsT with cache
"""
k = (width, signed)
try:
return __simBitsTCache[k]
except KeyError:
t = SimBitsT(width, signed)
__simBitsTCache[k] = t
return t | python | {
"resource": ""
} |
q272208 | ConstCache.getConstName | test | def getConstName(self, val):
"""
Get constant name for value
name of constant is reused if same value was used before
"""
try:
return self._cache[val]
except KeyError:
if isinstance(val.val, int):
name = "const_%d_" % val.val
else:
name = "const_"
c = self.nameCheckFn(name, val)
self._cache[val] = c
return c | python | {
"resource": ""
} |
q272209 | Assignment._cut_off_drivers_of | test | def _cut_off_drivers_of(self, sig: RtlSignalBase):
"""
Cut off statements which are driver of specified signal
"""
if self.dst is sig:
self.parentStm = None
return self
else:
return None | python | {
"resource": ""
} |
q272210 | TransTmpl._loadFromArray | test | def _loadFromArray(self, dtype: HdlType, bitAddr: int) -> int:
"""
Parse HArray type to this transaction template instance
:return: address of it's end
"""
self.itemCnt = evalParam(dtype.size).val
self.children = TransTmpl(
dtype.elmType, 0, parent=self, origin=self.origin)
return bitAddr + self.itemCnt * self.children.bitAddrEnd | python | {
"resource": ""
} |
q272211 | TransTmpl._loadFromHStruct | test | def _loadFromHStruct(self, dtype: HdlType, bitAddr: int):
"""
Parse HStruct type to this transaction template instance
:return: address of it's end
"""
for f in dtype.fields:
t = f.dtype
origin = f
isPadding = f.name is None
if isPadding:
width = t.bit_length()
bitAddr += width
else:
fi = TransTmpl(t, bitAddr, parent=self, origin=origin)
self.children.append(fi)
bitAddr = fi.bitAddrEnd
return bitAddr | python | {
"resource": ""
} |
q272212 | TransTmpl._loadFromHType | test | def _loadFromHType(self, dtype: HdlType, bitAddr: int) -> None:
"""
Parse any HDL type to this transaction template instance
"""
self.bitAddr = bitAddr
childrenAreChoice = False
if isinstance(dtype, Bits):
ld = self._loadFromBits
elif isinstance(dtype, HStruct):
ld = self._loadFromHStruct
elif isinstance(dtype, HArray):
ld = self._loadFromArray
elif isinstance(dtype, HStream):
ld = self._loadFromHStream
elif isinstance(dtype, HUnion):
ld = self._loadFromUnion
childrenAreChoice = True
else:
raise TypeError("expected instance of HdlType", dtype)
self.bitAddrEnd = ld(dtype, bitAddr)
self.childrenAreChoice = childrenAreChoice | python | {
"resource": ""
} |
q272213 | TransTmpl.getItemWidth | test | def getItemWidth(self) -> int:
"""
Only for transactions derived from HArray
:return: width of item in original array
"""
if not isinstance(self.dtype, HArray):
raise TypeError()
return (self.bitAddrEnd - self.bitAddr) // self.itemCnt | python | {
"resource": ""
} |
q272214 | TransTmpl.walkFlatten | test | def walkFlatten(self, offset: int=0,
shouldEnterFn=_default_shouldEnterFn,
otherObjItCtx: ObjIteratorCtx =_DummyIteratorCtx()
) -> Generator[
Union[Tuple[Tuple[int, int], 'TransTmpl'], 'OneOfTransaction'],
None, None]:
"""
Walk fields in instance of TransTmpl
:param offset: optional offset for all children in this TransTmpl
:param shouldEnterFn: function (transTmpl) which returns True
when field should be split on it's children
:param shouldEnterFn: function(transTmpl) which should return
(shouldEnter, shouldUse) where shouldEnter is flag that means
iterator should look inside of this actual object
and shouldUse flag means that this field should be used
(=generator should yield it)
:return: generator of tuples ((startBitAddress, endBitAddress),
TransTmpl instance)
"""
t = self.dtype
base = self.bitAddr + offset
end = self.bitAddrEnd + offset
shouldEnter, shouldYield = shouldEnterFn(self)
if shouldYield:
yield ((base, end), self)
if shouldEnter:
if isinstance(t, Bits):
pass
elif isinstance(t, HStruct):
for ch in self.children:
with otherObjItCtx(ch.origin.name):
yield from ch.walkFlatten(
offset,
shouldEnterFn,
otherObjItCtx)
elif isinstance(t, HArray):
itemSize = (self.bitAddrEnd - self.bitAddr) // self.itemCnt
for i in range(self.itemCnt):
with otherObjItCtx(i):
yield from self.children.walkFlatten(
base + i * itemSize,
shouldEnterFn,
otherObjItCtx)
elif isinstance(t, HUnion):
yield OneOfTransaction(self, offset, shouldEnterFn,
self.children)
elif isinstance(t, HStream):
assert len(self.children) == 1
yield StreamTransaction(self, offset, shouldEnterFn,
self.children[0])
else:
raise TypeError(t) | python | {
"resource": ""
} |
q272215 | signFix | test | def signFix(val, width):
"""
Convert negative int to positive int which has same bits set
"""
if val > 0:
msb = 1 << (width - 1)
if val & msb:
val -= mask(width) + 1
return val | python | {
"resource": ""
} |
q272216 | SwitchContainer._merge_with_other_stm | test | def _merge_with_other_stm(self, other: "IfContainer") -> None:
"""
Merge other statement to this statement
"""
merge = self._merge_statement_lists
newCases = []
for (c, caseA), (_, caseB) in zip(self.cases, other.cases):
newCases.append((c, merge(caseA, caseB)))
self.cases = newCases
if self.default is not None:
self.default = merge(self.default, other.default)
self._on_merge(other) | python | {
"resource": ""
} |
q272217 | getIndent | test | def getIndent(indentNum):
"""
Cached indent getter function
"""
try:
return _indentCache[indentNum]
except KeyError:
i = "".join([_indent for _ in range(indentNum)])
_indentCache[indentNum] = i
return i | python | {
"resource": ""
} |
q272218 | nameAvailabilityCheck | test | def nameAvailabilityCheck(obj, propName, prop):
"""
Check if not redefining property on obj
"""
if getattr(obj, propName, None) is not None:
raise IntfLvlConfErr("%r already has property %s old:%s new:%s" %
(obj, propName, repr(getattr(obj, propName)), prop)) | python | {
"resource": ""
} |
q272219 | PropDeclrCollector._registerParameter | test | def _registerParameter(self, pName, parameter) -> None:
"""
Register Param object on interface level object
"""
nameAvailabilityCheck(self, pName, parameter)
# resolve name in this scope
try:
hasName = parameter._name is not None
except AttributeError:
hasName = False
if not hasName:
parameter._name = pName
# add name in this scope
parameter._registerScope(pName, self)
if parameter.hasGenericName:
parameter.name = pName
if parameter._parent is None:
parameter._parent = self
self._params.append(parameter) | python | {
"resource": ""
} |
q272220 | PropDeclrCollector._updateParamsFrom | test | def _updateParamsFrom(self, otherObj:"PropDeclrCollector", updater, exclude:set, prefix:str) -> None:
"""
Update all parameters which are defined on self from otherObj
:param otherObj: other object which Param instances should be updated
:param updater: updater function(self, myParameter, onOtherParameterName, otherParameter)
:param exclude: iterable of parameter on otherObj object which should be excluded
:param prefix: prefix which should be added to name of paramters of this object before matching
parameter name on parent
"""
excluded = set()
if exclude is not None:
exclude = set(exclude)
for myP in self._params:
pPName = prefix + myP._scopes[self][1]
try:
otherP = getattr(otherObj, pPName)
if not isinstance(otherP, Param):
continue
except AttributeError:
continue
if exclude and otherP in exclude:
excluded.add(otherP)
continue
updater(self, myP, otherP)
if exclude is not None:
# assert that what should be excluded really exists
assert excluded == exclude | python | {
"resource": ""
} |
q272221 | PropDeclrCollector._registerUnit | test | def _registerUnit(self, uName, unit):
"""
Register unit object on interface level object
"""
nameAvailabilityCheck(self, uName, unit)
assert unit._parent is None
unit._parent = self
unit._name = uName
self._units.append(unit) | python | {
"resource": ""
} |
q272222 | PropDeclrCollector._registerInterface | test | def _registerInterface(self, iName, intf, isPrivate=False):
"""
Register interface object on interface level object
"""
nameAvailabilityCheck(self, iName, intf)
assert intf._parent is None
intf._parent = self
intf._name = iName
intf._ctx = self._ctx
if isPrivate:
self._private_interfaces.append(intf)
intf._isExtern = False
else:
self._interfaces.append(intf)
intf._isExtern = True | python | {
"resource": ""
} |
q272223 | PropDeclrCollector._registerArray | test | def _registerArray(self, name, items):
"""
Register array of items on interface level object
"""
items._parent = self
items._name = name
for i, item in enumerate(items):
setattr(self, "%s_%d" % (name, i), item) | python | {
"resource": ""
} |
q272224 | RtlSignal.singleDriver | test | def singleDriver(self):
"""
Returns a first driver if signal has only one driver.
"""
# [TODO] no driver exception
drv_cnt = len(self.drivers)
if not drv_cnt:
raise NoDriverErr(self)
elif drv_cnt != 1:
raise MultipleDriversErr(self)
return self.drivers[0] | python | {
"resource": ""
} |
q272225 | Operator.staticEval | test | def staticEval(self):
"""
Recursively statistically evaluate result of this operator
"""
for o in self.operands:
o.staticEval()
self.result._val = self.evalFn() | python | {
"resource": ""
} |
q272226 | Operator.withRes | test | def withRes(opDef, operands, resT, outputs=[]):
"""
Create operator with result signal
:ivar resT: data type of result signal
:ivar outputs: iterable of singnals which are outputs
from this operator
"""
op = Operator(opDef, operands)
out = RtlSignal(getCtxFromOps(operands), None, resT)
out._const = arr_all(op.operands, isConst)
out.drivers.append(op)
out.origin = op
op.result = out
op.registerSignals(outputs)
if out._const:
out.staticEval()
return out | python | {
"resource": ""
} |
q272227 | SerializerCtx.withIndent | test | def withIndent(self, indent=1):
"""
Create copy of this context with increased indent
"""
ctx = copy(self)
ctx.indent += indent
return ctx | python | {
"resource": ""
} |
q272228 | _tryConnect | test | def _tryConnect(src, unit, intfName):
"""
Try connect src to interface of specified name on unit.
Ignore if interface is not present or if it already has driver.
"""
try:
dst = getattr(unit, intfName)
except AttributeError:
return
if not dst._sig.drivers:
connect(src, dst) | python | {
"resource": ""
} |
q272229 | propagateClk | test | def propagateClk(obj):
"""
Propagate "clk" clock signal to all subcomponents
"""
clk = obj.clk
for u in obj._units:
_tryConnect(clk, u, 'clk') | python | {
"resource": ""
} |
q272230 | propagateClkRstn | test | def propagateClkRstn(obj):
"""
Propagate "clk" clock and negative reset "rst_n" signal
to all subcomponents
"""
clk = obj.clk
rst_n = obj.rst_n
for u in obj._units:
_tryConnect(clk, u, 'clk')
_tryConnect(rst_n, u, 'rst_n')
_tryConnect(~rst_n, u, 'rst') | python | {
"resource": ""
} |
q272231 | propagateClkRst | test | def propagateClkRst(obj):
"""
Propagate "clk" clock and reset "rst" signal to all subcomponents
"""
clk = obj.clk
rst = obj.rst
for u in obj._units:
_tryConnect(clk, u, 'clk')
_tryConnect(~rst, u, 'rst_n')
_tryConnect(rst, u, 'rst') | python | {
"resource": ""
} |
q272232 | propagateRstn | test | def propagateRstn(obj):
"""
Propagate negative reset "rst_n" signal
to all subcomponents
"""
rst_n = obj.rst_n
for u in obj._units:
_tryConnect(rst_n, u, 'rst_n')
_tryConnect(~rst_n, u, 'rst') | python | {
"resource": ""
} |
q272233 | propagateRst | test | def propagateRst(obj):
"""
Propagate reset "rst" signal
to all subcomponents
"""
rst = obj.rst
for u in obj._units:
_tryConnect(~rst, u, 'rst_n')
_tryConnect(rst, u, 'rst') | python | {
"resource": ""
} |
q272234 | iterBits | test | def iterBits(sigOrVal: Union[RtlSignal, Value], bitsInOne: int=1,
skipPadding: bool=True, fillup: bool=False):
"""
Iterate over bits in vector
:param sigOrVal: signal or value to iterate over
:param bitsInOne: number of bits in one part
:param skipPadding: if true padding is skipped in dense types
"""
bw = BitWalker(sigOrVal, skipPadding, fillup)
for _ in range(ceil(sigOrVal._dtype.bit_length() / bitsInOne)):
yield bw.get(bitsInOne)
bw.assertIsOnEnd() | python | {
"resource": ""
} |
q272235 | _serializeExclude_eval | test | def _serializeExclude_eval(parentUnit, obj, isDeclaration, priv):
"""
Always decide not to serialize obj
:param priv: private data for this function first unit of this class
:return: tuple (do serialize this object, next priv)
"""
if isDeclaration:
# prepare entity which will not be serialized
prepareEntity(obj, parentUnit.__class__.__name__, priv)
if priv is None:
priv = parentUnit
return False, priv | python | {
"resource": ""
} |
q272236 | _serializeOnce_eval | test | def _serializeOnce_eval(parentUnit, obj, isDeclaration, priv):
"""
Decide to serialize only first obj of it's class
:param priv: private data for this function
(first object with class == obj.__class__)
:return: tuple (do serialize this object, next priv)
where priv is private data for this function
(first object with class == obj.__class__)
"""
clsName = parentUnit.__class__.__name__
if isDeclaration:
obj.name = clsName
if priv is None:
priv = parentUnit
elif isDeclaration:
# prepare entity which will not be serialized
prepareEntity(obj, clsName, parentUnit)
serialize = priv is parentUnit
return serialize, priv | python | {
"resource": ""
} |
q272237 | _serializeParamsUniq_eval | test | def _serializeParamsUniq_eval(parentUnit, obj, isDeclaration, priv):
"""
Decide to serialize only objs with uniq parameters and class
:param priv: private data for this function
({frozen_params: obj})
:return: tuple (do serialize this object, next priv)
"""
params = paramsToValTuple(parentUnit)
if priv is None:
priv = {}
if isDeclaration:
try:
prevUnit = priv[params]
except KeyError:
priv[params] = parentUnit
return True, priv
prepareEntity(obj, prevUnit._entity.name, prevUnit)
return False, priv
return priv[params] is parentUnit, priv | python | {
"resource": ""
} |
q272238 | HObjList._getFullName | test | def _getFullName(self):
"""get all name hierarchy separated by '.' """
name = ""
tmp = self
while isinstance(tmp, (InterfaceBase, HObjList)):
if hasattr(tmp, "_name"):
n = tmp._name
else:
n = ''
if name == '':
name = n
else:
name = n + '.' + name
if hasattr(tmp, "_parent"):
tmp = tmp._parent
else:
tmp = None
return name | python | {
"resource": ""
} |
q272239 | HObjList._make_association | test | def _make_association(self, *args, **kwargs):
"""
Delegate _make_association on items
:note: doc in :func:`~hwt.synthesizer.interfaceLevel.propDeclCollector._make_association`
"""
for o in self:
o._make_association(*args, **kwargs) | python | {
"resource": ""
} |
q272240 | simPrepare | test | def simPrepare(unit: Unit, modelCls: Optional[SimModel]=None,
targetPlatform=DummyPlatform(),
dumpModelIn: str=None, onAfterToRtl=None):
"""
Create simulation model and connect it with interfaces of original unit
and decorate it with agents
:param unit: interface level unit which you wont prepare for simulation
:param modelCls: class of rtl simulation model to run simulation on,
if is None rtl sim model will be generated from unit
:param targetPlatform: target platform for this synthes
:param dumpModelIn: folder to where put sim model files
(if is None sim model will be constructed only in memory)
:param onAfterToRtl: callback fn(unit, modelCls) which will be called
after unit will be synthesised to rtl
:return: tuple (fully loaded unit with connected sim model,
connected simulation model,
simulation processes of agents
)
"""
if modelCls is None:
modelCls = toSimModel(
unit, targetPlatform=targetPlatform, dumpModelIn=dumpModelIn)
else:
# to instantiate hierarchy of unit
toSimModel(unit)
if onAfterToRtl:
onAfterToRtl(unit, modelCls)
reconnectUnitSignalsToModel(unit, modelCls)
model = modelCls()
procs = autoAddAgents(unit)
return unit, model, procs | python | {
"resource": ""
} |
q272241 | toSimModel | test | def toSimModel(unit, targetPlatform=DummyPlatform(), dumpModelIn=None):
"""
Create a simulation model for unit
:param unit: interface level unit which you wont prepare for simulation
:param targetPlatform: target platform for this synthes
:param dumpModelIn: folder to where put sim model files
(otherwise sim model will be constructed only in memory)
"""
sim_code = toRtl(unit,
targetPlatform=targetPlatform,
saveTo=dumpModelIn,
serializer=SimModelSerializer)
if dumpModelIn is not None:
d = os.path.join(os.getcwd(), dumpModelIn)
dInPath = d in sys.path
if not dInPath:
sys.path.insert(0, d)
if unit._name in sys.modules:
del sys.modules[unit._name]
simModule = importlib.import_module(unit._name)
if not dInPath:
sys.path.remove(d)
else:
simModule = ModuleType('simModule')
# python supports only ~100 opened brackets
# it exceded it throws MemoryError: s_push: parser stack overflow
exec(sim_code, simModule.__dict__)
return simModule.__dict__[unit._name] | python | {
"resource": ""
} |
q272242 | reconnectUnitSignalsToModel | test | def reconnectUnitSignalsToModel(synthesisedUnitOrIntf, modelCls):
"""
Reconnect model signals to unit to run simulation with simulation model
but use original unit interfaces for communication
:param synthesisedUnitOrIntf: interface where should be signals
replaced from signals from modelCls
:param modelCls: simulation model form where signals
for synthesisedUnitOrIntf should be taken
"""
obj = synthesisedUnitOrIntf
subInterfaces = obj._interfaces
if subInterfaces:
for intf in subInterfaces:
# proxies are destroyed on original interfaces and only proxies on
# array items will remain
reconnectUnitSignalsToModel(intf, modelCls)
else:
# reconnect signal from model
s = synthesisedUnitOrIntf
s._sigInside = getattr(modelCls, s._sigInside.name) | python | {
"resource": ""
} |
q272243 | simUnitVcd | test | def simUnitVcd(simModel, stimulFunctions, outputFile=sys.stdout,
until=100 * Time.ns):
"""
Syntax sugar
If outputFile is string try to open it as file
:return: hdl simulator object
"""
assert isinstance(simModel, SimModel), \
"Class of SimModel is required (got %r)" % (simModel)
if isinstance(outputFile, str):
d = os.path.dirname(outputFile)
if d:
os.makedirs(d, exist_ok=True)
with open(outputFile, 'w') as f:
return _simUnitVcd(simModel, stimulFunctions,
f, until)
else:
return _simUnitVcd(simModel, stimulFunctions,
outputFile, until) | python | {
"resource": ""
} |
q272244 | TristateAgent.onTWriteCallback__init | test | def onTWriteCallback__init(self, sim):
"""
Process for injecting of this callback loop into simulator
"""
yield from self.onTWriteCallback(sim)
self.intf.t._sigInside.registerWriteCallback(
self.onTWriteCallback,
self.getEnable)
self.intf.o._sigInside.registerWriteCallback(
self.onTWriteCallback,
self.getEnable) | python | {
"resource": ""
} |
q272245 | PortItem.connectSig | test | def connectSig(self, signal):
"""
Connect to port item on subunit
"""
if self.direction == DIRECTION.IN:
if self.src is not None:
raise HwtSyntaxError(
"Port %s is already associated with %r"
% (self.name, self.src))
self.src = signal
signal.endpoints.append(self)
elif self.direction == DIRECTION.OUT:
if self.dst is not None:
raise HwtSyntaxError(
"Port %s is already associated with %r"
% (self.name, self.dst))
self.dst = signal
signal.drivers.append(self)
else:
raise NotImplementedError(self)
signal.hidden = False
signal.ctx.subUnits.add(self.unit) | python | {
"resource": ""
} |
q272246 | PortItem.registerInternSig | test | def registerInternSig(self, signal):
"""
Connect internal signal to port item,
this connection is used by simulator and only output port items
will be connected
"""
if self.direction == DIRECTION.OUT:
if self.src is not None:
raise HwtSyntaxError(
"Port %s is already associated with %s"
% (self.name, str(self.src)))
self.src = signal
elif self.direction == DIRECTION.IN:
if self.dst is not None:
raise HwtSyntaxError(
"Port %s is already associated with %s"
% (self.name, str(self.dst)))
self.dst = signal
else:
raise NotImplementedError(self.direction) | python | {
"resource": ""
} |
q272247 | PortItem.connectInternSig | test | def connectInternSig(self):
"""
connet signal from internal side of of this component to this port
"""
d = self.direction
if d == DIRECTION.OUT:
self.src.endpoints.append(self)
elif d == DIRECTION.IN or d == DIRECTION.INOUT:
self.dst.drivers.append(self)
else:
raise NotImplementedError(d) | python | {
"resource": ""
} |
q272248 | PortItem.getInternSig | test | def getInternSig(self):
"""
return signal inside unit which has this port
"""
d = self.direction
if d == DIRECTION.IN:
return self.dst
elif d == DIRECTION.OUT:
return self.src
else:
raise NotImplementedError(d) | python | {
"resource": ""
} |
q272249 | isEvDependentOn | test | def isEvDependentOn(sig, process) -> bool:
"""
Check if hdl process has event depenency on signal
"""
if sig is None:
return False
return process in sig.simFallingSensProcs\
or process in sig.simRisingSensProcs | python | {
"resource": ""
} |
q272250 | HdlSimulator._add_process | test | def _add_process(self, proc, priority) -> None:
"""
Schedule process on actual time with specified priority
"""
self._events.push(self.now, priority, proc) | python | {
"resource": ""
} |
q272251 | HdlSimulator._addHdlProcToRun | test | def _addHdlProcToRun(self, trigger: SimSignal, proc) -> None:
"""
Add hdl process to execution queue
:param trigger: instance of SimSignal
:param proc: python generator function representing HDL process
"""
# first process in time has to plan executing of apply values on the
# end of this time
if not self._applyValPlaned:
# (apply on end of this time to minimalize process reevaluation)
self._scheduleApplyValues()
if isEvDependentOn(trigger, proc):
if self.now == 0:
return # pass event dependent on startup
self._seqProcsToRun.append(proc)
else:
self._combProcsToRun.append(proc) | python | {
"resource": ""
} |
q272252 | HdlSimulator._scheduleCombUpdateDoneEv | test | def _scheduleCombUpdateDoneEv(self) -> Event:
"""
Schedule combUpdateDoneEv event to let agents know that current
delta step is ending and values from combinational logic are stable
"""
assert not self._combUpdateDonePlaned, self.now
cud = Event(self)
cud.process_to_wake.append(self.__deleteCombUpdateDoneEv())
self._add_process(cud, PRIORITY_AGENTS_UPDATE_DONE)
self._combUpdateDonePlaned = True
self.combUpdateDoneEv = cud
return cud | python | {
"resource": ""
} |
q272253 | HdlSimulator._scheduleApplyValues | test | def _scheduleApplyValues(self) -> None:
"""
Apply stashed values to signals
"""
assert not self._applyValPlaned, self.now
self._add_process(self._applyValues(), PRIORITY_APPLY_COMB)
self._applyValPlaned = True
if self._runSeqProcessesPlaned:
# if runSeqProcesses is already scheduled
return
assert not self._seqProcsToRun and not self._runSeqProcessesPlaned, self.now
self._add_process(self._runSeqProcesses(), PRIORITY_APPLY_SEQ)
self._runSeqProcessesPlaned = True | python | {
"resource": ""
} |
q272254 | HdlSimulator._conflictResolveStrategy | test | def _conflictResolveStrategy(self, newValue: set)\
-> Tuple[Callable[[Value], bool], bool]:
"""
This functions resolves write conflicts for signal
:param actionSet: set of actions made by process
"""
invalidate = False
resLen = len(newValue)
if resLen == 3:
# update for item in array
val, indexes, isEvDependent = newValue
return (mkArrayUpdater(val, indexes, invalidate), isEvDependent)
else:
# update for simple signal
val, isEvDependent = newValue
return (mkUpdater(val, invalidate), isEvDependent) | python | {
"resource": ""
} |
q272255 | HdlSimulator._runCombProcesses | test | def _runCombProcesses(self) -> None:
"""
Delta step for combinational processes
"""
for proc in self._combProcsToRun:
cont = self._outputContainers[proc]
proc(self, cont)
for sigName, sig in cont._all_signals:
newVal = getattr(cont, sigName)
if newVal is not None:
res = self._conflictResolveStrategy(newVal)
# prepare update
updater, isEvDependent = res
self._valuesToApply.append(
(sig, updater, isEvDependent, proc))
setattr(cont, sigName, None)
# else value is latched
self._combProcsToRun = UniqList() | python | {
"resource": ""
} |
q272256 | HdlSimulator._runSeqProcesses | test | def _runSeqProcesses(self) -> Generator[None, None, None]:
"""
Delta step for event dependent processes
"""
updates = []
for proc in self._seqProcsToRun:
try:
outContainer = self._outputContainers[proc]
except KeyError:
# processes does not have to have outputs
outContainer = None
proc(self, outContainer)
if outContainer is not None:
updates.append(outContainer)
self._seqProcsToRun = UniqList()
self._runSeqProcessesPlaned = False
for cont in updates:
for sigName, sig in cont._all_signals:
newVal = getattr(cont, sigName)
if newVal is not None:
v = self._conflictResolveStrategy(newVal)
updater, _ = v
sig.simUpdateVal(self, updater)
setattr(cont, sigName, None)
return
yield | python | {
"resource": ""
} |
q272257 | HdlSimulator._applyValues | test | def _applyValues(self) -> Generator[None, None, None]:
"""
Perform delta step by writing stacked values to signals
"""
va = self._valuesToApply
self._applyValPlaned = False
# log if there are items to log
lav = self.config.logApplyingValues
if va and lav:
lav(self, va)
self._valuesToApply = []
# apply values to signals, values can overwrite each other
# but each signal should be driven by only one process and
# it should resolve value collision
addSp = self._seqProcsToRun.append
for s, vUpdater, isEventDependent, comesFrom in va:
if isEventDependent:
# now=0 and this was process initialization or async reg
addSp(comesFrom)
else:
# regular combinational process
s.simUpdateVal(self, vUpdater)
self._runCombProcesses()
# processes triggered from simUpdateVal can add new values
if self._valuesToApply and not self._applyValPlaned:
self._scheduleApplyValues()
return
yield | python | {
"resource": ""
} |
q272258 | HdlSimulator.read | test | def read(self, sig) -> Value:
"""
Read value from signal or interface
"""
try:
v = sig._val
except AttributeError:
v = sig._sigInside._val
return v.clone() | python | {
"resource": ""
} |
q272259 | HdlSimulator.write | test | def write(self, val, sig: SimSignal)-> None:
"""
Write value to signal or interface.
"""
# get target RtlSignal
try:
simSensProcs = sig.simSensProcs
except AttributeError:
sig = sig._sigInside
simSensProcs = sig.simSensProcs
# type cast of input value
t = sig._dtype
if isinstance(val, Value):
v = val.clone()
v = v._auto_cast(t)
else:
v = t.fromPy(val)
# can not update value in signal directly due singnal proxies
sig.simUpdateVal(self, lambda curentV: (
valueHasChanged(curentV, v), v))
if not self._applyValPlaned:
if not (simSensProcs or
sig.simRisingSensProcs or
sig.simFallingSensProcs):
# signal value was changed but there are no sensitive processes
# to it because of this _applyValues is never planed
# and should be
self._scheduleApplyValues()
elif (sig._writeCallbacks or
sig._writeCallbacksToEn):
# signal write did not caused any change on any other signal
# but there are still simulation agets waiting on
# updateComplete event
self._scheduleApplyValues() | python | {
"resource": ""
} |
q272260 | HdlSimulator.add_process | test | def add_process(self, proc) -> None:
"""
Add process to events with default priority on current time
"""
self._events.push(self.now, PRIORITY_NORMAL, proc) | python | {
"resource": ""
} |
q272261 | HdlSimulator.simUnit | test | def simUnit(self, synthesisedUnit: Unit, until: float, extraProcesses=[]):
"""
Run simulation for Unit instance
"""
beforeSim = self.config.beforeSim
if beforeSim is not None:
beforeSim(self, synthesisedUnit)
add_proc = self.add_process
for p in extraProcesses:
add_proc(p(self))
self._initUnitSignals(synthesisedUnit)
self.run(until) | python | {
"resource": ""
} |
q272262 | _mkOp | test | def _mkOp(fn):
"""
Function to create variadic operator function
:param fn: function to perform binary operation
"""
def op(*operands, key=None) -> RtlSignalBase:
"""
:param operands: variadic parameter of input uperands
:param key: optional function applied on every operand
before processing
"""
assert operands, operands
top = None
if key is not None:
operands = map(key, operands)
for s in operands:
if top is None:
top = s
else:
top = fn(top, s)
return top
return op | python | {
"resource": ""
} |
q272263 | ternaryOpsToIf | test | def ternaryOpsToIf(statements):
"""Convert all ternary operators to IfContainers"""
stms = []
for st in statements:
if isinstance(st, Assignment):
try:
if not isinstance(st.src, RtlSignalBase):
raise DoesNotContainsTernary()
d = st.src.singleDriver()
if not isinstance(d, Operator) or d.operator != AllOps.TERNARY:
raise DoesNotContainsTernary()
else:
ops = d.operands
ifc = IfContainer(ops[0],
[Assignment(ops[1], st.dst)],
[Assignment(ops[2], st.dst)]
)
stms.append(ifc)
continue
except (MultipleDriversErr, DoesNotContainsTernary):
pass
except NoDriverErr:
assert (hasattr(st.src, "_interface")
and st.src._interface is not None)\
or st.src.defVal.vldMask, st.src
stms.append(st)
return stms | python | {
"resource": ""
} |
q272264 | VhdlSerializer_statements.HWProcess | test | def HWProcess(cls, proc, ctx):
"""
Serialize HWProcess objects as VHDL
:param scope: name scope to prevent name collisions
"""
body = proc.statements
extraVars = []
extraVarsSerialized = []
hasToBeVhdlProcess = arr_any(body,
lambda x: isinstance(x,
(IfContainer,
SwitchContainer,
WhileContainer,
WaitStm)))
sensitivityList = sorted(
map(lambda s: cls.sensitivityListItem(s, ctx),
proc.sensitivityList))
if hasToBeVhdlProcess:
childCtx = ctx.withIndent()
else:
childCtx = copy(ctx)
def createTmpVarFn(suggestedName, dtype):
s = RtlSignal(None, None, dtype, virtualOnly=True)
s.name = ctx.scope.checkedName(suggestedName, s)
s.hidden = False
serializedS = cls.SignalItem(s, childCtx, declaration=True)
extraVars.append(s)
extraVarsSerialized.append(serializedS)
return s
childCtx.createTmpVarFn = createTmpVarFn
statemets = [cls.asHdl(s, childCtx) for s in body]
proc.name = ctx.scope.checkedName(proc.name, proc)
extraVarsInit = []
for s in extraVars:
if isinstance(s.defVal, RtlSignalBase) or s.defVal.vldMask:
a = Assignment(s.defVal, s, virtualOnly=True)
extraVarsInit.append(cls.Assignment(a, childCtx))
else:
assert s.drivers, s
for d in s.drivers:
extraVarsInit.append(cls.asHdl(d, childCtx))
_hasToBeVhdlProcess = hasToBeVhdlProcess
hasToBeVhdlProcess = extraVars or hasToBeVhdlProcess
if hasToBeVhdlProcess and not _hasToBeVhdlProcess:
# add indent because we did not added it before because we did not
# know t
oneIndent = getIndent(1)
statemets = list(map(lambda x: oneIndent + x, statemets))
return cls.processTmpl.render(
indent=getIndent(ctx.indent),
name=proc.name,
hasToBeVhdlProcess=hasToBeVhdlProcess,
extraVars=extraVarsSerialized,
sensitivityList=", ".join(sensitivityList),
statements=extraVarsInit + statemets
) | python | {
"resource": ""
} |
q272265 | hash_distance | test | def hash_distance(left_hash, right_hash):
"""Compute the hamming distance between two hashes"""
if len(left_hash) != len(right_hash):
raise ValueError('Hamming distance requires two strings of equal length')
return sum(map(lambda x: 0 if x[0] == x[1] else 1, zip(left_hash, right_hash))) | python | {
"resource": ""
} |
q272266 | average_hash | test | def average_hash(image_path, hash_size=8):
""" Compute the average hash of the given image. """
with open(image_path, 'rb') as f:
# Open the image, resize it and convert it to black & white.
image = Image.open(f).resize((hash_size, hash_size), Image.ANTIALIAS).convert('L')
pixels = list(image.getdata())
avg = sum(pixels) / len(pixels)
# Compute the hash based on each pixels value compared to the average.
bits = "".join(map(lambda pixel: '1' if pixel > avg else '0', pixels))
hashformat = "0{hashlength}x".format(hashlength=hash_size ** 2 // 4)
return int(bits, 2).__format__(hashformat) | python | {
"resource": ""
} |
q272267 | distance | test | def distance(image_path, other_image_path):
""" Compute the hamming distance between two images"""
image_hash = average_hash(image_path)
other_image_hash = average_hash(other_image_path)
return hash_distance(image_hash, other_image_hash) | python | {
"resource": ""
} |
q272268 | setup_platform | test | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Vizio media player platform."""
host = config.get(CONF_HOST)
token = config.get(CONF_ACCESS_TOKEN)
name = config.get(CONF_NAME)
volume_step = config.get(CONF_VOLUME_STEP)
device_type = config.get(CONF_DEVICE_CLASS)
device = VizioDevice(host, token, name, volume_step, device_type)
if device.validate_setup() is False:
_LOGGER.error("Failed to set up Vizio platform, "
"please check if host and API key are correct")
return
elif (token is None or token == "") and device_type == "tv":
_LOGGER.error("Failed to set up Vizio platform, "
"if device_class is 'tv' then an auth_token needs "
"to be provided, otherwise if device_class is "
"'soundbar' then add the right device_class to config")
return
if config.get(CONF_SUPPRESS_WARNING):
from requests.packages import urllib3
_LOGGER.warning("InsecureRequestWarning is disabled "
"because of Vizio platform configuration")
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
add_entities([device], True) | python | {
"resource": ""
} |
q272269 | VizioDevice.update | test | def update(self):
"""Retrieve latest state of the device."""
is_on = self._device.get_power_state()
if is_on:
self._state = STATE_ON
volume = self._device.get_current_volume()
if volume is not None:
self._volume_level = float(volume) / self._max_volume
input_ = self._device.get_current_input()
if input_ is not None:
self._current_input = input_.meta_name
inputs = self._device.get_inputs()
if inputs is not None:
self._available_inputs = [input_.name for input_ in inputs]
else:
if is_on is None:
self._state = None
else:
self._state = STATE_OFF
self._volume_level = None
self._current_input = None
self._available_inputs = None | python | {
"resource": ""
} |
q272270 | VizioDevice.mute_volume | test | def mute_volume(self, mute):
"""Mute the volume."""
if mute:
self._device.mute_on()
else:
self._device.mute_off() | python | {
"resource": ""
} |
q272271 | VizioDevice.volume_up | test | def volume_up(self):
"""Increasing volume of the device."""
self._volume_level += self._volume_step / self._max_volume
self._device.vol_up(num=self._volume_step) | python | {
"resource": ""
} |
q272272 | VizioDevice.volume_down | test | def volume_down(self):
"""Decreasing volume of the device."""
self._volume_level -= self._volume_step / self._max_volume
self._device.vol_down(num=self._volume_step) | python | {
"resource": ""
} |
q272273 | VizioDevice.set_volume_level | test | def set_volume_level(self, volume):
"""Set volume level."""
if self._volume_level is not None:
if volume > self._volume_level:
num = int(self._max_volume * (volume - self._volume_level))
self._volume_level = volume
self._device.vol_up(num=num)
elif volume < self._volume_level:
num = int(self._max_volume * (self._volume_level - volume))
self._volume_level = volume
self._device.vol_down(num=num) | python | {
"resource": ""
} |
q272274 | Board.reset | test | def reset(self):
'''Restores the starting position.'''
self.piece_bb = [
BB_VOID, # NONE
BB_RANK_C | BB_RANK_G, # PAWN
BB_A1 | BB_I1 | BB_A9 | BB_I9, # LANCE
BB_A2 | BB_A8 | BB_I2 | BB_I8, # KNIGHT
BB_A3 | BB_A7 | BB_I3 | BB_I7, # SILVER
BB_A4 | BB_A6 | BB_I4 | BB_I6, # GOLD
BB_B2 | BB_H8, # BISHOP
BB_B8 | BB_H2, # ROOK
BB_A5 | BB_I5, # KING
BB_VOID, # PROM_PAWN
BB_VOID, # PROM_LANCE
BB_VOID, # PROM_KNIGHT
BB_VOID, # PROM_SILVER
BB_VOID, # PROM_BISHOP
BB_VOID, # PROM_ROOK
]
self.pieces_in_hand = [collections.Counter(), collections.Counter()]
self.occupied = Occupied(BB_RANK_G | BB_H2 | BB_H8 | BB_RANK_I, BB_RANK_A | BB_B2 | BB_B8 | BB_RANK_C)
self.king_squares = [I5, A5]
self.pieces = [NONE for i in SQUARES]
for i in SQUARES:
mask = BB_SQUARES[i]
for piece_type in PIECE_TYPES:
if mask & self.piece_bb[piece_type]:
self.pieces[i] = piece_type
self.turn = BLACK
self.move_number = 1
self.captured_piece_stack = collections.deque()
self.move_stack = collections.deque()
self.incremental_zobrist_hash = self.board_zobrist_hash(DEFAULT_RANDOM_ARRAY)
self.transpositions = collections.Counter((self.zobrist_hash(), )) | python | {
"resource": ""
} |
q272275 | Board.piece_at | test | def piece_at(self, square):
'''Gets the piece at the given square.'''
mask = BB_SQUARES[square]
color = int(bool(self.occupied[WHITE] & mask))
piece_type = self.piece_type_at(square)
if piece_type:
return Piece(piece_type, color) | python | {
"resource": ""
} |
q272276 | Board.remove_piece_at | test | def remove_piece_at(self, square, into_hand=False):
'''Removes a piece from the given square if present.'''
piece_type = self.piece_type_at(square)
if piece_type == NONE:
return
if into_hand:
self.add_piece_into_hand(piece_type, self.turn)
mask = BB_SQUARES[square]
self.piece_bb[piece_type] ^= mask
color = int(bool(self.occupied[WHITE] & mask))
self.pieces[square] = NONE
self.occupied.ixor(mask, color, square)
# Update incremental zobrist hash.
if color == BLACK:
piece_index = (piece_type - 1) * 2
else:
piece_index = (piece_type - 1) * 2 + 1
self.incremental_zobrist_hash ^= DEFAULT_RANDOM_ARRAY[81 * piece_index + 9 * rank_index(square) + file_index(square)] | python | {
"resource": ""
} |
q272277 | Board.set_piece_at | test | def set_piece_at(self, square, piece, from_hand=False, into_hand=False):
'''Sets a piece at the given square. An existing piece is replaced.'''
if from_hand:
self.remove_piece_from_hand(piece.piece_type, self.turn)
self.remove_piece_at(square, into_hand)
self.pieces[square] = piece.piece_type
mask = BB_SQUARES[square]
piece_type = piece.piece_type
self.piece_bb[piece_type] |= mask
if piece_type == KING:
self.king_squares[piece.color] = square
self.occupied.ixor(mask, piece.color, square)
# Update incremental zorbist hash.
if piece.color == BLACK:
piece_index = (piece.piece_type - 1) * 2
else:
piece_index = (piece.piece_type - 1) * 2 + 1
self.incremental_zobrist_hash ^= DEFAULT_RANDOM_ARRAY[81 * piece_index + 9 * rank_index(square) + file_index(square)] | python | {
"resource": ""
} |
q272278 | Board.is_suicide_or_check_by_dropping_pawn | test | def is_suicide_or_check_by_dropping_pawn(self, move):
'''
Checks if the given move would move would leave the king in check or
put it into check.
'''
self.push(move)
is_suicide = self.was_suicide()
is_check_by_dropping_pawn = self.was_check_by_dropping_pawn(move)
self.pop()
return is_suicide or is_check_by_dropping_pawn | python | {
"resource": ""
} |
q272279 | Board.was_suicide | test | def was_suicide(self):
'''
Checks if the king of the other side is attacked. Such a position is not
valid and could only be reached by an illegal move.
'''
return self.is_attacked_by(self.turn, self.king_squares[self.turn ^ 1]) | python | {
"resource": ""
} |
q272280 | Board.is_game_over | test | def is_game_over(self):
'''
Checks if the game is over due to checkmate, stalemate or
fourfold repetition.
'''
# Stalemate or checkmate.
try:
next(self.generate_legal_moves().__iter__())
except StopIteration:
return True
# Fourfold repetition.
if self.is_fourfold_repetition():
return True
return False | python | {
"resource": ""
} |
q272281 | Board.is_checkmate | test | def is_checkmate(self):
'''Checks if the current position is a checkmate.'''
if not self.is_check():
return False
try:
next(self.generate_legal_moves().__iter__())
return False
except StopIteration:
return True | python | {
"resource": ""
} |
q272282 | Board.is_fourfold_repetition | test | def is_fourfold_repetition(self):
'''
a game is ended if a position occurs for the fourth time
on consecutive alternating moves.
'''
zobrist_hash = self.zobrist_hash()
# A minimum amount of moves must have been played and the position
# in question must have appeared at least four times.
if self.transpositions[zobrist_hash] < 4:
return False
return True | python | {
"resource": ""
} |
q272283 | Board.pop | test | def pop(self):
'''
Restores the previous position and returns the last move from the stack.
'''
move = self.move_stack.pop()
# Update transposition table.
self.transpositions.subtract((self.zobrist_hash(), ))
# Decrement move number.
self.move_number -= 1
# Restore state.
captured_piece_type = self.captured_piece_stack.pop()
captured_piece_color = self.turn
# On a null move simply swap the turn.
if not move:
self.turn ^= 1
return move
# Restore the source square.
piece_type = self.piece_type_at(move.to_square)
if move.promotion:
piece_type = PIECE_PROMOTED.index(piece_type)
if move.from_square is None:
self.add_piece_into_hand(piece_type, self.turn ^ 1)
else:
self.set_piece_at(move.from_square, Piece(piece_type, self.turn ^ 1))
# Restore target square.
if captured_piece_type:
self.remove_piece_from_hand(captured_piece_type, captured_piece_color ^ 1)
self.set_piece_at(move.to_square, Piece(captured_piece_type, captured_piece_color))
else:
self.remove_piece_at(move.to_square)
# Swap turn.
self.turn ^= 1
return move | python | {
"resource": ""
} |
q272284 | Board.sfen | test | def sfen(self):
'''
Gets an SFEN representation of the current position.
'''
sfen = []
empty = 0
# Position part.
for square in SQUARES:
piece = self.piece_at(square)
if not piece:
empty += 1
else:
if empty:
sfen.append(str(empty))
empty = 0
sfen.append(piece.symbol())
if BB_SQUARES[square] & BB_FILE_1:
if empty:
sfen.append(str(empty))
empty = 0
if square != I1:
sfen.append('/')
sfen.append(' ')
# Side to move.
if self.turn == WHITE:
sfen.append('w')
else:
sfen.append('b')
sfen.append(' ')
# Pieces in hand
pih_len = 0
for color in COLORS:
p = self.pieces_in_hand[color]
pih_len += len(p)
for piece_type in sorted(p.keys(), reverse=True):
if p[piece_type] >= 1:
if p[piece_type] > 1:
sfen.append(str(p[piece_type]))
piece = Piece(piece_type, color)
sfen.append(piece.symbol())
if pih_len == 0:
sfen.append('-')
sfen.append(' ')
# Move count
sfen.append(str(self.move_number))
return ''.join(sfen) | python | {
"resource": ""
} |
q272285 | Board.push_usi | test | def push_usi(self, usi):
'''
Parses a move in standard coordinate notation, makes the move and puts
it on the the move stack.
Raises `ValueError` if neither legal nor a null move.
Returns the move.
'''
move = Move.from_usi(usi)
self.push(move)
return move | python | {
"resource": ""
} |
q272286 | Board.zobrist_hash | test | def zobrist_hash(self, array=None):
'''
Returns a Zobrist hash of the current position.
'''
# Hash in the board setup.
zobrist_hash = self.board_zobrist_hash(array)
if array is None:
array = DEFAULT_RANDOM_ARRAY
if self.turn == WHITE:
zobrist_hash ^= array[2268]
# pieces in hand pattern is
# 19 * 5 * 5 * 5 * 5 * 3 * 3 = 106875 < pow(2, 17)
# just checking black side is okay in normal state
i = (
self.pieces_in_hand[BLACK][ROOK] * 35625 +
self.pieces_in_hand[BLACK][BISHOP] * 11875 +
self.pieces_in_hand[BLACK][GOLD] * 2375 +
self.pieces_in_hand[BLACK][SILVER] * 475 +
self.pieces_in_hand[BLACK][KNIGHT] * 95 +
self.pieces_in_hand[BLACK][LANCE] * 19 +
self.pieces_in_hand[BLACK][PAWN])
bit = bit_scan(i)
while bit != -1 and bit is not None:
zobrist_hash ^= array[2269 + bit]
bit = bit_scan(i, bit + 1)
return zobrist_hash | python | {
"resource": ""
} |
q272287 | Piece.symbol | test | def symbol(self):
'''
Gets the symbol `p`, `l`, `n`, etc.
'''
if self.color == BLACK:
return PIECE_SYMBOLS[self.piece_type].upper()
else:
return PIECE_SYMBOLS[self.piece_type] | python | {
"resource": ""
} |
q272288 | Piece.from_symbol | test | def from_symbol(cls, symbol):
'''
Creates a piece instance from a piece symbol.
Raises `ValueError` if the symbol is invalid.
'''
if symbol.lower() == symbol:
return cls(PIECE_SYMBOLS.index(symbol), WHITE)
else:
return cls(PIECE_SYMBOLS.index(symbol.lower()), BLACK) | python | {
"resource": ""
} |
q272289 | Move.usi | test | def usi(self):
'''
Gets an USI string for the move.
For example a move from 7A to 8A would be `7a8a` or `7a8a+` if it is
a promotion.
'''
if self:
if self.drop_piece_type:
return '{0}*{1}'.format(PIECE_SYMBOLS[self.drop_piece_type].upper(), SQUARE_NAMES[self.to_square])
else:
return SQUARE_NAMES[self.from_square] + SQUARE_NAMES[self.to_square] + \
('+' if self.promotion else '')
else:
return '0000' | python | {
"resource": ""
} |
q272290 | Move.from_usi | test | def from_usi(cls, usi):
'''
Parses an USI string.
Raises `ValueError` if the USI string is invalid.
'''
if usi == '0000':
return cls.null()
elif len(usi) == 4:
if usi[1] == '*':
piece = Piece.from_symbol(usi[0])
return cls(None, SQUARE_NAMES.index(usi[2:4]), False, piece.piece_type)
else:
return cls(SQUARE_NAMES.index(usi[0:2]), SQUARE_NAMES.index(usi[2:4]))
elif len(usi) == 5 and usi[4] == '+':
return cls(SQUARE_NAMES.index(usi[0:2]), SQUARE_NAMES.index(usi[2:4]), True)
else:
raise ValueError('expected usi string to be of length 4 or 5') | python | {
"resource": ""
} |
q272291 | parse_commits | test | def parse_commits(data):
'''Accept a string and parse it into many commits.
Parse and yield each commit-dictionary.
This function is a generator.
'''
raw_commits = RE_COMMIT.finditer(data)
for rc in raw_commits:
full_commit = rc.groups()[0]
parts = RE_COMMIT.match(full_commit).groupdict()
parsed_commit = parse_commit(parts)
yield parsed_commit | python | {
"resource": ""
} |
q272292 | parse_commit | test | def parse_commit(parts):
'''Accept a parsed single commit. Some of the named groups
require further processing, so parse those groups.
Return a dictionary representing the completely parsed
commit.
'''
commit = {}
commit['commit'] = parts['commit']
commit['tree'] = parts['tree']
parent_block = parts['parents']
commit['parents'] = [
parse_parent_line(parentline)
for parentline in
parent_block.splitlines()
]
commit['author'] = parse_author_line(parts['author'])
commit['committer'] = parse_committer_line(parts['committer'])
message_lines = [
parse_message_line(msgline)
for msgline in
parts['message'].split("\n")
]
commit['message'] = "\n".join(
msgline
for msgline in
message_lines
if msgline is not None
)
commit['changes'] = [
parse_numstat_line(numstat)
for numstat in
parts['numstats'].splitlines()
]
return commit | python | {
"resource": ""
} |
q272293 | load_config_from_cli | test | def load_config_from_cli(config: GoodConf, argv: List[str]) -> List[str]:
"""Loads config, checking CLI arguments for a config file"""
# Monkey patch Django's command parser
from django.core.management.base import BaseCommand
original_parser = BaseCommand.create_parser
def patched_parser(self, prog_name, subcommand):
parser = original_parser(self, prog_name, subcommand)
argparser_add_argument(parser, config)
return parser
BaseCommand.create_parser = patched_parser
try:
parser = argparse.ArgumentParser(add_help=False)
argparser_add_argument(parser, config)
config_arg, default_args = parser.parse_known_args(argv)
config.load(config_arg.config)
yield default_args
finally:
# Put that create_parser back where it came from or so help me!
BaseCommand.create_parser = original_parser | python | {
"resource": ""
} |
q272294 | execute_from_command_line_with_config | test | def execute_from_command_line_with_config(config: GoodConf, argv: List[str]):
"""Load's config then runs Django's execute_from_command_line"""
with load_config_from_cli(config, argv) as args:
from django.core.management import execute_from_command_line
execute_from_command_line(args) | python | {
"resource": ""
} |
q272295 | argparser_add_argument | test | def argparser_add_argument(parser: argparse.ArgumentParser, config: GoodConf):
"""Adds argument for config to existing argparser"""
help = "Config file."
if config.file_env_var:
help += (" Can also be configured via the "
"environment variable: {}".format(config.file_env_var))
if config.default_files:
help += (" Defaults to the first file that exists from "
"[{}].".format(', '.join(config.default_files)))
parser.add_argument('-C', '--config', metavar='FILE', help=help) | python | {
"resource": ""
} |
q272296 | GoodConf.load | test | def load(self, filename: str = None):
"""Find config file and set values"""
if filename:
self.config_file = _find_file(filename)
else:
if self.file_env_var and self.file_env_var in os.environ:
self.config_file = _find_file(os.environ[self.file_env_var])
if not self.config_file:
for filename in self.default_files:
self.config_file = _find_file(filename, require=False)
if self.config_file:
break
if self.config_file:
config = _load_config(self.config_file)
log.info("Loading config from %s", self.config_file)
else:
config = {}
log.info("No config file specified. "
"Loading with environment variables.")
self.set_values(config) | python | {
"resource": ""
} |
q272297 | GoodConf.generate_yaml | test | def generate_yaml(cls, **override):
"""
Dumps initial config in YAML
"""
import ruamel.yaml
yaml = ruamel.yaml.YAML()
yaml_str = StringIO()
yaml.dump(cls.get_initial(**override), stream=yaml_str)
yaml_str.seek(0)
dict_from_yaml = yaml.load(yaml_str)
if cls.__doc__:
dict_from_yaml.yaml_set_start_comment(
'\n' + cls.__doc__ + '\n\n')
for k in dict_from_yaml.keys():
if cls._values[k].help:
dict_from_yaml.yaml_set_comment_before_after_key(
k, before='\n' + cls._values[k].help)
yaml_str = StringIO()
yaml.dump(dict_from_yaml, yaml_str)
yaml_str.seek(0)
return yaml_str.read() | python | {
"resource": ""
} |
q272298 | GoodConf.generate_markdown | test | def generate_markdown(cls):
"""
Documents values in markdown
"""
lines = []
if cls.__doc__:
lines.extend(['# {}'.format(cls.__doc__), ''])
for k, v in cls._values.items():
lines.append('* **{}** '.format(k))
if v.required:
lines[-1] = lines[-1] + '_REQUIRED_ '
if v.help:
lines.append(' {} '.format(v.help))
lines.append(' type: `{}` '.format(v.cast_as.__name__))
if v.default is not None:
lines.append(' default: `{}` '.format(v.default))
return '\n'.join(lines) | python | {
"resource": ""
} |
q272299 | Value.cast | test | def cast(self, val: str):
"""converts string to type requested by `cast_as`"""
try:
return getattr(self, 'cast_as_{}'.format(
self.cast_as.__name__.lower()))(val)
except AttributeError:
return self.cast_as(val) | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.