function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def __init__(
self,
*,
routes: Optional[List["MatchedRoute"]] = None,
**kwargs | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(
self,
*,
route: "RouteProperties",
message: Optional["RoutingMessage"] = None,
twin: Optional["RoutingTwin"] = None,
**kwargs | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(
self,
*,
result: Optional[Union[str, "TestResultStatus"]] = None,
details: Optional["TestRouteResultDetails"] = None,
**kwargs | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(
self,
*,
compilation_errors: Optional[List["RouteCompilationError"]] = None,
**kwargs | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(
self,
*,
id: Optional[str] = None,
type: Optional[str] = None,
unit: Optional[str] = None,
current_value: Optional[int] = None,
limit: Optional[int] = None,
name: Optional["Name"] = None,
**kwargs | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(
self,
*,
value: Optional[List["UserSubscriptionQuota"]] = None,
**kwargs | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def _get_template(template_string):
if __is_18:
return engines["django"].from_string(template_string)
else:
return loader.get_template_from_string(template_string) | mbi/django-simple-captcha | [
1296,
306,
1296,
33,
1333550136
] |
def test(request):
class CaptchaTestForm(forms.Form):
subject = forms.CharField(max_length=100)
sender = forms.EmailField()
captcha = CaptchaField(help_text="asdasd")
return _test(request, CaptchaTestForm) | mbi/django-simple-captcha | [
1296,
306,
1296,
33,
1333550136
] |
def test_custom_generator(request):
class CaptchaTestModelForm(forms.ModelForm):
subject = forms.CharField(max_length=100)
sender = forms.EmailField()
captcha = CaptchaField(generator=lambda: ("111111", "111111"))
class Meta:
model = User
fields = ("subject", "sender", "captcha")
return _test(request, CaptchaTestModelForm) | mbi/django-simple-captcha | [
1296,
306,
1296,
33,
1333550136
] |
def test_per_form_format(request):
class CaptchaTestFormatForm(forms.Form):
captcha = CaptchaField(
help_text="asdasd",
error_messages=dict(invalid="TEST CUSTOM ERROR MESSAGE"),
output_format=(
"%(image)s testPerFieldCustomFormatString "
"%(hidden_field)s %(text_field)s"
),
)
return _test(request, CaptchaTestFormatForm) | mbi/django-simple-captcha | [
1296,
306,
1296,
33,
1333550136
] |
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file) | dz0ny/mopidy-api-explorer | [
20,
5,
20,
2,
1403610839
] |
def __init__ (self, id, status, response, ident = None):
rcache.Result.__init__ (self, status, ident)
self.node = id
self.__response = response | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def reraise (self):
if self.status_code >= 300:
try:
self.__response.expt
except AttributeError:
# redircting to HTTPError
raise exceptions.HTTPError ("%d %s" % (self.status_code, self.reason))
else:
self.__response.raise_for_status ()
return self | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def cache (self, timeout = 60, cache_if = (200,)):
if not timeout:
return
if self.status != NORMAL or self.status_code not in cache_if:
return
rcache.Result.cache (self, timeout)
return self | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def one (self, cache = None, cache_if = (200,)):
try:
return self.fetch (cache, cache_if, True)
except (PGIntegrityError, sqlite3.IntegrityError):
# primary or unique index error
raise exceptions.HTTPError ("409 Conflict") | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def __init__ (self, results, ident = None):
self.results = results
self.status_code = [rs.status_code for rs in results]
rcache.Result.__init__ (self, [rs.status for rs in self.results], ident) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def data (self):
return [r.data for r in self.results] | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def text (self):
return [r.text for r in self.results] | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def cache (self, timeout = 60, cache_if = (200,)):
if [_f for _f in [rs.status != NORMAL or rs.status_code not in cache_if for rs in self.results] if _f]:
return
rcache.Result.cache (self, timeout)
return self | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def one (self, cache = None, cache_if = (200,)):
self.cache (cache, cache_if)
return [r.one () for r in self.results] | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def __init__ (self, cv, id, ident = None, filterfunc = None, cachefs = None, callback = None):
self._cv = cv
self.id = id
self.ident = ident
self.filterfunc = filterfunc
self.cachefs = cachefs
self.callback = callback
self.creation_time = time.time ()
self.status = UNSENT
self.result = None
self.handler = None | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def get_status (self):
with self._cv:
return self.status | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def set_status (self, code, result = None):
with self._cv:
self.status = code
if result:
self.result = result
return code | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def do_filter (self):
if self.filterfunc:
self.filterfunc (self.result) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def handle_result (self, handler):
if self.get_status () == TIMEOUT:
# timeout, ignore
return
response = handler.response
# DON'T do_filter here, it blocks select loop
if response.code >= 700:
if response.code == 702:
status = TIMEOUT
else:
status = NETERR
else:
status = NORMAL
result = Result (self.id, status, response, self.ident)
cakey = response.request.get_cache_key ()
if self.cachefs and cakey and response.max_age:
self.cachefs.save (
cakey,
response.get_header ("content-type"), response.content,
response.max_age, 0
)
handler.callback = None
handler.response = None
self.set_status (status, result)
tuple_cb (self, self.callback) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def __init__ (self,
cluster,
uri,
params = None,
reqtype = "get",
headers = None,
auth = None,
meta = None,
use_cache = False,
mapreduce = True,
filter = None,
callback = None,
cache = None,
timeout = 10,
origin = None,
cachefs = None,
logger = None
):
self._uri = uri
self._params = params
self._headers = headers
self._reqtype = reqtype
self._auth = auth
self.set_defaults (cluster, meta, use_cache, mapreduce, filter, callback, cache, timeout, origin, logger, cachefs)
if not self._reqtype.lower ().endswith ("rpc"):
self._build_request ("", self._params) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def add_proto (cls, name, class_):
cls.proto_map [name] = class_ | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def __del__ (self):
self._cv = None
self._results = [] | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def _add_header (self, n, v):
if self._headers is None:
self._headers = {}
self._headers [n] = v | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def _build_request (self, method, params):
self._cached_request_args = (method, params) # backup for retry
if self._use_cache and rcache.the_rcache:
self._cached_result = rcache.the_rcache.get (self._get_ident (), self._use_cache)
if self._cached_result is not None:
self._cached_result.meta = self._meta
self._callback and tuple_cb (self._cached_result, self._callback)
return
else:
self._use_cache = False
requests = 0
while self._avails ():
if self._cluster.get_name () != "__socketpool__":
asyncon = self._get_connection (None)
else:
asyncon = self._get_connection (self._uri)
self._auth = self._auth or asyncon.get_auth ()
_reqtype = self._reqtype.lower ()
rs = Dispatcher (
self._cv, asyncon.address,
ident = not self._mapreduce and self._get_ident () or None,
filterfunc = self._filter, cachefs = self._cachefs,
callback = self._collect
)
self._requests [rs] = asyncon
args = (params, self._headers, self._auth, self._logger, self._meta)
try:
if _reqtype in ("ws", "wss"):
handler = ws_request_handler.RequestHandler
request = ws_request.Request (self._uri, *args)
else:
if not self._use_cache:
self._add_header ("Cache-Control", "no-cache")
handler = http_request_handler.RequestHandler
try:
class_ = self.proto_map [_reqtype]
except KeyError:
if _reqtype == "upload":
request = http_request.HTTPMultipartRequest (self._uri, _reqtype, *args)
else:
request = http_request.HTTPRequest (self._uri, _reqtype, *args)
else:
request = class_ (self._uri, method, *args)
requests += self._handle_request (request, rs, asyncon, handler)
except:
self._logger ("Request Creating Failed", "fail")
self._logger.trace ()
rs.request_failed ()
asyncon.set_active (False)
continue
if requests:
self._request = request # sample for unitest
trigger.wakeup ()
if _reqtype [-3:] == "rpc":
return self | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def _get_connection (self, id = None):
if id is None: id = self._nodes.pop ()
else: self._nodes = []
asyncon = self._cluster.get (id)
self._setup (asyncon)
return asyncon | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def _cancel (self):
with self._cv:
self._canceled = True | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def _fail_log (self, status):
if self._origin:
self._logger ("backend status is {}, {} at {} LINE {}: {}".format (
status, self._origin [3], self._origin [1], self._origin [2], self._origin [4][0].strip ()
), "debug") | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def _do_callback (self, callback):
result = self.dispatch (wait = False)
tuple_cb (result, callback) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def rerequest (self):
self._build_request (*self._cached_request_args) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def set_callback (self, callback, reqid = None, timeout = None):
if reqid is not None:
self._meta ["__reqid"] = reqid
if self._cv:
with self._cv:
requests = self._requests
self._callback = callback
else:
# already finished or will use cache
requests = self._requests
self._callback = callback
if not requests:
return self._do_callback (callback)
timeout and self.reset_timeout (timeout) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def _wait (self, timeout = None):
timeout and self.reset_timeout (timeout)
remain = self._timeout - (time.time () - self._init_time)
if remain > 0:
with self._cv:
if self._requests and not self._canceled:
self._cv.wait (remain)
self._canceled = True
requests = list (self._requests.items ())
for rs, asyncon in requests:
rs.set_status (TIMEOUT)
asyncon.handle_abort () # abort imme
self._collect (rs, failed = True) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def dispatch_or_throw (self, cache = None, cache_if = (200,), timeout = None):
return self.dispatch (cache, cache_if, reraise = True, timeout = timeout) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def wait (self, timeout = None, reraise = False):
return self.dispatch (reraise = reraise, timeout = timeout) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def commit (self, timeout = None):
return self.wait (timeout, True) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def fetch (self, cache = None, cache_if = (200,), timeout = None):
res = self._cached_result or self.dispatch (timeout = timeout, reraise = True)
return res.fetch (cache or self._cache_timeout, cache_if) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def then (self, func):
from ..tasks import Future
return Future (self, self._timeout, **self._meta).then (func) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def __init__(self, send, name):
self.__send = send
self.__name = name | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def __call__(self, *args):
return self.__send(self.__name, args) | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def __init__ (self, __class, *args, **kargs):
self.__class = __class
self.__args = args
self.__kargs = kargs | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def __exit__ (self, type, value, tb):
pass | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def __request (self, method, params):
cdc = self.__class (*self.__args, **self.__kargs)
cdc._build_request (method, params)
return cdc | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def __init__ (self, cluster, logger, cachesfs):
self.cluster = cluster
self.logger = logger
self.cachesfs = cachesfs | hansroh/skitai | [
3,
1,
3,
1,
1482310026
] |
def train(args, extra_args):
env_type, env_id = get_env_type(args.env)
print('env_type: {}'.format(env_type))
total_timesteps = int(args.num_timesteps)
seed = args.seed
learn = get_learn_function(args.alg)
alg_kwargs = get_learn_function_defaults(args.alg, env_type)
alg_kwargs.update(extra_args)
env = build_env(args)
if args.save_video_interval != 0:
env = VecVideoRecorder(env, osp.join(logger.Logger.CURRENT.dir, "videos"), record_video_trigger=lambda x: x % args.save_video_interval == 0, video_length=args.save_video_length)
if args.network:
alg_kwargs['network'] = args.network
else:
if alg_kwargs.get('network') is None:
alg_kwargs['network'] = get_default_network(env_type)
print('Training {} on {}:{} with arguments \n{}'.format(args.alg, env_type, env_id, alg_kwargs))
model = learn(
env=env,
seed=seed,
total_timesteps=total_timesteps,
**alg_kwargs
)
return model, env | dsbrown1331/CoRL2019-DREX | [
43,
11,
43,
22,
1570219415
] |
def get_env_type(env_id):
if env_id in _game_envs.keys():
env_type = env_id
env_id = [g for g in _game_envs[env_type]][0]
else:
env_type = None
for g, e in _game_envs.items():
if env_id in e:
env_type = g
break
assert env_type is not None, 'env_id {} is not recognized in env types'.format(env_id, _game_envs.keys())
return env_type, env_id | dsbrown1331/CoRL2019-DREX | [
43,
11,
43,
22,
1570219415
] |
def get_alg_module(alg, submodule=None):
submodule = submodule or alg
try:
# first try to import the alg module from baselines
alg_module = import_module('.'.join(['baselines', alg, submodule]))
except ImportError:
# then from rl_algs
alg_module = import_module('.'.join(['rl_' + 'algs', alg, submodule]))
return alg_module | dsbrown1331/CoRL2019-DREX | [
43,
11,
43,
22,
1570219415
] |
def get_learn_function_defaults(alg, env_type):
try:
alg_defaults = get_alg_module(alg, 'defaults')
kwargs = getattr(alg_defaults, env_type)()
except (ImportError, AttributeError):
kwargs = {}
return kwargs | dsbrown1331/CoRL2019-DREX | [
43,
11,
43,
22,
1570219415
] |
def parse(v):
assert isinstance(v, str)
try:
return eval(v)
except (NameError, SyntaxError):
return v | dsbrown1331/CoRL2019-DREX | [
43,
11,
43,
22,
1570219415
] |
def main():
# configure logger, disable logging in child MPI processes (with rank > 0)
arg_parser = common_arg_parser()
args, unknown_args = arg_parser.parse_known_args()
extra_args = parse_cmdline_kwargs(unknown_args)
if MPI is None or MPI.COMM_WORLD.Get_rank() == 0:
rank = 0
logger.configure()
else:
logger.configure(format_strs=[])
rank = MPI.COMM_WORLD.Get_rank()
model, env = train(args, extra_args)
env.close()
if args.save_path is not None and rank == 0:
save_path = osp.expanduser(args.save_path)
model.save(save_path)
if args.play:
logger.log("Running trained model")
env = build_env(args)
obs = env.reset()
def initialize_placeholders(nlstm=128,**kwargs):
return np.zeros((args.num_env or 1, 2*nlstm)), np.zeros((1))
state, dones = initialize_placeholders(**extra_args)
while True:
actions, _, state, _ = model.step(obs,S=state, M=dones)
obs, _, done, _ = env.step(actions)
env.render()
done = done.any() if isinstance(done, np.ndarray) else done
if done:
obs = env.reset()
env.close() | dsbrown1331/CoRL2019-DREX | [
43,
11,
43,
22,
1570219415
] |
def kelvin_dict_to(d, target_temperature_unit):
"""
Converts all the values in a dict from Kelvin temperatures to the
specified temperature format.
:param d: the dictionary containing Kelvin temperature values
:type d: dict
:param target_temperature_unit: the target temperature unit, may be:
'celsius' or 'fahrenheit'
:type target_temperature_unit: str
:returns: a dict with the same keys as the input dict and converted
temperature values as values
:raises: *ValueError* when unknown target temperature units are provided
"""
if target_temperature_unit == 'kelvin':
return d
elif target_temperature_unit == 'celsius':
return {key: kelvin_to_celsius(d[key]) for key in d}
elif target_temperature_unit == 'fahrenheit':
return {key: kelvin_to_fahrenheit(d[key]) for key in d}
else:
raise ValueError("Invalid value for target temperature conversion \
unit") | csparpa/pyowm | [
746,
159,
746,
16,
1378111431
] |
def kelvin_to_fahrenheit(kelvintemp):
"""
Converts a numeric temperature from Kelvin degrees to Fahrenheit degrees
:param kelvintemp: the Kelvin temperature
:type kelvintemp: int/long/float
:returns: the float Fahrenheit temperature
:raises: *TypeError* when bad argument types are provided
"""
if kelvintemp < 0:
raise ValueError(__name__ +
": negative temperature values not allowed")
fahrenheittemp = (kelvintemp - KELVIN_OFFSET) * \
FAHRENHEIT_DEGREE_SCALE + FAHRENHEIT_OFFSET
return float("{0:.2f}".format(fahrenheittemp)) | csparpa/pyowm | [
746,
159,
746,
16,
1378111431
] |
def metric_wind_dict_to_km_h(d):
"""
Converts all the wind values in a dict from meters/sec
to km/hour.
:param d: the dictionary containing metric values
:type d: dict
:returns: a dict with the same keys as the input dict and values converted
to km/hour
"""
result = {}
for key, value in d.items():
if key != 'deg': # do not convert wind degree
result[key] = value * KM_PER_HOUR_FOR_ONE_METER_PER_SEC
else:
result[key] = value
return result | csparpa/pyowm | [
746,
159,
746,
16,
1378111431
] |
def metric_wind_dict_to_beaufort(d):
"""
Converts all the wind values in a dict from meters/sec
to the corresponding Beaufort scale level (which is not an exact number but rather
represents a range of wind speeds - see: https://en.wikipedia.org/wiki/Beaufort_scale).
Conversion table: https://www.windfinder.com/wind/windspeed.htm
:param d: the dictionary containing metric values
:type d: dict
:returns: a dict with the same keys as the input dict and values converted
to Beaufort level
"""
result = {}
for key, value in d.items():
if key != 'deg': # do not convert wind degree
if value <= 0.2:
bf = 0
elif 0.2 < value <= 1.5:
bf = 1
elif 1.5 < value <= 3.3:
bf = 2
elif 3.3 < value <= 5.4:
bf = 3
elif 5.4 < value <= 7.9:
bf = 4
elif 7.9 < value <= 10.7:
bf = 5
elif 10.7 < value <= 13.8:
bf = 6
elif 13.8 < value <= 17.1:
bf = 7
elif 17.1 < value <= 20.7:
bf = 8
elif 20.7 < value <= 24.4:
bf = 9
elif 24.4 < value <= 28.4:
bf = 10
elif 28.4 < value <= 32.6:
bf = 11
else:
bf = 12
result[key] = bf
else:
result[key] = value
return result | csparpa/pyowm | [
746,
159,
746,
16,
1378111431
] |
def get_log():
return logging.getLogger(__name__.split('.')[0]) | thefactory/marathon-python | [
199,
152,
199,
25,
1398275002
] |
def default(self, obj):
if hasattr(obj, 'json_repr'):
return self.default(obj.json_repr())
if isinstance(obj, datetime.datetime):
return obj.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
if isinstance(obj, collections.Iterable) and not isinstance(obj, str):
try:
return {k: self.default(v) for k, v in obj.items()}
except AttributeError:
return [self.default(e) for e in obj]
return obj | thefactory/marathon-python | [
199,
152,
199,
25,
1398275002
] |
def default(self, obj):
if hasattr(obj, 'json_repr'):
return self.default(obj.json_repr(minimal=True))
if isinstance(obj, datetime.datetime):
return obj.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
if isinstance(obj, collections.Iterable) and not isinstance(obj, str):
try:
return {k: self.default(v) for k, v in obj.items() if (v or v in (False, 0))}
except AttributeError:
return [self.default(e) for e in obj if (e or e in (False, 0))]
return obj | thefactory/marathon-python | [
199,
152,
199,
25,
1398275002
] |
def to_snake_case(camel_str):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', camel_str)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() | thefactory/marathon-python | [
199,
152,
199,
25,
1398275002
] |
def __init__(self, **kw):
self.__dict__.update(kw)
self.version = version | reflectometry/direfl | [
2,
1,
2,
1,
1326382128
] |
def add_argument_group(name):
arg = parser.add_argument_group(name)
arg_lists.append(arg)
return arg | MichelDeudon/neural-combinatorial-optimization-rl-tensorflow | [
221,
63,
221,
1,
1493586256
] |
def get_config():
config, unparsed = parser.parse_known_args()
return config, unparsed | MichelDeudon/neural-combinatorial-optimization-rl-tensorflow | [
221,
63,
221,
1,
1493586256
] |
def main():
parser = argparse.ArgumentParser(description=__doc__.strip())
parser.add_argument('-p', '--ports', type=int, default=4, help="number of ports")
parser.add_argument('-n', '--name', type=str, help="module name")
parser.add_argument('-o', '--output', type=str, help="output file name")
args = parser.parse_args()
try:
generate(**args.__dict__)
except IOError as ex:
print(ex)
exit(1) | alexforencich/xfcp | [
39,
18,
39,
6,
1478123312
] |
def get_market_price_summary(asset1, asset2, with_last_trades=0):
# DEPRECATED 1.5
result = assets_trading.get_market_price_summary(asset1, asset2, with_last_trades)
return result if result is not None else False
#^ due to current bug in our jsonrpc stack, just return False if None is returned | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def get_market_cap_history(start_ts=None, end_ts=None):
now_ts = calendar.timegm(time.gmtime())
if not end_ts: # default to current datetime
end_ts = now_ts
if not start_ts: # default to 30 days before the end date
start_ts = end_ts - (30 * 24 * 60 * 60)
data = {}
results = {}
#^ format is result[market_cap_as][asset] = [[block_time, market_cap], [block_time2, market_cap2], ...]
for market_cap_as in (config.XCP, config.BTC):
caps = config.mongo_db.asset_marketcap_history.aggregate([
{"$match": {
"market_cap_as": market_cap_as,
"block_time": {
"$gte": datetime.datetime.utcfromtimestamp(start_ts)
} if end_ts == now_ts else {
"$gte": datetime.datetime.utcfromtimestamp(start_ts),
"$lte": datetime.datetime.utcfromtimestamp(end_ts)
}
}},
{"$project": {
"year": {"$year": "$block_time"},
"month": {"$month": "$block_time"},
"day": {"$dayOfMonth": "$block_time"},
"hour": {"$hour": "$block_time"},
"asset": 1,
"market_cap": 1,
}},
{"$sort": {"block_time": pymongo.ASCENDING}},
{"$group": {
"_id": {"asset": "$asset", "year": "$year", "month": "$month", "day": "$day", "hour": "$hour"},
"market_cap": {"$avg": "$market_cap"}, # use the average marketcap during the interval
}},
])
data[market_cap_as] = {}
for e in caps:
interval_time = int(calendar.timegm(datetime.datetime(e['_id']['year'], e['_id']['month'], e['_id']['day'], e['_id']['hour']).timetuple()) * 1000)
data[market_cap_as].setdefault(e['_id']['asset'], [])
data[market_cap_as][e['_id']['asset']].append([interval_time, e['market_cap']])
results[market_cap_as] = []
for asset in data[market_cap_as]:
#for z in data[market_cap_as][asset]: assert z[0] and z[0] > 0 and z[1] and z[1] >= 0
results[market_cap_as].append(
{'name': asset, 'data': sorted(data[market_cap_as][asset], key=operator.itemgetter(0))})
return results | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def get_market_info(assets):
assets_market_info = list(config.mongo_db.asset_market_info.find({'asset': {'$in': assets}}, {'_id': 0}))
extended_asset_info = config.mongo_db.asset_extended_info.find({'asset': {'$in': assets}})
extended_asset_info_dict = {}
for e in extended_asset_info:
if not e.get('disabled', False): # skip assets marked disabled
extended_asset_info_dict[e['asset']] = e
for a in assets_market_info:
if a['asset'] in extended_asset_info_dict and extended_asset_info_dict[a['asset']].get('processed', False):
extended_info = extended_asset_info_dict[a['asset']]
a['extended_image'] = bool(extended_info.get('image', ''))
a['extended_description'] = extended_info.get('description', '')
a['extended_website'] = extended_info.get('website', '')
a['extended_pgpsig'] = extended_info.get('pgpsig', '')
else:
a['extended_image'] = a['extended_description'] = a['extended_website'] = a['extended_pgpsig'] = ''
return assets_market_info | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def get_market_info_leaderboard(limit=100):
"""returns market leaderboard data for both the XCP and BTC markets"""
# do two queries because we limit by our sorted results, and we might miss an asset with a high BTC trading value
# but with little or no XCP trading activity, for instance if we just did one query
assets_market_info_xcp = list(config.mongo_db.asset_market_info.find({}, {'_id': 0}).sort('market_cap_in_{}'.format(config.XCP.lower()), pymongo.DESCENDING).limit(limit))
assets_market_info_btc = list(config.mongo_db.asset_market_info.find({}, {'_id': 0}).sort('market_cap_in_{}'.format(config.BTC.lower()), pymongo.DESCENDING).limit(limit))
assets_market_info = {
config.XCP.lower(): [a for a in assets_market_info_xcp if a['price_in_{}'.format(config.XCP.lower())]],
config.BTC.lower(): [a for a in assets_market_info_btc if a['price_in_{}'.format(config.BTC.lower())]]
}
# throw on extended info, if it exists for a given asset
assets = list(set([a['asset'] for a in assets_market_info[config.XCP.lower()]] + [a['asset'] for a in assets_market_info[config.BTC.lower()]]))
extended_asset_info = config.mongo_db.asset_extended_info.find({'asset': {'$in': assets}})
extended_asset_info_dict = {}
for e in extended_asset_info:
if not e.get('disabled', False): # skip assets marked disabled
extended_asset_info_dict[e['asset']] = e
for r in (assets_market_info[config.XCP.lower()], assets_market_info[config.BTC.lower()]):
for a in r:
if a['asset'] in extended_asset_info_dict:
extended_info = extended_asset_info_dict[a['asset']]
if 'extended_image' not in a or 'extended_description' not in a or 'extended_website' not in a:
continue # asset has been recognized as having a JSON file description, but has not been successfully processed yet
a['extended_image'] = bool(extended_info.get('image', ''))
a['extended_description'] = extended_info.get('description', '')
a['extended_website'] = extended_info.get('website', '')
else:
a['extended_image'] = a['extended_description'] = a['extended_website'] = ''
return assets_market_info | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def get_market_price_history(asset1, asset2, start_ts=None, end_ts=None, as_dict=False):
"""Return block-by-block aggregated market history data for the specified asset pair, within the specified date range.
@returns List of lists (or list of dicts, if as_dict is specified).
* If as_dict is False, each embedded list has 8 elements [block time (epoch in MS), open, high, low, close, volume, # trades in block, block index]
* If as_dict is True, each dict in the list has the keys: block_time (epoch in MS), block_index, open, high, low, close, vol, count
Aggregate on an an hourly basis
"""
now_ts = calendar.timegm(time.gmtime())
if not end_ts: # default to current datetime
end_ts = now_ts
if not start_ts: # default to 180 days before the end date
start_ts = end_ts - (180 * 24 * 60 * 60)
base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2)
# get ticks -- open, high, low, close, volume
result = config.mongo_db.trades.aggregate([
{"$match": {
"base_asset": base_asset,
"quote_asset": quote_asset,
"block_time": {
"$gte": datetime.datetime.utcfromtimestamp(start_ts)
} if end_ts == now_ts else {
"$gte": datetime.datetime.utcfromtimestamp(start_ts),
"$lte": datetime.datetime.utcfromtimestamp(end_ts)
}
}},
{"$project": {
"year": {"$year": "$block_time"},
"month": {"$month": "$block_time"},
"day": {"$dayOfMonth": "$block_time"},
"hour": {"$hour": "$block_time"},
"block_index": 1,
"unit_price": 1,
"base_quantity_normalized": 1 # to derive volume
}},
{"$group": {
"_id": {"year": "$year", "month": "$month", "day": "$day", "hour": "$hour"},
"open": {"$first": "$unit_price"},
"high": {"$max": "$unit_price"},
"low": {"$min": "$unit_price"},
"close": {"$last": "$unit_price"},
"vol": {"$sum": "$base_quantity_normalized"},
"count": {"$sum": 1},
}},
{"$sort": SON([("_id.year", pymongo.ASCENDING), ("_id.month", pymongo.ASCENDING), ("_id.day", pymongo.ASCENDING), ("_id.hour", pymongo.ASCENDING)])},
])
result = list(result)
if not len(result):
return False
midline = [((r['high'] + r['low']) / 2.0) for r in result]
if as_dict:
for i in range(len(result)):
result[i]['interval_time'] = int(calendar.timegm(datetime.datetime(
result[i]['_id']['year'], result[i]['_id']['month'], result[i]['_id']['day'], result[i]['_id']['hour']).timetuple()) * 1000)
result[i]['midline'] = midline[i]
del result[i]['_id']
return result
else:
list_result = []
for i in range(len(result)):
list_result.append([
int(calendar.timegm(datetime.datetime(
result[i]['_id']['year'], result[i]['_id']['month'], result[i]['_id']['day'], result[i]['_id']['hour']).timetuple()) * 1000),
result[i]['open'], result[i]['high'], result[i]['low'], result[i]['close'], result[i]['vol'],
result[i]['count'], midline[i]
])
return list_result | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def get_trade_history(asset1=None, asset2=None, start_ts=None, end_ts=None, limit=50):
"""
Gets last N of trades within a specific date range (normally, for a specified asset pair, but this can
be left blank to get any/all trades).
"""
assert (asset1 and asset2) or (not asset1 and not asset2) # cannot have one asset, but not the other
if limit > 500:
raise Exception("Requesting history of too many trades")
now_ts = calendar.timegm(time.gmtime())
if not end_ts: # default to current datetime
end_ts = now_ts
if not start_ts: # default to 30 days before the end date
start_ts = end_ts - (30 * 24 * 60 * 60)
filters = {
"block_time": {
"$gte": datetime.datetime.utcfromtimestamp(start_ts)
} if end_ts == now_ts else {
"$gte": datetime.datetime.utcfromtimestamp(start_ts),
"$lte": datetime.datetime.utcfromtimestamp(end_ts)
}
}
if asset1 and asset2:
base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2)
filters["base_asset"] = base_asset
filters["quote_asset"] = quote_asset
last_trades = config.mongo_db.trades.find(filters, {'_id': 0}).sort("block_time", pymongo.DESCENDING).limit(limit)
if not last_trades.count():
return False # no suitable trade data to form a market price
last_trades = list(last_trades)
return last_trades | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def get_o_pct(o):
if o['give_asset'] == config.BTC: # NB: fee_provided could be zero here
pct_fee_provided = float((D(o['fee_provided_remaining']) / D(o['give_quantity'])))
else:
pct_fee_provided = None
if o['get_asset'] == config.BTC: # NB: fee_required could be zero here
pct_fee_required = float((D(o['fee_required_remaining']) / D(o['get_quantity'])))
else:
pct_fee_required = None
return pct_fee_provided, pct_fee_required | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def make_book(orders, isBidBook):
book = {}
for o in orders:
if o['give_asset'] == base_asset:
if base_asset == config.BTC and o['give_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF:
continue # filter dust orders, if necessary
give_quantity = blockchain.normalize_quantity(o['give_quantity'], base_asset_info['divisible'])
get_quantity = blockchain.normalize_quantity(o['get_quantity'], quote_asset_info['divisible'])
unit_price = float((D(get_quantity) / D(give_quantity)))
remaining = blockchain.normalize_quantity(o['give_remaining'], base_asset_info['divisible'])
else:
if quote_asset == config.BTC and o['give_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF:
continue # filter dust orders, if necessary
give_quantity = blockchain.normalize_quantity(o['give_quantity'], quote_asset_info['divisible'])
get_quantity = blockchain.normalize_quantity(o['get_quantity'], base_asset_info['divisible'])
unit_price = float((D(give_quantity) / D(get_quantity)))
remaining = blockchain.normalize_quantity(o['get_remaining'], base_asset_info['divisible'])
id = "%s_%s_%s" % (base_asset, quote_asset, unit_price)
#^ key = {base}_{bid}_{unit_price}, values ref entries in book
book.setdefault(id, {'unit_price': unit_price, 'quantity': 0, 'count': 0})
book[id]['quantity'] += remaining # base quantity outstanding
book[id]['count'] += 1 # num orders at this price level
book = sorted(iter(book.values()), key=operator.itemgetter('unit_price'), reverse=isBidBook)
#^ convert to list and sort -- bid book = descending, ask book = ascending
return book | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def get_order_book_simple(asset1, asset2, min_pct_fee_provided=None, max_pct_fee_required=None):
# DEPRECATED 1.5
base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2)
result = _get_order_book(
base_asset, quote_asset,
bid_book_min_pct_fee_provided=min_pct_fee_provided,
bid_book_max_pct_fee_required=max_pct_fee_required,
ask_book_min_pct_fee_provided=min_pct_fee_provided,
ask_book_max_pct_fee_required=max_pct_fee_required)
return result | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def get_order_book_buysell(buy_asset, sell_asset, pct_fee_provided=None, pct_fee_required=None):
# DEPRECATED 1.5
base_asset, quote_asset = util.assets_to_asset_pair(buy_asset, sell_asset)
bid_book_min_pct_fee_provided = None
bid_book_min_pct_fee_required = None
bid_book_max_pct_fee_required = None
ask_book_min_pct_fee_provided = None
ask_book_min_pct_fee_required = None
ask_book_max_pct_fee_required = None
if base_asset == config.BTC:
if buy_asset == config.BTC:
# if BTC is base asset and we're buying it, we're buying the BASE. we require a BTC fee (we're on the bid (bottom) book and we want a lower price)
# - show BASE buyers (bid book) that require a BTC fee >= what we require (our side of the book)
# - show BASE sellers (ask book) that provide a BTC fee >= what we require
bid_book_min_pct_fee_required = pct_fee_required # my competition at the given fee required
ask_book_min_pct_fee_provided = pct_fee_required
elif sell_asset == config.BTC:
# if BTC is base asset and we're selling it, we're selling the BASE. we provide a BTC fee (we're on the ask (top) book and we want a higher price)
# - show BASE buyers (bid book) that provide a BTC fee >= what we provide
# - show BASE sellers (ask book) that require a BTC fee <= what we provide (our side of the book)
bid_book_max_pct_fee_required = pct_fee_provided
ask_book_min_pct_fee_provided = pct_fee_provided # my competition at the given fee provided
elif quote_asset == config.BTC:
assert base_asset == config.XCP # only time when this is the case
if buy_asset == config.BTC:
# if BTC is quote asset and we're buying it, we're selling the BASE. we require a BTC fee (we're on the ask (top) book and we want a higher price)
# - show BASE buyers (bid book) that provide a BTC fee >= what we require
# - show BASE sellers (ask book) that require a BTC fee >= what we require (our side of the book)
bid_book_min_pct_fee_provided = pct_fee_required
ask_book_min_pct_fee_required = pct_fee_required # my competition at the given fee required
elif sell_asset == config.BTC:
# if BTC is quote asset and we're selling it, we're buying the BASE. we provide a BTC fee (we're on the bid (bottom) book and we want a lower price)
# - show BASE buyers (bid book) that provide a BTC fee >= what we provide (our side of the book)
# - show BASE sellers (ask book) that require a BTC fee <= what we provide
bid_book_min_pct_fee_provided = pct_fee_provided # my compeitition at the given fee provided
ask_book_max_pct_fee_required = pct_fee_provided
result = _get_order_book(
base_asset, quote_asset,
bid_book_min_pct_fee_provided=bid_book_min_pct_fee_provided,
bid_book_min_pct_fee_required=bid_book_min_pct_fee_required,
bid_book_max_pct_fee_required=bid_book_max_pct_fee_required,
ask_book_min_pct_fee_provided=ask_book_min_pct_fee_provided,
ask_book_min_pct_fee_required=ask_book_min_pct_fee_required,
ask_book_max_pct_fee_required=ask_book_max_pct_fee_required)
# filter down raw_orders to be only open sell orders for what the caller is buying
open_sell_orders = []
for o in result['raw_orders']:
if o['give_asset'] == buy_asset:
open_sell_orders.append(o)
result['raw_orders'] = open_sell_orders
return result | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def get_users_pairs(addresses=[], max_pairs=12):
return dex.get_users_pairs(addresses, max_pairs, quote_assets=[config.XCP, config.XBTC]) | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def get_market_orders(asset1, asset2, addresses=[], min_fee_provided=0.95, max_fee_required=0.95):
return dex.get_market_orders(asset1, asset2, addresses, None, min_fee_provided, max_fee_required) | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def get_market_trades(asset1, asset2, addresses=[], limit=50):
return dex.get_market_trades(asset1, asset2, addresses, limit) | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def get_markets_list(quote_asset=None, order_by=None):
return dex.get_markets_list(quote_asset=quote_asset, order_by=order_by) | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def get_market_details(asset1, asset2, min_fee_provided=0.95, max_fee_required=0.95):
return dex.get_market_details(asset1, asset2, min_fee_provided, max_fee_required) | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def task_compile_asset_market_info():
assets_trading.compile_asset_market_info()
# all done for this run...call again in a bit
start_task(task_compile_asset_market_info, delay=COMPILE_ASSET_MARKET_INFO_PERIOD) | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def parse_trade_book(msg, msg_data):
# book trades
if(msg['category'] == 'order_matches' and
((msg['command'] == 'update' and msg_data['status'] == 'completed') or # for a trade with BTC involved, but that is settled (completed)
('forward_asset' in msg_data and msg_data['forward_asset'] != config.BTC and msg_data['backward_asset'] != config.BTC)
)
): # or for a trade without BTC on either end
if msg['command'] == 'update' and msg_data['status'] == 'completed':
# an order is being updated to a completed status (i.e. a BTCpay has completed)
tx0_hash, tx1_hash = msg_data['order_match_id'][:64], msg_data['order_match_id'][65:]
# get the order_match this btcpay settles
order_match = util.jsonrpc_api(
"get_order_matches",
{'filters': [
{'field': 'tx0_hash', 'op': '==', 'value': tx0_hash},
{'field': 'tx1_hash', 'op': '==', 'value': tx1_hash}]
}, abort_on_error=False)['result'][0]
else:
assert msg_data['status'] == 'completed' # should not enter a pending state for non BTC matches
order_match = msg_data
forward_asset_info = config.mongo_db.tracked_assets.find_one({'asset': order_match['forward_asset']})
backward_asset_info = config.mongo_db.tracked_assets.find_one({'asset': order_match['backward_asset']})
assert forward_asset_info and backward_asset_info
base_asset, quote_asset = util.assets_to_asset_pair(order_match['forward_asset'], order_match['backward_asset'])
# don't create trade records from order matches with BTC that are under the dust limit
if((order_match['forward_asset'] == config.BTC and
order_match['forward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF)
or (order_match['backward_asset'] == config.BTC and
order_match['backward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF)):
logger.debug("Order match %s ignored due to %s under dust limit." % (order_match['tx0_hash'] + order_match['tx1_hash'], config.BTC))
return 'ABORT_THIS_MESSAGE_PROCESSING'
# take divisible trade quantities to floating point
forward_quantity = blockchain.normalize_quantity(order_match['forward_quantity'], forward_asset_info['divisible'])
backward_quantity = blockchain.normalize_quantity(order_match['backward_quantity'], backward_asset_info['divisible'])
# compose trade
trade = {
'block_index': config.state['cur_block']['block_index'],
'block_time': config.state['cur_block']['block_time_obj'],
'message_index': msg['message_index'], # secondary temporaral ordering off of when
'order_match_id': order_match['tx0_hash'] + '_' + order_match['tx1_hash'],
'order_match_tx0_index': order_match['tx0_index'],
'order_match_tx1_index': order_match['tx1_index'],
'order_match_tx0_address': order_match['tx0_address'],
'order_match_tx1_address': order_match['tx1_address'],
'base_asset': base_asset,
'quote_asset': quote_asset,
'base_quantity': order_match['forward_quantity'] if order_match['forward_asset'] == base_asset else order_match['backward_quantity'],
'quote_quantity': order_match['backward_quantity'] if order_match['forward_asset'] == base_asset else order_match['forward_quantity'],
'base_quantity_normalized': forward_quantity if order_match['forward_asset'] == base_asset else backward_quantity,
'quote_quantity_normalized': backward_quantity if order_match['forward_asset'] == base_asset else forward_quantity,
}
d = D(trade['quote_quantity_normalized']) / D(trade['base_quantity_normalized'])
d = d.quantize(EIGHT_PLACES, rounding=decimal.ROUND_HALF_EVEN, context=decimal.Context(prec=30))
trade['unit_price'] = float(d)
d = D(trade['base_quantity_normalized']) / D(trade['quote_quantity_normalized'])
d = d.quantize(EIGHT_PLACES, rounding=decimal.ROUND_HALF_EVEN, context=decimal.Context(prec=30))
trade['unit_price_inverse'] = float(d)
config.mongo_db.trades.insert(trade)
logger.info("Procesed Trade from tx %s :: %s" % (msg['message_index'], trade)) | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def init():
# init db and indexes
# trades
config.mongo_db.trades.ensure_index(
[("base_asset", pymongo.ASCENDING),
("quote_asset", pymongo.ASCENDING),
("block_time", pymongo.DESCENDING)
])
config.mongo_db.trades.ensure_index( # tasks.py and elsewhere (for singlular block_index index access)
[("block_index", pymongo.ASCENDING),
("base_asset", pymongo.ASCENDING),
("quote_asset", pymongo.ASCENDING)
])
# asset_market_info
config.mongo_db.asset_market_info.ensure_index('asset', unique=True)
# asset_marketcap_history
config.mongo_db.asset_marketcap_history.ensure_index('block_index')
config.mongo_db.asset_marketcap_history.ensure_index( # tasks.py
[
("market_cap_as", pymongo.ASCENDING),
("asset", pymongo.ASCENDING),
("block_index", pymongo.DESCENDING)
])
config.mongo_db.asset_marketcap_history.ensure_index( # api.py
[
("market_cap_as", pymongo.ASCENDING),
("block_time", pymongo.DESCENDING)
])
# asset_pair_market_info
config.mongo_db.asset_pair_market_info.ensure_index( # event.py, api.py
[("base_asset", pymongo.ASCENDING),
("quote_asset", pymongo.ASCENDING)
], unique=True)
config.mongo_db.asset_pair_market_info.ensure_index('last_updated') | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def start_tasks():
start_task(task_compile_asset_pair_market_info)
start_task(task_compile_asset_market_info) | CounterpartyXCP/counterblock | [
15,
68,
15,
21,
1390186591
] |
def adicionar_novo_movimento_estoque(self, itens_mvmt_obj, pform, lista_produtos, lista_produtos_estocados):
prod = itens_mvmt_obj.produto
lista_produtos.append(prod)
# Modificar valor do estoque atual dos produtos
if prod.estoque_atual is not None and isinstance(self.object, EntradaEstoque):
prod_estocado = ProdutoEstocado.objects.get_or_create(
local=self.object.local_dest, produto=itens_mvmt_obj.produto)[0]
prod_estocado.quantidade = prod_estocado.quantidade + itens_mvmt_obj.quantidade
lista_produtos_estocados.append(prod_estocado)
prod.estoque_atual = prod.estoque_atual + itens_mvmt_obj.quantidade
elif prod.estoque_atual is not None and isinstance(self.object, SaidaEstoque):
prod_estocado = ProdutoEstocado.objects.get_or_create(
local=self.object.local_orig, produto=itens_mvmt_obj.produto)[0]
if itens_mvmt_obj.quantidade > prod_estocado.quantidade:
itens_mvmt_obj.quantidade = prod_estocado.quantidade
prod_estocado.quantidade = Decimal('0.00')
else:
prod_estocado.quantidade = prod_estocado.quantidade - itens_mvmt_obj.quantidade
lista_produtos_estocados.append(prod_estocado)
if prod.estoque_atual < itens_mvmt_obj.quantidade:
pform.add_error('quantidade', 'Quantidade retirada do estoque maior que o estoque atual (' +
str(prod.estoque_atual).replace('.', ',') + ') do produto.')
else:
prod.estoque_atual = prod.estoque_atual - itens_mvmt_obj.quantidade
elif isinstance(self.object, TransferenciaEstoque):
prod_estocado_orig = ProdutoEstocado.objects.get_or_create(
local=self.object.local_estoque_orig, produto=itens_mvmt_obj.produto)[0]
prod_estocado_dest = ProdutoEstocado.objects.get_or_create(
local=self.object.local_estoque_dest, produto=itens_mvmt_obj.produto)[0]
if itens_mvmt_obj.quantidade > prod_estocado_orig.quantidade:
itens_mvmt_obj.quantidade = prod_estocado_orig.quantidade
prod_estocado_orig.quantidade = Decimal('0.00')
else:
prod_estocado_orig.quantidade = prod_estocado_orig.quantidade - \
itens_mvmt_obj.quantidade
prod_estocado_dest.quantidade = prod_estocado_dest.quantidade + \
itens_mvmt_obj.quantidade
lista_produtos_estocados.append(prod_estocado_orig)
lista_produtos_estocados.append(prod_estocado_dest) | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def get_success_message(self, cleaned_data):
return self.success_message % dict(cleaned_data, pk=self.object.pk) | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def get(self, request, *args, **kwargs):
self.object = None
form_class = self.get_form_class()
form = form_class()
form.initial['data_movimento'] = datetime.today().strftime('%d/%m/%Y')
itens_form = ItensMovimentoFormSet(prefix='itens_form')
return self.render_to_response(self.get_context_data(form=form, itens_form=itens_form,)) | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def view_context(self, context):
context['title_complete'] = 'ADICIONAR ENTRADA EM ESTOQUE'
context['return_url'] = reverse_lazy(
'estoque:listaentradasestoqueview')
return context | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def view_context(self, context):
context['title_complete'] = 'ADICIONAR SAÍDA EM ESTOQUE'
context['return_url'] = reverse_lazy('estoque:listasaidasestoqueview')
return context | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def view_context(self, context):
context['title_complete'] = 'ADICIONAR TRANSFERÊNCIA EM ESTOQUE'
context['return_url'] = reverse_lazy(
'estoque:listatransferenciasestoqueview')
return context | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def get_context_data(self, **kwargs):
context = super(MovimentoEstoqueBaseListView,
self).get_context_data(**kwargs)
return self.view_context(context) | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def view_context(self, context):
context['title_complete'] = 'TODAS AS MOVIMENTAÇÕES DE ESTOQUE'
return context | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def post(self, request, *args, **kwargs):
if self.check_user_delete_permission(request, MovimentoEstoque):
for key, value in request.POST.items():
if value == "on":
if EntradaEstoque.objects.filter(id=key).exists():
instance = EntradaEstoque.objects.get(id=key)
elif SaidaEstoque.objects.filter(id=key).exists():
instance = SaidaEstoque.objects.get(id=key)
elif TransferenciaEstoque.objects.filter(id=key).exists():
instance = TransferenciaEstoque.objects.get(id=key)
instance.delete()
return redirect(self.success_url) | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def view_context(self, context):
context['title_complete'] = 'ENTRADAS EM ESTOQUE'
context['add_url'] = reverse_lazy('estoque:addentradaestoqueview')
return context | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def view_context(self, context):
context['title_complete'] = 'SAÍDAS EM ESTOQUE'
context['add_url'] = reverse_lazy('estoque:addsaidaestoqueview')
return context | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def view_context(self, context):
context['title_complete'] = 'TRANSFERÊNCIAS EM ESTOQUE'
context['add_url'] = reverse_lazy(
'estoque:addtransferenciaestoqueview')
return context | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def get_context_data(self, **kwargs):
context = super(DetalharMovimentoEstoqueBaseView,
self).get_context_data(**kwargs)
return self.view_context(context) | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def view_context(self, context):
context['title_complete'] = 'MOVIMENTO DE ENTRADA EM ESTOQUE N°' + \
str(self.object.id)
context['return_url'] = reverse_lazy(
'estoque:listaentradasestoqueview')
return context | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
def view_context(self, context):
context['title_complete'] = 'MOVIMENTO DE SAÍDA EM ESTOQUE N°' + \
str(self.object.id)
context['return_url'] = reverse_lazy('estoque:listasaidasestoqueview')
return context | thiagopena/djangoSIGE | [
343,
223,
343,
40,
1481383504
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.