content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
import xmlrpc.client
import getpass
import os
import time
from prettytable import PrettyTable
SERVER_IP = 'localhost'
SERVER_PORT = '8000'
server = xmlrpc.client.ServerProxy(
'http://{ip}:{port}'.format(ip=SERVER_IP, port=SERVER_PORT)
)
while True:
if menu_awal() == 1:
os.system('clear')
usr_user = input('Username :')
usr_pass = getpass.getpass('Password :')
if server.login_user(usr_user, usr_pass) :
while True :
time.sleep(0.5)
os.system('cls')
if menu_user() == 1:
if server.cek_peserta(usr_user):
print("Anda sudah melakukan kuis")
time.sleep(2)
break
else :
no = 1
print("Mulai Kuis")
print("Selesaikan dalam 10 menit")
soal = server.get_soal()
jawab = []
for i in soal :
if (time.time() > waktu_selesai):
print("Waktu habis")
time.sleep(3)
break
t = PrettyTable([no +"."+ i[1]])
t.align[i[1]] = 'l'
t.add_row(["a. %s"%i[3]])
t.add_row(["b. %s"%i[4]])
t.add_row(["c. %s"%i[5]])
t.add_row(["d. %s"%i[6]])
print(t)
# print(i[1])
# print("a. ",i[3])
# print("b. ",i[4])
# print("c. ",i[5])
# print("d. ",i[6])
no += 1
print()
while True:
jaw = input("masukkan jawaban(a/b/c/d) : ")
if (jaw == 'a') or (jaw == 'b') or (jaw == 'c') or (jaw == 'd'):
break
else :
print("jawaban tidak benar")
jawab.append(jaw)
nilai = 0
for i in range(len(jawab)) :
if (soal[i][2] == jawab[i]):
nilai += 5
print("Nilai anda adalah : ",nilai)
server.upload_nilai(nilai,usr_user,usr_pass)
print("Nilai anda sudah diupload")
for i in range((len(soal)-len(jawab))):
jawab.append('-')
server.upload_soal_peserta(soal,usr_user,jawab)
elif menu_user() == 2:
os.system('cls')
nilai = server.lihat_nilai(usr_user)
if not nilai :
print(server.get_np(usr_user),", anda belum mulai kuis" )
time.sleep(2)
else :
print("Hai ",server.get_np(usr_user)," nilai anda adalah ",nilai)
print("Enter untuk lanjutkan")
input()
elif menu_user() == 3:
os.system("cls")
jawaban = server.lihat_jawaban(usr_user)
if not jawaban :
print(server.get_np(usr_user),", anda belum mulai kuis" )
time.sleep(2)
else :
print("---Lihat Jawaban---")
t = PrettyTable(['Soal', 'Jawaban Anda', 'Kunci Jawaban'])
for isi in jawaban:
t.add_row(isi)
print(t)
print('Enter to lanjutkan')
input()
elif menu_user() == 4:
valid_user == False
print("Log Out Successful")
time.sleep(0.5)
os.system('cls')
break
elif menu_awal() == 2:
os.system("cls")
| [
11748,
35555,
81,
14751,
13,
16366,
201,
198,
11748,
651,
6603,
201,
198,
11748,
28686,
201,
198,
11748,
640,
201,
198,
6738,
2495,
11487,
1330,
20090,
10962,
201,
198,
201,
198,
35009,
5959,
62,
4061,
796,
705,
36750,
6,
201,
198,
35... | 1.439596 | 3,071 |
import logging
from django.conf.urls import include, url
from django.core.exceptions import ImproperlyConfigured
from corehq.apps.reports.standard.forms.reports import ReprocessXFormErrorView
from corehq.apps.userreports.reports.view import (
ConfigurableReportView,
CustomConfigurableReportDispatcher,
)
from corehq.apps.userreports.views import (
ConfigureReport,
EditReportInBuilder,
ReportBuilderDataSourceSelect,
ReportBuilderPaywallActivatingSubscription,
ReportBuilderPaywallPricing,
ReportPreview,
)
from .dispatcher import (
CustomProjectReportDispatcher,
ProjectReportDispatcher,
)
from .filters import urls as filter_urls
from .util import get_installed_custom_modules
from .views import (
AddSavedReportConfigView,
CaseAttachmentsView,
CaseDataView,
EditFormInstance,
FormDataView,
MySavedReportsView,
ScheduledReportsView,
archive_form,
case_form_data,
case_forms,
case_property_changes,
case_property_names,
case_xml,
close_case_view,
delete_config,
delete_scheduled_report,
download_case_history,
download_form,
edit_case_view,
edit_form,
email_report,
export_case_transactions,
export_report,
project_health_user_details,
rebuild_case_view,
resave_case_view,
resave_form_view,
restore_edit,
send_test_scheduled_report,
unarchive_form,
undo_close_case_view,
view_scheduled_report,
)
custom_report_urls = [
CustomProjectReportDispatcher.url_pattern(),
]
urlpatterns = [
ConfigurableReportView.url_pattern(),
CustomConfigurableReportDispatcher.url_pattern(),
# Report Builder
url(r'^builder/select_source/$', ReportBuilderDataSourceSelect.as_view(),
name=ReportBuilderDataSourceSelect.urlname),
url(r'^builder/configure/$', ConfigureReport.as_view(), name=ConfigureReport.urlname),
url(r'^builder/preview/(?P<data_source>[\w\-]+)/$', ReportPreview.as_view(), name=ReportPreview.urlname),
url(r'^builder/edit/(?P<report_id>[\w\-]+)/$', EditReportInBuilder.as_view(), name='edit_report_in_builder'),
url(r'builder/subscribe/pricing/$', ReportBuilderPaywallPricing.as_view(),
name=ReportBuilderPaywallPricing.urlname),
url(r'builder/subscribe/activating_subscription/$', ReportBuilderPaywallActivatingSubscription.as_view(),
name=ReportBuilderPaywallActivatingSubscription.urlname),
url(r'^$', MySavedReportsView.as_view(), name="reports_home"),
url(r'^saved/', MySavedReportsView.as_view(), name=MySavedReportsView.urlname),
url(r'^saved_reports', MySavedReportsView.as_view(), name="old_saved_reports"),
url(r'^case_data/(?P<case_id>[\w\-]+)/$', CaseDataView.as_view(), name=CaseDataView.urlname),
url(r'^case_data/(?P<case_id>[\w\-]+)/forms/$', case_forms, name="single_case_forms"),
url(r'^case_data/(?P<case_id>[\w\-]+)/attachments/$',
CaseAttachmentsView.as_view(), name=CaseAttachmentsView.urlname),
url(r'^case_data/(?P<case_id>[\w\-]+)/view/xml/$', case_xml, name="single_case_xml"),
url(r'^case_data/(?P<case_id>[\w\-]+)/properties/$', case_property_names, name="case_property_names"),
url(r'^case_data/(?P<case_id>[\w\-]+)/history/$', download_case_history, name="download_case_history"),
url(r'^case_data/(?P<case_id>[\w\-]+)/edit/$', edit_case_view, name="edit_case"),
url(r'^case_data/(?P<case_id>[\w\-]+)/rebuild/$', rebuild_case_view, name="rebuild_case"),
url(r'^case_data/(?P<case_id>[\w\-]+)/resave/$', resave_case_view, name="resave_case"),
url(r'^case_data/(?P<case_id>[\w\-]+)/close/$', close_case_view, name="close_case"),
url(r'^case_data/(?P<case_id>[\w\-]+)/undo-close/(?P<xform_id>[\w\-:]+)/$',
undo_close_case_view, name="undo_close_case"),
url(r'^case_data/(?P<case_id>[\w\-]+)/export_transactions/$',
export_case_transactions, name="export_case_transactions"),
url(r'^case_data/(?P<case_id>[\w\-]+)/(?P<xform_id>[\w\-:]+)/$', case_form_data, name="case_form_data"),
url(r'^case_data/(?P<case_id>[\w\-]+)/case_property/(?P<case_property_name>[\w_\-.]+)/$',
case_property_changes, name="case_property_changes"),
# Download and view form data
url(r'^form_data/(?P<instance_id>[\w\-:]+)/$', FormDataView.as_view(), name=FormDataView.urlname),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/download/$', download_form, name='download_form'),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/edit/$', EditFormInstance.as_view(), name='edit_form_instance'),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/restore_version/$', restore_edit, name='restore_edit'),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/correct_data/$', edit_form, name='edit_form'),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/archive/$', archive_form, name='archive_form'),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/unarchive/$', unarchive_form, name='unarchive_form'),
url(r'^form_data/(?P<instance_id>[\w\-:]+)/rebuild/$', resave_form_view, name='resave_form'),
# project health ajax
url(r'^project_health/ajax/(?P<user_id>[\w\-]+)/$', project_health_user_details,
name='project_health_user_details'),
# Full Excel export
url(r'^full_excel_export/(?P<export_hash>[\w\-]+)/(?P<format>[\w\-]+)$', export_report, name="export_report"),
# once off email
url(r"^email_onceoff/(?P<report_slug>[\w_]+)/$", email_report, kwargs=dict(once=True), name='email_report'),
url(r"^custom/email_onceoff/(?P<report_slug>[\w_]+)/$", email_report,
kwargs=dict(report_type=CustomProjectReportDispatcher.prefix, once=True), name='email_onceoff'),
# Saved reports
url(r"^configs$", AddSavedReportConfigView.as_view(), name=AddSavedReportConfigView.name),
url(r"^configs/(?P<config_id>[\w-]+)$", delete_config,
name='delete_report_config'),
# Scheduled reports
url(r'^scheduled_reports/(?P<scheduled_report_id>[\w-]+)?$',
ScheduledReportsView.as_view(), name=ScheduledReportsView.urlname),
url(r'^scheduled_report/(?P<scheduled_report_id>[\w-]+)/delete$',
delete_scheduled_report, name='delete_scheduled_report'),
url(r'^send_test_scheduled_report/(?P<scheduled_report_id>[\w-]+)/$',
send_test_scheduled_report, name='send_test_scheduled_report'),
url(r'^view_scheduled_report/(?P<scheduled_report_id>[\w_]+)/$',
view_scheduled_report, name='view_scheduled_report'),
# V2 Reports
url(r'^v2/', include('corehq.apps.reports.v2.urls')),
# Internal Use
url(r'^reprocess_error_form/$', ReprocessXFormErrorView.as_view(),
name=ReprocessXFormErrorView.urlname),
url(r'^custom/', include(custom_report_urls)),
url(r'^filters/', include(filter_urls)),
ProjectReportDispatcher.url_pattern(),
]
for module in get_installed_custom_modules():
module_name = module.__name__.split('.')[-1]
try:
custom_report_urls += [
url(r"^%s/" % module_name, include('{0}.urls'.format(module.__name__))),
]
except ImproperlyConfigured:
logging.info("Module %s does not provide urls" % module_name)
| [
11748,
18931,
198,
198,
6738,
42625,
14208,
13,
10414,
13,
6371,
82,
1330,
2291,
11,
19016,
198,
6738,
42625,
14208,
13,
7295,
13,
1069,
11755,
1330,
12205,
525,
306,
16934,
1522,
198,
198,
6738,
4755,
71,
80,
13,
18211,
13,
48922,
13... | 2.452324 | 2,905 |
# !/usr/bin/env python
# -*- coding: utf-8 -*-
#########################################################################
# This code is an adaptation from Toni Heittola's code [task1 baseline dcase 2018](https://github.com/DCASE-REPO/dcase2018_baseline/tree/master/task1/)
# Copyright Nicolas Turpault, Romain Serizel, Hamid Eghbal-zadeh, Ankit Parag Shah, 2018, v1.0
# This software is distributed under the terms of the License MIT
#########################################################################
import dcase_util
import sys
import numpy
import os
import random
import pickle
import pandas
import tensorflow as tf
from keras import backend as K
import keras
#from evaluation_measures import get_f_measure_by_class, event_based_evaluation, segment_based_evaluation
from evaluation_measures import get_f_measure_by_class, event_based_evaluation, event_based_evaluation_df
from Dataset_dcase2018 import DCASE2018_Task4_DevelopmentSet
dcase_util.utils.setup_logging(logging_file='task4.log')
print(keras.__version__)
random.seed(10)
numpy.random.seed(42)
tf.set_random_seed(1234)
sess = tf.Session(graph=tf.get_default_graph())
K.set_session(sess)
def data_generator(items, feature_path, many_hot_encoder, feature_processing_chain, batch_size=1, shuffle=True, mode='weak'):
""" Transform MetaDataContainer into batches of data
Parameters
----------
items : MetaDataContainer, items to be generated
feature_path : String, base path where features are stored
many_hot_encoder : ManyHotEncoder, class to encode data
feature_processing_chain : ProcessingChain, chain to process data
batch_size : int, size of the batch to be returned
shuffle : bool, shuffle the items before creating the batch
mode : "weak" or "strong", indicate to return labels as tags (1/file) or event_labels (1/frame)
Return
------
(batch_X, batch_y): generator, arrays containing batches of data.
"""
while True:
batch_X = []
batch_y = []
if shuffle:
random.shuffle(items)
for item in items:
# Get feature filename
feature_filename = dcase_util.utils.Path(
path=item.filename
).modify(
path_base=feature_path,
filename_extension='.cpickle',
)
features = feature_processing_chain.process(
filename=feature_filename
)
input_data = features.data.reshape(features.shape[:-1]).T
# Target
targets = item.tags
targets = many_hot_encoder.encode(targets, length_frames=1).data.flatten()
if mode == "strong":
targets = numpy.repeat(targets.reshape((1,) + targets.shape), input_data.shape[0], axis=0)
if batch_size == 1:
batch_X = input_data.reshape((1,) + input_data.shape)
batch_y = targets.reshape((1,) + targets.shape)
yield batch_X, batch_y
else:
batch_X.append(input_data)
batch_y.append(targets)
if len(batch_X) == batch_size and len(batch_y) == batch_size:
yield numpy.array(batch_X), numpy.array(batch_y)
batch_X = []
batch_y = []
if __name__ == "__main__":
# Read parameters file
parameters = dcase_util.containers.DictContainer().load(
filename='task4_crnn.yaml'
)
try:
sys.exit(main(parameters))
except (ValueError, IOError) as e:
sys.exit(e)
| [
2,
5145,
14,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
29113,
29113,
7804,
2,
198,
2,
770,
2438,
318,
281,
16711,
422,
309,
14651,
679,
715,
5708,
338,
2438,
685,
35943,... | 2.469374 | 1,453 |
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import pickle as pkl
import sys
import torch
if __name__ == "__main__":
input = sys.argv[1]
obj = torch.load(input, map_location="cpu")
obj = obj["state_dict"]
newmodel = {}
for k, v in obj.items():
old_k = k
if "layer" not in k:
k = k.replace("backbone", "backbone.stem")
for t in [1, 2, 3, 4]:
k = k.replace("layer{}".format(t), "res{}".format(t + 1))
if k.startswith("backbone.res5"):
k = k.replace("backbone", "roi_heads")
for t in [1, 2, 3]:
k = k.replace("bn{}".format(t), "conv{}.norm".format(t))
k = k.replace("downsample.0", "shortcut")
k = k.replace("downsample.1", "shortcut.norm")
print(old_k, "->", k)
newmodel[k] = v.numpy()
res = {
"model": newmodel,
"__author__": "OpenSelfSup",
"matching_heuristics": True
}
assert sys.argv[2].endswith('.pkl')
with open(sys.argv[2], "wb") as f:
pkl.dump(res, f)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
15069,
357,
66,
8,
3203,
11,
3457,
13,
290,
663,
29116,
13,
1439,
6923,
33876,
198,
198,
11748,
2298,
293,
355,
279,
41582,
198,
11748,
25064,
198,
11748,
28034,
198,
198,
361,
11... | 2.036765 | 544 |
# No. 14: Removing duplicates in a list:
x = [1,1,2,2,3,3,4,4,1]
print(removeDuplicates2(x))
print(removeDuplicates1(x))
| [
2,
1400,
13,
1478,
25,
3982,
5165,
14184,
16856,
287,
257,
1351,
25,
198,
198,
87,
796,
685,
16,
11,
16,
11,
17,
11,
17,
11,
18,
11,
18,
11,
19,
11,
19,
11,
16,
60,
198,
4798,
7,
28956,
35660,
489,
16856,
17,
7,
87,
4008,
... | 2.067797 | 59 |
#!/usr/bin/env python
#
# Copyright 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
import plaidbench.cli
SUPPORTED_NETWORKS = {
'keras': [
'densenet121',
'densenet169',
'densenet201',
'imdb_lstm',
'inception_resnet_v2',
'inception_v3',
'mobilenet',
'mobilenet_v2',
'nasnet_large',
'nasnet_mobile',
'resnet50',
'vgg16',
'vgg19',
'xception',
],
'onnx': [
'bvlc_alexnet',
'densenet121',
'inception_v1',
'inception_v2',
'resnet50',
'shufflenet',
'squeezenet', # TODO: Fix inputs/outputs (only available as *.pb)
'vgg16',
'vgg19',
],
}
if __name__ == '__main__':
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
198,
2,
15069,
2864,
8180,
10501,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
284... | 2.281034 | 580 |
#!/user/bin/python
# -*- coding: utf-8 -*-
# Author : (DEK) Devendra Kavthekar
# program010 :
# Write a program that accepts a sequence of whitespace
# separated words as input and prints the words after
# removing all duplicate words and sorting
# them alphanumerically.
# Suppose the following input is supplied to the
# program:
# hello world and practice makes perfect and hello world again
# Then, the output should be:
# again and hello makes perfect practice world
# Hints:
# In case of input data being supplied to the question,
# it should be assumed to be a console input.
# We use set container to remove duplicated data automatically and
# then use sorted() to sort the data.
if __name__ == '__main__':
main()
# checked
| [
2,
48443,
7220,
14,
8800,
14,
29412,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
6434,
1058,
357,
7206,
42,
8,
6245,
48286,
509,
615,
1169,
21070,
198,
198,
2,
1430,
20943,
1058,
198,
2,
19430,
257,
14... | 3.663366 | 202 |
from django.apps import AppConfig
| [
6738,
42625,
14208,
13,
18211,
1330,
2034,
16934,
628
] | 3.888889 | 9 |
# http://stackoverflow.com/questions/23864341/equivalent-of-asyncio-queues-with-worker-threads
import asyncio, random
q = asyncio.Queue()
@asyncio.coroutine
@asyncio.coroutine
loop = asyncio.get_event_loop()
loop.create_task(produce())
loop.create_task(consume())
loop.run_forever()
| [
198,
2,
2638,
1378,
25558,
2502,
11125,
13,
785,
14,
6138,
507,
14,
23721,
2414,
33660,
14,
4853,
29540,
12,
1659,
12,
292,
13361,
952,
12,
4188,
947,
12,
4480,
12,
28816,
12,
16663,
82,
198,
198,
11748,
30351,
952,
11,
4738,
198,
... | 2.621622 | 111 |
import numpy as np
from copy import deepcopy
from gbvision.constants.types import Number
class CameraData:
"""
describes constant about a camera in it's default state used to approximate distance
between the camera and an object seen in a frame
:param focal_length: the focal length of the camera at it's default state, in units of pixels
can be described as the square root of the amount of pixels an object takes on a frame, multiplied by it's
distance from the camera and divided by the square root of it's surface
FOCAL_LENGTH = :math:' sqrt(P) * D / sqrt(S)'
where P is the amount of pixels in the frame representing the object,
D is the real life distance between the object and the camera
S is the real life surface area (in 2d projection) of the object
note that this is a constant, whatever object you choose to use, this formula will yield the same result
:param fov_width:
half the viewing angle of the camera (field of view) in radians, can be calculated by placing an object in front
of the camera, so that the entire object is captured and it's center is at the frame's center.
the tangent of the angle can be described as the width of the object in real life, divided by the
product of the object's distance from the camera in real life and the ratio between the width of the frame
in pixels and the width of the object in the frame, also in pixels
math:: tan(FOV) = (Wm) / (D * (Wp/Wf))
where Wm is the real life width of the object
D is the real life distance between the object and the camera
Wp is the width of the object in the frame (pixels unit)
Wf is the width of the frame (pixels unit)
to calculate the FOV just apply the inverse tangent
FOV = math:: arctan(tan(FOV))
:param fov_height:
same as fov_width but on the height/y axis
:param yaw_angle:
the clockwise yaw angle (in radians) in which the camera is rotated, the yaw angle is the angle around the y axis,
it's output only affects the x and z axises.
set this variable when the camera is rotated around the y axis and you want the output of finder functions
to represent the original space, rather then the rotated one.
:param pitch_angle:
the clockwise pitch angle (in radians) in which the camera is rotated, the pitch angle is the angle around the x axis,
it's output only affects the y and z axises.
set this variable when the camera is rotated around the x axis and you want the output of finder functions
to represent the original space, rather then the rotated one.
:param roll_angle:
the clockwise roll angle (in radians) in which the camera is rotated, the roll angle is the angle around the z axis,
it's output only affects the x and y axises.
set this variable when the camera is rotated around the z axis and you want the output of finder functions
to represent the original space, rather then the rotated one.
:param x_offset:
the x offset in which the camera is placed
the distance from the measuring point (usually the center of the robot) to the camera on the x axis (left/right),
if the camera is to the right this should be positive and if it is left this should be negative
:param y_offset:
the y offset in which the camera is placed
the distance from the measuring point to the camera on the y axis (up/down), if the camera is above the measuring point
this variable should be positive and if it is below this should be negative
:param z_offset:
the z offset in which the camera is placed
the distance from the measuring point to the camera on the z axis (depth), if the camera is placed outer then the measuring point
this variable should be positive and if it is inner this should be negative
:param is_immutable: determines whether the camera data object's values are immutable (True) or mutable (False)
"""
def rotate_pitch(self, angle: float) -> 'CameraData':
"""
rotates the camera's angle around the pitch axis (the x axis)
:param angle: the rotation angle
:return: a camera data instance with the same params as this but with the pitch angle rotated \
if this is immutable it will return a copy of this, otherwise it will modify this instance and return it
"""
data = self.__get_data()
data.rotation_angles[0] += angle
sin, cos = np.sin(angle), np.cos(angle)
data.rotation_matrix = data.rotation_matrix.dot(np.array([[1, 0, 0],
[0, cos, -sin],
[0, sin, cos]]))
return data
def rotate_yaw(self, angle: float) -> 'CameraData':
"""
rotates the camera's angle around the yaw axis (the y axis)
:param angle: the rotation angle
:return: a camera data instance with the same params as this but with the yaw angle rotated \
if this is immutable it will return a copy of this, otherwise it will modify this instance and return it
"""
data = self.__get_data()
data.rotation_angles[1] += angle
sin, cos = np.sin(angle), np.cos(angle)
data.rotation_matrix = data.rotation_matrix.dot(np.array([[cos, 0, sin],
[0, 1, 0],
[-sin, 0, cos]]))
return data
def rotate_roll(self, angle: float) -> 'CameraData':
"""
rotates the camera's angle around the roll axis (the z axis)
:param angle: the rotation angle
:return: a camera data instance with the same params as this but with the roll angle rotated \
if this is immutable it will return a copy of this, otherwise it will modify this instance and return it
"""
data = self.__get_data()
data.rotation_angles[2] += angle
sin, cos = np.sin(angle), np.cos(angle)
data.rotation_matrix = data.rotation_matrix.dot(np.array([[cos, -sin, 0],
[sin, cos, 0],
[0, 0, 1]]))
return data
def set_pitch_angle(self, angle: float) -> 'CameraData':
"""
sets the camera's angle around the pitch axis (the x axis)
:param angle: the rotation angle
:return: a camera data instance with the same params as this but with the pitch angle changed \
if this is immutable it will return a copy of this, otherwise it will modify this instance and return it
"""
data = self.__get_data()
data.rotation_angles[0] = angle
data.rotation_matrix = data.__calculate_rotation_matrix()
return data
def set_yaw_angle(self, angle: float) -> 'CameraData':
"""
sets the camera's angle around the yaw axis (the y axis)
:param angle: the rotation angle
:return: a camera data instance with the same params as this but with the yaw angle changed \
if this is immutable it will return a copy of this, otherwise it will modify this instance and return it
"""
data = self.__get_data()
data.rotation_angles[1] = angle
data.rotation_matrix = data.__calculate_rotation_matrix()
return data
def set_roll_angle(self, angle: float) -> 'CameraData':
"""
sets the camera's angle around the roll axis (the z axis)
:param angle: the rotation angle
:return: a camera data instance with the same params as this but with the roll angle changed \
if this is immutable it will return a copy of this, otherwise it will modify this instance and return it
"""
data = self.__get_data()
data.rotation_angles[2] = angle
data.rotation_matrix = data.__calculate_rotation_matrix()
return data
def move_x(self, x: Number) -> 'CameraData':
"""
moves this camera data's x axis offset
:param x: the x offset to move by
:return: a camera data instance with the same params as this but with the x axis moved \
if this is immutable it will return a copy of this, otherwise it will modify this instance and return it
"""
data = self.__get_data()
data.offset[0] += x
return data
def move_y(self, y: Number) -> 'CameraData':
"""
moves this camera data's y axis offset
:param y: the y offset to move by
:return: a camera data instance with the same params as this but with the y axis moved \
if this is immutable it will return a copy of this, otherwise it will modify this instance and return it
"""
data = self.__get_data()
data.offset[1] += y
return data
def move_z(self, z: Number) -> 'CameraData':
"""
moves this camera data's z axis offset
:param z: the z offset to move by
:return: a camera data instance with the same params as this but with the z axis moved \
if this is immutable it will return a copy of this, otherwise it will modify this instance and return it
"""
data = self.__get_data()
data.offset[2] += z
return data
def set_x_offset(self, x: Number) -> 'CameraData':
"""
sets this camera data's x axis offset
:param x: the new x offset
:return: a camera data instance with the same params as this but with the x axis changed \
if this is immutable it will return a copy of this, otherwise it will modify this instance and return it
"""
data = self.__get_data()
data.offset[0] = x
return data
def set_y_offset(self, y: Number) -> 'CameraData':
"""
sets this camera data's y axis offset
:param y: the new y offset
:return: a camera data instance with the same params as this but with the y axis changed \
if this is immutable it will return a copy of this, otherwise it will modify this instance and return it
"""
data = self.__get_data()
data.offset[1] = y
return data
def set_z_offset(self, z: Number) -> 'CameraData':
"""
sets this camera data's z axis offset
:param z: the new z offset
:return: a camera data instance with the same params as this but with the z axis changed \
if this is immutable it will return a copy of this, otherwise it will modify this instance and return it
"""
data = self.__get_data()
data.offset[2] = z
return data
def copy(self) -> 'CameraData':
"""
creates a mutable copy of this and returns it
:return:
"""
copy = deepcopy(self)
copy.__is_immutable = False
return copy
def is_immutable(self) -> bool:
"""
checks if this camera data instance is immutable
:return: True if this is immutable, False otherwise
"""
return self.__is_immutable
def as_immutable(self) -> 'CameraData':
"""
creates and returns an immutable copy of this camera data
if this instance is already immutable it will return this instance
:return: an instance of CameraData, with the same values as this instance but immutable
"""
if self.__is_immutable:
return self
copy = self.copy()
copy.__is_immutable = True
return copy
| [
11748,
299,
32152,
355,
45941,
198,
6738,
4866,
1330,
2769,
30073,
198,
198,
6738,
308,
65,
10178,
13,
9979,
1187,
13,
19199,
1330,
7913,
628,
198,
4871,
20432,
6601,
25,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
8477,
6937,
546,... | 2.637639 | 4,490 |
"""Tests for the notebook kernel and session manager"""
from subprocess import PIPE
import time
from unittest import TestCase
from IPython.testing import decorators as dec
from IPython.config.loader import Config
from IPython.kernel import KernelManager
| [
37811,
51,
3558,
329,
262,
20922,
9720,
290,
6246,
4706,
37811,
198,
198,
6738,
850,
14681,
1330,
350,
4061,
36,
198,
11748,
640,
198,
6738,
555,
715,
395,
1330,
6208,
20448,
198,
198,
6738,
6101,
7535,
13,
33407,
1330,
11705,
2024,
3... | 4.03125 | 64 |
# Copyright (c) 2020 Graphcore Ltd. All rights reserved.
import os
import unittest
import re
import pytest
# NOTE: The import below is dependent on 'pytest.ini' in the root of
# the repository
from examples_tests.test_util import run_python_script_helper, run_test_helper
def run_sparse_softmax_subblock(**kwargs):
"""Helper function to run popart sparse softmax with subblock model python script with
command line arguments"""
out = run_python_script_helper(os.path.dirname(__file__), "sparse_softmax_subblock_demo.py",
**kwargs)
return out
| [
2,
15069,
357,
66,
8,
12131,
29681,
7295,
12052,
13,
1439,
2489,
10395,
13,
198,
11748,
28686,
198,
11748,
555,
715,
395,
198,
11748,
302,
198,
198,
11748,
12972,
9288,
198,
2,
24550,
25,
383,
1330,
2174,
318,
10795,
319,
705,
9078,
... | 2.777778 | 216 |
import os, sys, re, csv
from bs4 import BeautifulSoup
import requests, time
from tqdm import tqdm
url = 'http://web.phonetik.uni-frankfurt.de/S/S00002.html'
if __name__ == '__main__':
main()
| [
11748,
28686,
11,
25064,
11,
302,
11,
269,
21370,
198,
6738,
275,
82,
19,
1330,
23762,
50,
10486,
198,
11748,
7007,
11,
640,
198,
6738,
256,
80,
36020,
1330,
256,
80,
36020,
198,
6371,
796,
705,
4023,
1378,
12384,
13,
746,
36823,
11... | 2.432099 | 81 |
import argparse
import glob
# noinspection PyTypeChecker
from dataclasses import dataclass
from operator import add
from statistics import median, mean
from termcolor import colored
from forest.parse_examples import parse_resnax, parse_file
exclude_instances = ["datetime2"] # , "color", "date", "date7", "id1", "date3"]
@dataclass
if __name__ == '__main__':
main()
| [
11748,
1822,
29572,
198,
11748,
15095,
198,
2,
645,
1040,
14978,
9485,
6030,
9787,
263,
198,
6738,
4818,
330,
28958,
1330,
4818,
330,
31172,
198,
6738,
10088,
1330,
751,
198,
6738,
7869,
1330,
14288,
11,
1612,
198,
198,
6738,
3381,
8043... | 3.114754 | 122 |
main()
| [
12417,
3419,
201,
198
] | 2 | 4 |
#%%
"""
- Detect Capital
- https://leetcode.com/problems/detect-capital/
- Easy
Given a word, you need to judge whether the usage of capitals in it is right or not.
We define the usage of capitals in a word to be right when one of the following cases holds:
All letters in this word are capitals, like "USA".
All letters in this word are not capitals, like "leetcode".
Only the first letter in this word is capital, like "Google".
Otherwise, we define that this word doesn't use capitals in a right way.
Example 1:
Input: "USA"
Output: True
Example 2:
Input: "FlaG"
Output: False
Note: The input will be a non-empty word consisting of uppercase and lowercase latin letters.
"""
#%%
#%%
| [
2,
16626,
198,
37811,
198,
12,
35874,
9747,
198,
12,
3740,
1378,
293,
316,
8189,
13,
785,
14,
1676,
22143,
14,
15255,
478,
12,
27544,
14,
198,
12,
16789,
198,
198,
15056,
257,
1573,
11,
345,
761,
284,
5052,
1771,
262,
8748,
286,
4... | 3.261468 | 218 |
from workwork.errors import InstanceIdNotFound
| [
6738,
670,
1818,
13,
48277,
1330,
2262,
590,
7390,
3673,
21077,
628,
198
] | 3.769231 | 13 |
from django.contrib import admin
from .models import Tag
# Register your models here.
@admin.register(Tag) | [
6738,
42625,
14208,
13,
3642,
822,
1330,
13169,
198,
6738,
764,
27530,
1330,
17467,
198,
2,
17296,
534,
4981,
994,
13,
198,
198,
31,
28482,
13,
30238,
7,
24835,
8
] | 3.566667 | 30 |
from distutils.core import setup
setup(name='tableize',
version='v1.0',
description='Turn lists into tables with ease',
author='Anthony Federico',
author_email='dephoona@gmail.com',
url='https://github.com/anfederico/Tableize',
packages=['tableize']
) | [
6738,
1233,
26791,
13,
7295,
1330,
9058,
198,
198,
40406,
7,
3672,
11639,
11487,
1096,
3256,
198,
220,
220,
220,
220,
220,
2196,
11639,
85,
16,
13,
15,
3256,
198,
220,
220,
220,
220,
220,
6764,
11639,
17278,
8341,
656,
8893,
351,
10... | 2.547826 | 115 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Han Xiao <artex.xh@gmail.com> <https://hanxiao.github.io>
import multiprocessing
import os
import random
import sys
import threading
import time
from collections import defaultdict
from datetime import datetime
from itertools import chain
from multiprocessing import Process
from multiprocessing.pool import Pool
import numpy as np
import zmq
import zmq.decorators as zmqd
from termcolor import colored
from zmq.utils import jsonapi
from .helper import *
from .protocol import *
from .zmq_decor import multi_socket
from .postsink import WKRSink
from .hard_worker import WKRHardWorker
from .statistic import ServerStatistic
__all__ = ['__version__', 'WKRServer', 'WKRHardWorker']
__version__ = '2.1.0' | [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
9530,
28249,
1279,
433,
1069,
13,
87,
71,
31,
14816,
13,
785,
29,
1279,
5450,
1378,
7637,
87,
13481,
13,
12567,
... | 3.056911 | 246 |
# Code Snippets for Flow Solver
# Below are a few code snippets, intended to save you the time and tedium
# of typing in file name lists. Copy and paste these into your program as needed.
# Snippet number 1: List of colors used. Not sure if you need this or not.
# By the way, the "white" files are really gray.
colorList = ["blue", "green", "red", "yellow", "cyan", "orange", "magenta", "pink", "white"];
# Snippet number 2: a list of the EndPoint tile file names
endPointFileList = ["blueEndpoint.gif",
"greenEndpoint.gif",
"redEndpoint.gif",
"yellowEndpoint.gif",
"cyanEndpoint.gif",
"orangeEndpoint.gif",
"magentaEndpoint.gif",
"pinkEndpoint.gif",
"whiteEndpoint.gif" ]
# Snippet number 3: a list of the playable tile files names
fileList = ["blueHorizontal.gif", "blueQ1.gif", "blueQ2.gif", "blueQ3.gif", "blueQ4.gif", "blueVertical.gif",
"greenHorizontal.gif", "greenQ1.gif", "greenQ2.gif", "greenQ3.gif", "greenQ4.gif", "greenVertical.gif",
"redHorizontal.gif", "redQ1.gif", "redQ2.gif", "redQ3.gif", "redQ4.gif", "redVertical.gif",
"yellowHorizontal.gif", "yellowQ1.gif", "yellowQ2.gif", "yellowQ3.gif", "yellowQ4.gif", "yellowVertical.gif",
"cyanHorizontal.gif", "cyanQ1.gif", "cyanQ2.gif", "cyanQ3.gif", "cyanQ4.gif", "cyanVertical.gif",
"orangeHorizontal.gif", "orangeQ1.gif", "orangeQ2.gif", "orangeQ3.gif", "orangeQ4.gif", "orangeVertical.gif",
"magentaHorizontal.gif", "magentaQ1.gif", "magentaQ2.gif", "magentaQ3.gif", "magentaQ4.gif", "magentaVertical.gif",
"pinkHorizontal.gif", "pinkQ1.gif", "pinkQ2.gif", "pinkQ3.gif", "pinkQ4.gif", "pinkVertical.gif",
"whiteHorizontal.gif", "whiteQ1.gif", "whiteQ2.gif", "whiteQ3.gif", "whiteQ4.gif", "whiteVertical.gif" ]
# Snippet number 4: the isAllowedRight() function that we developed in class
# curr and right are PhotoImages. All photoimage should have been annotated with goesRight, color, etc when created.
# this one I'm giving to the students
My Notes
empty = PhotoImage(file = "Empty.gif")
empty.goesLeft = False
empty.goesRight = False
#Green Horizontal
greenHoriz = PhotoImage(file = "greenHorizontal.gif")
greenHoriz.goesLeft = True
greenHoriz.goesRight = True
greenHoriz.goesUp = False
greenHoriz.color = "green"
#def take(row, cell)
# for examine all playable images
# put images into cells[row][col]
# check allowed left/right/up/down
# and solve()
# return True
# otherwise continue loop
# if none work, return false cause no images work in that cell
#def solve():
# loop through all rows and cols
# if cell[row][col] is empty:
# if take (row, col)
# return true
# else
# return false
# continue row/cel loop
# if loop finishes return true
| [
2,
6127,
5489,
3974,
1039,
329,
27782,
4294,
332,
201,
198,
2,
10383,
389,
257,
1178,
2438,
45114,
11,
5292,
284,
3613,
345,
262,
640,
290,
28501,
1505,
201,
198,
2,
286,
19720,
287,
2393,
1438,
8341,
13,
220,
17393,
290,
17008,
777... | 2.163376 | 1,469 |
"""Implementation of the locatebiome command."""
from mcipc.rcon.client import Client
from mcipc.rcon.functions import parsed
from mcipc.rcon.je.types import Biome
from mcipc.rcon.response_types.location import parse
__all__ = ['locatebiome']
@parsed(parse)
def locatebiome(self: Client, biome: Biome) -> str:
"""Locates the given biome."""
return self.run('locatebiome', biome)
| [
37811,
3546,
32851,
286,
262,
17276,
8482,
462,
3141,
526,
15931,
198,
198,
6738,
36650,
541,
66,
13,
81,
1102,
13,
16366,
1330,
20985,
198,
6738,
36650,
541,
66,
13,
81,
1102,
13,
12543,
2733,
1330,
44267,
198,
6738,
36650,
541,
66,
... | 2.889706 | 136 |
import tensorflow as tf
from config import *
batch_norm = tf.contrib.layers.batch_norm
w_init = tf.contrib.layers.variance_scaling_initializer()
b_init = tf.constant_initializer(0.0)
class Generator(object):
""" Generator """
class Discriminator(object):
""" Discriminator """
| [
11748,
11192,
273,
11125,
355,
48700,
198,
6738,
4566,
1330,
1635,
198,
198,
43501,
62,
27237,
796,
48700,
13,
3642,
822,
13,
75,
6962,
13,
43501,
62,
27237,
198,
86,
62,
15003,
796,
48700,
13,
3642,
822,
13,
75,
6962,
13,
25641,
59... | 2.958333 | 96 |
# -*- coding: utf-8 -*-
import pandas as pd
pd.set_option("display.max_colwidth", 10000)
pd.options.mode.chained_assignment = None # to not make too many copies
import os
import numpy as np
import glob
rootdir_glob = 'C:\\conda\\remote_works\\remote_works\\remote_works\\**/*' # Note the added asterisks
# This will return absolute paths
file_list = [f for f in glob.iglob(rootdir_glob, recursive=True) if os.path.isfile(f) == False]
for f in file_list:
if 'migrations' in str(f):
os.chdir(f)
for file in glob.glob("*.py"):
if str(file) != '__init__.py':
if 'skill' in str(file):
file_renamed = str(file).replace("skill","skill")
os.rename(os.path.join(f,file),os.path.join(f,file_renamed))
file = file_renamed
file1 = os.path.join(f,file)
print(file1)
#replace_project_in(file1)
file_list = [f for f in glob.iglob(rootdir_glob, recursive=True) if os.path.isfile(f) == False]
for f in file_list:
if 'migrations' in str(f):
os.chdir(f)
for file in glob.glob("*.pyc"):
os.remove(file)
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
11748,
19798,
292,
355,
279,
67,
198,
30094,
13,
2617,
62,
18076,
7203,
13812,
13,
9806,
62,
4033,
10394,
1600,
33028,
8,
198,
30094,
13,
25811,
13,
14171,
13,
35... | 2.075439 | 570 |
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from __future__ import print_function
import sys
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
import spack.cmd.common.arguments as arguments
import spack.repo
from spack.version import infinity_versions, ver
description = "list available versions of a package"
section = "packaging"
level = "long"
| [
2,
15069,
2211,
12,
1238,
1828,
13914,
45036,
3549,
2351,
4765,
11,
11419,
290,
584,
198,
2,
1338,
441,
4935,
34152,
13,
4091,
262,
1353,
12,
5715,
27975,
38162,
9947,
2393,
329,
3307,
13,
198,
2,
198,
2,
30628,
55,
12,
34156,
12,
... | 3.365385 | 156 |
import logging
import shlex
import os as _os
import tempfile
from pathlib import Path
import pytest
import alexber.utils.processinvokes as processinvokes
from alexber.utils.processinvokes import LogPipe, LogSubProcessCall
logger = logging.getLogger(__name__)
process_invokes_logger = None
_process_invokes_logger_log = None
@pytest.fixture
@pytest.fixture
@pytest.fixture
if __name__ == "__main__":
pytest.main([__file__])
| [
11748,
18931,
198,
11748,
427,
2588,
198,
11748,
28686,
355,
4808,
418,
198,
11748,
20218,
7753,
198,
6738,
3108,
8019,
1330,
10644,
198,
11748,
12972,
9288,
198,
11748,
257,
2588,
527,
13,
26791,
13,
14681,
16340,
3369,
355,
1429,
16340,... | 2.907285 | 151 |
from .. import bech32, ec, script, base58
from . import blech32
import hmac
from .networks import NETWORKS
# TODO: refactor with network
| [
6738,
11485,
1330,
307,
354,
2624,
11,
9940,
11,
4226,
11,
2779,
3365,
198,
6738,
764,
1330,
7245,
354,
2624,
198,
11748,
289,
20285,
198,
6738,
764,
3262,
5225,
1330,
49791,
50,
198,
198,
2,
16926,
46,
25,
1006,
11218,
351,
3127,
6... | 3.232558 | 43 |
#! /usr/bin/env python3
import rospy
from session_one.srv import laser_service,laser_serviceResponse
from sensor_msgs.msg import LaserScan
laser_max = 0.00
laser_min = 0.00
rospy.init_node('arda_service_server_node')
my_service = rospy.Service('/Arda',laser_service,service_callback_function)
rospy.spin()
| [
2,
0,
1220,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
11748,
686,
2777,
88,
198,
6738,
6246,
62,
505,
13,
27891,
85,
1330,
12855,
62,
15271,
11,
75,
6005,
62,
15271,
31077,
198,
6738,
12694,
62,
907,
14542,
13,
19662,
1330,
... | 2.65812 | 117 |
"""Tools to implement caching."""
# pylint: disable=too-few-public-methods
import functools
def _make_key(args, kwargs):
"""Creates a hashable key. A simplified version of functools._make_key."""
# create a key for the memo from args and kwargs
key = args
if kwargs:
# marks the start of the keyword arguments in key
key += (object(),)
for item in kwargs.items():
key += item
return key
class MemoizedFunction:
"""Takes a function and returns a callable that is a memoized version of that function."""
def memoize(func):
"""Decorates a function to implement a memo.
A simpler, less optimized version of functools.cache."""
memo = {}
@functools.wraps(func)
return memorize_closure
| [
37811,
33637,
284,
3494,
40918,
526,
15931,
198,
198,
2,
279,
2645,
600,
25,
15560,
28,
18820,
12,
32146,
12,
11377,
12,
24396,
82,
198,
198,
11748,
1257,
310,
10141,
628,
198,
4299,
4808,
15883,
62,
2539,
7,
22046,
11,
479,
86,
220... | 2.794224 | 277 |
from rest_framework.response import Response
from rest_framework import viewsets, permissions, generics
from tournaments.api.serializers import TournamentSerializer
from tournaments.models import Tournament, TeamInvite
class TournamentViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows groups to be viewed or edited.
"""
queryset = Tournament.objects.all()
serializer_class = TournamentSerializer
permission_classes = [permissions.IsAuthenticated]
| [
6738,
1334,
62,
30604,
13,
26209,
1330,
18261,
198,
6738,
1334,
62,
30604,
1330,
5009,
1039,
11,
21627,
11,
1152,
873,
198,
198,
6738,
18130,
13,
15042,
13,
46911,
11341,
1330,
9595,
32634,
7509,
198,
6738,
18130,
13,
27530,
1330,
9595,... | 3.818898 | 127 |
from paysage import backends as be
from paysage import layers
from paysage.models import BoltzmannMachine
from paysage.models import gradient_util as gu
from paysage.models.state import StateTAP
import pytest
from copy import deepcopy
from cytoolz import partial
import math
# ----- Functional Programs with Gradients ----- #
# ----- Layer Methods ----- #
if __name__ == "__main__":
pytest.main([__file__])
| [
6738,
13831,
496,
1330,
736,
2412,
355,
307,
198,
6738,
13831,
496,
1330,
11685,
198,
6738,
13831,
496,
13,
27530,
1330,
21764,
89,
9038,
37573,
198,
6738,
13831,
496,
13,
27530,
1330,
31312,
62,
22602,
355,
915,
198,
6738,
13831,
496,
... | 3.598291 | 117 |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['GroupMemberEntityIdsArgs', 'GroupMemberEntityIds']
@pulumi.input_type
@pulumi.input_type
| [
2,
19617,
28,
40477,
12,
23,
198,
2,
17202,
39410,
25,
428,
2393,
373,
7560,
416,
262,
21624,
12994,
24118,
687,
10290,
357,
27110,
5235,
8,
16984,
13,
17202,
198,
2,
17202,
2141,
407,
4370,
416,
1021,
4556,
345,
821,
1728,
345,
760... | 3.403101 | 129 |
import fontforge
import os
import md5
import subprocess
import tempfile
import json
import copy
SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__))
SQUARE_PATH = os.path.join(SCRIPT_PATH, 'square.svg')
OUTPUT_FONT_DIR = os.path.join(SCRIPT_PATH, '..', '..', 'src/fonts')
AUTO_WIDTH = False
KERNING = 0
m = md5.new()
f = fontforge.font()
f.encoding = 'UnicodeFull'
f.design_size = 28
f.em = 512
f.ascent = 448
f.descent = 64
# Import base characters
for char in "0123456789abcdefghijklmnopqrstuvwzxyzABCDEFGHIJKLMNOPQRSTUVWZXYZ_- .,:;/\!/*&'\"|(){}[]":
glyph = f.createChar(ord(char))
glyph.importOutlines(SQUARE_PATH)
glyph.width = 256
font_name = 'skeleton';
m.update(font_name + ';')
fontfile = '%s/skeleton' % (OUTPUT_FONT_DIR)
print fontfile;
build_hash = m.hexdigest()
f.fontname = font_name
f.familyname = font_name
f.fullname = font_name
f.generate(fontfile + '.ttf')
# # Hint the TTF file
subprocess.call('ttfautohint -s -f -n ' + fontfile + '.ttf ' + fontfile + '-hinted.ttf > /dev/null 2>&1 && mv ' + fontfile + '-hinted.ttf ' + fontfile + '.ttf', shell=True)
# WOFF2 Font
subprocess.call('woff2_compress ' + fontfile + '.ttf', shell=True)
| [
11748,
10369,
30293,
198,
11748,
28686,
198,
11748,
45243,
20,
198,
11748,
850,
14681,
198,
11748,
20218,
7753,
198,
11748,
33918,
198,
11748,
4866,
198,
198,
6173,
46023,
62,
34219,
796,
28686,
13,
6978,
13,
15908,
3672,
7,
418,
13,
69... | 2.346693 | 499 |
"""
@Author :Furqan Khan
@Email :furqankhan08@gmail.com
@Date :12/30/2016
Objective :
The purpose of this file /module /Class is to actually execute the external scripts for vulnerability assessment and scanning.It runs metasploit modules ,external python ,ruby,bash,shell,java class files and some perl as well as NSE scripts.This file is invoked from driver_meta.py and it invokes appropriate method at 1 time depending upon the type of service and checks to be carried.
Thus there are various method catagories like :
(1) Single line :For the scripts that only require to be invoked and they return data after completion of execution .
(2) Interactive :This catagory of methods create a virtual terminal to execute the commands in interactive manner ,where the next input given to virtual terminal depends upon the output produced from the preceeding command.
(3) Sniffing :This catagory generates triffic sniffing files for the service /port being testes
(4) Metasploit :This catagory will invoke the metasploit modules and would execute them and collect data from the execution
(5) Single line timeout :THis catagory comes with a timeout parameter which will wait for external script to finish its execution for some specified time
"""
import shlex
import sys
#import msfrpc
import time
import pyshark
import pexpect
from subprocess import Popen, PIPE, STDOUT
import commands
import urllib2
import requests
import threading
import subprocess
import psutil
import logging
import logging.handlers
import threading
import Auto_logger
import IPexploits
import time
import unicodedata
import chardet
import os
import json
"""
Objective :
The following class Commands has got all the catagories of methods discussed above
"""
class Commands:
"""
Objective :
The following class Commands has got all the catagories of methods discussed above
"""
def set_log_file(self):
"""
Objective :
When invoked from the GUI the whole code executes in the background and in order to track the
details of execution we log the debugging messages that help us track the execution flow.This method initilizes the logger class and sets a logger file for the current project (scanning phase only)
"""
#print "Inside set Log file "
self.Log_file=str(self.project_id) +str("_Log_file")
self.Log_file_path = os.path.join(self.data_path, self.Log_file)
#self.Log_file_info=str(self.project_id) +str("_Log_file_info")
print "Log file is : " +str(self.Log_file)
self.logger=self.Auto_logger.configureLogger(self.method_id,self.Log_file_path)
#self.logger_info=self.Auto_logger.configureLoggerInfo(self.method_id,self.Log_file_info)
time.sleep(3)
def init_connection(self):
"""
Objective :
This method would initialize the database connection
"""
try:
self.con=MySQLdb.connect("localhost","<USER>","<PASSWORD>","nmapscan")
self.cursor = self.con.cursor()
except Exception, e:
self.print_Error("EXception in connection-->"+str(e))
def print_Log(self,message):
"""
Objective :
This method would log the command id and message (obtained results) to the log file
"""
message="Command Logger --->Command id --> "+str(self.command_id) +" Message --> :" +str(message)
try:
self.lock.acquire()
self.logger.debug(message)
self.lock.release()
except Exception ,e:
self.lock.acquire()
self.logger.critical(message +"--Exception : --"+str(e))
self.lock.release()
print "Log exception :"+str(e)
print message+"\n"
def print_Error(self,message):
"""
Objective :
This method log any exception or error to the logger with the flag info as ERROR
"""
print "Error Logger Command fro file -->"+str(self.Log_file)
#self.logger=self.Auto_logger.configureLogger(self.method_id,self.Log_file)
message="Error -->Command id --> "+str(self.command_id) +" Message --> :" +str(message)
print message
try:
self.lock.acquire()
self.logger.error(message)
self.lock.release()
except Exception ,e:
self.lock.acquire()
self.logger.error(message +"--Exception : --"+str(e))
self.lock.release()
def print_Log_info(self,message):
"""
Objective :
This method would log the execution flow to the different log file (info) .The purpose of this log is to do debugging and track execution flow of commands
"""
message="Command id --> "+str(self.command_id) +" Message --> :" +str(message)
message=message.replace("\n","")
message=message.replace("\\n","")
"""print "-----------------------------------------------------------------------------------------"
print "Logger Info for file -->"+str(self.Log_file_info)
#print "Inside print log !!--Log file is "+str(self.Log_file)
print "Message is " +str(message)
print "-----------------------------------------------------------------------------------------"
"""
#self.logger_info=self.Auto_logger.configureLoggerInfo(self.method_id,self.Log_file_info)
#print "\n\n\n" #print "logger is -->"+str(self.logger)
try:
self.lock.acquire()
self.logger_info.debug(message)
self.lock.release()
except Exception ,e:
self.lock.acquire()
self.logger_info.critical(message +"--Exception : --"+str(e))
self.lock.release()
print "Log exception :"+str(e)
print message+"\n"
def print_Error_info(self,message):
"""
Objective :
This method would log errors pertaining to execution flow with flag set as 'Error'
"""
#self.logger_info=self.Auto_logger.configureLoggerInfo(self.method_id,self.Log_file_info)
message="Command id --> "+str(self.command_id) +" Message --> :" +str(message)
print message
try:
self.lock.acquire()
self.logger_info.error(message)
self.lock.release()
except Exception ,e:
self.lock.acquire()
self.logger_info.error(message +"--Exception : --"+str(e))
self.lock.release()
def cleanUp(self):
"""
Objective :
This method would clean up the virtual console
"""
#a = client.call('console.write', [console_id, "workspace\n"])
#time.sleep(1)
#self.print_Log( "\n\n"+str(a)+"--->Written<----\n\n"
cleanup = self.client.call('console.destroy',[self.console_id])
time.sleep(1)
self.print_Log( "Clean up :"+str(cleanup))
self.print_Log( "Cleanup result: %s" %cleanup['result'])
def exit_child(self,child):
"""
Objective :
This method would gracefully exit the msfconsole when all metasploit commands are ececuted
"""
try:
self.print_Log_info("\nExiting from msfconsole !!!\n")
self.print_Log("\nExiting from msfconsole !!!\n")
child.sendline('exit')
time.sleep(2)
j=child.expect(['[$/#]',pexpect.EOF,pexpect.TIMEOUT],timeout=60)
print "j is "+str(j)
if(j==1):
self.print_Log("Exited from msfconsole !!!")
self.Display_msg(child)
else :
self.print_Log("\n\nSome Error Occured while Exiting\n\n")
self.Display_msg(child)
except Exception ,e:
self.print_Error_info("\n\nException in Exit Child "+str(e))
self.print_Error("\n\nException in Exit Child "+str(e))
self.Display_msg(child)
def SaveDetails(self,commands,result):
"""
Objective :
This method is commonly shared amongst all catagories of methods
(singleline ,interactive ,metasploit ,sniffing and etc) which are responsible for invoking
the external scripts as well as the metasploit modules .Actually when ever the methods execute the
external scripts teh recived data from external scripts is passed on to this method and it saves the
findings inside the databse table
"""
#print "\n\n\n\n"
self.print_Log("Saving details :")
self.print_Log_info("Saving details :")
print "\n\n Here :Commands Executed for Record id -> " +str(self.current_record_id) +" and Command Id : -->"+str(self.command_id )+" and Method id -->"+self.method_id
print str(commands)
print ("\n\n\n\n")
print "\n\nResults for Record id -> " +str(self.current_record_id) +" and Command Id : -->"+str(self.command_id) +" and Method id -->"+self.method_id
print str(result)
#print str(result)
status=1
self.IPexploitObj.logger=self.logger
status=self.IPexploitObj.Update(self.project_id,self.current_record_id,self.command_id,commands,result,False)
if (status==1):
self.print_Log_info( "Details Updated successfully")
#self.print_Log( "Details Update Failed")
print "Details Updated successfully"
else:
self.print_Log_info( "Details Update Failed")
self.print_Log( "Details Update Failed")
print "Details Update Failed"
#print str(result)+"\n\n\n"
x=1
def custom_meta(self,commands):
"""
Objective :
This method would take the list of commands as aurgument and would invoke metasploit as a subprocess
in virtual console and would execute the commands and would collect the resukts and finally would
pass the findings to savedetails method to save the results
"""
try:
exploit_result=''
commands_launched=[]
self.method_id="Custom meta"
self.print_Log_info("Inside command_meta")
self.print_Log("Inside command_meta")
#child=pexpect.spawn("msfconsole")
child = pexpect.spawn('msfconsole -q')
commands_launched.append('>msfconsole \n')
print "Console created "
#print str(child)
#child = pexpect.spawn(args[0])
i=child.expect(['.*> ',pexpect.EOF,pexpect.TIMEOUT],timeout=480)
run=True
if (i==0):
self.print_Log(str(child.after))
commands_launched.append(str(child.after))
self.print_Log(str(i))
for command in commands:
command=command.replace("\n","")
child.sendline(command)
#commands_launched.append(command+"\n")
time.sleep(3)
j=child.expect(['.*> ',pexpect.EOF,pexpect.TIMEOUT],timeout=280)
if(j==0):
self.print_Log(str(child.after))
commands_launched.append(str(child.after)+"\n")
continue
elif(j==1):
self.print_Log("EOF reached-->Not launching the run command")
self.Display_msg(child)
commands_launched.append(str(child.after)+"\n")
run=False
break
else:
self.print_Log("Time out exceeded in child check ->Not launching the run command")
self.Display_msg(child)
commands_launched.append(str(child.after)+"\n")
run=False
break
elif(i==1):
print "Reache1"
self.print_Log("EOF reached Outer Expect-->Not launching the run command")
run=False
self.Display_msg(child)
commands_launched.append("EOF->"+str(child.after)+"\n")
else:
print "Reache2"
self.print_Log("Time out exceeded in parent check ->Not launching the run command")
run=False
self.Display_msg(child)
commands_launched.append("Time out exceeded "+str(child.after)+"")
if(run==True):
print "Reache3"
self.print_Log("\n\nEverything Fine till now-->Launching run command\n\n")
self.print_Log_info("\nEverything Fine till now-->Launching run command\n")
child.sendline('run')
#commands_launched.append('>run')
time.sleep(3)
k=child.expect(['.*>.*',pexpect.EOF,pexpect.TIMEOUT],timeout=1500)
time.sleep(2)
if(k==0):
self.print_Log("\n\nModule Execution completed\n\n")
self.print_Log_info("\nModule Execution completed\n")
self.Display_msg(child)
commands_launched.append(''+str(child.after)+'\n')
exploit_result=exploit_result+"Command Executed :"+commands_launched[0]
exploit_result="\n"+exploit_result+"\nResult :\n"+str(child.after)
#exploit_result=str(child.after)
self.print_Log("\n\nNow exiting !!!\n\n")
self.exit_child(child)
self.print_Log("Closing the child pipe !!!")
child.close(force=True)
else:
self.print_Log("some error occured while running the aux module !!")
self.print_Log_info("some error occured while running the aux module !!")
self.print_Log_Error("some error occured while running the aux module !!")
self.print_Log("The value of expect here is :" +str(k))
self.Display_msg(child)
commands_launched.append('<Finished-T/O or EOF>'+str(child.after)+'')
exploit_result=exploit_result+"Command Executed :"+commands_launched[0]
exploit_result="\n"+exploit_result+"\nResult :\n"+str(child.after)
#exploit_result=str(child.after)
self.print_Log("\n\nNow exiting !!!\n\n")
self.exit_child(child)
self.print_Log("Closing the child pipe !!!")
child.close(force=True)
else:
self.print_Log("Run Flag is Not true !!")
self.print_Log_info("Run Flag is Not true !!")
self.print_Log("Closing the child pipe !!!")
child.sendline('exit')
child.close(force=True)
exploit_result="Command msf console failed to load the console or timeout occured "
exploit_result=exploit_result+"Command Executed :"+commands_launched[0]
exploit_result="\n"+exploit_result+"\nResult :\n"+commands_launched[len(commands_launched)-1]
#self.SaveDetails(''.join(commands_launched),exploit_result)
self.SaveDetails(str(commands_launched),exploit_result)
self.print_Log_info("Exiting custom_meta !!")
except Exception ,e:
self.print_Error(str(child.after))
self.print_Error("Custom MetaSploit module has exception :" +str(e))
self.print_Error_info("Custom MetaSploit module has exception :" +str(e))
#self.Display_msg("Closing the child pipe !!!")
child.close(force=True)
def meta_commands(self,commands):
"""
Objective :
This method is obselete and is not used in final draft of code.Its purpose was to use github
version of code for metasploit modules execution ,but due to its unstable nature we adapted
our own custom methodology using Python's pexpect
"""
try:
self.print_Log( "Console id is :"+str(self.console_id))
for command in commands:
a = self.client.call('console.write', [self.console_id, command])
time.sleep(1)
a = self.client.call('console.write', [self.console_id, "run\n"])
time.sleep(5)
self.print_Log( str(a))
while True:
self.res = self.client.call('console.read',[self.console_id])
if len(self.res['data']) > 1:
self.print_Log( "Result :" + self.res['data'])
if self.res['busy'] == True:
self.print_Log( "Console is busy :")
time.sleep(1)
continue
break
except Exception,e:
print "Exception meta_commands-->"+str(e)
self.print_Error( "EXception Meta "+str(e))
def start_wireshark(self,args):
"""
Objective :
This method would start python version of wireshark called as pyshark for traffic sniffing
"""
self.print_Log( "\n\nStarting wireshark for 50 sec\n\n")
self.print_Log_info( "\n\nStarting wireshark for 50 sec\n\n")
try:
capture = pyshark.LiveCapture(interface=args[0],bpf_filter=args[1])
capture.sniff(timeout=50)#will mae the pyshark to capture packets for next 50 seconds
for packet in capture.sniff_continuously(packet_count=5):
self.print_Log( 'Just arrived:'+str( packet))
except Exception ,ee:
self.print_Error( "EXception Wireshark-old "+str(ee))
self.print_Error_info( "EXception Wireshark-old "+str(ee))
return
def Display_msg(self,child):
"""
Objective :
This method would pass the debugging messages to print_Log method to keep track of execution flow
"""
try:
self.print_Log( "Before : \n"+ str(child.before) + "After : \n"+ str(child.after))
except Exception, ee:
self.print_Error("Error in Display_msg methos --> : "+str(ee))
self.print_Error_info("Error in Display_msg methos --> : "+str(ee))
def Nfs_Mount_intaractive(self,args):
"""
Objective :
Nfs mount is used for service NFS and this check will attempt to mount directories of remote system to local machine .The method is interacive and is customizd only for this purpose.
Todo :
For now we are hard coding-->assuming root permission -->later try to parse the directories which
have permission and mount them only.It assumes /temp/ directory is created already
"""
try: #For now we are hard coding-->assuming root permission -->later try to parse the directories which have permission and mount them only.It assumes /temp/ directory is created already
self.print_Log( "\n\nStarting Mount all retive\n\n")
self.print_Log_info( "\n\nStarting Mount all retive\n\n")
print ("Launching command--> "+str(args[0]))
commands_executed=[]
commands_executed.append(">"+str(args[0])+"\n")
exploit_result=''
child = pexpect.spawn(args[0])
print "Launched"
i=child.expect([pexpect.TIMEOUT, '[#\$]','.*access denied.*',pexpect.EOF],timeout=25)
if ((i==1)or(i==3)):
print "here"
self.print_Log(str(child.after))
commands_executed.append(">"+str(child.after))
self.print_Log( str(i))
for counter in range (1,len(args)):
child.sendline(args[counter])
commands_executed.append(args[counter]+"\n")
time.sleep(2)
j=child.expect([pexpect.TIMEOUT, '[#\$] ',pexpect.EOF],timeout=15)
time.sleep(2)
commands_executed.append(str(child.after))
if((j==1)or (j==2)):
self.print_Log(str(child.after))
continue
else :
self.print_Log("Some Error occured--During command launching")
self.print_Log_info("Some Error occured--During command launching")
self.Display_msg(child)
break
exploit_result="Command Executed :"+commands_executed[0]
exploit_result="\n"+exploit_result+"Result:\n"+commands_executed[len(commands_executed)-1]
self.print_Log("Closing Child now !!")
self.print_Log_info("Closing Child now !!")
child.close(force=True)
elif (i==2):
commands_executed.append(str(child.after))
exploit_result="Command Executed :"+commands_executed[0]
exploit_result="\n"+exploit_result+"Result:\n"+commands_executed[len(commands_executed)-1]
self.print_Log("Closing Child now !!")
self.print_Log_info("Closing Child now !!")
child.close(force=True)
else:
self.print_Log("Either timeout or End of file "+str(i))
self.print_Log_info("Either timeout or End of file "+str(i))
self.print_Log("Closing Child now !!")
exploit_result="Command Executed :"+commands_executed[0]
exploit_result="\n"+exploit_result+"\nResult:\n"+commands_executed[len(commands_executed)-1]
child.close(force=True)
#self.SaveDetails(''.join(commands_executed),exploit_result)
self.SaveDetails(str(commands_executed),exploit_result)
self.print_Log_info("Exiting mathos Nfs interactive now !!")
except Exception,e:
child.close(force=True)
self.print_Error("Exception in mount interactive "+str(e))
self.print_Error_info("Exception in mount interactive "+str(e))
#self.print_Error("Closing Child now !!")
def ftp_interactive(self,args): #Note never execute it as a sinle line command as the console gets stuck
"""
Objective :
The purpose of this method is to check weather anonymous login is allowed on ftp service
on the given remote host.Altough the same task could be achived by passing appropriate
aurguments to General_interactive catagory of methos ,but this method was developed before
developemt of General_interactive and thus is customized only for anonymous FTP check using
interactive mode with virtual console
"""
try :
commands_executed=[]
exploit_result=''
self.method_id="Ftp_interactive ()"
self.print_Log( "\n\nStarting FTP Login --Anonmous\n\n")
self.print_Log_info( "\n\nStarting FTP Login --Anonmous\n\n")
child = pexpect.spawn(args[0])
i=child.expect(['Permission denied', 'Name .*:','.* Connection refused',pexpect.TIMEOUT, '[#\$] '],timeout=25)
commands_executed.append(args[0]+"\n")
commands_executed.append(str(child.after)+"\n")
if (i==1):
self.print_Log(str(child.before) +" " +str(child.after))
commands_executed.append(str(child.after))
#self.print_Log( str(i))
child.sendline('anonymous')
commands_executed.append('anonymous'+"\n")
time.sleep(3)
j=child.expect(['.*Password:',pexpect.TIMEOUT],timeout=25)
if(j==0):
self.print_Log( "Before : "+ str(child.before) + "After : "+ str(child.after))
commands_executed.append(str(child.after)+"\n")
child.sendline('noah@example.com')
time.sleep(3)
commands_executed.append('noah@example.com'+"\n")
k=child.expect(['.*ftp> ',pexpect.TIMEOUT],15)
commands_executed.append(str(child.after)+"\n")
if(k==0):
exploit_result="Login SuccesFul --> "+str(child.after)
self.print_Log( "Login Successful")
self.print_Log_info( "Login Successful")
self.print_Log( "Before : "+ str(child.before) + "After : "+ str(child.after))
else:
exploit_result="Login Not SuccesFul --> "+str(child.after)
self.print_Log( "Login Not Successful")
self.print_Log_info( "Login Not Successful")
self.Display_msg(child)
else:
commands_executed.append(str(child.after)+"\n")
self.Display_msg(child)
elif ((i==2)or (i==3)):
self.print_Log( "Host seems to be down or service is turned off : ")
self.print_Log_info( "Host seems to be down or service is turned off- or connection Timed out : ")
self.Display_msg(child)
elif (i==4):
self.print_Log( "Host has very less security as it permits ftp login without any password: ")
self.print_Log_info( "Host has very less security as it permits ftp login without any password: ")
self.Display_msg(child)
else :
self.print_Log( "\n\nPermission Denied\n\n")
self.print_Log_info( "\n\nPermission Denied\n\n")
self.Display_msg(child)
self.print_Log("Closing Child now !!")
child.close(force=True)
exploit_result=exploit_result+"Command Executed :"+commands_executed[0]
exploit_result="\n"+exploit_result+"\nResult :\n"+str(child.after)
#self.SaveDetails(''.join(commands_executed),exploit_result)
self.SaveDetails(str(commands_executed),exploit_result)
self.print_Log_info("Exiting method Ftp interactive !!")
except Exception,e:
self.print_Error("Closing Child now !!")
child.close(force=True)
self.print_Error( "Exception ftp_intaractive "+str(e))
self.print_Error_info( "Exception ftp_intaractive "+str(e))
def ssh_check_execution(self,child,commands_executed):
"""This method works in conjuction with ssh_root_login check/ssh_interactive"""
exploit_result=""
try:
print "In here"
i=child.expect(['.*Permission denied.*', 'root@.* password:.*','.* Connection refused',pexpect.TIMEOUT,'[#\$]',pexpect.EOF],timeout=15)
time.sleep(2)
print "got something-->"+str(i)
commands_executed.append(str(child.after)+"\n")
print "i is -->"+str(i)
if ((i==1)):
self.print_Log( "Root is expecting a pssword--supplying default password")
self.print_Log_info( "Root is expecting a pssword--supplying default password")
#self.print_Log( str(i))
child.sendline('root')
commands_executed.append('root'+"\n")
time.sleep(2)
j=child.expect(['root@.* password:.*' ,'[#\$] ','Permission denied'],timeout=15)
commands_executed.append(str(child.after)+"\n")
#commands_executed.append('root'+"\n")
#time.sleep(2)
exploit_result=str(child.after)+"\n"
if(j==1):
self.print_Log( "Login Successful with password root")
self.print_Log_info( "Login Successful with password root")
self.print_Log( "Before : "+ str(child.before) + "After : "+ str(child.after))
else:
#exploit_result ="Before -: "+str(child.before) + "After - :" +str(child.after)
self.print_Log("No login with pw root-Cant guess weather root login is enabled.Need to brute force\n" +str(j))
self.print_Log_info("No login with pw root-Cant guess weather root login is enabled.Need to brute force")
self.Display_msg(child)
elif (i==4):
self.print_Log( "Login successful ..Root is set to weak privlages it permits login without password:")
self.print_Log_info( "Login successful ..Root is set to weak privlages it permits login without password:")
self.Display_msg(child)
elif (i==2):
self.print_Log( "Connection refused-->Service not running on host")
self.print_Log_info( "Connection refused-->Service not running on host")
self.Display_msg(child)
elif (i==3) or (i==5):
self.print_Log( "TimeOut occcured or EOF")
self.print_Log_info( "Connection Timed out !!!")
self.Display_msg(child)
else :
self.print_Log( "Permission Denied at inception for root--Good ")
self.print_Log_info( "Permission Denied at inception for root--Good ")
self.Display_msg(child)
#exploit_result ="Before -: "+str(child.before) + "After - :" +str(child.after)
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nResult:\n"+str(commands_executed[len(commands_executed)-1])
self.print_Log("Closing Child now !!")
child.close(force=True)
#self.SaveDetails(''.join(commands_executed),exploit_result)
self.SaveDetails(str(commands_executed),exploit_result)
self.print_Log_info("Exiting method ssh interactive!!")
except Exception,e:
self.print_Error("Closing Child now !!")
child.close(force=True)
self.print_Error( "Exception ssh_intaractive "+str(e))
self.print_Error_info( "Exception ssh_intaractive "+str(e))
def ssh_interactive(self,args): #Note never execute it as a sinle line command as the console gets stuck
"""
Objective :
The purpose of this method is to check weather root login is allowed on ssh service
on the given remote host.Altough the same task could be achived by passing appropriate
aurguments to General_interactive catagory of methods ,but this method was developed before
developemt of General_interactive and thus is customized only for SSH root login check using
interactive mode with virtual console
"""
try:
print "In ssh interactive !!!!!"
commands_executed=[]
exploit_result=""
self.method_id="ssh_interactive()"
self.print_Log( "\n\nStarting ssh--INteractive\n\n")
self.print_Log_info( "\n\nStarting ssh--INteractive\n\n")
child = pexpect.spawn(args[0]) #root@192.168.179.136's password:
commands_executed.append(args[0]+"\n")
check_list=['.*Permission denied.*', 'root@.* password:.*','.* Connection refused','.*(yes/no).*','[#\$] ',pexpect.TIMEOUT,pexpect.EOF]
i=child.expect(['.*Permission denied.*', 'root@.* password:.*','.* Connection refused','.*(yes/no).*',pexpect.TIMEOUT,'[#\$]',pexpect.EOF],timeout=15)
print "THe value oof i is "+str(i)
if(i==3):
print "Hre-yes/no"
child.sendline('yes')
time.sleep(3)
self.ssh_check_execution(child,commands_executed)
else:
print "here -->other--->" +str(i)
self.print_Log_info( "Root is expecting a pssword--supplying default password")
#self.print_Log( str(i))
commands_executed.append(str(child.after)+"\n")
child.sendline('root')
commands_executed.append('root'+"\n")
self.ssh_check_execution(child,commands_executed)
except Exception,e:
self.print_Error("Closing Child now !!")
child.close(force=True)
self.print_Error( "Exception ssh_intaractive "+str(e))
self.print_Error_info( "Exception ssh_intaractive "+str(e))
def domain_interactive(self,args):
"""
Objective :
The purpose of this method is to check /conduct various checks related to domain server /DNS
service like (zone transfers ,guessing subdomains and etc).Altough the same task could be
achived by passing appropriate aurguments to General_interactive catagory of methods ,but this
method was developed before developemt of General_interactive and thus is customized only for
Domain server checks using interactive mode with virtual console
"""
try:
self.method_id="Domain_interactive()"
self.print_Log("Launching Domain Interactive ::<--- Command :--->"+str(args[0]))
self.print_Log_info("Launching Domain Interactive ::<--- Command :--->"+str(args[0]))
child = pexpect.spawn(args[0]) #root@192.168.179.136's password:
commands_executed=[]
exploits_result=''
commands_executed.append(args[0]+"\n")
i=child.expect(['.*>.*'],timeout=55)#note > is kept with purposefully here,* is not there as it does something like xx->
time.sleep(2)
if (i==0):
self.print_Log( "$"+str(args[1])+"\n" )
#self.print_Log( str(i))
self.Display_msg(child)
child.sendline(args[1])
#commands_executed.append(args[1])
time.sleep(2)
j=child.expect(['Address: .*#.*> ' ,"nslookup: couldn't get.*",".*>.*"],timeout=35) #note this case will work only when the given <host> is in 192.x.x.x notation
commands_executed.append(str(child.after)+"\n")
if(j==0) or (j==2):
#self.print_Log( "Dns lookup Address changed successfully-->"+str(child.before)+str(child.after))
self.Display_msg(child)
commands_executed.append(str(child.before) +" " +str(child.after))
child.sendline(str(args[2]))
commands_executed.append(args[2]+"\n")
time.sleep(2)
k=child.expect(['.*>.*' ,".* SERVFAIL",".*no servers could be reached.*"],timeout=20)
commands_executed.append(str(child.after)+"\n")
exploit_result=str(child.after)+"\n"
else:
exploit_result=str(child.after)+"\n"
self.print_Log("Invalid host address given \n" +str(j))
self.print_Log_info("Invalid host address given \n" +str(args[2])+" J is --> " +str(j))
self.Display_msg(child)
exploit_result=exploit_result+"\n\n""Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"Result:\n"+str(commands_executed[len(commands_executed)-1])
self.print_Log("Closing Child now !!")
child.close(force=True)
#self.SaveDetails(''.join(commands_executed),exploit_result)
self.SaveDetails(str(commands_executed),exploit_result)
self.print_Log("Closing Child now !!")
child.close(force=True)
self.print_Log("Exiting Domain interactive !!")
except Exception ,e:
self.print_Error( "Exception Domain Intaractive " +str(e))
self.print_Error_info( "Exception Domain Intaractive " +str(e))
self.print_Error(self.Display_msg(child))
self.print_Error("Closing Child now !!")
child.close(force=True)
def imap_interactive(self,args):
"""
Objective :
The purpose of this method is to check /conduct various checks related to Imap service.
To do :
Right now teh username and password passed to this module are (msfadmin,msfadmin).Add additional
aurguments to the master json file for this service to take username password from master json
or name list to brute force login attempt.
"""
try:
commands_executed=[]
self.method_id="Imap_interactive"
exploit_result=''
self.print_Log("Launching Imap Interactive ::Command-->" +str(args[0]))
self.print_Log_info("Launching Imap Interactive ::Command-->" +str(args[0]))
child = pexpect.spawn(args[0]) #Telnet <IP> 143: Connection refused
commands_executed.append(args[0])
i=child.expect(['.*: No route to host', '.* login:','.*: Connection refused', '[#\$] ',pexpect.TIMEOUT],timeout=25)
#self.print_Log(str(i))
commands_executed.append(str(child.after)+"\n")
if (i==1):
self.print_Log( "Telnet is expecting Username -- supplying default Username")
self.print_Log_info( "Telnet is expecting Username -- supplying default Username")
#self.print_Log( str(i)
child.sendline('msfadmin')
commands_executed.append('msfadmin'+"\n")
time.sleep(2)
j=child.expect(['.*Password:' ,'[#\$] ','Last login',pexpect.TIMEOUT],timeout=15)
commands_executed.append(str(child.after))
if(j==0):
self.print_Log( "Telnet is expecting Password-- supplying default Password")
self.print_Log_info( "Telnet is expecting Password-- supplying default Password")
child.sendline('msfadmin')
commands_executed.append('msfadmin'+"\n")
time.sleep(2)
k=child.expect(['.* login:' ,'[#\$] ','Last login:',pexpect.TIMEOUT],timeout=15)
commands_executed.append(str(child.after)+"\n")
if(k==2):
self.print_Log( "Login Successful with password root "+str(k))
self.print_Log_info( "Login Successful with password root "+str(k))
self.Display_msg(child)
else:
self.print_Log( "Login Failed with default username and password "+str(k))
self.print_Log_info( "Login Failed with default username and password "+str(k))
self.Display_msg(child)
else:
self.print_Log( "Weak login -->Only default username was sufficient -- \n" +str(j) )
self.print_Log_info( "Weak login -->Only default username was sufficient -- \n" +str(j) )
self.Display_msg(child)
elif(i==0):
self.print_Log( "There is no route to host--The host is not up and running !!")
self.print_Log_info( "There is no route to host--The host is not up and running !!")
self.Display_msg(child)
elif(i==2):
self.print_Log( "The remote host has no service running on the supplied port :"+str(args[0]))
self.print_Log_info( "The remote host has no service running on the supplied port :"+str(args[0]))
self.Display_msg(child)
else:
self.print_Log( "Week security !!--Telnet can be logged in without any username and password -command :"+str(args[0]))
self.print_Log_info( "Week security !!--Telnet can be logged in without any username and password -command :"+str(args[0]))
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nResult:\n"+str(commands_executed[len(commands_executed)-1])
self.print_Log("Closing Child now !!")
child.close(force=True)
#self.SaveDetails(''.join(commands_executed),exploit_result)
self.SaveDetails(str(commands_executed),exploit_result)
self.print_Log("Closing Child now !!")
self.print_Log_info("Exiting Imap interactive!!")
child.close(force=True)
except Exception ,e:
#
self.print_Error( "Exception Imap_intaractive " +str(e))
self.print_Error_info( "Exception Imap_intaractive " +str(e))
self.print_Error(self.Display_msg(child))
self.print_Error("Closing Child now !!")
child.close(force=True)
def time_out_command(self,arg,timeout):
"""
Objective :
The purpose of this method is to accomdate execution of all scripts that can be invoked with a
single command along with host and port info and do not require any further interaction with
the user.The timeout parameter asks the controllor thread to wait for "n" units of time and
even then if the external script does not finish execution then abort the execution of external
script such that other services and scriptsare not starved for execution.
"""
try:
print "Command is---> ::" +str(cmd)
print "hello world !!1"
#cmd ="nslookup google.com"
commands_executed=[]
exploit_result=''
self.print_Log( 'Thread started --with command '+str(cmd))
self.print_Log_info( 'Thread started --with command '+str(cmd))
commands_executed.append(cmd+"\n")
self.process=subprocess.Popen(cmd,shell=True,stderr=subprocess.PIPE,stdout=subprocess.PIPE)#best way to implement -->gives o/p in variable
(output, err)=self.process.communicate() #seunicode characters.sends ouput continuesly.Thus we may not know in which chunk of o/p we would recieve unicode character.Its better to convert all output into utf-8 and then back to ascii with ignoring special characters/unicode characters
result = chardet.detect(output)
charenc = result['encoding']
print "Encoding used is --> : "+str(charenc)
if (charenc is not None):
output=output.decode(charenc).encode('ascii','replace')
err=err.decode(charenc).encode('ascii','replace')
self.print_Log_info( 'Thread finished')
self.general_op=(str(output)+"\n"+str(err)+"\n")
#return (str(output)+"\n"+str(err)+"\n")
except Exception ,ee:
self.print_Error("Exception in gneral :"+str(ee))
self.general_op= "0" +str(ee)
def threadControllor(self,cmd,timeout=100):
"""
Objective :
The purpose of this method is to start a new thread which will inturn launch a new subprocess
and that subprocess will actually execute the external script.Further more the thread will wait
for the subprocess to complete its execution for the time specified in timeout parameter
(in secconds) ,and if teh sub process dors not finish within that time ,the thread kills the
subprocess and recursively kills all its children processes(external scripts)
"""
thread = threading.Thread(target=self.execute_singleLine,args=(cmd,True))
thread.start()
timeout=100
timeout_=int(timeout)
print "Joined and waiting !!!\n\n"
thread.join(timeout_)
print "Timeout\n\n\n"
#self.method_id="Dns_Ferice_Check()"
if thread.is_alive():
self.print_Log( 'Terminating process')
self.print_Log_info( 'Terminating process')
try:
process = psutil.Process(self.process.pid)
for proc in process.children(recursive=True):
self.print_Log_info( "Killing Process with id -->"+str(proc))
try:
proc.kill()
except Exception ,ew:
self.print_Error("Exception while killing :"+str(ew))
self.print_Log_info("Killed Process with id -->"+str(proc))
try:
process = psutil.Process(self.process.pid)
if process:
self.process.kill()
thread.join(60)
#commands_executed.append('Process killed--.timeout')
except:
self.print_Log("Parent Process already KIlled")
except Exception ,ee:
self.print_Error("Exception caught in th-controllor"+str(ee))
def Dns_FierceCheck(self,args):#Aur are send seperately cuz we need to do a reverse dns lookup also
"""
Objective :
The purpose of this method is to check /conduct various checks related to DNS.
It does dns fierce check for the host and also then for reverse dns of the host.
"""
try:
commands_executed=[]
exploit_result=''
self.method_id="Dns_Ferice_Check()"
self.print_Log("Launching FierceCheck with the given host --> "+str(args[1]))
self.print_Log_info("Launching FierceCheck with the given host --> "+str(args[1]))
cmd=str(args[0])+str(args[1])+str(args[2])
print "command is " +cmd
commands_executed.append(cmd+"\n")
self.threadControllor(cmd,100)
time.sleep(50)
print "Not executed till thread is killed"
#p = commands.getoutput(cmd)
p=self.general_op
print "Output ### is -->" +str(p)+"\n\n\n"
self.print_Log(str(p))
commands_executed.append(str(p) +"\n")
host=self.getReverseDns(str(args[1]))
self.method_id="Dns_Ferice_Check()"
commands_executed.append("Result --> "+str(host))
if(host!=-1):
self.print_Log("Launching reverse DNS FierceCheck")
self.print_Log_info("Launching reverse DNS FierceCheck")
cmd=str(args[0])+str(host)+str(args[2])
commands_executed.append(cmd)
self.threadControllor(cmd,100)
p=self.general_op
#p = commands.getoutput(cmd)
commands_executed.append(str(p))
self.print_Log( str(p))
else:
self.print_Log("There is no reverse dns resolution for ip :"+args[1])
self.print_Log_info("There is no reverse dns resolution for ip :"+args[1])
commands_executed.append("No reverse dns for ip -->" +args[1])
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nResult:\n"+str(commands_executed[len(commands_executed)-1])
self.SaveDetails(str(commands_executed),exploit_result)
self.print_Log_info("Exiting Dns_Ferice_check()")
except Exception ,e:
self.print_Error("Exception in Dns_FierceCheck "+str(e))
self.print_Error_info("Exception in Dns_FierceCheck "+str(e))
def Dns_ReconCheck(self,args):
"""
Objective :
The purpose of this method is to check /conduct various checks related to DNS.
It does dns_recon check for the host and also then for reverse dns of the host.
"""
try:
commands_executed=[]
exploit_results=''
host=str(args[0])
self.method_id="DNS_Recon_check()"
self.print_Log("In Dns_recon check")
self.print_Log_info("In Dns_recon check")
commands_executed.append("Dns check : "+str(args[0]))
rev_host=self.getReverseDns(host)
commands_executed.append("Res:"+str(rev_host))
print "Length of args : "+str(len(args))
for i in range (1,len(args)):
#print args[i]
if (("<reversehost>" in args[i])):
self.print_Log_info( "Comamnd to be launched -->" +str(args[i]))
self.print_Log( "Comamnd to be launched -->" +str(args[i]))
if((rev_host !=-1)):
cmd=args[i].replace("<reversehost>",rev_host)
commands_executed.append(cmd+"\n")
print "Updated command --> " +str(cmd)
p = commands.getoutput(cmd)
commands_executed.append(str(p)+"\n")
self.print_Log( str(p)+"\n\n")
else:
cmd=args[i]
commands_executed.append(cmd+"\n")
self.print_Log("Launching Command --> :"+str(cmd))
p = commands.getoutput(cmd)
commands_executed.append(str(p)+"\n")
self.print_Log( str(p)+"\n\n")
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nResult :\n"+str(commands_executed[len(commands_executed)-1])
#self.SaveDetails(''.join(commands_executed),exploit_result)
self.SaveDetails(str(commands_executed),exploit_result)
self.print_Log_info("Exiting Dns_recon check")
#
except Exception ,e:
#child.close(force=True)
self.print_Error("Exception dns recon " +str(e))
self.print_Error_info("Exception dns recon " +str(e))
#.print_Error("Closing Child now !!")
def start_sniffing(self,interface,timeout):
"""
Objective :
The purpose of this method is to sniff network packets for various services using
terminal version of wireshark called as Tshark.This method would start the sniffer on
given interface.
Todo:
Right now the interface name is hardcoded to 'eth0' ,add additional aurguments to master json
to accomdate this flexibility
"""
try:
self.print_Log("IN Start_sniffing() method")
self.print_Log_info("IN Start_sniffing() method")
cmd="tshark -f 'port "+ str(self.current_port) +" and host "+ str(self.current_host) + "' -i "+str(interface)+" -a duration:"+str(timeout)+" -w "+ os.path.join(self.data_path,str(self.project_id)+"_"+str(self.current_host)+"_"+str(self.current_port)+"_capture-output.pcap")
commands_executed=[]
exploit_result=''
commands_executed.append(cmd+"\n")
self.print_Log("sniffing command is --> "+str(cmd))
self.process_sniff=subprocess.Popen(cmd,shell=True,stderr=subprocess.PIPE,stdout=subprocess.PIPE)
(output, err)=self.process_sniff.communicate()
commands_executed.append(str(output)+"\n"+str(err)+"\n")
#commands_executed.append()
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nResult:\n"+str(commands_executed[len(commands_executed)-1])
#self.SaveDetails(''.join(commands_executed),exploit_result)
print "output is " +str(output) + "Error is " +str(err)
self.print_Log_info("Exiting Start_sniffing() method")
except Exception ,e:
self.print_Log("Exception while sniffing !!"+str(e))
self.print_Log_info("Exception while sniffing !!"+str(e))
def execute_singleLine(self,cmd,result_=False,grep_commands=None):#A good thing is that even when a process is killed the thread resumes and details are saved
"""
Objective :
The purpose of this method is to execute scripts that can be invoked by single line command
and no timeout parameter is used here.
"""
try:
print "Command is---> ::" +str(cmd)
print "hello world !!1"
#cmd ="nslookup google.com"
commands_executed=[]
exploit_result=''
self.print_Log( 'Thread started --with command '+str(cmd))
self.print_Log_info( 'Thread started --with command '+str(cmd))
commands_executed.append(cmd+"")
polling_elements=[]
elements={}
output=''
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)) ,"polling_TC.json")
with open(file_path,"r+") as f:
elements=json.loads(f.read())
e=elements["entries"]
for item in e:
polling_elements.append(item)
print "Entries Read :" +str(polling_elements)
if self.command_id in polling_elements:
out_file=open(str(self.project_id)+"_"+str(self.command_id)+"_output.txt","w")
err_file=open(str(self.project_id)+"_"+str(self.command_id)+"_error.txt","w")
in_file=open(str(self.project_id)+"_"+str(self.command_id)+"_input.txt","w")
in_file.write('n')
self.process=subprocess.Popen(cmd,shell=True,stderr=err_file,stdout=out_file,stdin=in_file)
err=''
try:
(output, err)=self.process.communicate()
out_file.close()
err_file.close()
in_file.close()
f=open(str(self.project_id)+"_"+str(self.command_id)+"_output.txt","r")
e=open(str(self.project_id)+"_"+str(self.command_id)+"_error.txt","r")
output=f.read()
err=e.read()
f.close()
e.close()
os.remove(str(self.project_id)+"_"+str(self.command_id)+"_error.txt")
os.remove(str(self.project_id)+"_"+str(self.command_id)+"_output.txt")
os.remove(str(self.project_id)+"_"+str(self.command_id)+"_input.txt")
if self.Kill:
self.Kill=False
err=err+"Killed@"
except Exception ,exx:
if self.Kill:
self.Kill=False
err=err+"Killed@"
else:
self.process=subprocess.Popen(cmd,shell=True,stderr=subprocess.STDOUT,stdout=subprocess.PIPE)#best way to implement -->gives o/p in variable
(output, err)=self.process.communicate() #seunicode characters.sends ouput continuesly.Thus we may not know in which chunk of o/p we would recieve unicode character.Its better to convert all output into utf-8 and then back to ascii with ignoring special characters/unicode characters
result = chardet.detect(output)
charenc = result['encoding']
#print "Encoding used is --> : "+str(charenc)
if (charenc is not None):
output=output.decode(charenc).encode('ascii','replace')
if err is not None:
err=err.decode(charenc).encode('ascii','replace')
else:
err=''
if self.Kill:
self.Kill=False
err=err+"Killed@"
commands_executed.append(str(output)+"\n"+str(err)+"\n")
parent_output=str(output)
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nResult"+str(commands_executed[len(commands_executed)-1])
commands_executed[len(commands_executed)-1]="\nEnd"
self.print_Log( 'Thread finished')
self.print_Log_info( 'Thread finished')
if(result_==False):
#print "Execution result : "+str(exploit_result)
#print "Hello"
self.SaveDetails((str(commands_executed)),exploit_result)
var=0
if grep_commands !=None :
for dic in grep_commands:
if var <1:
#var =1
try:#for kk ,vv in dic.iteritems():
#var=1
kk=dic["id"]
vv=dic["grep_string"]
commands_executed=[]
self.command_id=kk
command=cmd +" | "+ str(vv)
commands_executed.append(command+"\n")
to_execute="echo "+"'"+str(parent_output) +"'" +"|" +str(vv)
split_output=parent_output.split("\n")
counter_var=0
result="None"
output=''
for op in split_output:
if vv in op:
#print "Found at counter val :"+str(counter_var)
output=op
after_val=int(dic["after"])
before_val=int(dic["before"])
grep_before=''
if before_val !=0:
before_counter=counter_var-before_val
#print "Before counter is : "+str(before_counter)
for i in range(before_counter,counter_var):
grep_before=grep_before +"\n"+split_output[i]
grep_after=''
if after_val !=0:
after_counter=counter_var+after_val
for i in range(counter_var +1 ,after_counter):
grep_after=grep_after +"\n"+split_output[i]
output=grep_before +"\n"+ output +"\n"+ grep_after
break
counter_var=counter_var + 1
result=chardet.detect(output)
charenc = result['encoding']
if (charenc is not None):
output=output.decode(charenc).encode('ascii','replace')
if err is not None:
err=err.decode(charenc).encode('ascii','replace')
else:
err=''
if self.Kill:
self.Kill=False
err=err+"Killed@"
if err==None:
err=""
commands_executed.append(str(output)+"\n"+str(err)+"\n")
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nResult\n"+str(commands_executed[len(commands_executed)-1])
commands_executed[len(commands_executed)-1]="\nEnd"
self.SaveDetails((str(commands_executed)),exploit_result)
except Exception ,exc:
print "INternal Exception " +str(exc)
self.print_Error( "Inner Exception - " +str(exc))
self.print_Error_info( "Inner Exception" +str(exc))
else:
self.general_op=(str(output)+"\n"+str(err)+"\n")
#return str(str(output)+"\n"+str(err)+"\n")
except Exception ,e :
print "EXception " +str(e)
self.print_Error( "Exception in thread " +str(e))
self.print_Error_info( "Exception in thread " +str(e))
def test_ssl(self,args):
"""
Objective :
The purpose of this method is to execute test_ssl.py script .The script detects various ciphers
used and is only customized for this purpose.It could have not been accomdated in
General_interactive catagory and thus we had to write custom code for this
"""
try:
self.method_id="Test_ssl"
self.print_Log_info("Starting Test ssl")
cmd=args[1]
to=args[0]
print( 'Thread started --with command '+str(cmd))
commands_executed=[]
exploit_result=''
commands_executed.append(cmd+"\n")
child = pexpect.spawn(cmd)
i=child.expect(['.*Proceed ?.*','.* Unable to open a socket to .*',pexpect.TIMEOUT,pexpect.EOF],timeout=int(to))
if (i==0):
print "\n\nReached at here"+str(child.after)
child.sendline('yes')
#commands_executed.append('yes')
j=child.expect(['.*Proceed ?.*','.* Unable to open a socket to .*',pexpect.TIMEOUT,pexpect.EOF],timeout=int(to))
print "J is --" +str(j)+"\n\n\n\n"+str(child.before)+" "+str(child.after)+"\n\n\n\n\n"
commands_executed.append(str(child.before)+str(child.after))
if(i==2):
commands_executed.append(str(child.after)+"Time out -It seems host is down")
if(i==3):
commands_executed.append(str(child.before)+str(child.after)+"End of file -")
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nResult\n"+str(commands_executed[len(commands_executed)-1])
self.SaveDetails(str(commands_executed),exploit_result)
self.print_Log_info("Stopping Test ssl")
child.close(force=True)
except Exception ,e :
self.print_Error("Exception general interactive " +str(e))
self.print_Error_info("Exception general interactive " +str(e))
self.print_Error("Closing Child now !!")
child.close(force=True)
def general_interactive_special_char(self,args):
"""
Objective :
The purpose of this method is to execute teh interactive scripts or kali tools which
return various special charactes which need to be decoded first to interpret meaning from them
before passing the next command of interaction
"""
try:
self.method_id="general_interactive_special_char()"
self.print_Log("Starting Special char-Interactive Session with command --> "+str(args[1]) +" and timeout " +str(args[0]))
cmd=args[1]
timeout=args[0]
child=pexpect.spawn(cmd)
commands_executed=[]
commands_executed.append(cmd+"\n")
exploit_result=''
self.print_Log_info("Starting Special char-Interactive Session with command --> "+str(args[1]) +" and timeout " +str(args[0]))
for i in range(2,len(args),2):
#print "Commands are --" +str(args[i]) + " " +str(args[i+1])
#child.sendline(args[i])
#time.sleep(2)
arg_list=[]
check_list=[]
#<<<<<<< HEAD
arg_list=list(args[i])
#=======
#arg_list=args[i]
#>>>>>>> b6b8e9ee72399e3d683c7808a85d7f1c8ce3cbf6
check_list=arg_list.pop(0).split(',')
count=len(arg_list)-1
arg_list.append(pexpect.TIMEOUT)
check_list.append(str(count+1))
arg_list.append(pexpect.EOF)
check_list.append(str(count+2))
self.print_Log("Arg list is --> "+str(arg_list))
commands_executed.append(str(arg_list))
self.print_Log("check list is --> "+str(check_list))
print "Waiting for 60 sec"
#<<<<<<< HEAD
j=child.expect(arg_list,120)
#=======
#j=child.expect(arg_list,60)
#>>>>>>> b6b8e9ee72399e3d683c7808a85d7f1c8ce3cbf6
print "The value of j is :"+str(j)
print str(child.after)+"\n\n"+str(child.before)
#commands_executed.append(str(child.after)+"\n")
commands_executed.append("\n"+str(child.before)+"\n"+str(child.after))
time.sleep(2)
print "J is "+str(j) +"\n and i is " +str(i)
if(str(j) in check_list):
self.print_Log("Before :"+str(child.before) + "\n" + "After : "+str(child.after)+" j is "+str(j) )
if((i+1)<len(args)): # i can never be == len (args) as i is an even number and len args wil always be odd
child.send(args[i+1])
child.send('\r')
commands_executed.append(args[i+1]+"\n")
self.print_Log("Just sent command --> "+str(args[i+1]))
time.sleep(2)
continue;
else:
self.print_Log("Results not as expected --> see aurguments " +str(j) +"\n"+str(child.before) + " " + str(child.after))
self.print_Log_info("Results not as expected --> see aurguments ")
break
#self.print_Log("Closing Child !")
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nResult:\n"+str(commands_executed[len(commands_executed)-1])
#self.SaveDetails(''.join(commands_executed),exploit_result)
self.SaveDetails(str(commands_executed),exploit_result)
self.print_Log("Before : "+str(child.before)+"After : "+str(child.after))
self.print_Log("Closing Child now !!")
child.sendcontrol('z')
child.sendcontrol('c')
child.close(force=True)
self.print_Log_info("Exiting general Interactive with special char()")
except Exception ,e:
self.print_Error("Exception general interactive " +str(e))
self.print_Error_info("Exception general interactive " +str(e))
self.print_Error("Closing Child now !!")
child.close(force=True)
def general_interactive(self,args):
"""
Objective :
This is a generic and important method and the purpose of this method is to accomdate most of
the interactive commands /tools that require user interaction.It automates the whole process
and there are various scripts that are executed using this method by passing appripriate
script aurguments to this method.
"""
try:
print "Inside general interactive"
self.method_id="General_Interactive()"
self.print_Log("Starting Interactive Session with command --> "+str(args[1]) +" and timeout " +str(args[0]))
self.print_Log_info("Starting Interactive Session with command --> "+str(args[1]) +" and timeout " +str(args[0]))
cmd=args[1]
timeout=args[0]
child=pexpect.spawn(cmd)
commands_executed=[]
commands_executed.append(cmd+"\n")
exploit_result=''
print "here"
for i in range(2,len(args),2):
#print "Commands are --" +str(args[i]) + " " +str(args[i+1])
#child.sendline(args[i])
#time.sleep(2)
arg_list=[]
check_list=[]
#<<<<<<< HEAD
arg_list=list(args[i])
#print "Initial arg list is ;"+str(arg_list)
check_list=arg_list.pop(0).split(',')
#print "Initial check list is :::: " +str(check_list)
#=======
#arg_list=args[i]
#check_list=arg_list.pop(0).split(',')
#>>>>>>> b6b8e9ee72399e3d683c7808a85d7f1c8ce3cbf6
count=len(arg_list)-1
arg_list.append(pexpect.TIMEOUT)
check_list.append(str(count+1))
arg_list.append(pexpect.EOF)
check_list.append(str(count+2))
self.print_Log("Arg list is --> "+str(arg_list))
#<<<<<<< HEAD
#commands_executed.append("\nThe console would produce a pattern similar to following :\n "+str(arg_list)+"\n")
self.print_Log("check list is --> "+str(check_list))
print "Waiting for 60 sec"
j=child.expect(arg_list,120)
commands_executed.append(str(str(child.before)+"\n\nConsole is Now expecting :"+str(arg_list[j])+"\n\n\nActual Output by console \n:"+str(child.after)+"\n\n").replace("<class 'pexpect.EOF'>","Console Ended").replace("<class 'pexpect.exceptions.TIMEOUT'>","Time out").replace("<class 'pexpect.exceptions.EOF'>","Console Ended"))#
#=======
#commands_executed.append("\nThe console would produce a pattern similar to following :\n "+str(arg_list)+"\n")
#self.print_Log("check list is --> "+str(check_list))
#print "Waiting for 60 sec"
#j=child.expect(arg_list,120)
#commands_executed.append(str("\nThe index of item that console produced is :"+str(j)+"\n\n"+str(child.before)+"\n:"+str(child.after)+"\n\n").replace("<class 'pexpect.EOF'>","Console Ended").replace("<class 'pexpect.TIMEOUT'>","Time out"))
#>>>>>>> b6b8e9ee72399e3d683c7808a85d7f1c8ce3cbf6
time.sleep(4)
print "J is "+str(j) +"\n and i is " +str(i)
if(str(j) in check_list):
self.print_Log("Before :"+str(child.before) + "\n" + "After : "+str(child.after)+" j is "+str(j) )
if((i+1)<len(args)): # i can never be == len (args) as i is an even number and len args wil always be odd
child.sendline(args[i+1])
#<<<<<<< HEAD
commands_executed.append("Writing on console : "+args[i+1]+"\n")
#=======
#commands_executed.append(args[i+1]+"\n")
#>>>>>>> b6b8e9ee72399e3d683c7808a85d7f1c8ce3cbf6
self.print_Log("Just sent command --> "+str(args[i+1]))
time.sleep(2)
continue;
else:
#<<<<<<< HEAD
self.print_Log("Results not as expected --> see aurguments " +str(j) +str(arg_list[j]) +"\n"+str(child.before) + " " + str(child.after))
#=======
#self.print_Log("Results not as expected --> see aurguments " +str(j) +"\n"+str(child.before) + " " + str(child.after))
#>>>>>>> b6b8e9ee72399e3d683c7808a85d7f1c8ce3cbf6
self.print_Log_info("Results not as expected --> see aurguments ")
break
#self.print_Log("Closing Child !")
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nOutput\n"+str(commands_executed[len(commands_executed)-1])
#self.SaveDetails(''.join(commands_executed),exploit_result)
self.SaveDetails(str(commands_executed),exploit_result)
self.print_Log("Before : "+str(child.before)+"After : "+str(child.after))
self.print_Log("Closing Child now !!")
child.sendcontrol('z')
child.sendcontrol('c')
child.close(force=True)
self.print_Log_info("Exiting General_interactive()")
except Exception ,e:
self.print_Error("Exception general interactive " +str(e))
self.print_Error_info("Exception general interactive " +str(e))
self.print_Error("Closing Child now !!")
child.close(force=True)
def generalCommands_Tout_Sniff(self,arg,interactive=False): #note see the methods which inoke other methods
"""
Objective :
The purpose of this method is to start a traffic sniffer and immidiately after that execute the
external script which would do vulnerability scanning on given service and the sniffer would
capture traffic in the background.The moment the external script would finish execution the
method would stop the sniffer.Important point is thet ,the external script is executed with
timeout value.If the script does not complete within given time frame both sniffer and external process would be stopped
"""
try:
commands_executed=[]
exploit_result=''
self.method_id="General_Commands_Timeout_sniff()"
self.print_Log("Starting single line + Sniff")
self.print_Log_info("Starting single line + Sniff")
commands_executed.append('starting sniffing')
#<<<<<<< HEAD
thread = threading.Thread(target=self.start_sniffing,args=("eth0","200",))
#=======
#thread = threading.Thread(target=self.start_sniffing,args=("eth0","100",))
#>>>>>>> b6b8e9ee72399e3d683c7808a85d7f1c8ce3cbf6
thread.start()
time.sleep(3)
if (interactive==False):
self.singleLineCommands_Timeout(arg) #this will act as join here and next line will execute after packets are sent
else:
self.general_interactive(arg)
self.method_id="General_Commands_Timeout_sniff()"
if thread.is_alive():
self.print_Log('Terminating Sniffing process')
self.print_Log_info('Terminating Sniffing process')
try:
process = psutil.Process(self.process_sniff.pid)
for proc in process.children(recursive=True):
print "Killing Process with id -->"+str(proc)
try:
proc.kill()
except Exception ,ew:
print "Exception while killing :"+str(ew)
#self.process.terminate()
try:
process = psutil.Process(self.process_sniff.pid)
if process:
self.process_sniff.kill()
thread.join(60) #wait only for 1 minute
print "Kill result is --> "+str(self.process_sniff.returncode)
except:
self.print_Log("Parent process already killed:")
commands_executed.append('Finished sniffing-->Details are in pcap file')
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nResult:\n"+str(commands_executed[len(commands_executed)-1])
#self.SaveDetails(''.join(commands_executed),exploit_result)
except Exception ,ee:
self.print_Error("Exception in killing process --> "+str(self.process_sniff.returncode) +str(ee))
self.print_Error_info( "Exception in killing process --> "+str(self.process_sniff.returncode) +str(ee))
self.print_Log_info("Exiting general_commands_tout_sniff()")
except Exception ,e:
self.print_Error("Exception in SingleLineCommands_Tout" +str(e))
self.print_Error_info("Exception in SingleLineCommands_Tout" +str(e))
def singleLineCommands_Timeout(self,arg,grep_commands=None): #see in this case its not necessaer to update result since it would be uodated by the other mrth
"""
Objective :
The purpose of this method is to execute scripts that can be invoked with single line command
and it internally invokes the earlier discussed single_line command method.
"""
self.method_id="Execute_Single_line_timeout()"
self.print_Log("In method SingleLineCommands_Timeout()")
self.print_Log_info("In method SingleLineCommands_Timeout()")
commands_executed=[]
commands_executed.append(arg[1])
if grep_commands ==None:
thread = threading.Thread(target=self.execute_singleLine,args=(arg[1],))
else:
print "In else with grep as true "
thread = threading.Thread(target=self.execute_singleLine,args=(arg[1],False,grep_commands))
thread.start()
timeout=int(arg[0])
thread.join(timeout)
self.method_id="Execute_Single_line_timeout()"
if thread.is_alive():
self.print_Log( 'Terminating process')
self.print_Log_info( 'Terminating process')
try:
process = psutil.Process(self.process.pid)
for proc in process.children(recursive=True):
self.print_Log_info( "Killing Process with id -->"+str(proc))
try:
self.Kill=True
proc.kill()
time.sleep(1)
except Exception ,ew:
print "Exception while killing :"+str(ew)
self.print_Log_info( "Killed Process with id -->"+str(proc))
#self.process.terminate()
try:
process = psutil.Process(self.process.pid)
if process:
self.Kill=True
self.process.kill()
thread.join(60)#wait for 1 minute ,if we dont set limit here the remaining code would halt
commands_executed.append('Process killed--.timeout')
except:
self.print_Log("Parent Process already KIlled")
self.print_Log( "Kill result is --> "+str(self.process.returncode))
self.print_Log_info( "Kill result is --> "+str(self.process.returncode))
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nResult:\n"+str(commands_executed[len(commands_executed)-1])
#self.SaveDetails(''.join(commands_executed),exploit_result)
except Exception ,ee:
self.print_Error( "Exception in killing process --> "+str(self.process.returncode) +str(ee))
self.print_Error_info( "Exception in killing process --> "+str(self.process.returncode) +str(ee))
def getHost(self,result): #no need to put results here--its intermediate results
"""
Objective :
The purpose of this method is to parse the output from nslookup and return host from ip.
"""
index=result.find("name =")
if(index !=-1):
index=index+6
actual_host=result[index:]
actual_host=actual_host.lstrip()
index_last=actual_host.find("\n")
if(index_last!=-1):
actual_host=actual_host.replace("\n","")
actual_host=actual_host[:index_last-2]
actual_host.rstrip()
print "Actual host is "+actual_host
return actual_host
else:
print "Actual host is "+actual_host
return actual_host
else:
print "Name not found !!"
print str(result)
return -1
def getReverseDns(self,host):#ret again intermediate results
"""
Objective :
The purpose of this method is to use nslookup to transform ip->domain name"
"""
try:
#host='google.com'
self.method_id="getReverseDns()"
self.print_Log( "Dns reverse lookup")
self.print_Log_info( "Dns reverse lookup")
commands_executed=[]
exploit_result=''
self.print_Log_info("Recieved host is : "+str(host))
child = pexpect.spawn("nslookup "+str(host))
commands_executed.append('nslookup ' +str(host))
i=child.expect(['Address: .*',".* server can't find .*",".* name = .*",pexpect.EOF,pexpect.TIMEOUT],timeout=15)
commands_executed.append(str(child.after))
self.print_Log(str(i))
if (i==0):
self.print_Log( "Reverse dns successful")
self.print_Log_info( "Reverse dns successful")
self.Display_msg(child)
result=str(child.after)
index=result.find(":")
index=index+1
actual_host=result[index:]
actual_host=actual_host.lstrip()
self.print_Log("Actual host is "+actual_host)
self.print_Log_info("Actual host is "+actual_host)
self.print_Log("Closing Child now !!")
self.print_Log_info( "Exiting getReverseDns()")
child.close(force=True)
return actual_host
#self.print_Log( str(i)
elif (i==2):
self.print_Log( "Reverse dns partially successful")
self.print_Log_info( "Reverse dns partially successful")
self.print_Log_info( "Exiting getReverseDns()")
result=str(child.after)
actual_host=self.getHost(result)
self.print_Log("Closing Child now !!")
child.close(force=True)
return actual_host
elif(i==3):
self.print_Log( " (2)-->Reverse dns Timed out")
self.print_Log_info( " (2)-->Reverse dns Timed out")
result=str(child.before)
actual_host=self.getHost(result)
self.print_Log_info( "Exiting getReverseDns()")
self.print_Log("Closing Child now !!")
child.close(force=True)
return actual_host
else:
self.print_Log( "Reverse dns Failed")
self.print_Log_info( "Reverse dns Failed")
self.print_Log_info( "Exiting getReverseDns()")
self.Display_msg(child)
self.print_Log("Closing Child now !!")
child.close(force=True)
return -1
except pexpect.TIMEOUT,e:
self.print_Error("Time out exception in pexpect !!"+str(e))
self.print_Error_info("Time out exception in pexpect !!"+str(e))
self.print_Error("Closing Child now !!")
child.close(force=True)
return -1
#pass
except pexpect.EOF,e:
self.print_Error("EOF exception in pexpect !!" +str(e))
self.print_Error_info("EOF exception in pexpect !!" +str(e))
self.print_Error("Closing Child now !!")
child.close(force=True)
return -1
except Exception ,e:
self.print_Error("Exception in Reverse Dns !!"+str(e))
self.print_Error_info("Exception in Reverse Dns !!"+str(e))
self.print_Error(self.Display_msg(child))
self.print_Log("Closing Child now !!")
child.close(force=True)
return -1
def singleLineCommands(self,args):
"""
Objective :
The purpose of this method is to execute scripts that can be invoked with single line command
and it internally invokes the earlier discussed single_line command method without timeout.
"""
try:
commands_executed=[]
exploit_result=''
self.method_id="SingleLineCommands()"
self.print_Log( "\nInvoking Single line command -->Title-->" +str(args[0])+"\n")
self.print_Log_info( "\nInvoking Single line command -->Title-->" +str(args[0])+"\n")
cmd=args[0]
commands_executed.append(cmd+"\n")
p = commands.getoutput(cmd)
commands_executed.append(str(p))
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nResult\n"+str(commands_executed[len(commands_executed)-1])
#self.SaveDetails(''.join(commands_executed),exploit_result)
self.SaveDetails(str(commands_executed),exploit_result)
self.print_Log( str(p))
self.print_Log( "\nExiting Single line command -->Title-->" +str(args[0])+"\n")
except Exception ,e:
self.print_Error( "Exception single Line "+ str(e))
self.print_Error_info( "Exception single Line "+ str(e))
def http_based(self,args):
"""
Objective :
The purpose of this method is to execute http based checks that request external urls using
python get request to fetch data.
"""
try:
commands_executed=[]
exploit_result=''
self.method_id="Http_based()"
self.print_Log("Inside HttpBased()")
self.print_Log_info("Inside HttpBased()")
self.print_Log("Args are : "+str(args[0]))
commands_executed.append('requests.get('+str(args[0])+')')
response = requests.get(str(args[0]))
self.print_Log( "Status code is : "+str(response.status_code))
self.print_Log_info( "Status code is : "+str(response.status_code))
html = response.text
commands_executed.append("http-response" +str(html))
file_ = open('response.html', 'w+')
file_.write(html.encode('utf8'))
file_.close()
exploit_result="Command Executed :"+commands_executed[0]+"\n"
exploit_result=exploit_result+"\nResult\n"+str(commands_executed[len(commands_executed)-1])
#exploit_result="\nResult"+exploit_result+str(commands_executed[len(commands_executed)-1])
#self.SaveDetails(''.join(commands_executed),exploit_result)
self.SaveDetails(str(commands_executed),exploit_result)
self.print_Log_info("Exiting HttpBased()")
except Exception ,ee:
self.print_Error( "Exception Http_based " +str(ee))
self.print_Error_info( "Exception Http_based " +str(ee))
| [
37811,
198,
31,
13838,
197,
197,
25,
37,
333,
80,
272,
11356,
198,
31,
15333,
197,
197,
25,
38916,
80,
962,
7637,
2919,
31,
14816,
13,
785,
198,
31,
10430,
220,
197,
197,
25,
1065,
14,
1270,
14,
5304,
197,
197,
197,
198,
197,
19... | 2.441616 | 29,383 |
# coding: utf-8
"""
OpsGenie REST API
OpsGenie OpenAPI Specification # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from opsgenie_swagger.api_client import ApiClient
class PolicyApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def change_alert_policy_order(self, policy_id, body, **kwargs): # noqa: E501
"""Change Alert Policy Order # noqa: E501
Change execution order of the alert policy with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_alert_policy_order(policy_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str policy_id: Id of the requested policy (required)
:param ChangeAlertPolicyOrderPayload body: Change order payload (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.change_alert_policy_order_with_http_info(policy_id, body, **kwargs) # noqa: E501
else:
(data) = self.change_alert_policy_order_with_http_info(policy_id, body, **kwargs) # noqa: E501
return data
def change_alert_policy_order_with_http_info(self, policy_id, body, **kwargs): # noqa: E501
"""Change Alert Policy Order # noqa: E501
Change execution order of the alert policy with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_alert_policy_order_with_http_info(policy_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str policy_id: Id of the requested policy (required)
:param ChangeAlertPolicyOrderPayload body: Change order payload (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['policy_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_alert_policy_order" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'policy_id' is set
if ('policy_id' not in params or
params['policy_id'] is None):
raise ValueError("Missing the required parameter `policy_id` when calling `change_alert_policy_order`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `change_alert_policy_order`") # noqa: E501
collection_formats = {}
path_params = {}
if 'policy_id' in params:
path_params['policyId'] = params['policy_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v1/policies/{policyId}/change-order', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_alert_policy(self, body, **kwargs): # noqa: E501
"""Create Alert Policy # noqa: E501
Creates a new alert policy # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_alert_policy(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AlertPolicy body: Payload of created alert policy (required)
:return: CreateAlertPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_alert_policy_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_alert_policy_with_http_info(body, **kwargs) # noqa: E501
return data
def create_alert_policy_with_http_info(self, body, **kwargs): # noqa: E501
"""Create Alert Policy # noqa: E501
Creates a new alert policy # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_alert_policy_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AlertPolicy body: Payload of created alert policy (required)
:return: CreateAlertPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_alert_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_alert_policy`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v1/policies', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateAlertPolicyResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_alert_policy(self, policy_id, **kwargs): # noqa: E501
"""Delete Alert Policy # noqa: E501
Delete alert policy with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_alert_policy(policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str policy_id: Id of the requested policy (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_alert_policy_with_http_info(policy_id, **kwargs) # noqa: E501
else:
(data) = self.delete_alert_policy_with_http_info(policy_id, **kwargs) # noqa: E501
return data
def delete_alert_policy_with_http_info(self, policy_id, **kwargs): # noqa: E501
"""Delete Alert Policy # noqa: E501
Delete alert policy with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_alert_policy_with_http_info(policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str policy_id: Id of the requested policy (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['policy_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_alert_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'policy_id' is set
if ('policy_id' not in params or
params['policy_id'] is None):
raise ValueError("Missing the required parameter `policy_id` when calling `delete_alert_policy`") # noqa: E501
collection_formats = {}
path_params = {}
if 'policy_id' in params:
path_params['policyId'] = params['policy_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v1/policies/{policyId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def disable_alert_policy(self, policy_id, **kwargs): # noqa: E501
"""Disable Alert Policy # noqa: E501
Disable the alert policy with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.disable_alert_policy(policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str policy_id: Id of the requested policy (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.disable_alert_policy_with_http_info(policy_id, **kwargs) # noqa: E501
else:
(data) = self.disable_alert_policy_with_http_info(policy_id, **kwargs) # noqa: E501
return data
def disable_alert_policy_with_http_info(self, policy_id, **kwargs): # noqa: E501
"""Disable Alert Policy # noqa: E501
Disable the alert policy with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.disable_alert_policy_with_http_info(policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str policy_id: Id of the requested policy (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['policy_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method disable_alert_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'policy_id' is set
if ('policy_id' not in params or
params['policy_id'] is None):
raise ValueError("Missing the required parameter `policy_id` when calling `disable_alert_policy`") # noqa: E501
collection_formats = {}
path_params = {}
if 'policy_id' in params:
path_params['policyId'] = params['policy_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v1/policies/{policyId}/disable', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def enable_alert_policy(self, policy_id, **kwargs): # noqa: E501
"""Enable Alert Policy # noqa: E501
Enable the alert policy with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enable_alert_policy(policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str policy_id: Id of the requested policy (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.enable_alert_policy_with_http_info(policy_id, **kwargs) # noqa: E501
else:
(data) = self.enable_alert_policy_with_http_info(policy_id, **kwargs) # noqa: E501
return data
def enable_alert_policy_with_http_info(self, policy_id, **kwargs): # noqa: E501
"""Enable Alert Policy # noqa: E501
Enable the alert policy with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enable_alert_policy_with_http_info(policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str policy_id: Id of the requested policy (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['policy_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method enable_alert_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'policy_id' is set
if ('policy_id' not in params or
params['policy_id'] is None):
raise ValueError("Missing the required parameter `policy_id` when calling `enable_alert_policy`") # noqa: E501
collection_formats = {}
path_params = {}
if 'policy_id' in params:
path_params['policyId'] = params['policy_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v1/policies/{policyId}/enable', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_alert_policy(self, policy_id, **kwargs): # noqa: E501
"""Get Alert Policy # noqa: E501
Used to get details of a single policy with id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_policy(policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str policy_id: Id of the requested policy (required)
:return: GetAlertPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_alert_policy_with_http_info(policy_id, **kwargs) # noqa: E501
else:
(data) = self.get_alert_policy_with_http_info(policy_id, **kwargs) # noqa: E501
return data
def get_alert_policy_with_http_info(self, policy_id, **kwargs): # noqa: E501
"""Get Alert Policy # noqa: E501
Used to get details of a single policy with id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_policy_with_http_info(policy_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str policy_id: Id of the requested policy (required)
:return: GetAlertPolicyResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['policy_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_alert_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'policy_id' is set
if ('policy_id' not in params or
params['policy_id'] is None):
raise ValueError("Missing the required parameter `policy_id` when calling `get_alert_policy`") # noqa: E501
collection_formats = {}
path_params = {}
if 'policy_id' in params:
path_params['policyId'] = params['policy_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v1/policies/{policyId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetAlertPolicyResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_alert_policies(self, **kwargs): # noqa: E501
"""List Alert Policies # noqa: E501
Returns list alert policies # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_alert_policies(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ListAlertPoliciesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_alert_policies_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_alert_policies_with_http_info(**kwargs) # noqa: E501
return data
def list_alert_policies_with_http_info(self, **kwargs): # noqa: E501
"""List Alert Policies # noqa: E501
Returns list alert policies # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_alert_policies_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ListAlertPoliciesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_alert_policies" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v1/policies', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListAlertPoliciesResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_alert_policy(self, policy_id, body, **kwargs): # noqa: E501
"""Update Alert Policy # noqa: E501
Update alert policy with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_alert_policy(policy_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str policy_id: Id of the requested policy (required)
:param AlertPolicy body: Payload of updated alert policy (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_alert_policy_with_http_info(policy_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_alert_policy_with_http_info(policy_id, body, **kwargs) # noqa: E501
return data
def update_alert_policy_with_http_info(self, policy_id, body, **kwargs): # noqa: E501
"""Update Alert Policy # noqa: E501
Update alert policy with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_alert_policy_with_http_info(policy_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str policy_id: Id of the requested policy (required)
:param AlertPolicy body: Payload of updated alert policy (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['policy_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_alert_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'policy_id' is set
if ('policy_id' not in params or
params['policy_id'] is None):
raise ValueError("Missing the required parameter `policy_id` when calling `update_alert_policy`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_alert_policy`") # noqa: E501
collection_formats = {}
path_params = {}
if 'policy_id' in params:
path_params['policyId'] = params['policy_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v1/policies/{policyId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
198,
37811,
198,
220,
220,
220,
26123,
13746,
494,
30617,
7824,
628,
220,
220,
220,
26123,
13746,
494,
4946,
17614,
18291,
2649,
220,
1303,
645,
20402,
25,
412,
33548,
628,
220,
220,
220,
4946,
1... | 2.26165 | 13,713 |
from .module import AutomatiaModule
import automatia.const.priority as priority
from automatia.const.state import *
from .main import Debug, Inform, Warn, Error, \
setdebug, setcli, setauto, isauto, \
FinishResult, FinishFinal, FinishNow
from .internal.util import import_exists
| [
6738,
764,
21412,
1330,
17406,
265,
544,
26796,
198,
11748,
3557,
265,
544,
13,
9979,
13,
49336,
355,
8475,
198,
6738,
3557,
265,
544,
13,
9979,
13,
5219,
1330,
1635,
198,
6738,
764,
12417,
1330,
31687,
11,
45255,
11,
39567,
11,
13047... | 3.376471 | 85 |
#!/usr/bin/env python
#coding:utf-8
# Purpose: text objects
# Created: 03.01.2011
# Copyright (C) 2011, Manfred Moitzi
# License: MIT
from __future__ import unicode_literals, print_function, division
__author__ = "mozman <mozman@gmx.at>"
from.compatibility import is_string
from .xmlns import CN, register_class, subelement, wrap
from .base import GenericWrapper, safelen
from .whitespaces import encode_whitespaces
from .protection import random_protection_key
from .propertymixins import StringProperty, TextNumberingMixin, BooleanProperty
from .propertymixins import IntegerWithLowerLimitProperty
@register_class
@register_class
@register_class
@register_class
@register_class
@register_class
@register_class
@register_class
@register_class
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
201,
198,
2,
66,
7656,
25,
40477,
12,
23,
201,
198,
2,
32039,
25,
2420,
5563,
201,
198,
2,
15622,
25,
7643,
13,
486,
13,
9804,
201,
198,
2,
15069,
357,
34,
8,
2813,
11,
1869,
39193,
... | 2.919708 | 274 |
#!/usr/bin/env python3
import os
import cv2
import json
import math
import time
import argparse
import numpy as np
import pandas as pd
# sudo apt install libfreetype6-dev
# sudo apt install libglfw3
from thirdparty.bop_toolkit.bop_toolkit_lib.renderer import create_renderer
from lib.utils.utils import euler2R
from lib.labeling import kp_config
np.random.seed(666)
# Rendering image size.
IMG_WH = 420
if __name__ == '__main__':
parser = argparse.ArgumentParser("./manual_keypoints.py")
parser.add_argument(
'--ply_file',
type=str,
default="/media/nate/Elements/bop/bop_datasets/ycbv/models_fine/obj_000015.ply",
help='Path to the input PLY mesh file.'
)
parser.add_argument(
'--renderer',
type=str,
default="python", choices=["python", "cpp"],
help='Which type of renderer from BOP to use. See bop_toolkit for details.'
)
parser.add_argument(
'--dataset',
type=str,
default="ycbv", choices=["ycbv", "tless"],
help='"ycbv" or "tless"'
)
parser.add_argument('--inspect', dest='inspect', action='store_true',
help='If set, inspect the keypoints already made for this ply file . '
'Obviously, the keypoint file must exist '
'(dirname(/path/to/blah.ply)/../kp_info/blah_kp_info.json). '
'Note that if you save, it will overwrite the view pose.'
)
parser.add_argument(
'--viz',
type=str,
default=None,
help='Path to a directory containing ply files'
' instances in kp_config_file. Program will write a visualization of them all.'
)
parser.add_argument(
'-r', type=int, default=8,
help='Radius size in pixels of the rendered circles.'
)
args = parser.parse_args()
setattr(args, "kp_config_file", f"./kp_configs/{args.dataset}_kp_config.csv")
if args.viz is not None:
config_data = pd.read_csv(args.kp_config_file)
num_objects = config_data.shape[0]
if args.dataset == "ycbv":
rows, cols = 3, 7
assert num_objects == 21
else:
rows, cols = 3, 10
assert num_objects == 30
img_combined = np.zeros((rows*IMG_WH, cols*IMG_WH, 3), dtype=np.uint8)
for i in range(rows):
for j in range(cols):
# TODO Why does this memory leak and fill up RAM?!
object_idx = i*cols + j
file_stem = "obj_" + str(object_idx+1).zfill(6)
ply_file = os.path.join(args.viz, file_stem + ".ply")
gui = SelectionGui(ply_file=ply_file,
kp_config_file=args.kp_config_file, r=args.r,
renderer=args.renderer)
img = gui.inspect_from_file(once=True)
img_combined[i*IMG_WH:(i+1)*IMG_WH, j*IMG_WH:(j+1)*IMG_WH, :] = img
cv2.imshow("kp_viz.png", img_combined)
cv2.waitKey(1)
print(f"Writing visualization image to ./assets/{args.dataset}_kp_viz.png")
cv2.imwrite(f"./assets/{args.dataset}_kp_viz.png", img_combined)
cv2.imshow(f"{args.dataset}_kp_viz.png", img_combined)
cv2.waitKey(0)
else:
gui = SelectionGui(ply_file=args.ply_file,
kp_config_file=args.kp_config_file, r=args.r, renderer=args.renderer)
if args.inspect:
gui.inspect_from_file()
else:
gui.run()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
11748,
28686,
198,
11748,
269,
85,
17,
198,
11748,
33918,
198,
11748,
10688,
198,
11748,
640,
198,
11748,
1822,
29572,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
19798,
292... | 2.005682 | 1,760 |
import os
import hashlib
from getpass import getpass
print('Username: ' )
passwd = getpass('Password: ')
h = hashlib.md5()
h.update(passwd.encode())
passwd_encrypt = h.hexdigest() | [
11748,
28686,
201,
198,
11748,
12234,
8019,
201,
198,
6738,
651,
6603,
1330,
651,
6603,
201,
198,
201,
198,
4798,
10786,
5842,
13292,
25,
705,
1267,
201,
198,
6603,
16993,
796,
651,
6603,
10786,
35215,
25,
705,
8,
201,
198,
71,
796,
... | 2.540541 | 74 |
from tortoise.models import Model
from tortoise import fields
from pippin.model.secrets import SeedStorage | [
6738,
7619,
25678,
13,
27530,
1330,
9104,
198,
6738,
7619,
25678,
1330,
7032,
198,
6738,
279,
3974,
259,
13,
19849,
13,
2363,
8004,
1330,
23262,
31425
] | 4.076923 | 26 |
count_num = int(input())
odd = 0
even = 0
for i in range(1, count_num + 1):
curr_num = int(input())
if i % 2 == 0:
odd += curr_num
else:
even += curr_num
if odd == even:
print(f'Yes, sum = {odd}')
elif odd > even:
print(f'No, diff = {odd-even}')
else:
print(f'No, diff = {even-odd}')
| [
9127,
62,
22510,
796,
493,
7,
15414,
28955,
198,
5088,
796,
657,
198,
10197,
796,
657,
198,
198,
1640,
1312,
287,
2837,
7,
16,
11,
954,
62,
22510,
1343,
352,
2599,
198,
220,
220,
220,
1090,
81,
62,
22510,
796,
493,
7,
15414,
28955... | 2.050314 | 159 |
# python3
# pylint: disable=g-bad-file-header
# Copyright 2021 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Helpful functions relating to losses."""
from typing import Any, Dict, Callable, List, Optional, Sequence, Tuple, Union
import dataclasses
from enn import base
from enn.losses import single_index
import haiku as hk
import jax
import jax.numpy as jnp
# Maps Haiku params (module_name, name, value) -> include or not
PredicateFn = Callable[[str, str, Any], bool]
def l2_weights_with_predicate(
params: hk.Params,
predicate: Optional[PredicateFn] = None) -> jnp.DeviceArray:
"""Sum of squares of parameter weights that passes predicate_fn."""
if predicate is not None:
params = hk.data_structures.filter(predicate, params)
return sum(jnp.sum(jnp.square(p)) for p in jax.tree_leaves(params))
def add_l2_weight_decay(loss_fn: base.LossFn,
scale: Union[float, Callable[[hk.Params], hk.Params]],
predicate: Optional[PredicateFn] = None) -> base.LossFn:
"""Adds scale * l2 weight decay to an existing loss function."""
try: # Scale is numeric.
scale = jnp.sqrt(scale)
scale_fn = lambda ps: jax.tree_map(lambda p: scale * p, ps)
except TypeError:
scale_fn = scale # Assuming scale is a Callable.
return new_loss
def combine_single_index_losses_as_metric(
train_loss: single_index.SingleIndexLossFn,
extra_losses: Dict[str, single_index.SingleIndexLossFn],
) -> single_index.SingleIndexLossFn:
"""Combines train_loss for training with extra_losses in metrics."""
return combined_loss
def combine_losses_as_metric(
train_loss: base.LossFn,
extra_losses: Dict[str, base.LossFn],
) -> base.LossFn:
"""Combines train_loss for training with extra_losses in metrics."""
return combined_loss
@dataclasses.dataclass
def combine_losses(
losses: Sequence[Union[CombineLossConfig, base.LossFn]]) -> base.LossFn:
"""Combines multiple losses into a single loss."""
clean_losses: List[CombineLossConfig] = []
for i, loss in enumerate(losses):
if not isinstance(loss, CombineLossConfig):
loss = CombineLossConfig(loss, name=f'loss_{i}')
clean_losses.append(loss)
return loss_fn
| [
2,
21015,
18,
198,
2,
279,
2645,
600,
25,
15560,
28,
70,
12,
14774,
12,
7753,
12,
25677,
198,
2,
15069,
33448,
10766,
28478,
21852,
15302,
13,
1439,
6923,
33876,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
1... | 2.959544 | 964 |
from aerosandbox.geometry import *
from aerosandbox import Atmosphere
import aerosandbox.numpy as np
from typing import Tuple, Union
if __name__ == '__main__':
op_point = OperatingPoint()
| [
6738,
41376,
392,
3524,
13,
469,
15748,
1330,
1635,
198,
6738,
41376,
392,
3524,
1330,
33276,
1456,
198,
11748,
41376,
392,
3524,
13,
77,
32152,
355,
45941,
198,
6738,
19720,
1330,
309,
29291,
11,
4479,
628,
198,
198,
361,
11593,
3672,
... | 3.25 | 60 |
import FWCore.ParameterSet.Config as cms
| [
11748,
48849,
14055,
13,
36301,
7248,
13,
16934,
355,
269,
907,
628
] | 3.5 | 12 |
from math import gcd
#Equality test | [
6738,
10688,
1330,
308,
10210,
198,
220,
220,
220,
220,
198,
220,
220,
220,
1303,
36,
13237,
1332
] | 2.444444 | 18 |
import django_heroku
import dj_database_url
HOST = 'https://partygwam-staging.herokuapp.com'
DEBUG = True
ALLOWED_HOSTS = [
'herokuapp.com',
]
DATABASES = {
'default': dj_database_url.config(ssl_require=True)
}
django_heroku.settings(locals())
| [
11748,
42625,
14208,
62,
11718,
23063,
198,
11748,
42625,
62,
48806,
62,
6371,
198,
198,
39,
10892,
796,
705,
5450,
1378,
10608,
70,
86,
321,
12,
301,
3039,
13,
11718,
23063,
1324,
13,
785,
6,
198,
198,
30531,
796,
6407,
198,
7036,
... | 2.348624 | 109 |
from __future__ import division
import tensorflow as tf
from nets import * | [
6738,
11593,
37443,
834,
1330,
7297,
198,
11748,
11192,
273,
11125,
355,
48700,
198,
6738,
31720,
1330,
1635
] | 4.111111 | 18 |
import networkx as nx
import json
if __name__=='__main__':
main()
| [
11748,
3127,
87,
355,
299,
87,
198,
11748,
33918,
198,
198,
361,
11593,
3672,
834,
855,
6,
834,
12417,
834,
10354,
198,
220,
220,
220,
1388,
3419,
198
] | 2.535714 | 28 |
#!pip install pymysql
import pandas as pd
from sqlalchemy import create_engine
from sqlalchemy import insert
import pymysql
import mysql.connector as msql
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import linear_kernel
#predictPlot()
| [
2,
0,
79,
541,
2721,
279,
4948,
893,
13976,
198,
11748,
19798,
292,
355,
279,
67,
198,
6738,
44161,
282,
26599,
1330,
2251,
62,
18392,
198,
6738,
44161,
282,
26599,
1330,
7550,
198,
11748,
279,
4948,
893,
13976,
198,
11748,
48761,
13,... | 3.227273 | 88 |
# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.conf import settings
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
628,
198,
6738,
42625,
14208,
13,
9945,
1330,
4981,
11,
15720,
602,
198,
6738,
42625,
14208,
13,
10414,
1330,
6460,
628
] | 2.970588 | 34 |
from urllib.request import urlopen, urljoin
import re
if __name__ == '__main__':
target_url = 'http://www.apress.com/'
# target_url = 'http://www.sainsbury.co.uk'
apress = download_page(target_url)
image_locations = extract_image_locations(apress)
for src in image_locations:
print(urljoin(target_url, src)) | [
6738,
2956,
297,
571,
13,
25927,
1330,
19016,
9654,
11,
19016,
22179,
198,
11748,
302,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
2496,
62,
6371,
796,
705,
4023,
1378,
2503,
13,
499,
601,
13,
... | 2.616 | 125 |
from Warehouse.Inventory import Inventory
from builtins import property
class Order:
"""
Order contains order_lines which contain order_items
Assumptions:
It is permissable to add items to order that are not in Inventory
"""
class OrderItem:
'''
OrderItem is class for contents of order line item
'''
@property
@property
@property
@property
@status.setter
##############################
# Order class variabiles
__last_order_no = 0
__inventory = Inventory()
@classmethod
def clear(cls):
'''
clear needed to keep unit tests independent
'''
cls.__last_order_no = 0
pass # for debugging
@classmethod
@property
@property
| [
6738,
45927,
13,
818,
17158,
1330,
35772,
198,
6738,
3170,
1040,
1330,
3119,
198,
198,
4871,
8284,
25,
198,
197,
37811,
198,
197,
18743,
4909,
1502,
62,
6615,
543,
3994,
1502,
62,
23814,
198,
197,
198,
197,
8021,
388,
8544,
25,
198,
... | 2.814516 | 248 |
class energyUnit(object):
"""Represents an energy unit."""
| [
4871,
2568,
26453,
7,
15252,
2599,
198,
220,
220,
220,
37227,
6207,
6629,
281,
2568,
4326,
526,
15931,
198
] | 3.315789 | 19 |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from common import constants
from common.waterfall import failure_type
from gae_libs.pipeline_wrapper import pipeline_handlers
from libs import analysis_status
from model.wf_analysis import WfAnalysis
from waterfall import analyze_build_failure_pipeline
from waterfall import buildbot
from waterfall import lock_util
from waterfall.analyze_build_failure_pipeline import AnalyzeBuildFailurePipeline
from waterfall.test import wf_testcase
| [
2,
15069,
1853,
383,
18255,
1505,
46665,
13,
1439,
2489,
10395,
13,
198,
2,
5765,
286,
428,
2723,
2438,
318,
21825,
416,
257,
347,
10305,
12,
7635,
5964,
326,
460,
307,
198,
2,
1043,
287,
262,
38559,
24290,
2393,
13,
198,
198,
11748... | 3.825 | 160 |
# coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Interface Class
# this is a auto generated file generated by Cheetah
# Libre Office Version: 7.3
# Namespace: com.sun.star.accessibility
import typing
from abc import abstractmethod, ABC
if typing.TYPE_CHECKING:
from ..beans.property_value import PropertyValue as PropertyValue_c9610c73
class XAccessibleTextAttributes(ABC):
"""
Implement this interface to give access to the attributes of a text.
**since**
OOo 2.0.4
See Also:
`API XAccessibleTextAttributes <https://api.libreoffice.org/docs/idl/ref/interfacecom_1_1sun_1_1star_1_1accessibility_1_1XAccessibleTextAttributes.html>`_
"""
__ooo_ns__: str = 'com.sun.star.accessibility'
__ooo_full_ns__: str = 'com.sun.star.accessibility.XAccessibleTextAttributes'
__ooo_type_name__: str = 'interface'
__pyunointerface__: str = 'com.sun.star.accessibility.XAccessibleTextAttributes'
@abstractmethod
def getDefaultAttributes(self, RequestedAttributes: 'typing.Tuple[str, ...]') -> 'typing.Tuple[PropertyValue_c9610c73, ...]':
"""
Get the default attribute set for the text.
Returns a set of all default paragraph and default character attributes that are associated for the text. To prevent the method from returning possibly large sets of attributes that the caller is not interested in the caller can provide a list of attributes that he wants to be returned.
When the sequence is empty all attributes are returned.
Raises:
com.sun.star.beans.UnknownPropertyException: ``UnknownPropertyException``
"""
@abstractmethod
def getRunAttributes(self, Index: int, RequestedAttributes: 'typing.Tuple[str, ...]') -> 'typing.Tuple[PropertyValue_c9610c73, ...]':
"""
Get the run attribute set for the specified position.
Returns a set of character attributes that are associated for the character at the given index and are directly set or are set via a character style. To prevent the method from returning all of these attributes the caller can provide a list of attributes that he wants to be returned.
When the sequence is empty all attributes are returned.
Raises:
com.sun.star.beans.UnknownPropertyException: ``UnknownPropertyException``
com.sun.star.lang.IndexOutOfBoundsException: ``IndexOutOfBoundsException``
"""
__all__ = ['XAccessibleTextAttributes']
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
2,
198,
2,
15069,
33160,
1058,
33,
6532,
12,
22405,
12,
12041,
25,
19935,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
4943,
198,
2,
345,
743,... | 3.080321 | 996 |
# Import required modules
import torch
import ogb
from ogb.graphproppred import PygGraphPropPredDataset
from WEGL.WEGL import WEGL
# Set the random seed
random_seed = 55
# Load the dataset
dataset = PygGraphPropPredDataset(name="ogbg-molhiv")
print('# of graphs = {0}\n# of classes = {1}\n# of node features = {2}\n# of edge features = {3}'.\
format(len(dataset), dataset.num_classes, dataset.num_node_features, dataset.num_edge_features))
if isinstance(dataset, PygGraphPropPredDataset):
print('# of tasks = {}'.format(dataset.num_tasks))
# Specify the parameters
# num_hidden_layers = range(3, 9)
num_hidden_layers = [4]
# node_embedding_sizes = [100, 300, 500]
node_embedding_sizes = [300]
# final_node_embeddings = ['concat', 'avg', 'final']
final_node_embeddings = ['final']
num_pca_components = -1
num_experiments = 10
classifiers = ['RF']
# device = 'cpu'
device = 'cuda' if torch.cuda.is_available() else 'cpu'
print('Device: {}'.format(device))
# Run the algorithm
for final_node_embedding in final_node_embeddings:
WEGL(dataset=dataset,
num_hidden_layers=num_hidden_layers,
node_embedding_sizes=node_embedding_sizes,
final_node_embedding=final_node_embedding,
num_pca_components=num_pca_components,
num_experiments=num_experiments,
classifiers=classifiers,
random_seed=random_seed,
device=device)
| [
2,
17267,
2672,
13103,
201,
198,
11748,
28034,
201,
198,
11748,
267,
22296,
201,
198,
6738,
267,
22296,
13,
34960,
1676,
381,
445,
1330,
9485,
70,
37065,
24331,
39156,
27354,
292,
316,
201,
198,
6738,
370,
7156,
43,
13,
54,
7156,
43,
... | 2.393688 | 602 |
from vsip import *
#Spline, Sort, Permute; Interpolate
#spline is a class in pyJvsip.py
# pyJvsip Functions
| [
6738,
3691,
541,
1330,
1635,
198,
2,
26568,
500,
11,
33947,
11,
2448,
76,
1133,
26,
4225,
16104,
378,
198,
2,
22018,
500,
318,
257,
1398,
287,
12972,
41,
14259,
541,
13,
9078,
198,
198,
2,
12972,
41,
14259,
541,
40480,
198,
220,
2... | 2.456522 | 46 |
import re
import base64
from io import BytesIO
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from dog_breed_classifier import DogBreedPrediction
# keep model as global variable so we don't have to reload
# it in case of warm invocations
model = None
BASE64_IMAGE_PATTERN = '^data:image/.+;base64,'
app = FastAPI()
# cors setting
app.add_middleware(
CORSMiddleware,
allow_origins=['http://localhost:8080',
'https://dev.doggie-wiki.com',
'https://doggie-wiki.com',
'https://www.doggie-wiki.com'],
allow_credentials=False,
allow_methods=['*'],
allow_headers=['*'],
)
@app.get('/healthcheck')
@app.post('/classify-dog-breeds')
| [
11748,
302,
198,
11748,
2779,
2414,
198,
6738,
33245,
1330,
2750,
4879,
9399,
198,
198,
6738,
3049,
15042,
1330,
12549,
17614,
11,
14626,
16922,
198,
6738,
3049,
15042,
13,
27171,
1574,
13,
66,
669,
1330,
23929,
12310,
2509,
1574,
198,
... | 2.453416 | 322 |
from matplotlib import pyplot as plt
from os import path, system
from datetime import datetime
| [
6738,
2603,
29487,
8019,
1330,
12972,
29487,
355,
458,
83,
201,
198,
6738,
28686,
1330,
3108,
11,
1080,
201,
198,
6738,
4818,
8079,
1330,
4818,
8079,
201,
198,
201,
198
] | 3.333333 | 30 |
from app import db
from app.utils.base_model import BaseEntity
from werkzeug.security import generate_password_hash, check_password_hash
| [
6738,
598,
1330,
20613,
198,
6738,
598,
13,
26791,
13,
8692,
62,
19849,
1330,
7308,
32398,
198,
6738,
266,
9587,
2736,
1018,
13,
12961,
1330,
7716,
62,
28712,
62,
17831,
11,
2198,
62,
28712,
62,
17831,
628
] | 3.72973 | 37 |
import pickle
from gcm import *
import matplotlib.pyplot as plt
#infection
nu = 1.5
beta = lambda n,i,trate,nu: trate*i**nu
#structure
mmax = 10
gm = np.zeros(mmax+1)
gm[mmax] += 1
nmax = 50
gamma_n = 3
pn = np.zeros(nmax+1)
pn[2:] = (np.arange(2,nmax+1)*1.)**(-gamma_n)
pn /= np.sum(pn)
state_meta = get_state_meta(mmax, nmax, gm, pn)
# nu_c = bistability_threshold_safe(beta, gm, pn, min_params=(10**(-14),1),
# max_params=(1,7))
param_c = invasion_threshold(beta,gm,pn,fixed_args=(nu,))
print(param_c)
#prepare result dict
results = {'stable':dict(), 'unstable':dict()}
#compute stable solutions
param_init = 1.03*param_c
param_var = -0.01*param_c #backward direction
Jtol = 0.0001
stable_param,stable_fixed_point,stable_infected_fraction = \
stable_branch(beta,state_meta,param_init,param_var,
rtol=10**(-10), max_iter=3000,
fixed_args=(nu,),Jtol=Jtol,verbose=True)
#get unstable solution
param_var = abs(stable_param[-2]-stable_param[-1])
fni = stable_fixed_point[-1][1]
param_init = stable_param[-1]
unstable_param,unstable_fixed_point,unstable_infected_fraction = \
unstable_branch(fni,beta,state_meta,param_init,
param_var,fixed_args=(nu,),init_iter=100,
h=10**(-2),
rtol=10**(-12),Jtol=Jtol,
max_iter=10000, verbose=True)
#format fni to In per group
@jit(nopython=True)
fni_list_stable = np.array([fni for _,fni in stable_fixed_point])
sm_list_stable = np.array([sm for sm,_ in stable_fixed_point])
In_list_stable = get_In_list(fni_list_stable,nmax)
fni_list_unstable = np.array([fni for _,fni in unstable_fixed_point])
sm_list_unstable = np.array([sm for sm,_ in unstable_fixed_point])
In_list_unstable = get_In_list(fni_list_unstable,nmax)
results['param_c'] = param_c
results['stable']['param_list'] = np.array(stable_param)
# results['stable']['fni_list'] = np.array(fni_list_stable)
results['stable']['I_list'] = np.array(stable_infected_fraction)
results['stable']['In_list'] = np.array(In_list_stable)
results['unstable']['param_list'] = np.array(unstable_param)
# results['unstable']['fni_list'] = np.array(fni_list_unstable)
results['unstable']['I_list'] = np.array(unstable_infected_fraction)
results['unstable']['In_list'] = np.array(In_list_unstable)
for n in [2,10,20,30,50]:
plt.plot(results['stable']['param_list'],results['stable']['In_list'][:,n])
plt.plot(results['unstable']['param_list'],results['unstable']['In_list'][:,n])
plt.axvline(param_c,0,1,color='k',ls=':')
plt.show()
with open('./dat/FigX_bif3.pk', 'wb') as outfile:
pickle.dump(results,outfile)
| [
11748,
2298,
293,
198,
6738,
308,
11215,
1330,
1635,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
628,
198,
2,
27816,
295,
198,
28803,
796,
352,
13,
20,
198,
31361,
796,
37456,
299,
11,
72,
11,
2213,
378,
11,
28803,... | 2.169614 | 1,244 |
#!/usr/bin/env python
# coding:utf-8
""":mod:`shapeFromGeo`
===================================
.. module:: moduleName
:platform: Unix
:synopsis: module idea
:author: viba
:date: 2016.10
"""
import maya.cmds as mc
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
19617,
25,
40477,
12,
23,
198,
15931,
1298,
4666,
25,
63,
43358,
4863,
10082,
78,
63,
198,
10052,
18604,
198,
492,
8265,
3712,
8265,
5376,
198,
220,
220,
1058,
24254,
25,
33501,
1... | 2.783133 | 83 |
from openprocurement.auctions.geb.tests.fixtures.active_auction import (
ACTIVE_AUCTION_DEFAULT_FIXTURE,
ACTIVE_AUCTION_DEFAULT_FIXTURE_WITH_URLS
)
| [
198,
6738,
1280,
36942,
495,
434,
13,
559,
2733,
13,
469,
65,
13,
41989,
13,
69,
25506,
13,
5275,
62,
559,
596,
1330,
357,
198,
220,
220,
220,
11741,
9306,
62,
32,
18415,
2849,
62,
7206,
38865,
62,
47084,
51,
11335,
11,
198,
220,
... | 2.295775 | 71 |
from .models import Experiment, Profile
from .serializers import ExperimentSerializer, UserSerializer, LoginSerializer
from django.contrib.auth.models import User
from django.http import Http404
from rest_framework.views import APIView,Response,status
from django.contrib.auth import (
login as django_login,
logout as django_logout
)
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from rest_framework import generics
class ListExperiments(APIView):
"""
List all experiments, or create a new experiment.
"""
class UpdateExperiment(APIView):
"""
Retrieve, update or delete a experiment instance.
"""
class UserCreate(generics.CreateAPIView):
"""
Create a User
"""
serializer_class = UserSerializer
authentication_classes = ()
permission_classes = ()
class UserDetail(APIView):
"""
Retrieve a User
"""
#queryset = User.objects.all()
#serializer_class = UserSerializer
"""
Retrieve, update or delete a experiment instance.
""" | [
6738,
764,
27530,
1330,
29544,
11,
13118,
198,
6738,
764,
46911,
11341,
1330,
29544,
32634,
7509,
11,
11787,
32634,
7509,
11,
23093,
32634,
7509,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
13,
27530,
1330,
11787,
198,
6738,
42625,... | 3.090116 | 344 |
"""File with only sanitizeStr function."""
def sanitizeStr(data):
"""
Escape all char that will trigger an error.
Parameters
----------
data: str
the str to sanitize
Returns
-------
str
The sanitized data.
"""
data = " ".join(data.split())
new_msg = []
for letter in data:
if letter in ['"',"\\"]:
new_msg.append("\\")
new_msg.append(letter)
return "".join(new_msg) | [
37811,
8979,
351,
691,
5336,
270,
1096,
13290,
2163,
526,
15931,
201,
198,
201,
198,
4299,
5336,
270,
1096,
13290,
7,
7890,
2599,
201,
198,
220,
220,
220,
37227,
201,
198,
220,
220,
220,
14473,
477,
1149,
326,
481,
7616,
281,
4049,
... | 2.150442 | 226 |
# Alex Goudine
# GEOG 490 - Webscraping and Database Design
# Scrapes weather data from forecast.io and returns a dict of the relevant information
# Modified by Taylor Denouden
# Shortened script and made into a simple function in which geom and date data can be passed
# Added more efficient and robust cardinal direction lookup
# Removed test for unlimited visibility to maintain data type consistency in database
# Removed test to see if rider was travelling in the same direction in favor of storing the cardinal wind direction only
# Updated docstring
import urllib2
import json
from datetime import datetime
import time
from django.conf import settings
def get_weather(coords, date):
""" Generate a dict of weather data for a location at a given time
Keyword arguments:
coords -- decimal degree coordinates of location. Format is [longitude, latitude]
date -- a python datetime object
"""
(lng, lat) = coords
DIRECTIONS = ["N", "NE", "E", "SE", "S", "SW", "W", "NW"]
# A call is made to the API using the provided key
APIkey = settings.FORECAST_IO_API_KEY
physicalURL = "https://api.forecast.io/forecast/"+APIkey+"/"+str(lat)+","+str(lng)+","+datetime.isoformat(date)+"?units=ca"
response = json.loads( urllib2.urlopen(physicalURL).read() )
c = response['currently']
d = response['daily']['data'][0]
sunrise = d.get('sunriseTime', None)
sunset = d.get('sunsetTime', None)
return {
'summary': c.get('summary', ''),
'sunrise_time': datetime.utcfromtimestamp(sunrise + response['offset']*60*60) if sunrise else None,
'sunset_time': datetime.utcfromtimestamp(sunset + response['offset']*60*60) if sunset else None,
'dawn': (sunrise-30*60 <= time.mktime(date.timetuple()) <= sunrise) if sunrise else False,
'dusk': (sunset <= time.mktime(date.timetuple()) <= sunrise+30*60) if sunrise else False,
'precip_intensity': c.get('precipIntensity', -1),
'precip_probability': c.get('precipProbability', -1),
'precip_type': c.get('precipType', ""),
'temperature': c.get('temperature', -1),
'black_ice_risk': c.get('temperature', 100) <= -18 or (c.get('precipIntensity', -1) > 0 and c.get('temperature', 100) <= 0),
'wind_speed': c.get('windSpeed', -1),
'wind_bearing': c.get('windBearing', -1),
'wind_bearing_str': DIRECTIONS[int((c.get('windBearing') + 22.5) // 45 % 8)] if c.get('windBearing') else "",
'visibility_km': c.get('visibility', -1), # if visibilityKM == 16.09 it is unlimited
}
| [
2,
4422,
402,
2778,
500,
198,
2,
402,
4720,
38,
45601,
532,
5313,
1416,
2416,
278,
290,
24047,
8495,
198,
2,
1446,
2416,
274,
6193,
1366,
422,
11092,
13,
952,
290,
5860,
257,
8633,
286,
262,
5981,
1321,
198,
198,
2,
40499,
416,
81... | 2.784632 | 924 |
from test_utils import run_query, redshift_connector
import pytest
| [
6738,
1332,
62,
26791,
1330,
1057,
62,
22766,
11,
2266,
30846,
62,
8443,
273,
198,
11748,
12972,
9288,
628,
198
] | 3.45 | 20 |
from sepal_ui import model
from traitlets import Any
| [
6738,
384,
18596,
62,
9019,
1330,
2746,
198,
6738,
1291,
2578,
912,
1330,
4377,
628
] | 3.6 | 15 |
import cairo
import os
from libqtile import bar
import base
BACKLIGHT_DIR = '/sys/class/backlight'
FORMAT = '{percent: 2.0%}'
class Backlight(base._TextBox):
"""
A simple widget to show the current brightness of a monitor.
"""
filenames = {}
defaults = [
('backlight_name', 'acpi_video0', 'ACPI name of a backlight device'),
(
'brightness_file',
'brightness',
'Name of file with the '
'current brightness in /sys/class/backlight/backlight_name'
),
(
'max_brightness_file',
'max_brightness',
'Name of file with the '
'maximum brightness in /sys/class/backlight/backlight_name'
),
('update_delay', .2, 'The delay in seconds between updates'),
]
| [
11748,
1275,
7058,
198,
11748,
28686,
198,
6738,
9195,
39568,
576,
1330,
2318,
198,
11748,
2779,
198,
198,
31098,
43,
9947,
62,
34720,
796,
31051,
17597,
14,
4871,
14,
1891,
2971,
6,
198,
198,
21389,
1404,
796,
705,
90,
25067,
25,
362... | 2.224932 | 369 |
import maya.cmds as cmds
import mop.vendor.node_calculator.core as noca
from mop.modules.leaf import Leaf
from mop.core.fields import IntField, ObjectField
import mop.metadata
import mop.dag
import mop.attributes
exported_rig_modules = [Corrective]
| [
11748,
743,
64,
13,
28758,
82,
355,
23991,
82,
198,
11748,
285,
404,
13,
85,
18738,
13,
17440,
62,
9948,
3129,
1352,
13,
7295,
355,
299,
11216,
198,
198,
6738,
285,
404,
13,
18170,
13,
33201,
1330,
14697,
198,
6738,
285,
404,
13,
... | 2.908046 | 87 |
import numpy as np
from numpy import log2, sqrt
from numpy.linalg import matrix_power
import scipy as sp, scipy.linalg, matrix as mx
from math import pi
| [
11748,
299,
32152,
355,
45941,
198,
6738,
299,
32152,
1330,
2604,
17,
11,
19862,
17034,
198,
6738,
299,
32152,
13,
75,
1292,
70,
1330,
17593,
62,
6477,
198,
11748,
629,
541,
88,
355,
599,
11,
629,
541,
88,
13,
75,
1292,
70,
11,
17... | 2.681159 | 69 |
# -*- coding: utf-8 -*-
"""
:copyright: Copyright 2017-2020 Sphinx Confluence Builder Contributors (AUTHORS)
:license: BSD-2-Clause (LICENSE)
"""
from sphinxcontrib.confluencebuilder.std.confluence import SUPPORTED_IMAGE_TYPES
from tests.lib import build_sphinx
from tests.lib import parse
from tests.lib import prepare_conf
from tests.lib import prepare_dirs
import mimetypes
import os
import shutil
import sys
import unittest
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
25,
22163,
4766,
25,
15069,
2177,
12,
42334,
45368,
28413,
7326,
23079,
35869,
25767,
669,
357,
32,
24318,
20673,
8,
198,
25,
43085,
25,
347,
10305,
12,
17,
... | 3.086331 | 139 |
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Sailfish(CMakePackage):
"""Sailfish is a tool for transcript quantification from RNA-seq data."""
homepage = "https://www.cs.cmu.edu/~ckingsf/software/sailfish"
url = "https://github.com/kingsfordgroup/sailfish/archive/v0.10.1.tar.gz"
version('0.10.1', sha256='a0d6d944382f2e07ffbfd0371132588e2f22bb846ecfc3d3435ff3d81b30d6c6')
depends_on('boost@1.55:')
depends_on('tbb')
| [
2,
15069,
2211,
12,
1238,
1828,
13914,
45036,
3549,
2351,
4765,
11,
11419,
290,
584,
198,
2,
1338,
441,
4935,
34152,
13,
4091,
262,
1353,
12,
5715,
27975,
38162,
9947,
2393,
329,
3307,
13,
198,
2,
198,
2,
30628,
55,
12,
34156,
12,
... | 2.50996 | 251 |
import hmac
import hashlib
import urllib
from optparse import OptionParser
parse=OptionParser("""
__ __ ______
/ | / | / \
$$ |____ ______ __ __ _$$ |_ ______ /$$$$$$ |______ ______ _______ ______
$$ \ / \ / | / |/ $$ | / \ $$ |_ $$// \ / \ / | / \
$$$$$$$ |/$$$$$$ |$$ | $$ |$$$$$$/ /$$$$$$ |$$ | /$$$$$$ |/$$$$$$ |/$$$$$$$/ /$$$$$$ |
$$ | $$ |$$ | $$/ $$ | $$ | $$ | __ $$ $$ |$$$$/ $$ | $$ |$$ | $$/ $$ | $$ $$ |
$$ |__$$ |$$ | $$ \__$$ | $$ |/ |$$$$$$$$/ $$ | $$ \__$$ |$$ | $$ \_____ $$$$$$$$/
$$ $$/ $$ | $$ $$/ $$ $$/ $$ |$$ | $$ $$/ $$ | $$ |$$ |
$$$$$$$/ $$/ $$$$$$/ $$$$/ $$$$$$$/ $$/ $$$$$$/ $$/ $$$$$$$/ $$$$$$$/
__ __
/ | / |
_______ ______ _______ ______ ______ _$$ |_ $$ | __ ______ __ __
/ | / \ / | / \ / \ / $$ | $$ | / | / \ / | / |
/$$$$$$$/ /$$$$$$ |/$$$$$$$/ /$$$$$$ |/$$$$$$ |$$$$$$/ $$ |_/$$/ /$$$$$$ |$$ | $$ |
$$ \ $$ $$ |$$ | $$ | $$/ $$ $$ | $$ | __ $$ $$< $$ $$ |$$ | $$ |
$$$$$$ |$$$$$$$$/ $$ \_____ $$ | $$$$$$$$/ $$ |/ | $$$$$$ \ $$$$$$$$/ $$ \__$$ |
/ $$/ $$ |$$ |$$ | $$ | $$ $$/ $$ | $$ |$$ |$$ $$ |
$$$$$$$/ $$$$$$$/ $$$$$$$/ $$/ $$$$$$$/ $$$$/ $$/ $$/ $$$$$$$/ $$$$$$$ |
/ \__$$ |
$$ $$/
$$$$$$/
[ @intx0x80 ]
./brutekey.py -c BAh7B0kiD3Nlc3Npb25faWQGOgZFRiJFNjYzYjQ1YTQxZDk1ZGZiMTBiZTA1%0A..... -f word.txt
./brutekey.py --cookie BAh7B0kiD3Nlc3Npb25faWQGOgZFRiJFNjYzYjQ1YTQxZDk1ZGZiMTBiZTA1%0A..... --file word.txt
"""
)
parse.add_option("-c","--cookie",dest="C",type="string",help="Cookies")
parse.add_option("-f","--file",dest="F",type="string",help="wordlist")
(opt,args)=parse.parse_args()
if opt.C==None and opt.F==None:
print(parse.usage)
exit(0)
else:
msg=str(opt.C)
file=str(opt.F)
m=msg.split("--")
decodeurl=urllib.unquote(m[0])
f=open("word.txt","r")
print "\n\nPlease Wait ... \n\n"
for i in f.readlines():
i=i.strip("\n")
if brutekey(i.rstrip("\n"),decodeurl)==m[1]:
print ("Found Secret Key [{}]".format(i))
| [
11748,
289,
20285,
201,
198,
11748,
12234,
8019,
201,
198,
11748,
2956,
297,
571,
201,
198,
6738,
2172,
29572,
1330,
16018,
46677,
201,
198,
201,
198,
201,
198,
29572,
28,
19722,
46677,
7203,
15931,
201,
198,
201,
198,
201,
198,
201,
... | 1.449696 | 2,306 |
# Copyright (c) 2016 Orange.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.urls import reverse
from django.utils import html
from django.utils.http import urlencode
from django.utils import safestring
from django.utils.translation import ugettext_lazy as _
from horizon import tables
| [
2,
15069,
357,
66,
8,
1584,
11942,
13,
198,
2,
1439,
6923,
33876,
13,
198,
2,
198,
2,
220,
220,
220,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
345,
743,
198,
2,
220,
220,
220,
407,
779,... | 3.424 | 250 |
# encoding: utf-8
import glob
import os
from shutil import copyfile
from mapdict import FarsiMapenDict
mypath = "./Plate_farsi/"
destPath ="./Plate_en/"
plate_list = glob.glob(mypath + "*")
print(len(plate_list))
for plate_path in plate_list:
plate_name = os.path.basename(plate_path)
plate_name = plate_name_modify(plate_name, FarsiMapenDict)
print(plate_name)
src = os.path.abspath(plate_path)
dst = os.path.abspath(destPath) + "/" + plate_name + ".jpg"
print(src, dst)
copyfile(src, dst) | [
2,
21004,
25,
3384,
69,
12,
23,
198,
11748,
15095,
198,
11748,
28686,
198,
6738,
4423,
346,
1330,
4866,
7753,
198,
6738,
3975,
11600,
1330,
376,
945,
72,
13912,
268,
35,
713,
198,
198,
1820,
6978,
796,
366,
19571,
3646,
378,
62,
69,... | 2.413953 | 215 |
from datetime import date, timedelta
import pytest
from .resolvers import (
resolve_all_plants,
resolve_plants_to_care,
resolve_water_plant,
)
from .test_factories import PlantFactory, WateringLogFactory
pytestmark = pytest.mark.django_db
| [
6738,
4818,
8079,
1330,
3128,
11,
28805,
12514,
198,
11748,
12972,
9288,
198,
198,
6738,
764,
411,
349,
690,
1330,
357,
198,
220,
220,
220,
10568,
62,
439,
62,
489,
1187,
11,
198,
220,
220,
220,
10568,
62,
489,
1187,
62,
1462,
62,
... | 2.866667 | 90 |
from aiogram.utils import exceptions
from loguru import logger
from app.loader import dp
@dp.errors_handler(exception=exceptions.RetryAfter)
| [
6738,
257,
72,
21857,
13,
26791,
1330,
13269,
198,
6738,
2604,
14717,
1330,
49706,
198,
198,
6738,
598,
13,
29356,
1330,
288,
79,
628,
198,
31,
26059,
13,
48277,
62,
30281,
7,
1069,
4516,
28,
1069,
11755,
13,
9781,
563,
3260,
8,
198... | 3.348837 | 43 |
from qiskit import QuantumCircuit
from qiskit.converters import dag_to_circuit, circuit_to_dag
from numpy import zeros, uint16
# make the global DP array
LCS_DP = zeros((2000, 2000), dtype=uint16)
| [
6738,
10662,
1984,
270,
1330,
29082,
31560,
5013,
201,
198,
6738,
10662,
1984,
270,
13,
1102,
332,
1010,
1330,
48924,
62,
1462,
62,
21170,
5013,
11,
10349,
62,
1462,
62,
67,
363,
201,
198,
6738,
299,
32152,
1330,
1976,
27498,
11,
2039... | 2.710526 | 76 |
"""Package for dealing with the todo.txt format."""
import fileinput
import os
from typing import cast, List, Sequence
from .task import Task
from .tasks import Tasks
def uncompleted_task_on(line: str) -> bool:
"""Return whether there's an uncompleted task on the line."""
return bool(line.strip() and not line.startswith("x "))
def unblocked_tasks(tasks: Sequence[Task]) -> Sequence[Task]:
"""Set the blocked status of the tasks and return only the unblocked tasks."""
task_by_id = {task.task_id(): task for task in tasks if task.task_id()}
for task in tasks:
for parent_id in task.parent_ids():
parent_task = task_by_id.get(parent_id)
if parent_task:
parent_task.set_is_blocked()
task.add_blocked_task(parent_task)
for child_id in task.child_ids():
child_task = task_by_id.get(child_id)
if child_task:
task.set_is_blocked()
child_task.add_blocked_task(task)
return [task for task in tasks if not task.is_blocked()]
def read_todotxt_files(filenames: List[str]) -> Tasks:
"""Read tasks from the Todo.txt files."""
filenames = [os.path.expanduser(filename) for filename in filenames]
with cast(fileinput.FileInput, fileinput.input(filenames)) as todotxt_file:
tasks = [Task(line.strip(), todotxt_file.filename(), index + 1) for index, line in enumerate(todotxt_file)
if uncompleted_task_on(line)]
return Tasks(unblocked_tasks(tasks))
| [
37811,
27813,
329,
7219,
351,
262,
284,
4598,
13,
14116,
5794,
526,
15931,
198,
198,
11748,
2393,
15414,
198,
11748,
28686,
198,
6738,
19720,
1330,
3350,
11,
7343,
11,
45835,
198,
198,
6738,
764,
35943,
1330,
15941,
198,
6738,
764,
83,
... | 2.438792 | 629 |
import pickle
import random
from dataclasses import dataclass
from typing import Any, Tuple, cast
import numpy as np
import tensorflow as tf
import tensorflow.keras as keras
from srl.base.define import RLObservationType
from srl.base.env.base import EnvRun
from srl.base.rl.algorithms.discrete_action import DiscreteActionConfig
from srl.base.rl.base import RLParameter, RLTrainer, RLWorker
from srl.base.rl.registration import register
from srl.base.rl.remote_memory.sequence_memory import SequenceRemoteMemory
from srl.rl.functions.common import random_choice_by_probs, render_discrete_action, to_str_observation
from srl.rl.functions.model import ImageLayerType, create_input_layers_one_sequence
from tensorflow.keras import layers as kl
# ------------------------------------------------------
# config
# ------------------------------------------------------
@dataclass
register(
Config,
__name__ + ":RemoteMemory",
__name__ + ":Parameter",
__name__ + ":Trainer",
__name__ + ":Worker",
)
# ------------------------------------------------------
# RemoteMemory
# ------------------------------------------------------
# ------------------------------------------------------
# network
# ------------------------------------------------------
# ------------------------------------------------------
# Parameter
# ------------------------------------------------------
# ------------------------
# ------------------------------------------------------
# Trainer
# ------------------------------------------------------
# ------------------------------------------------------
# Worker
# ------------------------------------------------------
| [
11748,
2298,
293,
198,
11748,
4738,
198,
6738,
4818,
330,
28958,
1330,
4818,
330,
31172,
198,
6738,
19720,
1330,
4377,
11,
309,
29291,
11,
3350,
198,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
11192,
273,
11125,
355,
48700,
198,
11... | 4.323077 | 390 |
from django.views.decorators.csrf import csrf_protect
from django.shortcuts import render
from search.forms import BasicSearchForm, AdvancedSearchForm
from frontend.models import BaseUser
from django.http import HttpResponseRedirect
from django.core.exceptions import ValidationError
# from django.contrib.auth.models import User
# Create your views here.
@csrf_protect
@csrf_protect
| [
6738,
42625,
14208,
13,
33571,
13,
12501,
273,
2024,
13,
6359,
41871,
1330,
269,
27891,
69,
62,
35499,
198,
6738,
42625,
14208,
13,
19509,
23779,
1330,
8543,
198,
6738,
2989,
13,
23914,
1330,
14392,
18243,
8479,
11,
13435,
18243,
8479,
... | 3.5 | 112 |
import geopandas as gpd
import pandas as pd
import numpy as np
# import scipy.io as spio
# import scipy.stats as stats
# import ogr
# import os
# import fnmatch
import pyproj
import rasterio.features
from rasterio.vrt import WarpedVRT
import rioxarray
from shapely.geometry import box
from shapely.geometry import Polygon as shpPolygon
from shapely.ops import unary_union
import xarray as xr
from icebath.utils import raster_ops
from icebath.core import fl_ice_calcs as icalcs
from icebath.core import fjord_props
@xr.register_dataset_accessor("bergxr")
class BergXR:
"""
An extension for an XArray dataset that will calculate tidal offsets and delineate
icebergs in DEMs brought in from a GeoTiff.
"""
# ----------------------------------------------------------------------
# Constructors
# ----------------------------------------------------------------------
# Properties
# ----------------------------------------------------------------------
# Methods
@staticmethod
def _validate(self, req_dim=None, req_vars=None):
'''
Make sure the xarray dataset (or dataarray) has the correct coordinates and variables
'''
# if type(xrds) == dataarray
if req_dim is not None:
if all([dim not in list(self._xrds.dims) for dim in req_dim]):
raise AttributeError("Required dimensions are missing")
if req_vars is not None:
if all([var not in self._xrds.variables for var in req_vars.keys()]):
raise AttributeError("Required variables are missing")
#if type(xrds) == dataset
# for a dataset rather than a datarray
# if all([dim not in list(xrds.dims.keys()) for dim in req_dim]):
# raise AttributeError("Required dimensions are missing")
# if all ([var not in list(xrds.keys()) for var in req_vars.keys()]):
# raise AttributeError("Required variables are missing")
# for key in req_vars.keys():
# if all([dim not in list(xrds[key].dims) for dim in req_vars[key]]):
# raise AttributeError("Variables do not have all required coordinates")
# def calc_medmaxmad(self, column=''):
# """
# Compute median, maximum, and median absolute devation from an array of values
# specified by the string of the input column name and add columns to hold the results.
# Input values might be from a filtered raster of iceberg pixel drafts or a series of measurements.
# Parameters
# ---------
# column: str, default ''
# Column name on which to compute median, maximum, and median absolute deviation
# """
# req_cols = [column] # e.g. 'draft' for iceberg water depths, 'depth' for measured depths
# self._validate(self._gdf, req_cols)
def _calc_allpoints(self, function, req_dim=None, req_vars=None):
"""
Helper function to do a pixel-wise calculation that requires using x and y dimension values
as inputs. This version does the computation over all available timesteps as well.
Point-based iteration based on example code by Ryan Abernathy from:
https://gist.github.com/rabernat/bc4c6990eb20942246ce967e6c9c3dbe
"""
# note: the below code will need to be generalized for using this function outside of to_geoid
self._validate(self, req_dim, req_vars)
# stack x and y into a single dimension called allpoints
stacked = self._xrds.stack(allpoints=['x','y'])
# groupby time and apply the function over allpoints to calculate the trend at each point
newelev = stacked.groupby('allpoints', squeeze=False).apply(_time_wrapper)
# unstack back to x y coordinates
self._xrds = newelev.unstack('allpoints')
return self._xrds
def get_mask(self, req_dim=['x','y'], req_vars=None,
name=None,
shpfile='/home/jovyan/icebath/notebooks/supporting_docs/Land_region.shp'):
"""
Get a shapefile of land (or area of interest) boundaries and add to the dataset
as a mask layer that matches the extent and x/y coordinates.
"""
self._validate(self, req_dim)
#read in shapefile
shpfl = gpd.read_file(shpfile)
#confirm and correct projection if needed
shpfl = shpfl.to_crs(self._xrds.attrs['crs'])
# apply buffer since mask isn't exact
shpfl['geometry'] = shpfl.buffer(5)
mask = rasterio.features.geometry_mask(shpfl.geometry,
out_shape = (len(self._xrds.y), len(self._xrds.x)),
transform= self._xrds.attrs['transform'],
invert=False)
# check for coordinate monotony. If true, then flip along the appropriate x/y coordinates before putting into xarray dataset
flipax=[]
if pd.Series(self._xrds.x).is_monotonic_decreasing:
flipax.append(1)
if pd.Series(self._xrds.y).is_monotonic_increasing:
flipax.append(0)
mask = xr.DataArray(np.flip(mask, axis=flipax), coords={'y':self._xrds.y, 'x':self._xrds.x}, dims=['y','x'])
self._xrds.coords[name] = mask
# clip original shapefile to XArray extent plus a half-pixel buffer
# clipped_shpfl = gpd.clip(shpfl, box(self._xrds.x.min().item()-0.5*self._xrds.attrs['res'][0],
# self._xrds.y.min().item()-0.5*self._xrds.attrs['res'][1],
# self._xrds.x.max().item()+0.5*self._xrds.attrs['res'][0],
# self._xrds.y.max().item()+0.5*self._xrds.attrs['res'][1]))
# self._xrds.attrs[name] = unary_union(clipped_shpfl.geometry) #[shpfl.geometry.exterior[row_id].coords for row_id in range(shpfl.shape[0])])
def add_meas_to_ds(self, src_fl, vardict={}, nanval=None):
"""
Add new variables to an existing dataset, resampling if needed
Parameters
----------
dataset : XArray dataset
dataset containing the spatial area of interest with x and y dimensions
src_fl : source file, string
The full path of the measurement data source file
vardict : variable mapping, dictionary
Key-value pairs mapping the source dataset keys to their new variable names in the dataset
"""
# assert type(self._xrds)==xr.core.dataset.Dataset, "You must input an Xarray dataset from which to get measured values"
assert vardict != {}, "You must specify your origin variables and their dataset names"
# ToDo: add check to see if the layers are already there...
# Note: assumes compatible CRS systems
for key in vardict.keys():
self._xrds.bergxr.get_new_var_from_file(req_dim=['x','y'],
newfile=src_fl,
variable=key,
varname=vardict[key])
if nanval != None:
self._xrds[vardict[key]] = self._xrds[vardict[key]].where(self._xrds[vardict[key]] != nanval)
def get_new_var_from_file(self, req_dim=['x','y'], newfile=None, variable=None, varname=None):
"""
Get info from another dataset (NetCDF) and resample it and add it to the dataset.
Note: this requires you have a local copy of the NetCDF you are using.
Note: this also assumes reconciled crs between the existing and input variables.
Using groupby on a netcdf may be slow due to lazy loading: https://github.com/pydata/xarray/issues/659#issuecomment-334212532
However, at the moment it looks like you can't open the dataset (http://xarray.pydata.org/en/stable/io.html#netcdf)
with geospatial info, because rioxarray is not yet an engine.
"""
self._validate(self, req_dim)
print("Note that the new file is reprojected to have the same CRS as the dataset to which it is being added.\
However, if the two CRSs are compatible, the spatial properties of the new file may be added to or overwrite the ones of the existing dataset")
# add check for existing file?
assert newfile != None, "You must provide an input file of the dataset to add."
assert variable != None, "You must specify which variable you'd like to add"
if newfile.endswith(".nc"):
newfilestr = 'netcdf:'+newfile+':'+variable
elif newfile.endswith(".tif"):
newfilestr = newfile
# read this in as a virtual raster
with rasterio.open(newfilestr) as src:
# print('Source CRS:' +str(src.crs))
# !!!!! a terrible idea but need to get this to run for now...
try: crs = self._xrds.attrs['crs']
except KeyError: crs = src.crs
with WarpedVRT(src,resampling=1,src_crs=src.crs,crs=crs) as vrt:
# warp_mem_limit=12000,warp_extras={'NUM_THREADS':2}) as vrt:
# print('Destination CRS:' +str(vrt.crs))
newdset = rioxarray.open_rasterio(vrt).chunk({'x': 3072, 'y': 3072})
# ds = rioxarray.open_rasterio(vrt).chunk({'x':1500,'y':1500,'band':1}).to_dataset(name='HLS_Red')
# in an ideal world, we'd read this in chunked with dask. however, this means (in the case of Pangeo) that the file
# needs to be in cloud storage, since the cluster can't access your home directory
# https://pangeo.io/cloud.html#cloud-object-storage
# with rioxarray.open_rasterio(newfile) as newdset: #, chunks={'x': 500, 'y': 500}) as newdset:
try: newdset=newdset.squeeze().drop_vars('band')
except ValueError: pass
newdset = newdset.rename(variable)
# newdset = xr.open_dataset(newfile, chunks={'x': 500, 'y': 500})
# Improvement: actually check CRS matching
# apply the existing chunking to the new dataset
# newdset = newdset.rio.reproject(dst_crs=self._xrds.attrs['crs']).chunk({'x': 1000, 'y': 1000})
# newvar = newdset[variable].interp(x=self._xrds['x'], y=self._xrds['y']).chunk({key:self._xrds.chunks[key] for key in req_dim})
try:
newvar = newdset.interp(x=self._xrds['x'], y=self._xrds['y']).chunk({key:self._xrds.chunks[key] for key in req_dim})
except KeyError:
print("there was a key error, so no chunking")
newvar = newdset.interp(x=self._xrds['x'], y=self._xrds['y'])
self._xrds[varname] = newvar
del newvar
del newdset
return self._xrds
def to_geoid(self, req_dim=['dtime','x','y'], req_vars={'elevation':['x','y','dtime','geoid']},
source=None):
"""
Get geoid layer from BedMachine (you must have the NetCDF stored locally; filename is hardcoded in)
and apply to all elevation values.
Adds 'geoid_offset' keyword to "offsets" attribute
"""
try:
values = (self._xrds.attrs['offset_names'])
assert 'geoid_offset' not in values, "You've already applied the geoid offset!"
values = list([values])+ ['geoid_offset']
except KeyError:
values = ['geoid_offset']
self._validate(self, req_dim, req_vars)
self.get_new_var_from_file(newfile=source,
variable='geoid', varname='geoid')
self._xrds['elevation'] = self._xrds.elevation - self._xrds.geoid
self._xrds.attrs['offset_names'] = values
return self._xrds
def to_geoid_pixel(self, req_dim=['dtime','x','y'], req_vars={'elevation':['x','y','dtime']}, geoid=None):
"""
Change the elevation values to be relative to the geoid rather than the ellipsoid
(as ArcticDEM data comes) by iterating over each pixel (over time).
Gets a keyword added to the "offsets" attribute
Note: CRS codes are hard-coded in for EPSG:3413 (NSIDC Polar Stereo) and EPSG:3855 (EGM08 Geoid)
"""
try:
values = (self._xrds.attrs['offset_names'])
assert 'geoid_offset' not in values, "You've already applied the geoid offset!"
values = list([values])+ ['geoid_offset']
except KeyError:
values = ['geoid_offset']
self._validate(self, req_dim, req_vars)
# self._xrds['elevation_orig'] = self._xrds['elevation']
self._calc_allpoints(self._to_geoid_wrapper) #don't supply req_dim and req_vars since same as submitted to this fn
self._xrds.attrs['crs'] = pyproj.Proj("EPSG:3413+3855")
self._xrds.attrs['offset_names'] = values
return self._xrds
def _to_geoid_wrapper(self, gb):
"""
XArray wrapper for the raster_ops.crs2crs function to be able to use it with
`.groupby().apply()` to get geoid heights. It also checks that the x and y values
are not affected by computing the geoid offset.
Parameters
----------
gb : groupby object
Must contain the fields ...
"""
# print(gb)
x=gb.allpoints.x.values
y=gb.allpoints.y.values
z=gb.elevation.values[0]
nx, ny, nz = raster_ops.crs2crs(pyproj.Proj("EPSG:3413"), pyproj.Proj("EPSG:3413+3855"), x, y, z)
assert np.isclose(x, nx)==True, "x values have changed a bunch"
assert np.isclose(y, ny)==True, "y values have changed a bunch"
gb = gb.assign(elevation=('dtime', nz))
return gb
def tidal_corr(self, req_dim=['dtime'], req_vars={'elevation':['x','y','dtime']},
loc=None, **kwargs):
"""
Gets tidal predictions for the image date/time in the fjord of interest,
then applies the tidal correction to the elevation field. The dataset
gets a keyword added to the "offsets" attribute, and time dependent variables
for the tidal offset, tidal max, and tidal min are added. If you want to model
tides and see output plots, see fl_ice_calcs.predict_tides.
"""
print("Note that tide model and epsg are hard coded in!")
print("They can also be provided as keywords if the wrapper function is updated to handle them")
try:
values = (self._xrds.attrs['offset_names'])
assert 'tidal_corr' not in values, "You've already applied a tidal correction!"
values = list(values)+ ['tidal_corr']
except KeyError:
values = ['tidal_corr']
self._validate(self, req_dim, req_vars)
self._xrds = self._xrds.groupby('dtime', squeeze=False).apply(self._tidal_corr_wrapper, args=(loc), **kwargs)
self._xrds.attrs['offset_names'] = values
return self._xrds
def _tidal_corr_wrapper(self, gb, loc, **kwargs):
"""
XArray wrapper for the fl_ice_calcs.predict_tides function to be able to use it with
`.groupby().apply()` to get and apply tidal corrections
Parameters
----------
gb : groupby object
Must contain the fields ...
"""
try: model_path = kwargs['model_path']
except KeyError:
raise AssertionError("You must specify a model path")
time, tidal_ht, plots = icalcs.predict_tides(loc,
img_time=gb.dtime.values,
model_path=model_path,
model='AOTIM-5-2018',
epsg=3413)
tidx = list(time).index(np.timedelta64(12, 'h').item().total_seconds())
vals = [tidal_ht[tidx], np.min(tidal_ht), np.max(tidal_ht)]
gb['elevation'] = gb.elevation + vals[0]
dtimeones = np.ones(len(gb.dtime.values))
gb = gb.assign(
tidal_corr = ('dtime', dtimeones*vals[0]),
min_tidal_ht = ('dtime', dtimeones*vals[1]),
max_tidal_ht = ('dtime', dtimeones*vals[2])
)
return gb | [
11748,
30324,
392,
292,
355,
27809,
67,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
299,
32152,
355,
45941,
198,
2,
1330,
629,
541,
88,
13,
952,
355,
599,
952,
198,
2,
1330,
629,
541,
88,
13,
34242,
355,
9756,
198,
2,
1330,
... | 2.178297 | 7,667 |
from __future__ import print_function
import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Function
import torch.optim as optim
from torchvision import datasets, transforms
from torch.autograd import Variable
import create_dic_fuc
from FCDecomp import FCDecomp
import time
| [
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
11748,
1822,
29572,
198,
11748,
28034,
198,
11748,
28034,
13,
20471,
355,
299,
77,
198,
11748,
28034,
13,
20471,
13,
45124,
355,
376,
198,
6738,
28034,
13,
2306,
519,
6335,
1330,
15553... | 3.707865 | 89 |
from pydantic import BaseModel
| [
6738,
279,
5173,
5109,
1330,
7308,
17633,
628,
198
] | 3.666667 | 9 |
# Basic Auth User
username = ""
password = ""
basic_auth = "Basic "
# Application
api_token = ""
namespace = ""
| [
2,
14392,
26828,
11787,
198,
29460,
796,
13538,
198,
28712,
796,
13538,
198,
35487,
62,
18439,
796,
366,
26416,
366,
198,
198,
2,
15678,
198,
15042,
62,
30001,
796,
13538,
198,
14933,
10223,
796,
13538,
198
] | 3.138889 | 36 |
import pandas as pd
import numpy as np
import h5py
import scipy.signal
np.random.seed(1)
if __name__=="__main__": main() | [
11748,
19798,
292,
355,
279,
67,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
289,
20,
9078,
220,
198,
11748,
629,
541,
88,
13,
12683,
282,
198,
198,
37659,
13,
25120,
13,
28826,
7,
16,
8,
198,
220,
220,
220,
220,
628,
198,
361... | 2.345455 | 55 |
# import curses
# import datetime
#
# stdscr = curses.initscr()
# curses.noecho()
# stdscr.nodelay(1) # set getch() non-blocking
#
# stdscr.addstr(0,0,"Press \"p\" to show count, \"q\" to exit...")
# line = 1
# try:
# while 1:
# c = stdscr.getch()
# if c == ord('p'):
# stdscr.addstr(line,0,"Some text here")
# line += 1
# elif c == ord('q'): break
#
# """
# Do more things
# """
#
# finally:
# curses.endwin()
import numpy as np
X = np.array([[0,0],[0,1],[1,0],[1,1]])
y = np.array([[0],[1],[1],[0]])
np.random.seed(1)
nums = [2, 4, 1]
network = [2*np.random.random((nums[i]+1,nums[i+1]))-1 for i in range(len(nums)-1)]
print('network', network)
for j in range(100000):
outputs = [X]
for layer in network:
outputs[-1] = np.c_[outputs[-1], np.ones(len(outputs[-1]))]
outputs.append(nonlin(np.dot(outputs[-1], layer)))
print('outputs', outputs, '\n')
errors = [y - outputs[2]]
print('errors', errors)
# if(j % 100000) == 0: # Only print the error every 10000 steps, to save time and limit the amount of output.
# print('outputs, prediction', l0, l1, l2, y, l2.shape)
# print('weights', self.network0, self.network1)
# print("Error: " + str(np.mean(np.abs(errors[2]))))
# print('Training input l0:', l0, '\nDot product between training and rand:', np.dot(l0, self.network0), 'non linear dot product l1:', l1, '\n dot product between l1, and self.network1:', np.dot(l1, self.network1), 'nonlinear dot product between l1, and self.network1:', l2, 'input and output training data: ', self.network0, self.network1, errors[2], nonlin(l2, deriv=True))
deltas = [errors[-1]*nonlin(outputs[2], deriv=True)]
print('deltas', deltas)
# if(j % 100000) == 0:
# print('l2Error, nonlin\'(l2)', errors[2], nonlin(l2, deriv=True))
# print('l2Delta, self.network1.t', l2_delta, self.network1.T)
for i in range(len(network)-1):
errors.insert(0, deltas[0].dot(network[i+1].T))
print('layer', i, 'error', errors[0])
# if(j % 100000) == 0:
# print('l1Error', errors[1])
# print(nonlin(outputs[i+1],deriv=True))
deltas.insert(0, errors[0] * nonlin(outputs[i+1],deriv=True))
print('layer', i, 'delta', deltas[0], '\n')
# if(j % 100000) == 0:
# print('self.network1, l1.T, l2Delta', network[1].shape, outputs[1].T.shape, deltas[1].shape)
# if(j % 100000) == 0:
# print('self.network0, l0.T, l1Delta', network[0].shape, outputs[0].T.shape, deltas[0].shape)
#update weights (no learning rate term)
for i in range(len(deltas)):
delta = outputs[i].T.dot(deltas[i])
print(delta,'\n', network[i])
network[i] += delta
print("Output after training")
print(outputs[2])
| [
2,
1330,
43878,
201,
198,
2,
1330,
4818,
8079,
201,
198,
2,
201,
198,
2,
14367,
1416,
81,
796,
43878,
13,
259,
896,
6098,
3419,
201,
198,
2,
43878,
13,
77,
2577,
6679,
3419,
201,
198,
2,
14367,
1416,
81,
13,
77,
375,
417,
323,
... | 2.093324 | 1,393 |
from abc import ABCMeta, abstractmethod
from datetime import datetime
from typing import Callable, Set, Union
from backends.software_package import SoftwarePackage
from backends.software_version import SoftwareVersion
class Provider(metaclass=ABCMeta):
"""
The abstract base class for any provider.
A provider exposes functionality to retrieve code and versions.
"""
cache_directory: str
@abstractmethod
def get_versions(self) -> Set[SoftwareVersion]:
"""Retrieve all available versions and return them as a set."""
@abstractmethod
def list_files(self, version: SoftwareVersion):
"""List all files available within version."""
@abstractmethod
def get_file_data(self, version: SoftwareVersion, path: str):
"""Get stream of file data at path as contained within version.."""
def _get_software_version(
self, internal_identifier: str, name: str,
release_date: datetime) -> Union[SoftwareVersion, None]:
"""Get a SoftwareVersion object from an internal identifier."""
if self.version_name_derivator is not None:
name = self.version_name_derivator(internal_identifier)
return SoftwareVersion(
software_package=self.software_package,
name=name,
internal_identifier=internal_identifier,
release_date=release_date)
| [
6738,
450,
66,
1330,
9738,
48526,
11,
12531,
24396,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
6738,
19720,
1330,
4889,
540,
11,
5345,
11,
4479,
198,
198,
6738,
736,
2412,
13,
43776,
62,
26495,
1330,
10442,
27813,
198,
6738,
736,
2... | 2.943038 | 474 |
# -*- coding: utf-8 -*-
# @Time : 2020/2/16 17:00
# @Author : MLee
# @File : forms.py
from django.forms import ModelForm
from .models import SubscriberInfoModel
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
2488,
7575,
220,
220,
220,
1058,
12131,
14,
17,
14,
1433,
1596,
25,
405,
198,
2,
2488,
13838,
220,
1058,
10373,
1453,
198,
2,
2488,
8979,
220,
220,
220,
1058,
510... | 2.463768 | 69 |
#!/usr/bin/python
#
# (C) 2019 Riad S. Wahby <rsw@cs.stanford.edu>
#
# pure Python implementation of curve ops for Ell2 on BLS12-381
import sys
from consts import p, q
from fields import Fq, Fq2
if sys.version_info[0] < 3:
sys.exit("This script requires Python3 or PyPy3")
###
## generators for BLS signatures
###
# I'd rather have these in consts, but then we'd get an import cycle, consts <-> fields
g1gen = (Fq(p, 0x17f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb),
Fq(p, 0x08b3f481e3aaa0f1a09e30ed741d8ae4fcf5e095d5d00af600db18cb2c04b3edd03cc744a2888ae40caa232946c5e7e1),
Fq.one(p))
g2gen = (Fq2(p, 0x024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8,
0x13e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e),
Fq2(p, 0x0ce5d527727d6e118cc9cdc6da2e351aadfd9baa8cbdd3a76d429a695160d12c923ac9cc3baca289e193548608b82801,
0x0606c4a02ea734cc32acd2b02bc28b99cb3e287e85a763af267492ab572e99ab3f370d275cec1da1aaa9075ff05f79be),
Fq2.one(p))
###
## Basic curve operations
###
# Jacobian coordinates
# point equality or co-z repr
point_eq = lambda P, Q: _point_eq_coz(P, Q, False)
to_coZ = lambda P, Q: _point_eq_coz(P, Q, True)
# http://www.hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#addition-add-2007-bl
# http://www.hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#doubling-dbl-2009-l
# negate the Y-coordinate
# Addition chain for multiplication by 0xd201000000010000 == -x, the BLS parameter
# addition chain for multiplication by (1 - x) // 3, for x the BLS parameter
# addition chain for multiplication by (x**2 - 1) // 3, for x the BLS parameter
###
## Point multiplication routines
###
# basic double-and-add
# NOTE: this routine is NOT constant time!
# ZDAU', Alg 23 (sans Z-coord) of
# Goundar, Joye, Miyaji, Rivain, Venelli, "Scalar multiplication on Weierstrass
# elliptic curves from co-Z arithmetic." J Crypt Eng 1(2):161-176, 2011.
# http://joye.site88.net/papers/GJMRV11regpm.pdf
# left-to-right signed digit co-Z point multiplication, from Algorithm 16 in
# Goundar, Joye, Miyaji, Rivain, Venelli, "Scalar multiplication on Weierstrass
# elliptic curves from co-Z arithmetic." J Crypt Eng 1(2):161-176, 2011.
# http://joye.site88.net/papers/GJMRV11regpm.pdf
# NOTE: this routine only works for P in the subgroup of order q!
###
## Fast cofactor clearing for Ell1
###
###
## Isogeny map evaluation specified by map_coeffs
###
# map_coeffs should be specified as (xnum, xden, ynum, yden)
#
# This function evaluates the isogeny over Jacobian projective coordinates.
# For details, see Section 4.3 of
# Wahby and Boneh, "Fast and simple constant-time hashing to the BLS12-381 elliptic curve."
# ePrint # 2019/403, https://ia.cr/2019/403.
###
## Fast cofactor clearing using the untwist-Frobenius-twist Endomorphism
###
# We use the version given in section 4.1 of
# Budroni and Pintore, "Efficient hash maps to G2 on BLS curves,"
# ePrint 2017/419 https://eprint.iacr.org/2017/419
# NOTE: this impl works for Jacobian projective coordinates without computing an inversion.
#
# constants for Psi, the untwist-Frobenius-twist endomorphism
iwsc = 0xd0088f51cbff34d258dd3db21a5d66bb23ba5c279c2895fb39869507b587b120f55ffff58a9ffffdcff7fffffffd556
iwsc = Fq2(p, iwsc, iwsc - 1)
k_qi_x = Fq(p, 0x1a0111ea397fe699ec02408663d4de85aa0d857d89759ad4897d29650fb85f9b409427eb4f49fffd8bfd00000000aaad)
k_qi_y = Fq(p, 0x6af0e0437ff400b6831e36d6bd17ffe48395dabc2d3435e77f76e17009241c5ee67992f72ec05f4c81084fbede3cc09)
k_cx = Fq2(p, 0, 0x1a0111ea397fe699ec02408663d4de85aa0d857d89759ad4897d29650fb85f9b409427eb4f49fffd8bfd00000000aaad)
k_cy = Fq2(p, 0x135203e60180a68ee2e9c448d77a2cd91c3dedd930b1cf60ef396489f61eb45e304466cf3e67fa0af1ee7b04121bdea2,
0x6af0e0437ff400b6831e36d6bd17ffe48395dabc2d3435e77f76e17009241c5ee67992f72ec05f4c81084fbede3cc09)
# shortcut Frobenius evaluations that avoid going all the way to Fq12
###
## Fast subgroup checks via Bowe19
###
on_curve_g1 = lambda P: _on_curve(P, Fq(p, 4))
on_curve_g2 = lambda P: _on_curve(P, Fq2(p, 4, 4))
# fast subgroup check for G2: [z]psi^3(P) - psi^2(P) + P == infty
_beta = Fq(p, 0x1a0111ea397fe699ec02408663d4de85aa0d857d89759ad4897d29650fb85f9b409427eb4f49fffd8bfd00000000aaac)
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
2,
198,
2,
357,
34,
8,
13130,
30385,
324,
311,
13,
35893,
1525,
1279,
81,
2032,
31,
6359,
13,
14192,
3841,
13,
15532,
29,
198,
2,
198,
2,
5899,
11361,
7822,
286,
12133,
39628,
329,
7122,
... | 2.215215 | 1,998 |
""" Wrappers for generated PB
"""
from .feature_pb2 import Feature, Features, Value, List, Struct
def feature(val, var_type=Feature.Variable.CATEGORICAL, var_mode=Feature.Modality.STATIC):
""" Create feature message
:param val:
:param var_type:
:param var_mode:
:return:
"""
f = Feature()
f.mode = var_mode
f.type = var_type
f.value.CopyFrom(to_value(val))
return f
def to_value(val):
""" Convert python type to value
:param val:
:return value
"""
v = Value()
if isinstance(val, list):
l = List()
l.value.extend([to_value(i) for i in val])
v.list_value.CopyFrom(l)
elif isinstance(val, dict):
s = Struct()
for k in val.keys():
s.value[k].CopyFrom(to_value(val[k]))
v.struct_value.CopyFrom(s)
elif isinstance(val, str):
v.string_value = val
elif isinstance(val, bool):
v.bool_value = val
elif isinstance(val, int):
v.int_value = val
elif isinstance(val, float):
v.double_value = val
else:
raise TypeError("unsupported type %s" % type(val))
return v
def value(val):
""" Convert value to python type
:param val:
:return: list, string, number, bool, or none value
"""
if val.HasField("string_value"):
return val.string_value
elif val.HasField("int_value"):
return val.int_value
elif val.HasField("bool_value"):
return val.bool_value
elif val.HasField("double_value"):
return val.double_value
elif val.HasField("struct_value"):
return {k: value(val.struct_value.value[k]) for k in val.struct_value.value}
else:
return [value(v) for v in val.list_value.value]
| [
37811,
27323,
11799,
329,
7560,
30524,
198,
37811,
198,
6738,
764,
30053,
62,
40842,
17,
1330,
27018,
11,
17571,
11,
11052,
11,
7343,
11,
32112,
628,
198,
4299,
3895,
7,
2100,
11,
1401,
62,
4906,
28,
38816,
13,
43015,
13,
34,
6158,
... | 2.290026 | 762 |
import os
import io
import platform
import json
import nltk
import heapq
import time
from rich.progress import (
TextColumn,
BarColumn,
Progress,
TimeRemainingColumn,
TimeElapsedColumn
)
from rich import print
from rich import box
from rich.panel import Panel
from math import sqrt
from bs4 import BeautifulSoup
from nltk.stem import snowball
from nltk.corpus import stopwords
from collections import defaultdict
from nltk.tokenize import word_tokenize, sent_tokenize
SNOWBALL_STEMMER = snowball.SnowballStemmer('english')
STOP_WORDS = None
PATH_SEP = '/' if platform.system().lower() == 'darwin' else "\\"
HITS_JSON = None
# Prints headers of sections
# Prints success msgs
# updates stopwords as well as punctuations for nltk
# Uses BeautifulSoup to parse html file and get strings from it's body
# Uses nltk to tokenize words and
# get unique (alphabetical) tokens as well
# Finds position of tokens in doc
# Task-1: Preprocessing the files in a sub directory passed as parameter
# Task-2: This generates in memory inverted index for one BLOCK
# Task-2: This function saves posting list to file, applying delta encoding
# Task-2: This function writes the inverted index for one BLOCK into a file
# This function generates complete inverted index
# This reads posting list from the previously stored files
# Task-3: Merges Sorted Indices
# Reading inverted index data
# This uses boolean retrieval to search query
# Task-5: Boolean Retrieval and searching
# Prints Menu
# Entry Point
if __name__ == '__main__':
init()
while(True):
print_menu()
print('[yellow bold italic]Enter Option[/yellow bold italic]: ', end='')
opt = input()
if not opt.isdigit() or int(opt) > 3 or int(opt) < 1:
print('[bold red]Enter a value between 1 and 3![/bold red]', end='\n\n')
elif opt == '3':
break
else:
flag = False
if opt == '2':
main_dir_path = input('Enter Cropus path: ')
if os.path.exists(main_dir_path):
# Sorting since by default it uses the sequence
# by which files are indexed by the File System
# so may not be in expected order
sub_dirs = sorted([s_dir for s_dir in os.listdir(
main_dir_path) if not s_dir.startswith('.')])
# Turning into complete paths
sub_dirs = [os.path.join(main_dir_path, s_dir) for s_dir in sub_dirs]
gen_complete_inverted_index(sub_dirs)
merge_indices(sub_dirs)
flag = True
else:
print(f"\nCorpus Path '{main_dir_path}' does not exist!")
if flag or opt == '1':
print_header("", "SEARCH QUERY")
print('[yellow bold italic]Enter Query[/yellow bold italic]: ', end='')
query = input()
if len(query) > 0:
search_query(query)
else:
print("[red bold]Query cannot be empty[/red bold]")
| [
11748,
28686,
198,
11748,
33245,
198,
11748,
3859,
198,
11748,
33918,
198,
11748,
299,
2528,
74,
198,
11748,
24575,
80,
198,
11748,
640,
198,
6738,
5527,
13,
33723,
1330,
357,
198,
220,
220,
220,
8255,
39470,
11,
198,
220,
220,
220,
2... | 2.389055 | 1,334 |
# This script can be used for 'manually' uploading CANedge log files from an SD to S3.
# The script includes S3 meta data such as firmware and SD timestamp and correctly derives S3 key.
import mdf_iter
import canedge_browser
from pathlib import Path
import boto3
from botocore.client import Config
from s3transfer import TransferConfig, S3Transfer
# specify devices to process from local disk
devices = ["LOG/958D2219"]
session_offset = 0 # optionally offset the session counter for the uploaded files
# specify target S3 bucket details
key = "s3_key"
secret = "s3_secret"
endpoint = "s3_endpoint" # e.g. https://s3.eu-central-1.amazonaws.com
bucket = "s3_bucket"
# ----------------------------------
# load all log files from local folder
base_path = Path(__file__).parent
fs = canedge_browser.LocalFileSystem(base_path=base_path)
log_files = canedge_browser.get_log_files(fs, devices)
print(f"Found a total of {len(log_files)} log files")
s3 = boto3.client(
"s3", endpoint_url=endpoint, aws_access_key_id=key, aws_secret_access_key=secret, config=Config(signature_version="s3v4"),
)
transfer = S3Transfer(s3, TransferConfig(multipart_threshold=9999999999999999, max_concurrency=10, num_download_attempts=10,))
# for each log file, extract header information, create S3 key and upload
for log_file in log_files:
with fs.open(log_file, "rb") as handle:
mdf_file = mdf_iter.MdfFile(handle)
header = "HDComment.Device Information"
device_id = mdf_file.get_metadata()[f"{header}.serial number"]["value_raw"]
session = mdf_file.get_metadata()[f"{header}.File Information.session"]["value_raw"]
session = f"{(int(session) + session_offset):08}"
split = int(mdf_file.get_metadata()[f"{header}.File Information.split"]["value_raw"])
split = f"{split:08}"
ext = log_file.split(".")[-1]
s3_meta_fw = mdf_file.get_metadata()[f"{header}.firmware version"]["value_raw"]
s3_meta_timestamp = mdf_file.get_data_frame().index.min().strftime("%Y%m%dT%H%M%S")
s3_key = f"{device_id}/{session}/{split}.{ext}"
s3_meta = {"Metadata": {"Fw": s3_meta_fw, "Timestamp": s3_meta_timestamp}}
# upload local file to S3
transfer.upload_file(log_file[1:], key=s3_key, bucket=bucket, extra_args=s3_meta)
print(f"Uploaded {log_file} as {s3_key}")
| [
2,
770,
4226,
460,
307,
973,
329,
705,
805,
935,
6,
33794,
15628,
14907,
2604,
3696,
422,
281,
9834,
284,
311,
18,
13,
198,
2,
383,
4226,
3407,
311,
18,
13634,
1366,
884,
355,
18779,
290,
9834,
41033,
290,
9380,
37453,
311,
18,
19... | 2.660978 | 879 |
from __future__ import absolute_import, division, print_function
from warnings import warn
import pandas as pd
import statsmodels.formula.api as smf
from ..doctools import document
from .stat import stat
# method_args are any of the keyword args (other than q) for
# statsmodels.regression.quantile_regression.QuantReg.fit
@document
class stat_quantile(stat):
"""
Compute quantile regression lines
{usage}
Parameters
----------
{common_parameters}
quatiles : tuple, optional (default: (0.25, 0.5, 0.75))
Quantiles of y to compute
formula : str, optional (default: 'y ~ x')
Formula relating y variables to x variables
method_args : dict, optional
Extra arguments passed on to the model fitting method,
:meth:`statsmodels.regression.quantile_regression.QuantReg.fit`.
See Also
--------
statsmodels.regression.quantile_regression.QuantReg
plotnine.geoms.geom_quantile
"""
_aesthetics_doc = """
{aesthetics_table}
.. rubric:: Options for computed aesthetics
::
'quantile' # quantile
'group' # group identifier
Calculated aesthetics are accessed using the `calc` function.
e.g. :py:`'stat(quantile)'`.
"""
REQUIRED_AES = {'x', 'y'}
DEFAULT_PARAMS = {'geom': 'quantile', 'position': 'identity',
'na_rm': False, 'quantiles': (0.25, 0.5, 0.75),
'formula': 'y ~ x', 'method_args': {}}
CREATES = {'quantile', 'group'}
@classmethod
| [
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
11,
7297,
11,
3601,
62,
8818,
198,
198,
6738,
14601,
1330,
9828,
198,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
9756,
27530,
13,
687,
4712,
13,
15042,
355,
895,
69,
198,
198,
6... | 2.572864 | 597 |
# MIT License
# Copyright (c) 2021 Rene Jean Corneille
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import datetime
import pytest
from package_name.cli import _get_str_date
@pytest.mark.parametrize(
"date, fmt, result",
[
(datetime.datetime(2018, 10, 18, 14, 30, 59), '%Y-%B-%d, %H:%M:%S', '2018-October-18, 14:30:59'),
(datetime.datetime(2025, 9, 1, 4, 16, 22), '%y-%m-%d, %H:%M:%S', '25-09-01, 04:16:22'),
(datetime.datetime(1987, 3, 25, 21, 11, 2), '%Y-%B-%d', '1987-March-25'),
],
)
def test_format_date(date, fmt, result):
"""Test get str date fn."""
assert _get_str_date(date, fmt) == result | [
2,
17168,
13789,
198,
198,
2,
15069,
357,
66,
8,
33448,
371,
1734,
11320,
2744,
710,
8270,
198,
198,
2,
2448,
3411,
318,
29376,
7520,
11,
1479,
286,
3877,
11,
284,
597,
1048,
16727,
257,
4866,
198,
2,
286,
428,
3788,
290,
3917,
10... | 3.005464 | 549 |
from discord.ext import commands
import requests
import os
from ..core.cog_config import CogExtension
# This extension is for school h.w. of phantom0174.
data_prefix = {
"0": "天氣描述",
"1": "最高溫度",
"2": "最低溫度",
"3": "體感描述",
"4": "降水機率"
}
data_suffix = {
"0": "",
"1": "度",
"2": "度",
"3": "",
"4": "%"
}
| [
6738,
36446,
13,
2302,
1330,
9729,
198,
11748,
7007,
198,
11748,
28686,
198,
6738,
11485,
7295,
13,
66,
519,
62,
11250,
1330,
327,
519,
11627,
3004,
628,
198,
2,
770,
7552,
318,
329,
1524,
289,
13,
86,
13,
286,
36381,
486,
4524,
13,... | 1.714286 | 203 |
import os
import flask
import logging, logging.handlers
from celery import Celery
def configure_logger(app):
'''
Configures the app's logger object.
'''
app.logger.removeHandler(flask.logging.default_handler)
formatter = logging.Formatter(
'%(asctime)s %(levelname)s: [in %(pathname)s:%(lineno)d] %(threadName)s %(message)s')
file_handler = logging.handlers.RotatingFileHandler(
filename=app.config['LOG_FILE'],
mode='a',
maxBytes=1*1000*1000,
backupCount=20)
file_handler.setFormatter(formatter)
file_handler.setLevel(logging.DEBUG)
app.logger.addHandler(file_handler)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
stream_handler.setLevel(logging.DEBUG)
if app.config['ENV'] == 'development':
app.logger.addHandler(stream_handler)
app.logger.setLevel(logging.DEBUG)
app.logger.debug('Starting app')
return app.logger
def create_app(config=None):
'''
Implements the "app factory" pattern, recommended by Flask documentation.
'''
app = flask.Flask(
__name__,
instance_path=os.path.join(os.path.dirname(__file__), 'config'),
instance_relative_config=True
)
# app.config['ENV'] is automatically set by the FLASK_ENV environment variable.
# by default, app.config['ENV'] == 'production'
app.config.from_pyfile('default.cfg')
app.config.from_pyfile('{}.cfg'.format(app.config.get('ENV')), silent=True)
if config and isinstance(config, dict):
app.config.from_mapping(config)
configure_logger(app)
# The placement of this import statement is important!
# It must come after the app is initialized, and imported in the same scope.
from . import routes
app.register_blueprint(routes.BLUEPRINT)
return app
def create_celery(app):
"""
Initializes a celery application using Flask App
"""
config_source = app.config["CELERY_CONFIG"]
celery = Celery(
app.import_name,
include=["expertise.service.celery_tasks"],
config_source=config_source
)
return celery
| [
11748,
28686,
198,
11748,
42903,
198,
11748,
18931,
11,
18931,
13,
4993,
8116,
198,
6738,
18725,
1924,
1330,
15248,
1924,
198,
198,
4299,
17425,
62,
6404,
1362,
7,
1324,
2599,
198,
220,
220,
220,
705,
7061,
198,
220,
220,
220,
17056,
... | 2.572282 | 837 |
# -*- coding: utf-8 -*-
from trello import util
from orchester.ConfigHelper import get_config_data, pick
if __name__ == '__main__':
generate()
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
6738,
2054,
18798,
1330,
7736,
198,
6738,
17771,
1706,
13,
16934,
47429,
1330,
651,
62,
11250,
62,
7890,
11,
2298,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
... | 2.777778 | 54 |
# Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List, Optional, Sequence, Tuple, Union
import numpy as np
import torch
__all__ = ["same_padding", "stride_minus_kernel_padding", "calculate_out_shape", "gaussian_1d", "polyval"]
def same_padding(
kernel_size: Union[Sequence[int], int], dilation: Union[Sequence[int], int] = 1
) -> Union[Tuple[int, ...], int]:
"""
Return the padding value needed to ensure a convolution using the given kernel size produces an output of the same
shape as the input for a stride of 1, otherwise ensure a shape of the input divided by the stride rounded down.
Raises:
NotImplementedError: When ``np.any((kernel_size - 1) * dilation % 2 == 1)``.
"""
kernel_size_np = np.atleast_1d(kernel_size)
dilation_np = np.atleast_1d(dilation)
if np.any((kernel_size_np - 1) * dilation % 2 == 1):
raise NotImplementedError(
f"Same padding not available for kernel_size={kernel_size_np} and dilation={dilation_np}."
)
padding_np = (kernel_size_np - 1) / 2 * dilation_np
padding = tuple(int(p) for p in padding_np)
return padding if len(padding) > 1 else padding[0]
def calculate_out_shape(
in_shape: Union[Sequence[int], int, np.ndarray],
kernel_size: Union[Sequence[int], int],
stride: Union[Sequence[int], int],
padding: Union[Sequence[int], int],
) -> Union[Tuple[int, ...], int]:
"""
Calculate the output tensor shape when applying a convolution to a tensor of shape `inShape` with kernel size
`kernel_size`, stride value `stride`, and input padding value `padding`. All arguments can be scalars or multiple
values, return value is a scalar if all inputs are scalars.
"""
in_shape_np = np.atleast_1d(in_shape)
kernel_size_np = np.atleast_1d(kernel_size)
stride_np = np.atleast_1d(stride)
padding_np = np.atleast_1d(padding)
out_shape_np = ((in_shape_np - kernel_size_np + padding_np + padding_np) // stride_np) + 1
out_shape = tuple(int(s) for s in out_shape_np)
return out_shape if len(out_shape) > 1 else out_shape[0]
def gaussian_1d(
sigma: torch.Tensor, truncated: float = 4.0, approx: str = "erf", normalize: bool = False
) -> torch.Tensor:
"""
one dimensional Gaussian kernel.
Args:
sigma: std of the kernel
truncated: tail length
approx: discrete Gaussian kernel type, available options are "erf", "sampled", and "scalespace".
- ``erf`` approximation interpolates the error function;
- ``sampled`` uses a sampled Gaussian kernel;
- ``scalespace`` corresponds to
https://en.wikipedia.org/wiki/Scale_space_implementation#The_discrete_Gaussian_kernel
based on the modified Bessel functions.
normalize: whether to normalize the kernel with `kernel.sum()`.
Raises:
ValueError: When ``truncated`` is non-positive.
Returns:
1D torch tensor
"""
sigma = torch.as_tensor(sigma, dtype=torch.float, device=sigma.device if isinstance(sigma, torch.Tensor) else None)
device = sigma.device
if truncated <= 0.0:
raise ValueError(f"truncated must be positive, got {truncated}.")
tail = int(max(float(sigma) * truncated, 0.5) + 0.5)
if approx.lower() == "erf":
x = torch.arange(-tail, tail + 1, dtype=torch.float, device=device)
t = 0.70710678 / torch.abs(sigma)
out = 0.5 * ((t * (x + 0.5)).erf() - (t * (x - 0.5)).erf())
out = out.clamp(min=0)
elif approx.lower() == "sampled":
x = torch.arange(-tail, tail + 1, dtype=torch.float, device=sigma.device)
out = torch.exp(-0.5 / (sigma * sigma) * x ** 2)
if not normalize: # compute the normalizer
out = out / (2.5066282 * sigma)
elif approx.lower() == "scalespace":
sigma2 = sigma * sigma
out_pos: List[Optional[torch.Tensor]] = [None] * (tail + 1)
out_pos[0] = _modified_bessel_0(sigma2)
out_pos[1] = _modified_bessel_1(sigma2)
for k in range(2, len(out_pos)):
out_pos[k] = _modified_bessel_i(k, sigma2)
out = out_pos[:0:-1]
out.extend(out_pos)
out = torch.stack(out) * torch.exp(-sigma2)
else:
raise NotImplementedError(f"Unsupported option: approx='{approx}'.")
return out / out.sum() if normalize else out # type: ignore
def polyval(coef, x) -> torch.Tensor:
"""
Evaluates the polynomial defined by `coef` at `x`.
For a 1D sequence of coef (length n), evaluate::
y = coef[n-1] + x * (coef[n-2] + ... + x * (coef[1] + x * coef[0]))
Args:
coef: a sequence of floats representing the coefficients of the polynomial
x: float or a sequence of floats representing the variable of the polynomial
Returns:
1D torch tensor
"""
device = x.device if isinstance(x, torch.Tensor) else None
coef = torch.as_tensor(coef, dtype=torch.float, device=device)
if coef.ndim == 0 or (len(coef) < 1):
return torch.zeros(x.shape)
x = torch.as_tensor(x, dtype=torch.float, device=device)
ans = coef[0]
for c in coef[1:]:
ans = ans * x + c
return ans # type: ignore
| [
2,
15069,
357,
66,
8,
25000,
20185,
42727,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
198,
2,
921,
743... | 2.528509 | 2,280 |