hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0ec920cc247e6b14a55a0653f485463df509250d
| 260
|
py
|
Python
|
acrcloud/__init__.py
|
Cronuh/highlights-bot
|
0442cdf927cbebae6395f536b5529fe4c1948b99
|
[
"Apache-2.0"
] | null | null | null |
acrcloud/__init__.py
|
Cronuh/highlights-bot
|
0442cdf927cbebae6395f536b5529fe4c1948b99
|
[
"Apache-2.0"
] | null | null | null |
acrcloud/__init__.py
|
Cronuh/highlights-bot
|
0442cdf927cbebae6395f536b5529fe4c1948b99
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
#-*- coding: utf-8 -*-
import os,sys
sys.path.append(os.path.split(os.path.realpath(__file__))[0])
| 37.142857
| 158
| 0.307692
| 20
| 260
| 3.8
| 0.75
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018349
| 0.580769
| 260
| 6
| 159
| 43.333333
| 0.678899
| 0.684615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0ecec63fc632dab1fe4fa0e6031ffa9e699a8441
| 50
|
py
|
Python
|
test.py
|
lipengyuan1994/Tensorflow-Developer
|
73aa3feb32513cd7c959575e0b08ad4384351941
|
[
"MIT"
] | 5
|
2021-12-17T18:57:51.000Z
|
2022-03-21T06:15:46.000Z
|
test.py
|
lipengyuan1994/Tensorflow-Developer
|
73aa3feb32513cd7c959575e0b08ad4384351941
|
[
"MIT"
] | null | null | null |
test.py
|
lipengyuan1994/Tensorflow-Developer
|
73aa3feb32513cd7c959575e0b08ad4384351941
|
[
"MIT"
] | 2
|
2022-01-14T00:42:53.000Z
|
2022-02-05T15:35:53.000Z
|
import tensorflow as tf
print(tf.__version__)
| 7.142857
| 23
| 0.76
| 7
| 50
| 4.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.18
| 50
| 6
| 24
| 8.333333
| 0.829268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
0ee83b9e890a8d6dffd1620ecb9ba8b03a8c22b9
| 37
|
py
|
Python
|
1957.py
|
heltonricardo/URI
|
160cca22d94aa667177c9ebf2a1c9864c5e55b41
|
[
"MIT"
] | 6
|
2021-04-13T00:33:43.000Z
|
2022-02-10T10:23:59.000Z
|
1957.py
|
heltonricardo/URI
|
160cca22d94aa667177c9ebf2a1c9864c5e55b41
|
[
"MIT"
] | null | null | null |
1957.py
|
heltonricardo/URI
|
160cca22d94aa667177c9ebf2a1c9864c5e55b41
|
[
"MIT"
] | 3
|
2021-03-23T18:42:24.000Z
|
2022-02-10T10:24:07.000Z
|
print(hex(int(input()))[2:].upper())
| 18.5
| 36
| 0.594595
| 6
| 37
| 3.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 0.027027
| 37
| 1
| 37
| 37
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
1624eb6363b732cc7d8291a8f440da732d37e8eb
| 105
|
py
|
Python
|
test/myproject/regist/admin.py
|
d0r6y/CloudKloud
|
33747e208bd625a6c80bae0f87a0997181be4298
|
[
"MIT"
] | 1
|
2021-01-23T11:21:30.000Z
|
2021-01-23T11:21:30.000Z
|
test/myproject/regist/admin.py
|
d0r6y/CloudKloud
|
33747e208bd625a6c80bae0f87a0997181be4298
|
[
"MIT"
] | null | null | null |
test/myproject/regist/admin.py
|
d0r6y/CloudKloud
|
33747e208bd625a6c80bae0f87a0997181be4298
|
[
"MIT"
] | 2
|
2020-12-18T17:56:58.000Z
|
2020-12-23T05:20:34.000Z
|
from django.contrib import admin
from .models import accessKeyIDPW
admin.site.register(accessKeyIDPW)
| 15
| 34
| 0.828571
| 13
| 105
| 6.692308
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 105
| 6
| 35
| 17.5
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1635a3ee5a741300c68d22b9f311a8c9e52a7e32
| 43,493
|
py
|
Python
|
app/apis/ga_uniparthenope/v1/ga_v1.py
|
uniparthenope/api-uniparthenope
|
ae959ccb5827e6ba7df6f195a6570347e39860c9
|
[
"Apache-2.0"
] | 1
|
2020-09-11T08:29:55.000Z
|
2020-09-11T08:29:55.000Z
|
app/apis/ga_uniparthenope/v1/ga_v1.py
|
uniparthenope/api-uniparthenope
|
ae959ccb5827e6ba7df6f195a6570347e39860c9
|
[
"Apache-2.0"
] | 3
|
2021-09-08T02:30:13.000Z
|
2022-03-12T00:48:19.000Z
|
app/apis/ga_uniparthenope/v1/ga_v1.py
|
uniparthenope/api-uniparthenope
|
ae959ccb5827e6ba7df6f195a6570347e39860c9
|
[
"Apache-2.0"
] | null | null | null |
import json
import sys
import traceback
import base64
import math
import sqlalchemy
from sqlalchemy import exc
from app import api, db
from flask_restplus import Resource, fields
from datetime import datetime, timedelta
from flask import g, request
from app.apis.uniparthenope.v1.login_v1 import token_required_general, token_required
from app.apis.uniparthenope.v1.professor_v1 import getCourses
from app.apis.uniparthenope.v2.students_v2 import MyExams
from app.config import Config
from app.apis.ga_uniparthenope.models import Reservations, ReservableRoom, Room, Area, Entry, UserTemp
from app.apis.access.models import UserAccess
ns = api.namespace('uniparthenope')
# ------------- GLOBAL FUNCTIONS -------------
def createDate(data):
mesi = ["gennaio", "febbraio", "marzo", "aprile", "maggio", "giugno", "luglio", "agosto", "settembre",
"ottobre", "novembre", "dicembre"]
data = data.split()
# print(data)
ora = data[0][0:2]
minuti = data[0][3:5]
anno = data[5]
giorno = data[3]
mese = mesi.index(data[4]) + 1
# final_data = datetime(anno, mese, giorno, ora, minuti)
final_data = str(anno) + "/" + str(mese) + "/" + str(giorno) + " " + str(ora) + ":" + str(minuti)
return final_data
def extractData(data):
data_split = data.split()[0]
export_data = datetime.strptime(data_split, '%d/%m/%Y')
return export_data
# ------------- GET TODAY SERVICES -------------
class getTodayServices(Resource):
@ns.doc(security='Basic Auth')
@token_required_general
def get(self):
"""Get Today Services"""
if g.status == 200:
username = g.response['user']['userId']
array = []
try:
start = datetime.now().date()
end = start + timedelta(days=2)
grpid = "," + str(g.response['user']['grpId']) + ","
aree = Area.query.all()
for area in aree:
array_area = []
service = []
services = db.session.query(Entry, Room).filter(Room.id == Entry.room_id) \
.filter(Entry.start_time >= start) \
.filter(Entry.end_time <= end) \
.filter(Entry.end_time > datetime.now()) \
.filter(Room.area_id == area.id) \
.filter(Room.user_access.contains(grpid))
for s in services:
reserved = False
resered_id = None
reserved_by = None
reservation = Reservations.query.filter_by(id_lezione=s.Entry.id).filter_by(
username=username)
if reservation.first() is not None:
reserved = True
resered_id = reservation.first().id
reserved_by = reservation.first().reserved_by
if s.Room.piano is None or s.Room.piano == '999':
piano = " "
else:
piano = s.Room.piano
if s.Room.lato is None:
lato = " "
else:
lato = s.Room.lato
service.append({
'id': s.Entry.id,
'start': str(s.Entry.start_time),
'end': str(s.Entry.end_time),
'room': {
'name': s.Room.room_name,
'capacity': math.floor(s.Room.capacity / 2),
'description': "Piano " + piano + " Lato " + lato,
'availability': math.floor(
s.Room.capacity / 2) - Reservations.query.with_for_update().filter_by(
id_lezione=s.Entry.id).count()
},
'reservation': {
'reserved_id': resered_id,
'reserved': reserved,
'reserved_by': reserved_by
}
})
array.append({
'area': area.area_name,
'services': service
})
return array, 200
except:
db.session.rollback()
print("Unexpected error:")
print("Title: " + sys.exc_info()[0].__name__)
print("Description: " + traceback.format_exc())
return {
'errMsgTitle': sys.exc_info()[0].__name__,
'errMsg': traceback.format_exc()
}, 500
else:
return {'errMsg': 'Wrong username/pass'}, g.status
# ------------- GET TODAY LECTURES -------------
parser = api.parser()
parser.add_argument('matId', required=True, help='')
@ns.doc(parser=parser)
class getTodayLecture(Resource):
@ns.doc(security='Basic Auth')
@token_required
def get(self, matId):
"""Get Today Lectures"""
con = sqlalchemy.create_engine(Config.GA_DATABASE, echo=False)
base64_bytes = g.token.encode('utf-8')
message_bytes = base64.b64decode(base64_bytes)
token_string = message_bytes.decode('utf-8')
username = token_string.split(':')[0]
result = MyExams(Resource).get(matId)
status = json.loads(json.dumps(result))[1]
_result = json.loads(json.dumps(result))[0]
if status == 200:
codici = []
codici_res = []
for i in range(len(_result)):
if _result[i]['status']['esito'] == 'P' or _result[i]['status']['esito'] == 'F':
codici.append(_result[i]['codice'])
res = Reservations.query.filter_by(username=username)
for r in res:
if r.id_corso not in codici:
codici.append(r.id_corso)
array = []
start = datetime.now().date()
end = start + timedelta(days=1)
for i in range(len(codici)):
codice = codici[i]
rs = con.execute(
"SELECT * FROM `mrbs_entry` E JOIN `mrbs_room` R WHERE E.room_id = R.id AND `id_corso` LIKE '%%" + str(
codice) + "%%' AND start_time >= '" + str(start) + "' AND end_time <= '" + str(end) + "'")
for row in rs:
reserved = False
resered_id = None
reserved_by = None
reservation = Reservations.query.filter_by(id_lezione=row[0]).filter_by(
username=username)
if reservation.first() is not None:
reserved = True
resered_id = reservation.first().id
reserved_by = reservation.first().reserved_by
array.append({
'id': row[0],
'id_corso': codice,
'start': str(datetime.fromtimestamp(row[1])),
'end': str(datetime.fromtimestamp(row[2])),
'room': {
'name': row[38],
'capacity': math.floor(row[41] / 2),
'description': row[40],
'availability': math.floor(
int(row[41]) / 2) - Reservations.query.with_for_update().filter_by(
id_lezione=row[0]).count()
},
'course_name': row[9],
'prof': row[11],
'reservation': {
'reserved_id': resered_id,
'reserved': reserved,
'reserved_by': reserved_by
}
})
return array, 200
else:
return {'errMsg': _result['errMsg']}, status
# ------------- GET ALL OWN LECTURES -------------
parser = api.parser()
parser.add_argument('matId', required=True, help='')
@ns.doc(parser=parser)
class getLectures(Resource):
@ns.doc(security='Basic Auth')
@token_required
def get(self, matId):
"""Get All Own Lectures"""
base64_bytes = g.token.encode('utf-8')
message_bytes = base64.b64decode(base64_bytes)
token_string = message_bytes.decode('utf-8')
username = token_string.split(':')[0]
result = MyExams(Resource).get(matId)
status = json.loads(json.dumps(result))[1]
_result = json.loads(json.dumps(result))[0]
con = sqlalchemy.create_engine(Config.GA_DATABASE, echo=False)
start = datetime(datetime.now().year, datetime.now().month, datetime.now().day, 0, 0).timestamp()
if status == 200:
array = []
for i in range(len(_result)):
if _result[i]['status']['esito'] == 'P' or _result[i]['status']['esito'] == 'F':
codice = _result[i]['codice']
rs = con.execute("SELECT * FROM `mrbs_entry` E JOIN `mrbs_room` R WHERE E.id_corso LIKE '%%" + str(
codice) + "%%' AND E.start_time >= '" + str(start) + "' AND R.id = E.room_id")
for row in rs:
reserved = False
resered_id = None
reserved_by = None
reservation = Reservations.query.filter_by(id_lezione=row[0]).filter_by(
username=username)
if reservation.first() is not None:
reserved = True
resered_id = reservation.first().id
reserved_by = reservation.first().reserved_by
array.append({
'id': row[0],
'id_corso': codice,
'start': str(datetime.fromtimestamp(row[1])),
'end': str(datetime.fromtimestamp(row[2])),
'room': {
'name': row[38],
'capacity': math.floor(int(row[41]) / 2),
'description': row[40],
'availability': math.floor(
int(row[41]) / 2) - Reservations.query.with_for_update().filter_by(
id_lezione=row[0]).count()
},
'course_name': row[9],
'prof': row[11],
'reservation': {
'reserved_id': resered_id,
'reserved': reserved,
'reserved_by': reserved_by
}
})
res = Reservations.query.filter_by(username=username).filter(
Reservations.start_time >= datetime.fromtimestamp(start)).all()
if len(array) == 0:
for r in res:
rs = con.execute("SELECT * FROM `mrbs_entry` E JOIN `mrbs_room` R WHERE E.id ='" + str(
r.id_lezione) + "' AND E.start_time >= '" + str(start) + "' AND R.id = E.room_id")
for row in rs:
array.append({
'id': row[0],
'id_corso': r.id_corso,
'start': str(datetime.fromtimestamp(row[1])),
'end': str(datetime.fromtimestamp(row[2])),
'room': {
'name': row[38],
'capacity': math.floor(int(row[41]) / 2),
'description': row[40],
'availability': math.floor(
int(row[41]) / 2) - Reservations.query.with_for_update().filter_by(
id_lezione=row[0]).count()
},
'course_name': row[9],
'prof': row[11],
'reservation': {
'reserved_id': r.id,
'reserved': True,
'reserved_by': r.reserved_by
}
})
else:
id_lez = []
for i in range(len(array)):
id_lez.append(array[i]['id'])
print(id_lez)
for r in res:
if r.id_lezione not in id_lez:
rs = con.execute("SELECT * FROM `mrbs_entry` E JOIN `mrbs_room` R WHERE E.id ='" + str(
r.id_lezione) + "' AND E.start_time >= '" + str(start) + "' AND R.id = E.room_id")
for row in rs:
array.append({
'id': row[0],
'id_corso': r.id_corso,
'start': str(datetime.fromtimestamp(row[1])),
'end': str(datetime.fromtimestamp(row[2])),
'room': {
'name': row[38],
'capacity': math.floor(int(row[41]) / 2),
'description': row[40],
'availability': math.floor(
int(row[41]) / 2) - Reservations.query.with_for_update().filter_by(
id_lezione=row[0]).count()
},
'course_name': row[9],
'prof': row[11],
'reservation': {
'reserved_id': r.id,
'reserved': True,
'reserved_by': r.reserved_by
}
})
return array, 200
else:
return {'errMsg': _result['errMsg']}, status
# ------------- GET ALL PROF LECTURES -------------
parser = api.parser()
parser.add_argument('aaId', required=True, help='')
@ns.doc(parser=parser)
class getProfLectures(Resource):
@ns.doc(security='Basic Auth')
@token_required
def get(self, aaId):
"""Get All Prof Lectures"""
result = getCourses(Resource).get(aaId)
status = json.loads(json.dumps(result))[1]
_result = json.loads(json.dumps(result))[0]
con = sqlalchemy.create_engine(Config.GA_DATABASE, echo=False)
if status == 200:
base64_bytes = g.token.encode('utf-8')
message_bytes = base64.b64decode(base64_bytes)
token_string = message_bytes.decode('utf-8')
username = token_string.split(':')[0]
array = []
for i in range(len(_result)):
codice = _result[i]['adDefAppCod']
start = datetime(datetime.now().year, datetime.now().month, datetime.now().day, 0, 0).timestamp()
rs = con.execute("SELECT * FROM `mrbs_entry` E JOIN `mrbs_room` R WHERE E.id_corso LIKE '%%" + str(
codice) + "%%' AND E.start_time >= '" + str(start) + "' AND R.id = E.room_id")
# COGNOMI CON SPAZI E ACCENTI
# AND E.description LIKE '%%" + username.split(".")[1] + "%%'")
courses = []
for row in rs:
courses.append({
'id': row[0],
'start': str(datetime.fromtimestamp(row[1])),
'end': str(datetime.fromtimestamp(row[2])),
'room': {
'name': row[38],
'capacity': math.floor(int(row[41]) / 2),
'description': row[40],
'availability': math.floor(
int(row[41]) / 2) - Reservations.query.with_for_update().filter_by(
id_lezione=row[0]).count()
},
'course_name': row[9],
'prof': row[11]
})
array.append({
'nome': _result[i]['adDes'],
'courses': courses
})
# print(array)
return array, 200
else:
return {'errMsg': _result['errMsg']}, status
# ------------- SERVICES RESERVATIONS -------------
prenotazione_servizi = ns.model("services_reservation", {
"id_entry": fields.String(description="", required=True),
"matricola": fields.String(description="", required=True)
})
def reserve(username, content, rs):
try:
capacity = math.floor(rs.Room.capacity / 2)
now = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=2)
if rs.Entry.start_time > now or rs.Entry.end_time > now or rs.Entry.end_time < datetime.now():
return {
'errMsgTitle': 'Attenzione',
'errMsg': 'Prenotazione non consentita.'
}, 500
start = datetime.now().date()
end = start + timedelta(days=2)
today_reservations = Reservations.query.filter_by(username=username).filter(
Reservations.start_time >= start).filter(
Reservations.end_time <= end).all()
for res in today_reservations:
if res.start_time <= rs.Entry.start_time < res.end_time or res.start_time < rs.Entry.end_time <= res.end_time:
return {
'errMsgTitle': 'Attenzione',
'errMsg': 'Già presente una prenotazione in questo lasso di tempo.'
}, 500
r = Reservations(id_corso="SERVICE", course_name=rs.Entry.name,
start_time=rs.Entry.start_time,
end_time=rs.Entry.end_time,
username=username, matricola=content['matricola'],
time=datetime.now(), id_lezione=content['id_entry'],
reserved_by=username)
db.session.add(r)
count = Reservations.query.with_for_update().filter_by(
id_lezione=content['id_entry']).count()
if count > capacity:
db.session.rollback()
return {
'errMsgTitle': 'Attenzione',
'errMsg': 'Raggiunta la capacità massima consentita.'
}, 500
db.session.commit()
return {
"status": "Prenotazione effettuata con successo."
}, 200
except exc.IntegrityError:
db.session.rollback()
return {
'errMsgTitle': 'Attenzione',
'errMsg': 'Prenotazione già effettuata per questo servizio.'
}, 500
except:
db.session.rollback()
print("Unexpected error:")
print("Title: " + sys.exc_info()[0].__name__)
print("Description: " + traceback.format_exc())
return {
'errMsgTitle': sys.exc_info()[0].__name__,
'errMsg': traceback.format_exc()
}, 500
class ServicesReservation(Resource):
@ns.doc(security='Basic Auth')
@token_required_general
@ns.expect(prenotazione_servizi)
def post(self):
"""Set Service Reservation"""
base64_bytes = g.token.encode('utf-8')
message_bytes = base64.b64decode(base64_bytes)
token_string = message_bytes.decode('utf-8')
username = token_string.split(':')[0]
content = request.json
if g.status == 200 and 'id_entry' in content and 'matricola' in content:
rs = db.session.query(Entry, Room).filter(Room.id == Entry.room_id).filter(
Entry.id == content['id_entry']).first()
grpid = "," + str(g.response['user']['grpId']) + ","
if grpid in rs.Room.user_access:
if g.response['user']['grpId'] != 7 and g.response['user']['grpId'] != 99:
user = UserAccess.query.filter_by(username=username).first()
if user is not None and user.greenpass:
return reserve(username, content, rs)
else:
return {'status': 'error',
'errMsg': 'Impossibile prenotarsi in mancanza di Green Pass.'}, 500
else:
return reserve(username, content, rs)
else:
return {
'errMsgTitle': 'Attenzione',
'errMsg': 'Utente non autorizzato per questo servizio.'
}, 500
else:
return {'errMsg': 'Errore username/pass!'}, g.status
# ------------- RESERVATIONS -------------
prenotazione = ns.model("reservation", {
"id_corso": fields.String(description="", required=True),
"id_lezione": fields.String(description="", required=True),
"matricola": fields.String(description="", required=True),
"matId": fields.String(description="", required=True)
})
class Reservation(Resource):
@ns.doc(security='Basic Auth')
@token_required
@ns.expect(prenotazione)
def post(self):
"""Set Reservation"""
base64_bytes = g.token.encode('utf-8')
message_bytes = base64.b64decode(base64_bytes)
token_string = message_bytes.decode('utf-8')
username = token_string.split(':')[0]
content = request.json
if 'id_corso' in content and 'id_lezione' in content and 'matricola' in content and 'matId' in content:
result = MyExams(Resource).get(content['matId'])
status = json.loads(json.dumps(result))[1]
_result = json.loads(json.dumps(result))[0]
codici = []
if status == 200:
for i in range(len(_result)):
if _result[i]['status']['esito'] == 'P' or _result[i]['status']['esito'] == 'F':
codici.append(_result[i]['codice'])
codici_res = []
res_room = ReservableRoom.query.all()
for rr in res_room:
codici_res.append(rr.id_corso)
try:
user_info = UserTemp.query.filter_by(username=username).first()
if content['id_corso'] in codici or user_info is not None:
user = UserAccess.query.filter_by(username=username).first()
if user is not None:
if user.autocertification and user.classroom == "presence":
con = sqlalchemy.create_engine(Config.GA_DATABASE, echo=False)
rs = con.execute(
"SELECT * FROM `mrbs_entry` E JOIN `mrbs_room` R WHERE E.id = '" + content[
'id_lezione'] + "' AND E.room_id = R.id")
result = rs.fetchall()
capacity = int(result[0][41]) / 2
now = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(
days=1)
if datetime.fromtimestamp(result[0][1]) > now or datetime.fromtimestamp(
result[0][2]) > now or datetime.fromtimestamp(result[0][2]) < datetime.now():
return {
'errMsgTitle': 'Attenzione',
'errMsg': 'Prenotazione non consentita.'
}, 500
start = datetime.now().date()
end = start + timedelta(days=1)
today_reservations = Reservations.query.filter_by(username=username).filter(
Reservations.start_time >= start).filter(
Reservations.end_time <= end).all()
for res in today_reservations:
if res.start_time <= datetime.fromtimestamp(
result[0][1]) < res.end_time or res.start_time < datetime.fromtimestamp(
result[0][2]) <= res.end_time:
return {
'errMsgTitle': 'Attenzione',
'errMsg': 'Già presente una prenotazione in questo lasso di tempo.'
}, 500
r = Reservations(id_corso=content['id_corso'], course_name=result[0][9],
start_time=datetime.fromtimestamp(result[0][1]),
end_time=datetime.fromtimestamp(result[0][2]),
username=username, matricola=content['matricola'],
time=datetime.now(), id_lezione=content['id_lezione'],
reserved_by=username)
db.session.add(r)
count = Reservations.query.with_for_update().filter_by(
id_lezione=content['id_lezione']).count()
if count > capacity:
db.session.rollback()
return {
'errMsgTitle': 'Attenzione',
'errMsg': 'Raggiunta la capacità massima consentita.'
}, 500
db.session.commit()
return {
"status": "Prenotazione effettuata con successo."
}, 200
else:
return {'status': 'error',
'errMsg': 'Impossibile prenotarsi in mancanza di autocertificazione/accesso in presenza.'}, 500
else:
return {'status': 'error',
'errMsg': 'Impossibile prenotarsi in mancanza di autocertificazione/accesso in presenza.'}, 500
else:
return {
'errMsgTitle': 'Attenzione',
'errMsg': 'Non è possibile prenotarsi ad una lezione non presente nel proprio piano di studi/già superata.'
}, 500
except exc.IntegrityError:
db.session.rollback()
return {
'errMsgTitle': 'Attenzione',
'errMsg': 'Prenotazione già effettuata per questa lezione.'
}, 500
except:
db.session.rollback()
print("Unexpected error:")
print("Title: " + sys.exc_info()[0].__name__)
print("Description: " + traceback.format_exc())
return {
'errMsgTitle': sys.exc_info()[0].__name__,
'errMsg': traceback.format_exc()
}, 500
else:
return {'errMsg': _result['errMsg']}, status
else:
return {'errMsg': 'Payload error!'}, 500
@ns.doc(security='Basic Auth')
@token_required_general
def get(self):
"""Get Reservations"""
base64_bytes = g.token.encode('utf-8')
message_bytes = base64.b64decode(base64_bytes)
token_string = message_bytes.decode('utf-8')
username = token_string.split(':')[0]
if g.status == 200:
if g.response['user']['grpId'] == 6:
try:
start = datetime.now().date()
end = start + timedelta(days=1)
reservations = Reservations.query.filter_by(username=username).filter(
Reservations.start_time >= datetime.fromtimestamp(start)).all()
array = []
for r in reservations:
array.append({
"id": r.id,
"id_corso": r.id_corso,
"course_name": r.course_name,
"start_time": str(r.start_time),
"end_time": str(r.end_time),
'reserved_by': r.reserved_by
})
return array, 200
except:
db.session.rollback()
print("Unexpected error:")
print("Title: " + sys.exc_info()[0].__name__)
print("Description: " + traceback.format_exc())
return {
'errMsgTitle': sys.exc_info()[0].__name__,
'errMsg': traceback.format_exc()
}, 500
else:
return {
'errMsgTitle': "Attenzione",
'errMsg': "Il tipo di user non è di tipo Studente"
}, 500
else:
return {'errMsg': 'Wrong username/pass'}, g.status
@ns.doc(security='Basic Auth')
@token_required_general
def delete(self, id_prenotazione):
"""Delete Reservation"""
if g.status == 200:
base64_bytes = g.token.encode('utf-8')
message_bytes = base64.b64decode(base64_bytes)
token_string = message_bytes.decode('utf-8')
username = token_string.split(':')[0]
if g.response['user']['grpId'] == 7:
if request.args.get('aaId') != None:
result = getCourses(Resource).get(request.args.get('aaId'))
status = json.loads(json.dumps(result))[1]
_result = json.loads(json.dumps(result))[0]
if status == 200:
codici = []
for i in range(len(_result)):
codici.append(_result[i]['adDefAppCod'])
reservation = Reservations.query.filter_by(id=id_prenotazione)
if reservation.first().id_corso in codici:
reservation.delete()
db.session.commit()
return {
"status": "Cancellazione effettuata con successo."
}, 200
else:
return {
'errMsgTitle': "Attenzione",
'errMsg': "Operazione non consentita."
}, 500
else:
return {
'errMsgTitle': "Attenzione",
'errMsg': "Anno di corso non valido!"
}, 500
else:
try:
reservation = Reservations.query.filter_by(id=id_prenotazione).filter_by(
username=username)
if reservation.first() is not None:
reservation.delete()
db.session.commit()
return {
"status": "Cancellazione effettuata con successo."
}, 200
else:
return {
'errMsgTitle': "Attenzione",
'errMsg': "Operazione non consentita."
}, 500
except AttributeError as error:
return {
'errMsgTitle': "Attenzione",
'errMsg': "Operazione non consentita."
}, 500
except:
db.session.rollback()
print("Unexpected error:")
print("Title: " + sys.exc_info()[0].__name__)
print("Description: " + traceback.format_exc())
return {
'errMsgTitle': sys.exc_info()[0].__name__,
'errMsg': traceback.format_exc()
}, 500
else:
try:
reservation = Reservations.query.filter_by(id=id_prenotazione).filter_by(
username=username)
if reservation.first() is not None:
reservation.delete()
db.session.commit()
return {
"status": "Cancellazione effettuata con successo."
}, 200
else:
return {
'errMsgTitle': "Attenzione",
'errMsg': "Operazione non consentita."
}, 500
except AttributeError as error:
return {
'errMsgTitle': "Attenzione",
'errMsg': "Operazione non consentita."
}, 500
except:
db.session.rollback()
print("Unexpected error:")
print("Title: " + sys.exc_info()[0].__name__)
print("Description: " + traceback.format_exc())
return {
'errMsgTitle': sys.exc_info()[0].__name__,
'errMsg': traceback.format_exc()
}, 500
else:
return {'errMsg': 'Wrong username/pass'}, g.status
# ------------- RESERVE STUDENT BY PROF -------------
prenotazione_prof = ns.model("reservation_prof", {
"id_lezione": fields.String(description="", required=True),
"matricola": fields.String(description="", required=True),
"username": fields.String(description="", required=True),
"aaId": fields.String(description="", required=True)
})
class ReservationByProf(Resource):
@ns.doc(security='Basic Auth')
@token_required
@ns.expect(prenotazione_prof)
def post(self):
"""Set Reservation to student"""
base64_bytes = g.token.encode('utf-8')
message_bytes = base64.b64decode(base64_bytes)
token_string = message_bytes.decode('utf-8')
username = token_string.split(':')[0]
content = request.json
print(content)
if 'id_lezione' in content and 'matricola' in content and 'username' in content and 'aaId' in content:
result = getCourses(Resource).get(content['aaId'])
status = json.loads(json.dumps(result))[1]
_result = json.loads(json.dumps(result))[0]
if status == 200:
try:
codici = []
for i in range(len(_result)):
codici.append(_result[i]['adDefAppCod'])
con = sqlalchemy.create_engine(Config.GA_DATABASE, echo=False)
rs = con.execute("SELECT * FROM `mrbs_entry` E JOIN mrbs_room R WHERE E.id = '" + str(
content['id_lezione']) + "' AND E.room_id = R.id").fetchall()
capacity = int(rs[0][41]) / 2
if len(rs) != 0:
if rs[0][32] in codici:
r = Reservations(id_corso=rs[0][32], course_name=rs[0][9],
start_time=datetime.fromtimestamp(rs[0][1]),
end_time=datetime.fromtimestamp(rs[0][2]),
username=content['username'], matricola=content['matricola'],
time=datetime.now(), id_lezione=content['id_lezione'],
reserved_by=username)
db.session.add(r)
count = Reservations.query.with_for_update().filter_by(
id_lezione=content['id_lezione']).count()
if count > capacity:
db.session.rollback()
return {
'errMsgTitle': 'Attenzione',
'errMsg': 'Raggiunta la capacità massima consentita.'
}, 500
db.session.commit()
return {
"status": "Prenotazione effettuata con successo."
}, 200
else:
return {
'errMsgTitle': "Attenzione",
'errMsg': "Operazione non consentita!"
}, 500
else:
return {
'errMsgTitle': "Attenzione",
'errMsg': "ID lezione errato"
}, 500
except exc.IntegrityError:
db.session.rollback()
return {
'errMsgTitle': 'Attenzione',
'errMsg': 'Prenotazione già effettuata per questa lezione.'
}, 500
except:
db.session.rollback()
print("Unexpected error:")
print("Title: " + sys.exc_info()[0].__name__)
print("Description: " + traceback.format_exc())
return {
'errMsgTitle': sys.exc_info()[0].__name__,
'errMsg': traceback.format_exc()
}, 500
else:
return {
'errMsgTitle': "Attenzione",
'errMsg': "Errore nel caricamento degli esami!!"
}, 500
else:
return {
'errMsgTitle': "Attenzione",
'errMsg': "Errore Payload/Studente non immatricolato!"
}, 500
# ------------- GET STUDENTS LIST -------------
parser = api.parser()
parser.add_argument('id_lezione', required=True, help='')
@ns.doc(parser=parser)
class getStudentsList(Resource):
@ns.doc(security='Basic Auth')
@token_required_general
def get(self, id_lezione):
"""Get Students Lists"""
if g.status == 200:
if g.response['user']['grpId'] == 7:
##TODO controllare se la lezione appartiene a quel determinato professore
try:
mat = Reservations.query.filter_by(id_lezione=id_lezione).all()
array = []
for m in mat:
array.append({
'id': m.id,
'matricola': m.matricola,
'username': m.username
})
return array, 200
except:
db.session.rollback()
print("Unexpected error:")
print("Title: " + sys.exc_info()[0].__name__)
print("Description: " + traceback.format_exc())
return {
'errMsgTitle': sys.exc_info()[0].__name__,
'errMsg': traceback.format_exc()
}, 500
else:
return {
'errMsgTitle': "Attenzione",
'errMsg': "Il tipo di user non è di tipo Studente"
}, 500
else:
return {'errMsg': 'Wrong username/pass'}, g.status
# ------------- GET EVENTS -------------
class getEvents(Resource):
def get(self):
"""Get Events"""
try:
start = datetime(datetime.now().year, datetime.now().month, datetime.now().day, 0, 0).timestamp()
con = sqlalchemy.create_engine(Config.GA_DATABASE, echo=False)
rs = con.execute(
"SELECT * FROM `mrbs_entry` E JOIN mrbs_room R WHERE (E.type = 't' COLLATE utf8mb4_bin OR E.type = 's' COLLATE utf8mb4_bin OR E.type = 'b' COLLATE utf8mb4_bin OR E.type = 'a' COLLATE utf8mb4_bin OR E.type = 'z' COLLATE utf8mb4_bin OR E.type = 'Y' COLLATE utf8mb4_bin OR E.type = 'O' COLLATE utf8mb4_bin) AND start_time >= '" + str(
start) + "' AND E.room_id = R.id")
array = []
for row in rs:
array.append({
'id': row[0],
'start': str(datetime.fromtimestamp(row[1])),
'end': str(datetime.fromtimestamp(row[2])),
'room': {
'name': row[38],
'capacity': int(row[41]) / 2,
'description': row[40],
'availability': int(row[41]) / 2 - Reservations.query.with_for_update().filter_by(
id_lezione=row[0]).count()
},
'course_name': row[9],
'description': row[11],
'type': row[10]
})
return array, 200
except:
print("Unexpected error:")
print("Title: " + sys.exc_info()[0].__name__)
print("Description: " + traceback.format_exc())
return {
'errMsgTitle': sys.exc_info()[0].__name__,
'errMsg': traceback.format_exc()
}, 500
| 41.900771
| 348
| 0.437036
| 3,770
| 43,493
| 4.91008
| 0.088594
| 0.016531
| 0.035006
| 0.042785
| 0.791367
| 0.773054
| 0.734158
| 0.703258
| 0.693209
| 0.663767
| 0
| 0.021348
| 0.448578
| 43,493
| 1,037
| 349
| 41.941176
| 0.75049
| 0.021176
| 0
| 0.746667
| 0
| 0.002424
| 0.130706
| 0.001224
| 0
| 0
| 0
| 0.000964
| 0
| 1
| 0.01697
| false
| 0.008485
| 0.020606
| 0
| 0.124848
| 0.035152
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
16470b06d1f9df8217df9ec2f207d188d307efce
| 1,228
|
py
|
Python
|
DaNN/data_loader.py
|
sethGu/-
|
baf06b86bf450ae8d7dde8827d76502df3b537f7
|
[
"MIT"
] | 1
|
2022-01-17T01:57:44.000Z
|
2022-01-17T01:57:44.000Z
|
DaNN/data_loader.py
|
sethGu/-
|
baf06b86bf450ae8d7dde8827d76502df3b537f7
|
[
"MIT"
] | null | null | null |
DaNN/data_loader.py
|
sethGu/-
|
baf06b86bf450ae8d7dde8827d76502df3b537f7
|
[
"MIT"
] | null | null | null |
import torchvision
import torch
from torchvision import datasets, transforms
def load_data(root_dir, domain, batch_size):
transform = transforms.Compose([
transforms.Grayscale(),
transforms.Resize([28, 28]),
transforms.ToTensor(),
transforms.Normalize(mean=(0,), std=(1,)),
]
)
image_folder = datasets.ImageFolder(
root=root_dir + domain,
transform=transform
)
data_loader = torch.utils.data.DataLoader(dataset=image_folder, batch_size=batch_size, shuffle=True, num_workers=2,
drop_last=True
)
return data_loader
def load_test(root_dir, domain, batch_size):
transform = transforms.Compose([
transforms.Grayscale(),
transforms.Resize([28, 28]),
transforms.ToTensor(),
transforms.Normalize(mean=(0,), std=(1,)),
]
)
image_folder = datasets.ImageFolder(
root=root_dir + domain,
transform=transform
)
data_loader = torch.utils.data.DataLoader(dataset=image_folder, batch_size=batch_size, shuffle=False, num_workers=2
)
return data_loader
| 31.487179
| 119
| 0.600163
| 124
| 1,228
| 5.758065
| 0.33871
| 0.07563
| 0.072829
| 0.05042
| 0.770308
| 0.770308
| 0.770308
| 0.770308
| 0.770308
| 0.770308
| 0
| 0.016279
| 0.299674
| 1,228
| 38
| 120
| 32.315789
| 0.813953
| 0
| 0
| 0.529412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.088235
| 0
| 0.205882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
16a41a3dbdd33170259ab32bc620be877527f690
| 18
|
py
|
Python
|
polite/_build.py
|
H0R5E/polite
|
bbbbf4fffbade585cb8b7114c4a8821e822c42e0
|
[
"MIT"
] | 1
|
2020-05-03T21:10:33.000Z
|
2020-05-03T21:10:33.000Z
|
polite/_build.py
|
H0R5E/polite
|
bbbbf4fffbade585cb8b7114c4a8821e822c42e0
|
[
"MIT"
] | 6
|
2017-06-12T13:50:09.000Z
|
2019-07-02T10:24:45.000Z
|
polite/_build.py
|
H0R5E/polite
|
bbbbf4fffbade585cb8b7114c4a8821e822c42e0
|
[
"MIT"
] | 2
|
2017-06-12T12:40:03.000Z
|
2018-11-17T12:47:48.000Z
|
BUILD = "0.10.1-0"
| 18
| 18
| 0.555556
| 5
| 18
| 2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.3125
| 0.111111
| 18
| 1
| 18
| 18
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
16cc3a851479318bdae810f123dad8c3d597e0e7
| 122
|
py
|
Python
|
learning_logs/admin.py
|
sam0jones0/learning_log_django
|
f6b9268c9ba25a6b2a7a9d9b62b2c6c48d7abdc8
|
[
"MIT"
] | null | null | null |
learning_logs/admin.py
|
sam0jones0/learning_log_django
|
f6b9268c9ba25a6b2a7a9d9b62b2c6c48d7abdc8
|
[
"MIT"
] | null | null | null |
learning_logs/admin.py
|
sam0jones0/learning_log_django
|
f6b9268c9ba25a6b2a7a9d9b62b2c6c48d7abdc8
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Topic, Entry
admin.site.register(Topic)
admin.site.register(Entry)
| 17.428571
| 32
| 0.803279
| 18
| 122
| 5.444444
| 0.555556
| 0.183673
| 0.346939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106557
| 122
| 6
| 33
| 20.333333
| 0.899083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
bc40368ea5dd43430c8d879754897c34d3dc87ec
| 196
|
py
|
Python
|
moabb/pipelines/__init__.py
|
bibliolytic/moabb
|
46799bfd7957b1da3e7a0534286c1973af9c95d9
|
[
"BSD-3-Clause"
] | 1
|
2018-11-17T03:03:09.000Z
|
2018-11-17T03:03:09.000Z
|
moabb/pipelines/__init__.py
|
bibliolytic/moabb
|
46799bfd7957b1da3e7a0534286c1973af9c95d9
|
[
"BSD-3-Clause"
] | null | null | null |
moabb/pipelines/__init__.py
|
bibliolytic/moabb
|
46799bfd7957b1da3e7a0534286c1973af9c95d9
|
[
"BSD-3-Clause"
] | 1
|
2021-12-23T23:09:49.000Z
|
2021-12-23T23:09:49.000Z
|
"""
Pipeline defines all steps required by an algorithm to obtain predictions.
Pipelines are typically a chain of sklearn compatible transformers and end
with an sklearn compatible estimator.
"""
| 32.666667
| 74
| 0.811224
| 27
| 196
| 5.888889
| 0.888889
| 0.213836
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147959
| 196
| 5
| 75
| 39.2
| 0.952096
| 0.954082
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bc742a3988389eb3d6c1399116eaa21f8e254876
| 4,050
|
py
|
Python
|
cmd_make.py
|
bokveizen/baselines
|
79f5d18e2358567c72f422c5a23df78f9f64a55d
|
[
"MIT"
] | null | null | null |
cmd_make.py
|
bokveizen/baselines
|
79f5d18e2358567c72f422c5a23df78f9f64a55d
|
[
"MIT"
] | null | null | null |
cmd_make.py
|
bokveizen/baselines
|
79f5d18e2358567c72f422c5a23df78f9f64a55d
|
[
"MIT"
] | null | null | null |
game_name = input("The name of the game:")
num_timesteps = input("The num of timesteps:")
total_gpu_num = 3
gpu_use_index = 0
print('conda activate openaivezen; cd baselines; ' +
'CUDA_VISIBLE_DEVICES={} '.format(gpu_use_index) +
'python -m baselines.run --alg=deepq ' +
'--env={}NoFrameskip-v4 --num_timesteps={} '.format(game_name, num_timesteps) +
'--save_path=models/{}_{}/{}{} '.format(game_name, num_timesteps, 'baseline', '') +
'--log_path=logs/{}_{}/{}{} '.format(game_name, num_timesteps, 'baseline', '') +
'--print_freq=1 ' +
'--dpsr_replay=False ' +
'--prioritized_replay=False ' +
'--state_recycle_freq=10000 ' +
'--dpsr_replay_candidates_size=32')
gpu_use_index = (gpu_use_index + 1) % total_gpu_num
print('conda activate openaivezen; cd baselines; ' +
'CUDA_VISIBLE_DEVICES={} '.format(gpu_use_index) +
'python -m baselines.run --alg=deepq ' +
'--env={}NoFrameskip-v4 --num_timesteps={} '.format(game_name, num_timesteps) +
'--save_path=models/{}_{}/{}{} '.format(game_name, num_timesteps, 'prio', '') +
'--log_path=logs/{}_{}/{}{} '.format(game_name, num_timesteps, 'prio', '') +
'--print_freq=1 ' +
'--dpsr_replay=False ' +
'--prioritized_replay=True ' +
'--state_recycle_freq=10000 ' +
'--dpsr_replay_candidates_size=32')
gpu_use_index = (gpu_use_index + 1) % total_gpu_num
print('conda activate openaivezen; cd baselines; ' +
'CUDA_VISIBLE_DEVICES={} '.format(gpu_use_index) +
'python -m baselines.run --alg=deepq ' +
'--env={}NoFrameskip-v4 --num_timesteps={} '.format(game_name, num_timesteps) +
'--save_path=models/{}_{}/{} '.format(game_name, num_timesteps, 'dpsr0') +
'--log_path=logs/{}_{}/{} '.format(game_name, num_timesteps, 'dpsr0') +
'--print_freq=1 ' +
'--dpsr_replay=True ' +
'--prioritized_replay=False ' +
'--state_recycle_freq=0 ' +
'--dpsr_replay_candidates_size=32')
gpu_use_index = (gpu_use_index + 1) % total_gpu_num
recycle_freq_list = [500, 1000, 1500, 2000, 2500]
cand_size_list = [8, 16, 32, 64, 128]
state_recycle_max_priority_set_list = [True, False]
# 5 * 5 * 2 = 50
for recycle_freq in recycle_freq_list:
for cand_size in cand_size_list:
for state_recycle_max_priority_set in state_recycle_max_priority_set_list:
print(
# 'conda activate openaivezen; cd baselines; ' +
'CUDA_VISIBLE_DEVICES={} '.format(gpu_use_index) +
'python -m baselines.run --alg=deepq ' +
'--env={}NoFrameskip-v4 --num_timesteps={} '.format(game_name, num_timesteps) +
'--save_path=models/{}_{}/dpsr{}_{}cand_MAX_prio_set_{} '.format(game_name,
num_timesteps,
recycle_freq,
cand_size,
state_recycle_max_priority_set) +
'--log_path=logs/{}_{}/dpsr{}_{}cand_MAX_prio_set_{} '.format(game_name,
num_timesteps,
recycle_freq,
cand_size,
state_recycle_max_priority_set) +
'--print_freq=1 ' +
'--dpsr_replay=False ' +
'--prioritized_replay=True ' +
'--state_recycle_freq={} '.format(recycle_freq) +
'--dpsr_replay_candidates_size={} '.format(cand_size) +
'--dpsr_state_recycle_max_priority_set={}'.format(state_recycle_max_priority_set))
gpu_use_index = (gpu_use_index + 1) % total_gpu_num
| 54
| 114
| 0.534815
| 420
| 4,050
| 4.730952
| 0.169048
| 0.102667
| 0.071968
| 0.102667
| 0.849522
| 0.800201
| 0.731253
| 0.731253
| 0.654253
| 0.654253
| 0
| 0.024569
| 0.326667
| 4,050
| 74
| 115
| 54.72973
| 0.70407
| 0.015062
| 0
| 0.641791
| 0
| 0
| 0.350978
| 0.203964
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.119403
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bc99732726c63705a87941e41ecf0e499f5ebe40
| 460
|
py
|
Python
|
my_notes/ex127/sieve_module.py
|
ivalderrama/packt_python_workshop
|
48ba82eb9d16c59565a392d515b2cf1c9ec95058
|
[
"MIT"
] | null | null | null |
my_notes/ex127/sieve_module.py
|
ivalderrama/packt_python_workshop
|
48ba82eb9d16c59565a392d515b2cf1c9ec95058
|
[
"MIT"
] | null | null | null |
my_notes/ex127/sieve_module.py
|
ivalderrama/packt_python_workshop
|
48ba82eb9d16c59565a392d515b2cf1c9ec95058
|
[
"MIT"
] | null | null | null |
class PrimesBelow:
def __init__(self, bound):
self.candidate_numbers = list(range(2,bound))
def __iter__(self):
return self
def __next__(self):
if len(self.candidate_numbers) == 0:
raise StopIteration
next_prime = self.candidate_numbers[0]
self.candidate_numbers = [x for x in self.candidate_numbers if x % next_prime != 0]
return next_prime
| 27.058824
| 91
| 0.582609
| 53
| 460
| 4.679245
| 0.433962
| 0.262097
| 0.403226
| 0.169355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013158
| 0.33913
| 460
| 17
| 92
| 27.058824
| 0.802632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0
| 0.090909
| 0.545455
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
bc9ed42df4991746e01103bc42be1244f0fd3d7d
| 80
|
py
|
Python
|
py/torch_tensorrt/_enums.py
|
svenchilton/Torch-TensorRT
|
95d2b6e003a0392fe08ce49520f015c230d4c750
|
[
"BSD-3-Clause"
] | 430
|
2021-11-09T08:08:01.000Z
|
2022-03-31T10:13:45.000Z
|
py/torch_tensorrt/_enums.py
|
svenchilton/Torch-TensorRT
|
95d2b6e003a0392fe08ce49520f015c230d4c750
|
[
"BSD-3-Clause"
] | 257
|
2021-11-09T07:17:03.000Z
|
2022-03-31T20:29:31.000Z
|
py/torch_tensorrt/_enums.py
|
svenchilton/Torch-TensorRT
|
95d2b6e003a0392fe08ce49520f015c230d4c750
|
[
"BSD-3-Clause"
] | 68
|
2021-11-10T05:03:22.000Z
|
2022-03-22T17:07:32.000Z
|
from torch_tensorrt._C import dtype, DeviceType, EngineCapability, TensorFormat
| 40
| 79
| 0.8625
| 9
| 80
| 7.444444
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0875
| 80
| 1
| 80
| 80
| 0.917808
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bca22dcf1eb9dc3a86fdf28689e0121a1169fc33
| 15,833
|
py
|
Python
|
hvplot/tests/testcharts.py
|
toddrjen/hvplot
|
cccc1cebd496f542404cf89798970e655dcb5754
|
[
"BSD-3-Clause"
] | null | null | null |
hvplot/tests/testcharts.py
|
toddrjen/hvplot
|
cccc1cebd496f542404cf89798970e655dcb5754
|
[
"BSD-3-Clause"
] | null | null | null |
hvplot/tests/testcharts.py
|
toddrjen/hvplot
|
cccc1cebd496f542404cf89798970e655dcb5754
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
from unittest import SkipTest, expectedFailure
from parameterized import parameterized
from holoviews import NdOverlay, Store
from holoviews.element import Curve, Area, Scatter, Points, Path, HeatMap
from holoviews.element.comparison import ComparisonTestCase
from ..util import is_dask
class TestChart2D(ComparisonTestCase):
def setUp(self):
try:
import pandas as pd
except:
raise SkipTest('Pandas not available')
import hvplot.pandas # noqa
self.df = pd.DataFrame([[1, 2], [3, 4], [5, 6]], columns=['x', 'y'])
self.cat_df = pd.DataFrame([[1, 2, 'A'], [3, 4, 'B'], [5, 6, 'C']],
columns=['x', 'y', 'category'])
self.time_df = pd.DataFrame({
'time': pd.date_range('1/1/2000', periods=5*24, freq='1H', tz='UTC'),
'temp': np.sin(np.linspace(0, 5*2*np.pi, 5*24)).cumsum()})
@parameterized.expand([('points', Points), ('paths', Path)])
def test_2d_defaults(self, kind, element):
plot = self.df.hvplot(kind=kind)
self.assertEqual(plot, element(self.df, ['x', 'y']))
@parameterized.expand([('points', Points), ('paths', Path)])
def test_2d_chart(self, kind, element):
plot = self.df.hvplot(x='x', y='y', kind=kind)
self.assertEqual(plot, element(self.df, ['x', 'y']))
@parameterized.expand([('points', Points), ('paths', Path)])
def test_2d_index_and_c(self, kind, element):
plot = self.df.hvplot(x='index', y='y', c='x', kind=kind)
self.assertEqual(plot, element(self.df, ['index', 'y'], ['x']))
@parameterized.expand([('points', Points), ('paths', Path)])
def test_2d_set_hover_cols_to_list(self, kind, element):
plot = self.cat_df.hvplot(x='x', y='y', hover_cols=['category'], kind=kind)
self.assertEqual(plot, element(self.cat_df, ['x', 'y'], ['category']))
@parameterized.expand([('points', Points), ('paths', Path)])
def test_2d_set_hover_cols_including_index(self, kind, element):
plot = self.cat_df.hvplot(x='x', y='y', hover_cols=['index'], kind=kind)
data = plot.data[0] if kind == 'paths' else plot.data
assert 'index' in data.columns
self.assertEqual(plot, element(self.cat_df.reset_index(), ['x', 'y'], ['index']))
@parameterized.expand([('points', Points), ('paths', Path)])
def test_2d_set_hover_cols_to_all(self, kind, element):
plot = self.cat_df.hvplot(x='x', y='y', hover_cols='all', kind=kind)
data = plot.data[0] if kind == 'paths' else plot.data
assert 'index' in data.columns
self.assertEqual(plot, element(self.cat_df.reset_index(), ['x', 'y'], ['index', 'category']))
@parameterized.expand([('points', Points), ('paths', Path)])
def test_2d_set_hover_cols_to_all_with_use_index_as_false(self, kind, element):
plot = self.cat_df.hvplot(x='x', y='y', hover_cols='all', use_index=False, kind=kind)
self.assertEqual(plot, element(self.cat_df, ['x', 'y'], ['category']))
def test_heatmap_2d_index_columns(self):
plot = self.df.hvplot.heatmap()
self.assertEqual(plot, HeatMap((['x', 'y'], [0, 1, 2], self.df.values),
['columns', 'index'], 'value'))
def test_heatmap_2d_derived_x_and_y(self):
plot = self.time_df.hvplot.heatmap(x='time.hour', y='time.day', C='temp')
assert plot.kdims == ['time.hour', 'time.day']
assert plot.vdims == ['temp']
class TestChart2DDask(TestChart2D):
def setUp(self):
super(TestChart2DDask, self).setUp()
try:
import dask.dataframe as dd
except:
raise SkipTest('Dask not available')
import hvplot.dask # noqa
self.df = dd.from_pandas(self.df, npartitions=2)
self.cat_df = dd.from_pandas(self.cat_df, npartitions=3)
@expectedFailure
def test_heatmap_2d_index_columns(self):
self.df.hvplot.heatmap()
class TestChart1D(ComparisonTestCase):
def setUp(self):
try:
import pandas as pd
except:
raise SkipTest('Pandas not available')
import hvplot.pandas # noqa
self.df = pd.DataFrame([[1, 2], [3, 4], [5, 6]], columns=['x', 'y'])
self.dt_df = pd.DataFrame(np.random.rand(90), index=pd.date_range('2019-01-01', '2019-03-31'))
self.cat_df = pd.DataFrame([[1, 2, 'A'], [3, 4, 'B'], [5, 6, 'C']],
columns=['x', 'y', 'category'])
self.cat_only_df = pd.DataFrame([['A', 'a'], ['B', 'b'], ['C', 'c']],
columns=['upper', 'lower'])
self.time_df = pd.DataFrame({
'time': pd.date_range('1/1/2000', periods=10, tz='UTC'),
'A': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
'B': list('abcdefghij')})
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_wide_chart(self, kind, element):
plot = self.df.hvplot(kind=kind)
obj = NdOverlay({'x': element(self.df, 'index', 'x').redim(x='value'),
'y': element(self.df, 'index', 'y').redim(y='value')}, 'Variable')
self.assertEqual(plot, obj)
def test_by_datetime_accessor(self):
plot = self.dt_df.hvplot.line('index.dt.day', '0', by='index.dt.month')
obj = NdOverlay({m: Curve((g.index.day, g[0]), 'index.dt.day', '0')
for m, g in self.dt_df.groupby(self.dt_df.index.month)}, 'index.dt.month')
self.assertEqual(plot, obj)
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_wide_chart_labels(self, kind, element):
plot = self.df.hvplot(kind=kind, value_label='Test', group_label='Category')
obj = NdOverlay({'x': element(self.df, 'index', 'x').redim(x='Test'),
'y': element(self.df, 'index', 'y').redim(y='Test')}, 'Category')
self.assertEqual(plot, obj)
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_wide_chart_legend_position(self, kind, element):
plot = self.df.hvplot(kind=kind, value_label='Test', group_label='Category', legend='left')
opts = Store.lookup_options('bokeh', plot, 'plot')
self.assertEqual(opts.kwargs['legend_position'], 'left')
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_tidy_chart(self, kind, element):
plot = self.df.hvplot(x='x', y='y', kind=kind)
self.assertEqual(plot, element(self.df, 'x', 'y'))
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_tidy_chart_index(self, kind, element):
plot = self.df.hvplot(x='index', y='y', kind=kind)
self.assertEqual(plot, element(self.df, 'index', 'y'))
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_tidy_chart_index_by(self, kind, element):
plot = self.df.hvplot(x='index', y='y', by='x', kind=kind)
obj = NdOverlay({1: element(self.df[self.df.x==1], 'index', 'y'),
3: element(self.df[self.df.x==3], 'index', 'y'),
5: element(self.df[self.df.x==5], 'index', 'y')}, 'x')
self.assertEqual(plot, obj)
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_tidy_chart_index_by_legend_position(self, kind, element):
plot = self.df.hvplot(x='index', y='y', by='x', kind=kind, legend='left')
opts = Store.lookup_options('bokeh', plot, 'plot')
self.assertEqual(opts.kwargs['legend_position'], 'left')
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_use_index_disabled_uses_first_cols(self, kind, element):
plot = self.df.hvplot(use_index=False, kind=kind)
self.assertEqual(plot.kdims, ['x'])
self.assertEqual(plot.vdims, ['y'])
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_tidy_chart_ranges(self, kind, element):
plot = self.df.hvplot(x='x', y='y', kind=kind, xlim=(0, 3), ylim=(5, 10))
opts = Store.lookup_options('bokeh', plot, 'plot').options
self.assertEqual(opts['xlim'], (0, 3))
self.assertEqual(opts['ylim'], (5, 10))
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_wide_chart_ranges(self, kind, element):
plot = self.df.hvplot(kind=kind, xlim=(0, 3), ylim=(5, 10))
opts = Store.lookup_options('bokeh', plot.last, 'plot').options
self.assertEqual(opts['xlim'], (0, 3))
self.assertEqual(opts['ylim'], (5, 10))
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_tidy_chart_with_hover_cols(self, kind, element):
plot = self.cat_df.hvplot(x='x', y='y', kind=kind, hover_cols=['category'])
self.assertEqual(plot, element(self.cat_df, 'x', ['y', 'category']))
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_tidy_chart_with_index_in_hover_cols(self, kind, element):
plot = self.df.hvplot(x='x', y='y', kind=kind, hover_cols=['index'])
altered_df = self.df.reset_index()
self.assertEqual(plot, element(altered_df, 'x', ['y', 'index']))
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_tidy_chart_with_hover_cols_as_all(self, kind, element):
plot = self.cat_df.hvplot(x='x', y='y', kind=kind, hover_cols='all')
altered_df = self.cat_df.reset_index()
self.assertEqual(plot, element(altered_df, 'x', ['y', 'index', 'category']))
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_tidy_chart_with_hover_cols_as_all_with_use_index_as_false(self, kind, element):
plot = self.cat_df.hvplot(x='x', y='y', kind=kind, hover_cols='all', use_index=False)
self.assertEqual(plot, element(self.cat_df, 'x', ['y', 'category']))
def test_area_stacked(self):
plot = self.df.hvplot.area(stacked=True)
obj = NdOverlay({'x': Area(self.df, 'index', 'x').redim(x='value'),
'y': Area(self.df, 'index', 'y').redim(y='value')}, 'Variable')
self.assertEqual(plot, Area.stack(obj))
def test_scatter_color_set_to_series(self):
if is_dask(self.df['y']):
y = self.df['y'].compute()
else:
y = self.df['y']
actual = self.df.hvplot.scatter('x', 'y', c=y)
altered_df = self.df.assign(_color=y)
expected = altered_df.hvplot.scatter('x', 'y', c='_color')
self.assertEqual(actual, expected)
def test_scatter_size_set_to_series(self):
if is_dask(self.df['y']):
y = self.df['y'].compute()
else:
y = self.df['y']
plot = self.df.hvplot.scatter('x', 'y', s=y)
opts = Store.lookup_options('bokeh', plot, 'style')
assert '_size' in plot.data.columns
self.assertEqual(opts.kwargs['size'], '_size')
def test_scatter_color_by_legend_position(self):
plot = self.cat_df.hvplot.scatter('x', 'y', c='category', legend='left')
opts = Store.lookup_options('bokeh', plot, 'plot')
self.assertEqual(opts.kwargs['legend_position'], 'left')
def test_histogram_by_category_legend_position(self):
plot = self.cat_df.hvplot.hist('y', by='category', legend='left')
opts = Store.lookup_options('bokeh', plot, 'plot')
self.assertEqual(opts.kwargs['legend_position'], 'left')
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_only_includes_num_chart(self, kind, element):
plot = self.cat_df.hvplot(kind=kind)
obj = NdOverlay({'x': element(self.cat_df, 'index', 'x').redim(x='value'),
'y': element(self.cat_df, 'index', 'y').redim(y='value'),
}, 'Variable')
self.assertEqual(plot, obj)
@parameterized.expand([('line', Curve), ('area', Area), ('scatter', Scatter)])
def test_includes_str_if_no_num_chart(self, kind, element):
plot = self.cat_only_df.hvplot(kind=kind)
obj = NdOverlay({'upper': element(self.cat_only_df, 'index', 'upper').redim(upper='value'),
'lower': element(self.cat_only_df, 'index', 'lower').redim(lower='value'),
}, 'Variable')
self.assertEqual(plot, obj)
def test_time_df_sorts_on_plot(self):
scrambled = self.time_df.sample(frac=1)
plot = scrambled.hvplot(x='time')
assert (plot.data == self.time_df).all().all()
assert len(plot.data.time.unique()) == len(plot.data.time)
def test_time_df_does_not_sort_on_plot_if_sort_date_off(self):
scrambled = self.time_df.sample(frac=1)
plot = scrambled.hvplot(x='time', sort_date=False)
assert (plot.data == scrambled).all().all()
assert len(plot.data.time.unique()) == len(plot.data.time)
def test_time_df_sorts_on_plot_using_index_as_x(self):
df = self.time_df.set_index('time')
scrambled = df.sample(frac=1)
plot = scrambled.hvplot()
assert (plot.data['time'] == df.index).all()
assert len(plot.data.time.unique()) == len(plot.data.time)
def test_time_df_does_not_sort_on_plot_if_sort_date_off_using_index_as_x(self):
df = self.time_df.set_index('time')
scrambled = df.sample(frac=1)
plot = scrambled.hvplot(sort_date=False)
assert (plot.data.time == scrambled.index).all().all()
assert len(plot.data.time.unique()) == len(plot.data.time)
def test_time_df_with_groupby_as_derived_datetime(self):
plot = self.time_df.hvplot(groupby='time.dayofweek', dynamic=False)
assert list(plot.keys()) == [0, 1, 2, 3, 4, 5, 6]
assert list(plot.dimensions()) == ['time.dayofweek', 'index', 'A']
def test_time_df_with_by_as_derived_datetime(self):
plot = self.time_df.hvplot(by='time.month', dynamic=False)
assert list(plot.keys()) == [1]
assert list(plot.dimensions()) == ['time.month', 'index', 'A']
def test_time_df_with_x_as_derived_datetime(self):
plot = self.time_df.hvplot.scatter(x='time.day', dynamic=False)
assert list(plot.dimensions()) == ['time.day', 'A']
def test_time_df_as_index_with_x_as_derived_datetime_using_name(self):
indexed = self.time_df.set_index('time')
plot = indexed.hvplot.scatter(x='time.day', dynamic=False)
assert list(plot.dimensions()) == ['time.day', 'A']
def test_time_df_as_index_with_x_as_derived_datetime_using_index(self):
indexed = self.time_df.set_index('time')
plot = indexed.hvplot.scatter(x='index.day', dynamic=False)
assert list(plot.dimensions()) == ['index.day', 'A']
def test_default_y_not_in_by(self):
plot = self.cat_df.hvplot.scatter(by='x')
assert plot.kdims == ['x']
assert plot[1].kdims == ['index']
assert plot[1].vdims == ['y']
class TestChart1DDask(TestChart1D):
def setUp(self):
super(TestChart1DDask, self).setUp()
try:
import dask.dataframe as dd
except:
raise SkipTest('Dask not available')
import hvplot.dask # noqa
self.df = dd.from_pandas(self.df, npartitions=2)
self.dt_df = dd.from_pandas(self.dt_df, npartitions=3)
self.cat_df = dd.from_pandas(self.cat_df, npartitions=3)
self.cat_only_df = dd.from_pandas(self.cat_only_df, npartitions=1)
def test_by_datetime_accessor(self):
raise SkipTest("Can't expand dt accessor columns when using dask")
| 47.833837
| 102
| 0.607655
| 2,158
| 15,833
| 4.291937
| 0.091752
| 0.034334
| 0.025265
| 0.047182
| 0.801123
| 0.785036
| 0.733967
| 0.711078
| 0.675124
| 0.632909
| 0
| 0.011923
| 0.210699
| 15,833
| 330
| 103
| 47.978788
| 0.729215
| 0.0012
| 0
| 0.4375
| 0
| 0
| 0.093048
| 0
| 0
| 0
| 0
| 0
| 0.205882
| 1
| 0.172794
| false
| 0
| 0.055147
| 0
| 0.242647
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bca71fc4cf96b873540d3a6b3e794bd9a42f4cc4
| 309
|
py
|
Python
|
tests/unit/test_seeds.py
|
joye1503/cocrawler
|
39b543320e91477412ab8bfc8402c88c3304c553
|
[
"Apache-2.0"
] | 166
|
2016-07-18T19:37:34.000Z
|
2022-03-06T18:26:50.000Z
|
tests/unit/test_seeds.py
|
joye1503/cocrawler
|
39b543320e91477412ab8bfc8402c88c3304c553
|
[
"Apache-2.0"
] | 9
|
2016-10-22T18:20:56.000Z
|
2021-04-06T05:28:04.000Z
|
tests/unit/test_seeds.py
|
joye1503/cocrawler
|
39b543320e91477412ab8bfc8402c88c3304c553
|
[
"Apache-2.0"
] | 25
|
2017-02-28T19:41:41.000Z
|
2021-07-10T11:20:33.000Z
|
import cocrawler.seeds as seeds
def test_special_seed_handling():
specialsh = seeds.special_seed_handling
assert specialsh('foo') == 'http://foo'
assert specialsh('//foo/') == 'http://foo/'
assert specialsh('https://foo') == 'https://foo'
#assert specialsh('mailto:foo') == 'mailto:foo'
| 30.9
| 52
| 0.66343
| 37
| 309
| 5.405405
| 0.405405
| 0.3
| 0.27
| 0.22
| 0.325
| 0.325
| 0.325
| 0
| 0
| 0
| 0
| 0
| 0.152104
| 309
| 9
| 53
| 34.333333
| 0.763359
| 0.148867
| 0
| 0
| 0
| 0
| 0.198473
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.166667
| false
| 0
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bcb40ee55b6617cf04b6c56cbba927ef32802dd1
| 340
|
py
|
Python
|
eventsourcing/infrastructure/popo/mapper.py
|
alexanderlarin/eventsourcing
|
6f2a4ded3c783ba3ee465243a48f66ecdee20f52
|
[
"BSD-3-Clause"
] | null | null | null |
eventsourcing/infrastructure/popo/mapper.py
|
alexanderlarin/eventsourcing
|
6f2a4ded3c783ba3ee465243a48f66ecdee20f52
|
[
"BSD-3-Clause"
] | null | null | null |
eventsourcing/infrastructure/popo/mapper.py
|
alexanderlarin/eventsourcing
|
6f2a4ded3c783ba3ee465243a48f66ecdee20f52
|
[
"BSD-3-Clause"
] | null | null | null |
from eventsourcing.infrastructure.sequenceditemmapper import SequencedItemMapper
class SequencedItemMapperForPopo(SequencedItemMapper):
def get_event_class_and_attrs(self, topic, data):
return topic, data
def get_item_topic_and_state(self, domain_event_class, event_attrs):
return domain_event_class, event_attrs
| 34
| 80
| 0.811765
| 39
| 340
| 6.717949
| 0.487179
| 0.114504
| 0.122137
| 0.160305
| 0.198473
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138235
| 340
| 9
| 81
| 37.777778
| 0.894198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
bcbf89dd3a9e302a033fe47a0f5d47bf275dd4ab
| 399
|
py
|
Python
|
pyleecan/Methods/Slot/SlotW28/__init__.py
|
IrakozeFD/pyleecan
|
5a93bd98755d880176c1ce8ac90f36ca1b907055
|
[
"Apache-2.0"
] | 95
|
2019-01-23T04:19:45.000Z
|
2022-03-17T18:22:10.000Z
|
pyleecan/Methods/Slot/SlotW28/__init__.py
|
IrakozeFD/pyleecan
|
5a93bd98755d880176c1ce8ac90f36ca1b907055
|
[
"Apache-2.0"
] | 366
|
2019-02-20T07:15:08.000Z
|
2022-03-31T13:37:23.000Z
|
pyleecan/Methods/Slot/SlotW28/__init__.py
|
IrakozeFD/pyleecan
|
5a93bd98755d880176c1ce8ac90f36ca1b907055
|
[
"Apache-2.0"
] | 74
|
2019-01-24T01:47:31.000Z
|
2022-02-25T05:44:42.000Z
|
# -*- coding: utf-8 -*-
from ....Methods.Slot.Slot import SlotCheckError
class S28_WCheckError(SlotCheckError):
""" """
pass
class S28_RboW0CheckError(SlotCheckError):
""" """
pass
class S28_R1W3CheckError(SlotCheckError):
""" """
pass
class S28_R1R1CheckError(SlotCheckError):
""" """
pass
class S28_ZsCheckError(SlotCheckError):
""" """
pass
| 12.090909
| 48
| 0.626566
| 34
| 399
| 7.205882
| 0.470588
| 0.163265
| 0.37551
| 0.42449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051613
| 0.223058
| 399
| 32
| 49
| 12.46875
| 0.73871
| 0.052632
| 0
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.454545
| 0.090909
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
4c54e937bca7f79fb4da0ee880f05e1d391b45c4
| 25
|
py
|
Python
|
tests/syntax/async_def_missing_parens.py
|
matan-h/friendly
|
3ab0fc6541c837271e8865e247750007acdd18fb
|
[
"MIT"
] | 287
|
2019-04-08T13:18:29.000Z
|
2021-03-14T19:10:21.000Z
|
tests/syntax/async_def_missing_parens.py
|
matan-h/friendly
|
3ab0fc6541c837271e8865e247750007acdd18fb
|
[
"MIT"
] | 191
|
2019-04-08T14:39:18.000Z
|
2021-03-14T22:14:56.000Z
|
tests/syntax/async_def_missing_parens.py
|
matan-h/friendly
|
3ab0fc6541c837271e8865e247750007acdd18fb
|
[
"MIT"
] | 9
|
2019-04-08T12:54:08.000Z
|
2020-11-20T02:26:27.000Z
|
async def name:
pass
| 8.333333
| 15
| 0.64
| 4
| 25
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.32
| 25
| 2
| 16
| 12.5
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.5
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
4c68ed31dbb4d53e9d6bc10771e583e6f947f50c
| 107
|
py
|
Python
|
unitorch/cli/datasets/__init__.py
|
fuliucansheng/UniTorch
|
47038321593ce4e7eabda555bd58c0cf89482146
|
[
"MIT"
] | 2
|
2022-02-05T08:52:00.000Z
|
2022-03-27T07:01:34.000Z
|
unitorch/cli/datasets/__init__.py
|
Lixin-Qian/unitorch
|
47038321593ce4e7eabda555bd58c0cf89482146
|
[
"MIT"
] | null | null | null |
unitorch/cli/datasets/__init__.py
|
Lixin-Qian/unitorch
|
47038321593ce4e7eabda555bd58c0cf89482146
|
[
"MIT"
] | 1
|
2022-03-27T07:01:13.000Z
|
2022-03-27T07:01:13.000Z
|
# Copyright (c) FULIUCANSHENG.
# Licensed under the MIT License.
import unitorch.cli.datasets.huggingface
| 21.4
| 40
| 0.794393
| 13
| 107
| 6.538462
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121495
| 107
| 4
| 41
| 26.75
| 0.904255
| 0.560748
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d5b397ee3619fc1a34942ab12d0fe0bc44e014b2
| 2,363
|
py
|
Python
|
scripts/generate-images.py
|
getwarped/s2i-jupyter-stacks
|
5dd5250583101cd13551c4a5f82bd257e5247fc2
|
[
"BSD-2-Clause"
] | 11
|
2016-11-22T04:11:58.000Z
|
2019-05-21T17:23:45.000Z
|
scripts/generate-images.py
|
getwarped/s2i-jupyter-stacks
|
5dd5250583101cd13551c4a5f82bd257e5247fc2
|
[
"BSD-2-Clause"
] | 2
|
2016-11-01T04:07:49.000Z
|
2016-12-14T23:48:01.000Z
|
scripts/generate-images.py
|
getwarped/s2i-jupyter-stacks
|
5dd5250583101cd13551c4a5f82bd257e5247fc2
|
[
"BSD-2-Clause"
] | 3
|
2016-11-24T08:02:19.000Z
|
2018-01-01T21:16:39.000Z
|
'''
Generates the image stream resource definitions.
'''
import powershift.resources as resources
image_stream = resources.v1_ImageStream(
metadata = resources.v1_ObjectMeta(
name = 'jupyter-notebook',
annotations = {
'openshift.io/display-name': 'Jupyter Notebook'
}
),
spec = resources.v1_ImageStreamSpec()
)
image_stream.spec.tags.append(
resources.v1_TagReference(
name = '2.7',
annotations = {
'openshift.io/display-name': 'Jupyter Notebook (Python 2.7)',
'description': 'Build and deploy custom Jupyter Notebook images for Python 2.7.',
'iconClass': 'icon-python',
'tags': 'builder,python,jupyter',
'supports':'python',
'version': '2.7',
'sampleRepo': 'https://github.com/ricardoduarte/python-for-developers.git'
},
from_ = resources.v1_ObjectReference(
kind = 'DockerImage',
name = 'getwarped/s2i-notebook-python27:latest'
)
)
)
image_stream.spec.tags.append(
resources.v1_TagReference(
name = '3.5',
annotations = {
'openshift.io/display-name': 'Jupyter Notebook (Python 3.5)',
'description': 'Build and deploy custom Jupyter Notebook images for Python 3.5.',
'iconClass': 'icon-python',
'tags': 'builder,python,jupyter',
'supports':'python',
'version': '3.5',
'sampleRepo': 'https://github.com/ricardoduarte/python-for-developers.git'
},
from_ = resources.v1_ObjectReference(
kind = 'DockerImage',
name = 'getwarped/s2i-notebook-python35:latest'
)
)
)
image_stream.spec.tags.append(
resources.v1_TagReference(
name = 'latest',
annotations = {
'openshift.io/display-name': 'Jupyter Notebook (Python 3.X)',
'description': 'Build and deploy custom Jupyter Notebook images for Python 3.X.',
'iconClass': 'icon-python',
'tags': 'builder,python,jupyter',
'supports':'python',
'sampleRepo': 'https://github.com/ricardoduarte/python-for-developers.git'
},
from_ = resources.v1_ObjectReference(
kind = 'ImageStreamTag',
name = '3.5'
)
)
)
resources.dump(image_stream, indent=4, sort_keys=True)
| 31.506667
| 91
| 0.59543
| 234
| 2,363
| 5.935897
| 0.286325
| 0.071274
| 0.068395
| 0.083513
| 0.791217
| 0.791217
| 0.791217
| 0.756659
| 0.717783
| 0.560115
| 0
| 0.020906
| 0.271265
| 2,363
| 74
| 92
| 31.932432
| 0.785714
| 0.020313
| 0
| 0.421875
| 1
| 0
| 0.421509
| 0.104944
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015625
| 0
| 0.015625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d5cdc1b3697ea07c37396fd433149768f1a6f400
| 237
|
py
|
Python
|
chexnet/scripts/load_dataset.py
|
tmarkmann/chexnet-tf2
|
c93cf47620a956a2ad25f952250ea0ecd7b90e4e
|
[
"MIT"
] | 1
|
2021-12-15T16:54:59.000Z
|
2021-12-15T16:54:59.000Z
|
chexnet/scripts/load_dataset.py
|
tmarkmann/chexnet-tf2
|
c93cf47620a956a2ad25f952250ea0ecd7b90e4e
|
[
"MIT"
] | null | null | null |
chexnet/scripts/load_dataset.py
|
tmarkmann/chexnet-tf2
|
c93cf47620a956a2ad25f952250ea0ecd7b90e4e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from chexnet.dataloader.cxr14_dataset import CXR14Dataset
from chexnet.configs.config import chexnet_config
chexnet_config["dataset"]["download"] = True
dataset = CXR14Dataset(chexnet_config)
dataset.benchmark()
| 26.333333
| 57
| 0.818565
| 29
| 237
| 6.551724
| 0.551724
| 0.205263
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03211
| 0.080169
| 237
| 9
| 58
| 26.333333
| 0.83945
| 0.088608
| 0
| 0
| 0
| 0
| 0.069444
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d5cef18b974096e29960fb203980cd731cc5bc33
| 38
|
py
|
Python
|
layers.py
|
nomoi/encoderdec
|
fd6c115c8ab6ea3c985f543ab723f1ffac8334a7
|
[
"MIT"
] | null | null | null |
layers.py
|
nomoi/encoderdec
|
fd6c115c8ab6ea3c985f543ab723f1ffac8334a7
|
[
"MIT"
] | null | null | null |
layers.py
|
nomoi/encoderdec
|
fd6c115c8ab6ea3c985f543ab723f1ffac8334a7
|
[
"MIT"
] | null | null | null |
from GRU import GRU
from FF import FF
| 12.666667
| 19
| 0.789474
| 8
| 38
| 3.75
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 38
| 2
| 20
| 19
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d5cf348d7c1cc538d7e6cf5dbe1ba65f5ed93ab8
| 163
|
py
|
Python
|
28_updating_list.py
|
onowdev/python-selflearning
|
3d7245de0207a5bfcbce4f7adde60e7316b70a8e
|
[
"MIT"
] | null | null | null |
28_updating_list.py
|
onowdev/python-selflearning
|
3d7245de0207a5bfcbce4f7adde60e7316b70a8e
|
[
"MIT"
] | null | null | null |
28_updating_list.py
|
onowdev/python-selflearning
|
3d7245de0207a5bfcbce4f7adde60e7316b70a8e
|
[
"MIT"
] | null | null | null |
list1 = ['phisics','chemistry',1997,2000]
print("Value available at index 2 is ", list1[2])
list1[2] = 2003
print("New Value available at index 2 is ", list1[2])
| 27.166667
| 53
| 0.687117
| 27
| 163
| 4.148148
| 0.518519
| 0.160714
| 0.285714
| 0.375
| 0.535714
| 0.535714
| 0.535714
| 0.535714
| 0
| 0
| 0
| 0.151079
| 0.147239
| 163
| 6
| 53
| 27.166667
| 0.654676
| 0
| 0
| 0
| 0
| 0
| 0.487805
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
d5d17141656ebbab637c1491e889ad1ac9a76434
| 344
|
py
|
Python
|
expects/matchers/built_in/equal_values.py
|
danibaena/expects
|
296203a3fb07cf3061b8f7b348136c9208195d93
|
[
"Apache-2.0"
] | null | null | null |
expects/matchers/built_in/equal_values.py
|
danibaena/expects
|
296203a3fb07cf3061b8f7b348136c9208195d93
|
[
"Apache-2.0"
] | null | null | null |
expects/matchers/built_in/equal_values.py
|
danibaena/expects
|
296203a3fb07cf3061b8f7b348136c9208195d93
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*
from .. import Matcher
class equal_values(Matcher):
def __init__(self, expected):
self._expected = expected
def _match(self, subject):
return subject.__dict__ == self._expected.__dict__, []
def _match_negated(self, subject):
return subject.__dict__ != self._expected.__dict__, []
| 22.933333
| 62
| 0.659884
| 38
| 344
| 5.263158
| 0.473684
| 0.24
| 0.17
| 0.24
| 0.44
| 0.44
| 0.44
| 0.44
| 0
| 0
| 0
| 0.003704
| 0.215116
| 344
| 14
| 63
| 24.571429
| 0.737037
| 0.05814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.125
| 0.25
| 0.875
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
d5d1a7dfc38e80ff7dd3a9ab115711c1234f7f3f
| 8,626
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowSpanningTreeMstDetail/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxe/tests/ShowSpanningTreeMstDetail/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxe/tests/ShowSpanningTreeMstDetail/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
'mst_instances': {
6: {
'bridge_address': '5897.bdff.3b3a',
'bridge_priority': 20486,
'interfaces': {
'GigabitEthernet1/7': {
'cost': 20000,
'counters': {
'bpdu_received': 0,
'bpdu_sent': 836828,
},
'designated_bridge_address': '5897.bdff.3b3a',
'designated_bridge_port_id': '128.7',
'designated_bridge_priority': 20486,
'designated_root_address': '58ac.78ff.c3f5',
'designated_root_cost': 2000,
'designated_root_priority': 8198,
'forward_delay': 0,
'forward_transitions': 1,
'message_expires': 0,
'name': 'GigabitEthernet1/7',
'port_id': '128.7',
'port_priority': 128,
'status': 'designated forwarding',
},
'TenGigabitEthernet2/10': {
'cost': 2000,
'counters': {
'bpdu_received': 0,
'bpdu_sent': 1285480,
},
'designated_bridge_address': '5897.bdff.3b3a',
'designated_bridge_port_id': '128.138',
'designated_bridge_priority': 20486,
'designated_root_address': '58ac.78ff.c3f5',
'designated_root_cost': 2000,
'designated_root_priority': 8198,
'forward_delay': 0,
'forward_transitions': 2,
'message_expires': 0,
'name': 'TenGigabitEthernet2/10',
'port_id': '128.138',
'port_priority': 128,
'status': 'designated forwarding',
},
'TenGigabitEthernet2/3': {
'cost': 2000,
'counters': {
'bpdu_received': 0,
'bpdu_sent': 1285495,
},
'designated_bridge_address': '5897.bdff.3b3a',
'designated_bridge_port_id': '128.131',
'designated_bridge_priority': 20486,
'designated_root_address': '58ac.78ff.c3f5',
'designated_root_cost': 2000,
'designated_root_priority': 8198,
'forward_delay': 0,
'forward_transitions': 2,
'message_expires': 0,
'name': 'TenGigabitEthernet2/3',
'port_id': '128.131',
'port_priority': 128,
'status': 'designated forwarding',
},
'TenGigabitEthernet2/4': {
'cost': 2000,
'counters': {
'bpdu_received': 0,
'bpdu_sent': 1285500,
},
'designated_bridge_address': '5897.bdff.3b3a',
'designated_bridge_port_id': '128.132',
'designated_bridge_priority': 20486,
'designated_root_address': '58ac.78ff.c3f5',
'designated_root_cost': 2000,
'designated_root_priority': 8198,
'forward_delay': 0,
'forward_transitions': 2,
'message_expires': 0,
'name': 'TenGigabitEthernet2/4',
'port_id': '128.132',
'port_priority': 128,
'status': 'designated forwarding',
},
'TenGigabitEthernet2/5': {
'cost': 2000,
'counters': {
'bpdu_received': 0,
'bpdu_sent': 1285475,
},
'designated_bridge_address': '5897.bdff.3b3a',
'designated_bridge_port_id': '128.133',
'designated_bridge_priority': 20486,
'designated_root_address': '58ac.78ff.c3f5',
'designated_root_cost': 2000,
'designated_root_priority': 8198,
'forward_delay': 0,
'forward_transitions': 2,
'message_expires': 0,
'name': 'TenGigabitEthernet2/5',
'port_id': '128.133',
'port_priority': 128,
'status': 'designated forwarding',
},
'TenGigabitEthernet2/6': {
'cost': 2000,
'counters': {
'bpdu_received': 0,
'bpdu_sent': 1285487,
},
'designated_bridge_address': '5897.bdff.3b3a',
'designated_bridge_port_id': '128.134',
'designated_bridge_priority': 20486,
'designated_root_address': '58ac.78ff.c3f5',
'designated_root_cost': 2000,
'designated_root_priority': 8198,
'forward_delay': 0,
'forward_transitions': 2,
'message_expires': 0,
'name': 'TenGigabitEthernet2/6',
'port_id': '128.134',
'port_priority': 128,
'status': 'designated forwarding',
},
'TenGigabitEthernet2/7': {
'cost': 2000,
'counters': {
'bpdu_received': 0,
'bpdu_sent': 1285497,
},
'designated_bridge_address': '5897.bdff.3b3a',
'designated_bridge_port_id': '128.135',
'designated_bridge_priority': 20486,
'designated_root_address': '58ac.78ff.c3f5',
'designated_root_cost': 2000,
'designated_root_priority': 8198,
'forward_delay': 0,
'forward_transitions': 2,
'message_expires': 0,
'name': 'TenGigabitEthernet2/7',
'port_id': '128.135',
'port_priority': 128,
'status': 'designated forwarding',
},
'TenGigabitEthernet2/8': {
'cost': 2000,
'counters': {
'bpdu_received': 0,
'bpdu_sent': 1285497,
},
'designated_bridge_address': '5897.bdff.3b3a',
'designated_bridge_port_id': '128.136',
'designated_bridge_priority': 20486,
'designated_root_address': '58ac.78ff.c3f5',
'designated_root_cost': 2000,
'designated_root_priority': 8198,
'forward_delay': 0,
'forward_transitions': 2,
'message_expires': 0,
'name': 'TenGigabitEthernet2/8',
'port_id': '128.136',
'port_priority': 128,
'status': 'designated forwarding',
},
'TenGigabitEthernet2/9': {
'cost': 2000,
'counters': {
'bpdu_received': 0,
'bpdu_sent': 1285494,
},
'designated_bridge_address': '5897.bdff.3b3a',
'designated_bridge_port_id': '128.137',
'designated_bridge_priority': 20486,
'designated_root_address': '58ac.78ff.c3f5',
'designated_root_cost': 2000,
'designated_root_priority': 8198,
'forward_delay': 0,
'forward_transitions': 2,
'message_expires': 0,
'name': 'TenGigabitEthernet2/9',
'port_id': '128.137',
'port_priority': 128,
'status': 'designated forwarding',
},
},
'mst_id': 6,
'root_address': '58ac.78ff.c3f5',
'root_priority': 8198,
'sysid': 6,
'vlan': '500-501,504-505,507-554,556-599',
},
},
}
| 44.010204
| 66
| 0.415952
| 603
| 8,626
| 5.640133
| 0.124378
| 0.127021
| 0.047633
| 0.061747
| 0.89062
| 0.876507
| 0.855925
| 0.71479
| 0.649515
| 0.649515
| 0
| 0.127768
| 0.476467
| 8,626
| 195
| 67
| 44.235897
| 0.625332
| 0
| 0
| 0.6
| 0
| 0
| 0.391143
| 0.171111
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d5f727f66f41e4ec4a3cd53c615f8811d300a0b1
| 338
|
py
|
Python
|
work_3_2.py
|
Ghost-Lonely-Wolf/work_2
|
62037de0753328b6770c1371a89c2fdd5e52fad7
|
[
"MIT"
] | null | null | null |
work_3_2.py
|
Ghost-Lonely-Wolf/work_2
|
62037de0753328b6770c1371a89c2fdd5e52fad7
|
[
"MIT"
] | 1
|
2021-02-23T13:39:09.000Z
|
2021-02-23T13:39:09.000Z
|
work_3_2.py
|
Ghost-Lonely-Wolf/work_2
|
62037de0753328b6770c1371a89c2fdd5e52fad7
|
[
"MIT"
] | null | null | null |
def print_id(**kwarg):
return ' '.join(kwarg.values())
print(print_id(name=input('Введите своё имя '),
surname=input('Введите свою фамилию '),
date=input('Введите свой год рождения '),
city=input('Введите свой город проживания '),
email=input('Введите свой email '),
number=input('Введите свой номер телефона ')))
| 37.555556
| 50
| 0.680473
| 44
| 338
| 5.181818
| 0.590909
| 0.315789
| 0.280702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16568
| 338
| 9
| 50
| 37.555556
| 0.808511
| 0
| 0
| 0
| 0
| 0
| 0.418879
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| true
| 0
| 0
| 0.125
| 0.25
| 0.25
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
911f98d7ac788c63ef49a2beaae72a784bcc603b
| 139
|
py
|
Python
|
app/main/views.py
|
sd19surf/flask
|
3369b3bd74ea7e0489f3c3ba14482aaf89d33a88
|
[
"MIT"
] | null | null | null |
app/main/views.py
|
sd19surf/flask
|
3369b3bd74ea7e0489f3c3ba14482aaf89d33a88
|
[
"MIT"
] | null | null | null |
app/main/views.py
|
sd19surf/flask
|
3369b3bd74ea7e0489f3c3ba14482aaf89d33a88
|
[
"MIT"
] | null | null | null |
from app.main import bp
@bp.route('/')
def index():
return 'Index Page'
@bp.route('/hello')
def hello():
return 'Hello World!'
| 11.583333
| 25
| 0.611511
| 20
| 139
| 4.25
| 0.6
| 0.164706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.201439
| 139
| 11
| 26
| 12.636364
| 0.765766
| 0
| 0
| 0
| 0
| 0
| 0.208633
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| true
| 0
| 0.142857
| 0.285714
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
9136904bd45b58044ddc0ce28c4cc1d9a651a974
| 5,416
|
py
|
Python
|
tests/st/ops/gpu/test_in_top_k.py
|
PowerOlive/mindspore
|
bda20724a94113cedd12c3ed9083141012da1f15
|
[
"Apache-2.0"
] | 3,200
|
2020-02-17T12:45:41.000Z
|
2022-03-31T20:21:16.000Z
|
tests/st/ops/gpu/test_in_top_k.py
|
zimo-geek/mindspore
|
665ec683d4af85c71b2a1f0d6829356f2bc0e1ff
|
[
"Apache-2.0"
] | 176
|
2020-02-12T02:52:11.000Z
|
2022-03-28T22:15:55.000Z
|
tests/st/ops/gpu/test_in_top_k.py
|
zimo-geek/mindspore
|
665ec683d4af85c71b2a1f0d6829356f2bc0e1ff
|
[
"Apache-2.0"
] | 621
|
2020-03-09T01:31:41.000Z
|
2022-03-30T03:43:19.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
class InTopKNet(nn.Cell):
def __init__(self, k):
super(InTopKNet, self).__init__()
self.in_top_k = P.InTopK(k)
def construct(self, predictions, targets):
return self.in_top_k(predictions, targets)
def in_top_k(nptype):
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
predictions = Tensor(np.array([[4, 1, 2, 0, 0, 0, 0, 0, 0],
[7, 9, 9, 0, 0, 0, 0, 0, 0],
[3, 3, 3, 0, 0, 0, 0, 0, 0]]).astype(nptype))
k = 165
in_top_k_net = InTopKNet(k)
targets = Tensor(np.array([0, 1, 0]).astype(np.int32))
output = in_top_k_net(predictions, targets)
expected_output = np.array([True, True, True])
np.testing.assert_array_equal(output.asnumpy(), expected_output)
k = -2
in_top_k_net = InTopKNet(k)
targets = Tensor(np.array([0, 1, 0]).astype(np.int32))
output = in_top_k_net(predictions, targets)
expected_output = np.array([False, False, False])
np.testing.assert_array_equal(output.asnumpy(), expected_output)
k = 1
in_top_k_net = InTopKNet(k)
targets = Tensor(np.array([0, 1, 0]).astype(np.int32))
output = in_top_k_net(predictions, targets)
expected_output = np.array([True, True, True])
np.testing.assert_array_equal(output.asnumpy(), expected_output)
targets = Tensor(np.array([1, 0, 2]).astype(np.int32))
output = in_top_k_net(predictions, targets)
expected_output = np.array([False, False, True])
np.testing.assert_array_equal(output.asnumpy(), expected_output)
targets = Tensor(np.array([2, 2, 1]).astype(np.int32))
output = in_top_k_net(predictions, targets)
expected_output = np.array([False, True, True])
np.testing.assert_array_equal(output.asnumpy(), expected_output)
k = 2
in_top_k_net = InTopKNet(k)
targets = Tensor(np.array([0, 1, 2]).astype(np.int32))
output = in_top_k_net(predictions, targets)
expected_output = np.array([True, True, True])
np.testing.assert_array_equal(output.asnumpy(), expected_output)
targets = Tensor(np.array([2, 2, 0]).astype(np.int32))
output = in_top_k_net(predictions, targets)
expected_output = np.array([True, True, True])
np.testing.assert_array_equal(output.asnumpy(), expected_output)
targets = Tensor(np.array([1, 0, 1]).astype(np.int32))
output = in_top_k_net(predictions, targets)
expected_output = np.array([False, False, True])
np.testing.assert_array_equal(output.asnumpy(), expected_output)
k = 3
in_top_k_net = InTopKNet(k)
targets = Tensor(np.array([2, 2, 2]).astype(np.int32))
output = in_top_k_net(predictions, targets)
expected_output = np.array([True, True, True])
np.testing.assert_array_equal(output.asnumpy(), expected_output)
targets = Tensor(np.array([1, 1, 0]).astype(np.int32))
output = in_top_k_net(predictions, targets)
expected_output = np.array([True, True, True])
np.testing.assert_array_equal(output.asnumpy(), expected_output)
targets = Tensor(np.array([0, 0, 1]).astype(np.int32))
output = in_top_k_net(predictions, targets)
expected_output = np.array([True, True, True])
np.testing.assert_array_equal(output.asnumpy(), expected_output)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_in_top_k_float16():
in_top_k(np.float16)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_in_top_k_float32():
in_top_k(np.float32)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_in_top_k_invalid_input():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
# predictions must be 2d
with pytest.raises(ValueError):
in_top_k_net = InTopKNet(1)
predictions = Tensor(np.zeros(4).astype(np.float32))
targets = Tensor(np.zeros(4).astype(np.int32))
_ = in_top_k_net(predictions, targets)
# targets must be 1d
with pytest.raises(ValueError):
in_top_k_net = InTopKNet(1)
predictions = Tensor(np.zeros(4).astype(np.float32))
targets = Tensor(np.zeros(4).reshape(2, 2).astype(np.int32))
_ = in_top_k_net(predictions, targets)
# predictions.shape[1] must be equal to targets.shape[0]
with pytest.raises(ValueError):
in_top_k_net = InTopKNet(1)
predictions = Tensor(np.zeros(4).reshape(2, 2).astype(np.float32))
targets = Tensor(np.zeros(4).astype(np.int32))
_ = in_top_k_net(predictions, targets)
| 38.411348
| 80
| 0.686115
| 799
| 5,416
| 4.44806
| 0.171464
| 0.042206
| 0.050647
| 0.055712
| 0.743106
| 0.742825
| 0.737198
| 0.737198
| 0.737198
| 0.720878
| 0
| 0.031962
| 0.173929
| 5,416
| 140
| 81
| 38.685714
| 0.762405
| 0.135709
| 0
| 0.623762
| 0
| 0
| 0.001287
| 0
| 0
| 0
| 0
| 0
| 0.108911
| 1
| 0.059406
| false
| 0
| 0.059406
| 0.009901
| 0.138614
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
914810e35224b63ebbd87ba0efac18538ecf3849
| 19,408
|
py
|
Python
|
src/virtual-wan/azext_vwan/_help.py
|
peizhou298/azure-cli-extensions
|
66bf5a6c5bfb8e9ea6c91a189ea8aab10f2221e1
|
[
"MIT"
] | 1
|
2020-09-16T03:47:44.000Z
|
2020-09-16T03:47:44.000Z
|
src/virtual-wan/azext_vwan/_help.py
|
peizhou298/azure-cli-extensions
|
66bf5a6c5bfb8e9ea6c91a189ea8aab10f2221e1
|
[
"MIT"
] | null | null | null |
src/virtual-wan/azext_vwan/_help.py
|
peizhou298/azure-cli-extensions
|
66bf5a6c5bfb8e9ea6c91a189ea8aab10f2221e1
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from knack.help_files import helps
# region VirtualHub
helps['network vhub'] = """
type: group
short-summary: Manage virtual hubs.
"""
helps['network vhub create'] = """
type: command
short-summary: Create a virtual hub.
"""
helps['network vhub list'] = """
type: command
short-summary: List virtual hubs.
"""
helps['network vhub show'] = """
type: command
short-summary: Get the details of a virtual hub.
"""
helps['network vhub update'] = """
type: command
short-summary: Update settings of a virtual hub.
"""
helps['network vhub delete'] = """
type: command
short-summary: Delete a virtual hub.
"""
helps['network vhub get-effective-routes'] = """
type: command
short-summary: Get the effective routes configured for the Virtual Hub resource or the specified resource.
examples:
- name: Get the effective routes configured for route table in the virtual hub.
text: |
az network vhub get-effective-routes --resource-type RouteTable --resource-id /subscriptions/MySub/resourceGroups/MyRG/providers/Microsoft.Network/virtualHubs/MyHub/hubRouteTables/MyRouteTable -g MyRG -n MyHub
- name: Get the effective routes configured for P2S connection in the virtual hub.
text: |
az network vhub get-effective-routes --resource-type P2SConnection --resource-id /subscriptions/MySub/resourceGroups/MyRG/providers/Microsoft.Network/p2sVpnGateways/MyGateway/p2sConnectionConfigurations/MyConnection -g MyRG -n MyHub
"""
helps['network vhub connection'] = """
type: group
short-summary: Manage virtual hub VNet connections.
"""
helps['network vhub connection create'] = """
type: command
short-summary: Create a virtual hub VNet connection.
examples:
- name: Create a virtual hub VNet connection without routing configuration.
text: |
az network vhub connection create -n MyConnection --vhub-name MyHub -g MyRG --remote-vnet MyVNet
- name: Create a virtual hub VNet connection with routing configuration.
text: |
az network vhub connection create -n MyConnection --vhub-name MyHub -g MyRG --remote-vnet MyVNet --associated-route-table /subscriptions/MySub/resourceGroups/MyRG/providers/Microsoft.Network/virtualHubs/MyHub/hubRouteTables/RouteTable1 --propagated-route-tables /subscriptions/MySub/resourceGroups/MyRG/providers/Microsoft.Network/virtualHubs/MyHub/hubRouteTables/RouteTable1 /subscriptions/MySub/resourceGroups/MyRG/providers/Microsoft.Network/virtualHubs/MyHub/hubRouteTables/RouteTable2 --labels label1 label2 --route-name route1 --next-hop 70.0.0.2 --address-prefixes 10.80.0.0/16 10.90.0.0/16
"""
helps['network vhub connection list'] = """
type: command
short-summary: List virtual hub VNet connections.
examples:
- name: List VNet connections in a given virtual hub.
text: |
az network vhub connection list --vhub-name MyHub -g MyRG
"""
helps['network vhub connection show'] = """
type: command
short-summary: Get the details of a virtual hub VNet connection.
examples:
- name: Get the details of a virtual hub VNet connection.
text: |
az network vhub connection show -n MyConnection --vhub-name MyHub -g MyRG
"""
helps['network vhub connection delete'] = """
type: command
short-summary: Delete a virtual hub VNet connection.
examples:
- name: Delete a virtual hub VNet connection.
text: |
az network vhub connection delete -n MyConnection --vhub-name MyHub -g MyRG
"""
helps['network vhub connection wait'] = """
type: command
short-summary: Place the CLI in a waiting state until a condition of virtual hub VNet connection is met.
"""
helps['network vhub route'] = """
type: group
short-summary: Manage entries in the virtual hub route table.
"""
helps['network vhub route add'] = """
type: command
short-summary: Add a route to the virtual hub route table.
"""
helps['network vhub route list'] = """
type: command
short-summary: List routes in the virtual hub route table.
"""
helps['network vhub route remove'] = """
type: command
short-summary: Remove a route from the virtual hub route table.
"""
helps['network vhub route reset'] = """
type: command
short-summary: Reset virtual hub route when the route state is failed.
"""
helps['network vhub route-table'] = """
type: group
short-summary: Manage route table in the virtual hub.
"""
helps['network vhub route-table create'] = """
type: command
short-summary: Create a route table in the virtual hub.
examples:
- name: Create a v2 route table in the virtual hub.
text: |
az network vhub route-table create -n MyRouteTable -g MyResourceGroup --vhub-name MyVhub --connections All_Vnets --destination-type CIDR --destinations "10.4.0.0/16" "10.6.0.0/16" --next-hop-type IPAddress --next-hops "10.0.0.68"
- name: Create a v3 route table in the virtual hub.
text: |
az network vhub route-table create -n MyRouteTable -g MyResourceGroup --vhub-name MyVhub --route-name MyRoute --destination-type CIDR --destinations "10.4.0.0/16" "10.6.0.0/16" --next-hop-type ResourceId --next-hop /subscriptions/MySub/resourceGroups/MyResourceGroup/providers/Microsoft.Network/azureFirewalls/MyFirewall --labels label1 label2
"""
helps['network vhub route-table update'] = """
type: command
short-summary: Update a route table in the virtual hub.
examples:
- name: Update the connections for a v2 route table in the virtual hub.
text: |
az network vhub route-table update -n MyRouteTable -g MyResourceGroup --vhub-name MyVhub --connections All_Vnets All_Branches
- name: Update the labels for a v3 route table in the virtual hub.
text: |
az network vhub route-table update -n MyRouteTable -g MyResourceGroup --vhub-name MyVhub --labels label1 label2
"""
helps['network vhub route-table delete'] = """
type: command
short-summary: Delete a route table in the virtual hub.
examples:
- name: Delete a route table in the virtual hub.
text: |
az network vhub route-table delete -n MyRouteTable -g MyResourceGroup --vhub-name MyVhub
"""
helps['network vhub route-table show'] = """
type: command
short-summary: Show a route table in the virtual hub.
"""
helps['network vhub route-table list'] = """
type: command
short-summary: List all route tables in the virtual hub.
"""
helps['network vhub route-table wait'] = """
type: command
short-summary: Place the CLI in a waiting state until a condition of the vhub route-table is met.
examples:
- name: Pause executing next line of CLI script until the route table is successfully provisioned.
text: az network vhub route-table wait -n MyRouteTable -g MyResourceGroup --vhub-name MyVhub --created
"""
helps['network vhub route-table route'] = """
type: group
short-summary: Manage routes of route table in the virtual hub.
"""
helps['network vhub route-table route add'] = """
type: command
short-summary: Add a route into route table of the virtual hub.
examples:
- name: Add a route with CIDR destination into route table of the virtual hub (route table v2).
text: |
az network vhub route-table route add -n MyRouteTable -g MyResourceGroup --vhub-name MyVhub --destination-type CIDR --destinations "10.4.0.0/16" "10.6.0.0/16" --next-hop-type IPAddress --next-hops "10.0.0.68"
- name: Add a route with Service destination into route table of the virtual hub (route table v2).
text: |
az network vhub route-table route add -n MyRouteTable -g MyResourceGroup --vhub-name MyVhub --destination-type Service --destinations Skype Sharepoint --next-hop-type IPAddress --next-hops "10.0.0.68"
- name: Add a route with firewall as next hop into route table of the virtual hub (route table v3).
text: |
az network vhub route-table route add -n MyRouteTable -g MyResourceGroup --vhub-name MyVhub --destination-type CIDR --destinations "10.4.0.0/16" "10.6.0.0/16" --next-hop-type ResourceId --next-hop /subscriptions/MySub/resourceGroups/MyResourceGroup/providers/Microsoft.Network/azureFirewalls/MyFirewall
"""
helps['network vhub route-table route list'] = """
type: command
short-summary: List routes in the virtual hub route table.
"""
helps['network vhub route-table route remove'] = """
type: command
short-summary: Remove a route from route table of the virtual hub.
"""
# endregion
# region VirtualWAN
helps['network vwan'] = """
type: group
short-summary: Manage virtual WANs.
"""
helps['network vwan create'] = """
type: command
short-summary: Create a virtual WAN.
"""
helps['network vwan list'] = """
type: command
short-summary: List virtual WANs.
"""
helps['network vwan show'] = """
type: command
short-summary: Get the details of a virtual WAN.
"""
helps['network vwan update'] = """
type: command
short-summary: Update settings of a virtual WAN.
"""
helps['network vwan delete'] = """
type: command
short-summary: Delete a virtual WAN.
"""
# endregion
# region VpnGateway
helps['network vpn-gateway'] = """
type: group
short-summary: Manage VPN gateways.
"""
helps['network vpn-gateway create'] = """
type: command
short-summary: Create a VPN gateway.
"""
helps['network vpn-gateway list'] = """
type: command
short-summary: List VPN gateways.
"""
helps['network vpn-gateway show'] = """
type: command
short-summary: Get the details of a VPN gateway.
"""
helps['network vpn-gateway update'] = """
type: command
short-summary: Update settings of a VPN gateway.
"""
helps['network vpn-gateway delete'] = """
type: command
short-summary: Delete a VPN gateway.
"""
helps['network vpn-gateway connection'] = """
type: group
short-summary: Manage VPN gateway connections.
"""
helps['network vpn-gateway connection create'] = """
type: command
short-summary: Create a VPN gateway connection.
examples:
- name: Create a VPN gateway connection
text: |
az network vpn-gateway connection create -g MyRG -n MyConnection --gateway-name MyGateway --remote-vpn-site /subscriptions/MySub/resourceGroups/MyRG/providers/Microsoft.Network/vpnSites/MyVPNSite --associated-route-table /subscriptions/MySub/resourceGroups/MyRG/providers/Microsoft.Network/virtualHubs/MyHub/hubRouteTables/MyRouteTable1 --propagated-route-tables /subscriptions/MySub/resourceGroups/MyRG/providers/Microsoft.Network/virtualHubs/MyHub/hubRouteTables/MyRouteTable1 /subscriptions/MySub/resourceGroups/MyRG/providers/Microsoft.Network/virtualHubs/MyHub/hubRouteTables/MyRouteTable2 --labels label1 label2
"""
helps['network vpn-gateway connection list'] = """
type: command
short-summary: List VPN gateway connections.
examples:
- name: List all connections for a given VPN gateway
text: |
az network vpn-gateway connection list -g MyRG --gateway-name MyGateway
"""
helps['network vpn-gateway connection show'] = """
type: command
short-summary: Get the details of a VPN gateway connection.
examples:
- name: Get the details of a VPN gateway connection
text: |
az network vpn-gateway connection show -g MyRG -n MyConnection --gateway-name MyGateway
"""
helps['network vpn-gateway connection delete'] = """
type: command
short-summary: Delete a VPN gateway connection.
examples:
- name: Delete a VPN gateway connection
text: |
az network vpn-gateway connection delete -g MyRG -n MyConnection --gateway-name MyGateway
"""
helps['network vpn-gateway connection wait'] = """
type: command
short-summary: Place the CLI in a waiting state until a condition of the VPN gateway connection is met.
"""
helps['network vpn-gateway connection ipsec-policy'] = """
type: group
short-summary: Manage VPN gateway connection IPSec policies.
"""
helps['network vpn-gateway connection ipsec-policy add'] = """
type: command
short-summary: Add an IPSec policy to a VPN gateway connection.
"""
helps['network vpn-gateway connection ipsec-policy list'] = """
type: command
short-summary: List VPN gateway connection IPSec policies.
"""
helps['network vpn-gateway connection ipsec-policy remove'] = """
type: command
short-summary: Remove an IPSec policy from a VPN gateway connection.
"""
# endregion
# region VpnSite
helps['network vpn-site'] = """
type: group
short-summary: Manage VPN site configurations.
"""
helps['network vpn-site create'] = """
type: command
short-summary: Create a VPN site configuration.
"""
helps['network vpn-site list'] = """
type: command
short-summary: List VPN site configurations.
"""
helps['network vpn-site show'] = """
type: command
short-summary: Get the details of a VPN site configuration.
"""
helps['network vpn-site update'] = """
type: command
short-summary: Update settings of a VPN site configuration.
"""
helps['network vpn-site delete'] = """
type: command
short-summary: Delete a VPN site configuration.
"""
helps['network vpn-site download'] = """
type: command
short-summary: Provide a SAS-URL to download the configuration for a VPN site.
"""
# endregion
# region VpnServerConfig
helps['network vpn-server-config'] = """
type: group
short-summary: Manage VPN server configuration.
"""
helps['network vpn-server-config create'] = """
type: command
short-summary: Create a VPN server configuration.
examples:
- name: Create a VPN server configuration with VPN auth type
text: |
az network vpn-server-config create -n MyVPNServerConfig -g MyRG --vpn-client-root-certs "ApplicationGatewayAuthCert.cer" --vpn-client-revoked-certs "ApplicationGatewayAuthCert.pem"
"""
helps['network vpn-server-config list'] = """
type: command
short-summary: List all VPN server configuration.
"""
helps['network vpn-server-config show'] = """
type: command
short-summary: Show the details of a VPN server configuration.
"""
helps['network vpn-server-config set'] = """
type: command
short-summary: Set settings of a VPN server configuration.
examples:
- name: Set a VPN server configuration with Radius auth type
text: |
az network vpn-server-config set -n MyVPNServerConfig -g MyRG --radius-client-root-certs "ApplicationGatewayAuthCert.cer" --radius-server-root-certs "ApplicationGatewayAuthCert.pem" --radius-servers address=test1 secret=clitest score=10 --radius-servers address=test2 secret=clitest score=10
"""
helps['network vpn-server-config delete'] = """
type: command
short-summary: Delete a VPN server configuration.
"""
helps['network vpn-server-config wait'] = """
type: command
short-summary: Place the CLI in a waiting state until a condition of the VPN server configuration is met.
"""
helps['network vpn-server-config ipsec-policy'] = """
type: group
short-summary: Manage VPN server configuration IPSec policies.
"""
helps['network vpn-server-config ipsec-policy add'] = """
type: command
short-summary: Add an IPSec policy to a VPN server configuration.
"""
helps['network vpn-server-config ipsec-policy list'] = """
type: command
short-summary: List VPN server configuration IPSec policies.
"""
helps['network vpn-server-config ipsec-policy remove'] = """
type: command
short-summary: Remove an IPSec policy from a VPN server configuration.
"""
helps['network vpn-server-config ipsec-policy wait'] = """
type: command
short-summary: Place the CLI in a waiting state until a condition of the IPSec policy of a VPN server configuration is met.
"""
# endregion
# region VpnServerConfig
helps['network p2s-vpn-gateway'] = """
type: group
short-summary: Manage point-to-site VPN gateway.
"""
helps['network p2s-vpn-gateway create'] = """
type: command
short-summary: Create a point-to-site VPN gateway.
examples:
- name: Create a point-to-site VPN gateway.
text: |
az network p2s-vpn-gateway create -g MyRG -n MyP2SVPNGateway --scale-unit 2 --vhub MyVhub --vpn-server-config MyVPNServerConfig --address-space 10.0.0.0/24 11.0.0.0/24
- name: Create a point-to-site VPN gateway with routing configuration.
text: |
az network p2s-vpn-gateway create -g MyRG -n MyP2SVPNGateway --scale-unit 2 --vhub MyVhub --vpn-server-config MyVPNServerConfig --address-space 10.0.0.0/24 11.0.0.0/24 --associated-route-table /subscriptions/MySub/resourceGroups/MyRG/providers/Microsoft.Network/virtualHubs/MyHub/hubRouteTables/MyRouteTable1 --propagated-route-tables /subscriptions/MySub/resourceGroups/MyRG/providers/Microsoft.Network/virtualHubs/MyHub/hubRouteTables/MyRouteTable1 /subscriptions/MySub/resourceGroups/MyRG/providers/Microsoft.Network/virtualHubs/MyHub/hubRouteTables/MyRouteTable2 --labels label1 label2
"""
helps['network p2s-vpn-gateway list'] = """
type: command
short-summary: List all point-to-site VPN gateway.
"""
helps['network p2s-vpn-gateway show'] = """
type: command
short-summary: Show the details of a point-to-site VPN gateway.
"""
helps['network p2s-vpn-gateway update'] = """
type: command
short-summary: Update settings of a point-to-site VPN gateway.
"""
helps['network p2s-vpn-gateway delete'] = """
type: command
short-summary: Delete a point-to-site VPN gateway.
"""
helps['network p2s-vpn-gateway wait'] = """
type: command
short-summary: Place the CLI in a waiting state until a condition of the point-to-site VPN gateway is met.
"""
helps['network p2s-vpn-gateway connection'] = """
type: group
short-summary: Manage point-to-site VPN gateway connections.
"""
helps['network p2s-vpn-gateway connection list'] = """
type: command
short-summary: List all connections for a given point-to-site VPN gateway.
examples:
- name: List all connections for a given point-to-site VPN gateway
text: |
az network p2s-vpn-gateway connection list -g MyRG --gateway-name MyP2SVPNGateway
"""
helps['network p2s-vpn-gateway connection show'] = """
type: command
short-summary: Show the details of a point-to-site VPN gateway connection.
examples:
- name: Show the details of a point-to-site VPN gateway connection
text: |
az network p2s-vpn-gateway connection show -g MyRG -n connection --gateway-name MyP2SVPNGateway
"""
helps['network p2s-vpn-gateway vpn-client'] = """
type: group
short-summary: Download a VPN client configuration required to connect to Azure via point-to-site
"""
helps['network p2s-vpn-gateway vpn-client generate'] = """
type: command
short-summary: Generate VPN profile for P2S client of the P2SVpnGateway in the specified resource group
"""
# endregion
| 36.897338
| 629
| 0.69528
| 2,528
| 19,408
| 5.336234
| 0.084256
| 0.072943
| 0.079466
| 0.114233
| 0.867532
| 0.811935
| 0.756857
| 0.670941
| 0.579096
| 0.48295
| 0
| 0.01166
| 0.186882
| 19,408
| 525
| 630
| 36.967619
| 0.843166
| 0.026329
| 0
| 0.494118
| 0
| 0.061176
| 0.910328
| 0.103284
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.002353
| 0
| 0.002353
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
914c659673a3113acb6c31534f5dccc393c3daf8
| 101
|
py
|
Python
|
Dictinary_Key_Values.py
|
mcjohnchristopher/Python_Samples
|
738f3b7d9baa7f4e396647f380118eba66ea645c
|
[
"CC0-1.0"
] | null | null | null |
Dictinary_Key_Values.py
|
mcjohnchristopher/Python_Samples
|
738f3b7d9baa7f4e396647f380118eba66ea645c
|
[
"CC0-1.0"
] | null | null | null |
Dictinary_Key_Values.py
|
mcjohnchristopher/Python_Samples
|
738f3b7d9baa7f4e396647f380118eba66ea645c
|
[
"CC0-1.0"
] | null | null | null |
jj = ['john:' =1 , 'jemi' = 2 ]
print list(jj)
print jj.keys()
print jj.values()
print jj.items()
| 20.2
| 32
| 0.584158
| 17
| 101
| 3.470588
| 0.588235
| 0.355932
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024691
| 0.19802
| 101
| 5
| 33
| 20.2
| 0.703704
| 0
| 0
| 0
| 0
| 0
| 0.091837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.8
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
915412e70b5cc9fb9818bf8415d75163cb4e5ad6
| 44,240
|
py
|
Python
|
app/locustio/confluence/http_actions.py
|
AKStratus/dc-app-performance-toolkit
|
f2e264dd87969a075a69ebededea1c73dc3798d6
|
[
"Apache-2.0"
] | 1
|
2021-05-27T09:58:38.000Z
|
2021-05-27T09:58:38.000Z
|
app/locustio/confluence/http_actions.py
|
AKStratus/dc-app-performance-toolkit
|
f2e264dd87969a075a69ebededea1c73dc3798d6
|
[
"Apache-2.0"
] | null | null | null |
app/locustio/confluence/http_actions.py
|
AKStratus/dc-app-performance-toolkit
|
f2e264dd87969a075a69ebededea1c73dc3798d6
|
[
"Apache-2.0"
] | 2
|
2021-05-26T13:43:15.000Z
|
2021-06-07T14:11:06.000Z
|
import random
import re
from locustio.common_utils import confluence_measure, fetch_by_re, timestamp_int, \
TEXT_HEADERS, NO_TOKEN_HEADERS, JSON_HEADERS, RESOURCE_HEADERS, generate_random_string, init_logger, \
raise_if_login_failed
from locustio.confluence.requests_params import confluence_datasets, Login, ViewPage, ViewDashboard, ViewBlog, \
CreateBlog, CreateEditPage, UploadAttachments, LikePage
from util.conf import CONFLUENCE_SETTINGS
import uuid
logger = init_logger(app_type='confluence')
confluence_dataset = confluence_datasets()
@confluence_measure
def login_and_view_dashboard(locust):
session_id = str(uuid.uuid4())
locust.cross_action_storage[session_id] = dict()
locust.session_data_storage = locust.cross_action_storage[session_id]
params = Login()
user = random.choice(confluence_dataset["users"])
username = user[0]
password = user[1]
login_body = params.login_body
login_body['os_username'] = username
login_body['os_password'] = password
locust.post('/dologin.action', login_body, TEXT_HEADERS, catch_response=True)
r = locust.get(url='/', catch_response=True)
content = r.content.decode('utf-8')
if 'Log Out' not in content:
logger.error(f'Login with {username}, {password} failed: {content}')
assert 'Log Out' in content, 'User authentication failed.'
logger.locust_info(f'User {username} is successfully logged in')
keyboard_hash = fetch_by_re(params.keyboard_hash_re, content)
build_number = fetch_by_re(params.build_number_re, content)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("010"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.get('/rest/mywork/latest/status/notification/count', catch_response=True)
locust.get(f'/rest/shortcuts/latest/shortcuts/{build_number}/{keyboard_hash}', catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("025"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.get(f'/rest/experimental/search?cql=type=space%20and%20space.type=favourite%20order%20by%20favourite'
f'%20desc&expand=space.icon&limit=100&_={timestamp_int()}', catch_response=True)
locust.get('/rest/dashboardmacros/1.0/updates?maxResults=40&tab=all&showProfilePic=true&labels='
'&spaces=&users=&types=&category=&spaceKey=', catch_response=True)
locust.session_data_storage['build_number'] = build_number
locust.session_data_storage['keyboard_hash'] = keyboard_hash
locust.session_data_storage['username'] = username
def view_page_and_tree(locust):
raise_if_login_failed(locust)
params = ViewPage()
page = random.choice(confluence_dataset["pages"])
page_id = page[0]
@confluence_measure
def view_page():
r = locust.get(f'/pages/viewpage.action?pageId={page_id}', catch_response=True)
content = r.content.decode('utf-8')
if 'Created by' not in content or 'Save for later' not in content:
logger.error(f'Fail to open page {page_id}: {content}')
assert 'Created by' in content and 'Save for later' in content, 'Could not open page.'
parent_page_id = fetch_by_re(params.parent_page_id_re, content)
parsed_page_id = fetch_by_re(params.page_id_re, content)
space_key = fetch_by_re(params.space_key_re, content)
tree_request_id = fetch_by_re(params.tree_result_id_re, content)
has_no_root = fetch_by_re(params.has_no_root_re, content)
root_page_id = fetch_by_re(params.root_page_id_re, content)
atl_token_view_issue = fetch_by_re(params.atl_token_view_issue_re, content)
editable = fetch_by_re(params.editable_re, content)
ancestor_ids = re.findall(params.ancestor_ids_re, content)
ancestor_str = 'ancestors='
for ancestor in ancestor_ids:
ancestor_str = ancestor_str + str(ancestor) + '&'
locust.session_data_storage['page_id'] = parsed_page_id
locust.session_data_storage['has_no_root'] = has_no_root
locust.session_data_storage['tree_request_id'] = tree_request_id
locust.session_data_storage['root_page_id'] = root_page_id
locust.session_data_storage['ancestors'] = ancestor_str
locust.session_data_storage['space_key'] = space_key
locust.session_data_storage['editable'] = editable
locust.session_data_storage['atl_token_view_issue'] = atl_token_view_issue
locust.get('/rest/helptips/1.0/tips', catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("110"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.get(f'/rest/likes/1.0/content/{parsed_page_id}/likes?commentLikes=true&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/highlighting/1.0/panel-items?pageId={parsed_page_id}&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/mywork/latest/status/notification/count?pageId={parsed_page_id}&_={timestamp_int()}',
catch_response=True)
r = locust.get(f'/rest/inlinecomments/1.0/comments?containerId={parsed_page_id}&_={timestamp_int()}',
catch_response=True)
content = r.content.decode('utf-8')
if 'authorDisplayName' not in content and '[]' not in content:
logger.error(f'Could not open comments for page {parsed_page_id}: {content}')
assert 'authorDisplayName' in content or '[]' in content, 'Could not open comments for page.'
locust.get(f'/plugins/editor-loader/editor.action?parentPageId={parent_page_id}&pageId={parsed_page_id}'
f'&spaceKey={space_key}&atl_after_login_redirect=/pages/viewpage.action'
f'&timeout=12000&_={timestamp_int()}', catch_response=True)
locust.get(f'/rest/watch-button/1.0/watchState/{parsed_page_id}?_={timestamp_int()}',
catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("145"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("150"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("155"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("160"),
headers=RESOURCE_HEADERS, catch_response=True)
@confluence_measure
def view_page_tree():
tree_request_id = locust.session_data_storage['tree_request_id'].replace('&', '&')
# if postfix is set, need to trim it from the tree_request_id to avoid duplication
if tree_request_id.startswith(CONFLUENCE_SETTINGS.postfix):
tree_request_id = tree_request_id[len(CONFLUENCE_SETTINGS.postfix):]
ancestors = locust.session_data_storage['ancestors']
root_page_id = locust.session_data_storage['root_page_id']
viewed_page_id = locust.session_data_storage['page_id']
space_key = locust.session_data_storage['space_key']
r = ''
# Page has parent
if locust.session_data_storage['has_no_root'] == 'false':
request = f"{tree_request_id}&hasRoot=true&pageId={root_page_id}&treeId=0&startDepth=0&mobile=false" \
f"&{ancestors}treePageId={viewed_page_id}&_={timestamp_int()}"
r = locust.get(f'{request}', catch_response=True)
# Page does not have parent
elif locust.session_data_storage['has_no_root'] == 'true':
request = f"{tree_request_id}&hasRoot=false&spaceKey={space_key}&treeId=0&startDepth=0&mobile=false" \
f"&{ancestors}treePageId={viewed_page_id}&_={timestamp_int()}"
r = locust.get(f'{request}', catch_response=True)
content = r.content.decode('utf-8')
if 'plugin_pagetree_children_span' not in content or 'plugin_pagetree_children_list' not in content:
logger.error(f'Could not view page tree: {content}')
assert 'plugin_pagetree_children_span' in content and 'plugin_pagetree_children_list' in content, \
'Could not view page tree.'
view_page()
view_page_tree()
@confluence_measure
def view_dashboard(locust):
raise_if_login_failed(locust)
params = ViewDashboard()
r = locust.get('/index.action', catch_response=True)
content = r.content.decode('utf-8')
keyboard_hash = fetch_by_re(params.keyboard_hash_re, content)
build_number = fetch_by_re(params.build_number_re, content)
if 'quick-search' not in content or 'Log Out' not in content:
logger.error(f'Could not view dashboard: {content}')
assert 'quick-search' in content and 'Log Out' in content, 'Could not view dashboard.'
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("205"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.get('/rest/mywork/latest/status/notification/count', catch_response=True)
locust.get(f'/rest/shortcuts/latest/shortcuts/{build_number}/{keyboard_hash}', catch_response=True)
locust.get(f'/rest/experimental/search?cql=type=space%20and%20space.type=favourite%20order%20by%20'
f'favourite%20desc&expand=space.icon&limit=100&_={timestamp_int()}', catch_response=True)
r = locust.get('/rest/dashboardmacros/1.0/updates?maxResults=40&tab=all&showProfilePic=true&labels='
'&spaces=&users=&types=&category=&spaceKey=', catch_response=True)
content = r.content.decode('utf-8')
if 'changeSets' not in content:
logger.error(f'Could not view dashboard macros: {content}')
assert 'changeSets' in content, 'Could not view dashboard macros.'
@confluence_measure
def view_blog(locust):
raise_if_login_failed(locust)
params = ViewBlog()
blog = random.choice(confluence_dataset["blogs"])
blog_id = blog[0]
r = locust.get(f'/pages/viewpage.action?pageId={blog_id}', catch_response=True)
content = r.content.decode('utf-8')
if 'Created by' not in content or 'Save for later' not in content:
logger.error(f'Fail to open blog {blog_id}: {content}')
assert 'Created by' in content and 'Save for later' in content, 'Could not view blog.'
parent_page_id = fetch_by_re(params.parent_page_id_re, content)
parsed_blog_id = fetch_by_re(params.page_id_re, content)
space_key = fetch_by_re(params.space_key_re, content)
locust.get('/rest/helptips/1.0/tips', catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("310"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.get(f'/rest/likes/1.0/content/{parsed_blog_id}/likes?commentLikes=true&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/highlighting/1.0/panel-items?pageId={parsed_blog_id}&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/mywork/latest/status/notification/count?pageId={parsed_blog_id}&_={timestamp_int()}',
catch_response=True)
r = locust.get(f'/rest/inlinecomments/1.0/comments?containerId={parsed_blog_id}&_={timestamp_int()}',
catch_response=True)
content = r.content.decode('utf-8')
if 'authorDisplayName' not in content and '[]' not in content:
logger.error(f'Could not open comments for page {parsed_blog_id}: {content}')
assert 'authorDisplayName' in content or '[]' in content, 'Could not open comments for page.'
r = locust.get(f'/plugins/editor-loader/editor.action?parentPageId={parent_page_id}&pageId={parsed_blog_id}'
f'&spaceKey={space_key}&atl_after_login_redirect=/pages/viewpage.action'
f'&timeout=12000&_={timestamp_int()}', catch_response=True)
content = r.content.decode('utf-8')
if 'draftId' not in content:
logger.error(f'Could not open editor for blog {parsed_blog_id}: {content}')
assert 'draftId' in content, 'Could not open editor for blog.'
locust.get(f'/rest/watch-button/1.0/watchState/{parsed_blog_id}?_={timestamp_int()}', catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("345"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("350"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("355"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("360"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.get(f'/rest/quickreload/latest/{parsed_blog_id}?since={timestamp_int()}&_={timestamp_int()}',
catch_response=True)
def search_cql_and_view_results(locust):
raise_if_login_failed(locust)
@confluence_measure
def search_recently_viewed():
locust.get('/rest/recentlyviewed/1.0/recent?limit=8', catch_response=True)
@confluence_measure
def search_cql():
r = locust.get(f"/rest/api/search?cql=siteSearch~'{generate_random_string(3, only_letters=True)}'"
f"&start=0&limit=20", catch_response=True)
if '{"results":[' not in r.content.decode('utf-8'):
logger.locust_info(r.content.decode('utf-8'))
content = r.content.decode('utf-8')
if 'results' not in content:
logger.error(f"Search cql failed: {content}")
assert 'results' in content, "Search cql failed."
locust.get('/rest/mywork/latest/status/notification/count', catch_response=True)
search_recently_viewed()
search_cql()
def open_editor_and_create_blog(locust):
params = CreateBlog()
blog = random.choice(confluence_dataset["blogs"])
blog_space_key = blog[1]
build_number = locust.session_data_storage.get('build_number', '')
keyboard_hash = locust.session_data_storage.get('keyboard_hash', '')
@confluence_measure
def create_blog_editor():
raise_if_login_failed(locust)
r = locust.get(f'/pages/createblogpost.action?spaceKey={blog_space_key}', catch_response=True)
content = r.content.decode('utf-8')
if 'Blog post title' not in content:
logger.error(f'Could not open editor for {blog_space_key}: {content}')
assert 'Blog post title' in content, 'Could not open editor for blog.'
atl_token = fetch_by_re(params.atl_token_re, content)
content_id = fetch_by_re(params.content_id_re, content)
parsed_space_key = fetch_by_re(params.space_key, content)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("910"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.get('/rest/mywork/latest/status/notification/count?pageId=0', catch_response=True)
locust.get('/plugins/servlet/notifications-miniview', catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("925"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("930"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.get(f'/rest/shortcuts/latest/shortcuts/{build_number}/{keyboard_hash}?_={timestamp_int()}',
catch_response=True)
heartbeat_activity_body = {"dataType": "json",
"contentId": content_id,
"draftType": "blogpost",
"spaceKey": parsed_space_key,
"atl_token": atl_token
}
r = locust.post('/json/startheartbeatactivity.action', heartbeat_activity_body,
TEXT_HEADERS, catch_response=True)
content = r.content.decode('utf-8')
if atl_token not in content:
logger.error(f'Token {atl_token} not found in content: {content}')
assert atl_token in content, 'Token not found in content.'
contributor_hash = fetch_by_re(params.contribution_hash, content)
locust.session_data_storage['contributor_hash'] = contributor_hash
r = locust.get(f'/rest/ui/1.0/content/{content_id}/labels', catch_response=True)
content = r.content.decode('utf-8')
if '"success":true' not in content:
logger.error(f'Could not get labels for content {content_id}: {content}')
assert '"success":true' in content, 'Could not get labels for content in blog editor.'
draft_name = f"Performance Blog - {generate_random_string(10, only_letters=True)}"
locust.session_data_storage['draft_name'] = draft_name
locust.session_data_storage['parsed_space_key'] = parsed_space_key
locust.session_data_storage['content_id'] = content_id
locust.session_data_storage['atl_token'] = atl_token
draft_body = {"draftId": content_id,
"pageId": "0",
"type": "blogpost",
"title": draft_name,
"spaceKey": parsed_space_key,
"content": "<p>test blog draft</p>",
"syncRev": "0.mcPCPtDvwoayMR7zvuQSbf8.27"}
TEXT_HEADERS['Content-Type'] = 'application/json'
r = locust.post('/rest/tinymce/1/drafts', json=draft_body, headers=TEXT_HEADERS, catch_response=True)
content = r.content.decode('utf-8')
if 'draftId' not in content:
logger.error(f'Could not create blog post draft in space {parsed_space_key}: {content}')
assert 'draftId' in content, 'Could not create blog post draft.'
@confluence_measure
def create_blog():
raise_if_login_failed(locust)
draft_name = locust.session_data_storage['draft_name']
parsed_space_key = locust.session_data_storage['parsed_space_key']
content_id = locust.session_data_storage['content_id']
atl_token = locust.session_data_storage['atl_token']
draft_body = {"status": "current", "title": draft_name, "space": {"key": f"{parsed_space_key}"},
"body": {"editor": {"value": f"Test Performance Blog Page Content {draft_name}",
"representation": "editor", "content": {"id": f"{content_id}"}}},
"id": f"{content_id}", "type": "blogpost",
"version": {"number": 1, "minorEdit": True, "syncRev": "0.mcPCPtDvwoayMR7zvuQSbf8.30"}}
TEXT_HEADERS['Content-Type'] = 'application/json'
r = locust.client.put(f'/rest/api/content/{content_id}?status=draft', json=draft_body,
headers=TEXT_HEADERS, catch_response=True)
content = r.content.decode('utf-8')
if 'current' not in content or 'title' not in content:
logger.error(f'Could not open draft {draft_name}: {content}')
assert 'current' in content and 'title' in content, 'Could not open blog draft.'
created_blog_title = fetch_by_re(params.created_blog_title_re, content)
logger.locust_info(f'Blog {created_blog_title} created')
r = locust.get(f'/{created_blog_title}', catch_response=True)
content = r.content.decode('utf-8')
if 'Created by' not in content:
logger.error(f'Could not open created blog {created_blog_title}: {content}')
assert 'Created by' in content, 'Could not open created blog.'
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("970"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("975"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.get('/plugins/servlet/notifications-miniview', catch_response=True)
locust.get(f'/rest/watch-button/1.0/watchState/{content_id}?_={timestamp_int()}', catch_response=True)
locust.get(f'/rest/likes/1.0/content/{content_id}/likes?commentLikes=true&_={timestamp_int()}',
catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("995"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.get(f'/rest/highlighting/1.0/panel-items?pageId={content_id}&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/inlinecomments/1.0/comments?containerId={content_id}&_={timestamp_int()}',
catch_response=True)
locust.get(f'/s/en_GB/{build_number}/{keyboard_hash}/_/images/icons/profilepics/add_profile_pic.svg',
catch_response=True)
locust.get('/rest/helptips/1.0/tips', catch_response=True)
locust.get(f'/rest/mywork/latest/status/notification/count?pageid={content_id}&_={timestamp_int()}',
catch_response=True)
locust.get(f'/plugins/editor-loader/editor.action?parentPageId=&pageId={content_id}'
f'&spaceKey={parsed_space_key}&atl_after_login_redirect={created_blog_title}'
f'&timeout=12000&_={timestamp_int()}', catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("1030"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("1035"),
headers=RESOURCE_HEADERS, catch_response=True)
heartbeat_activity_body = {"dataType": "json",
"contentId": content_id,
"draftType": "blogpost",
"spaceKey": parsed_space_key,
"atl_token": atl_token
}
r = locust.post('/json/startheartbeatactivity.action', heartbeat_activity_body,
TEXT_HEADERS, catch_response=True)
content = r.content.decode('utf-8')
if atl_token not in content:
logger.error(f'Token {atl_token} not found in content: {content}')
assert atl_token in content, 'Token not found in content.'
create_blog_editor()
create_blog()
def create_and_edit_page(locust):
params = CreateEditPage()
page = random.choice(confluence_dataset["pages"])
page_id = page[0]
space_key = page[1]
build_number = locust.session_data_storage.get('build_number', '')
keyboard_hash = locust.session_data_storage.get('keyboard_hash', '')
@confluence_measure
def create_page_editor():
raise_if_login_failed(locust)
r = locust.get(f'/pages/createpage.action?spaceKey={space_key}&fromPageId={page_id}&src=quick-create',
catch_response=True)
content = r.content.decode('utf-8')
if 'Page Title' not in content:
logger.error(f'Could not open page editor: {content}')
assert 'Page Title' in content, 'Could not open page editor.'
parsed_space_key = fetch_by_re(params.space_key_re, content)
atl_token = fetch_by_re(params.atl_token_re, content)
content_id = fetch_by_re(params.content_id_re, content)
locust.session_data_storage['content_id'] = content_id
locust.session_data_storage['atl_token'] = atl_token
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("705"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("710"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("715"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.get('/rest/create-dialog/1.0/storage/quick-create', catch_response=True)
locust.get(f'/rest/mywork/latest/status/notification/count?pageid=0&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/jiraanywhere/1.0/servers?_={timestamp_int()}', catch_response=True)
locust.get(f'/rest/shortcuts/latest/shortcuts/{build_number}/{keyboard_hash}', catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("750"),
headers=RESOURCE_HEADERS, catch_response=True)
heartbeat_activity_body = {"dataType": "json",
"contentId": content_id,
"draftType": "page",
"spaceKey": parsed_space_key,
"atl_token": atl_token
}
r = locust.post('/json/startheartbeatactivity.action', heartbeat_activity_body,
TEXT_HEADERS, catch_response=True)
content = r.content.decode('utf-8')
if atl_token not in content:
logger.error(f'Token {atl_token} not found in content: {content}')
assert atl_token in content, 'Token not found in content.'
@confluence_measure
def create_page():
raise_if_login_failed(locust)
draft_name = f"{generate_random_string(10, only_letters=True)}"
content_id = locust.session_data_storage['content_id']
atl_token = locust.session_data_storage['atl_token']
create_page_body = {
"status": "current",
"title": f"Test Performance JMeter {draft_name}",
"space": {"key": f"{space_key}"},
"body": {
"storage": {
"value": f"Test Performance Create Page Content {draft_name}",
"representation": "storage",
"content": {
"id": f"{content_id}"
}
}
},
"id": f"{content_id}",
"type": "page",
"version": {
"number": 1
},
"ancestors": [
{
"id": f"{page_id}",
"type": "page"
}
]
}
TEXT_HEADERS['Content-Type'] = 'application/json'
TEXT_HEADERS['X-Requested-With'] = 'XMLHttpRequest'
r = locust.client.put(f'/rest/api/content/{content_id}?status=draft', json=create_page_body,
headers=TEXT_HEADERS, catch_response=True)
content = r.content.decode('utf-8')
if 'draftId' not in content:
logger.error(f'Could not create PAGE draft: {content}')
assert 'draftId' in content, 'Could not create PAGE draft.'
page_title = fetch_by_re(params.page_title_re, content)
r = locust.get(f'{page_title}', catch_response=True)
content = r.content.decode('utf-8')
if 'Created by' not in content:
logger.error(f'Page {page_title} was not created: {content}')
assert 'Created by' in content, 'Page was not created.'
parent_page_id = fetch_by_re(params.parent_page_id, content)
create_page_id = fetch_by_re(params.create_page_id, content)
locust.session_data_storage['create_page_id'] = create_page_id
locust.session_data_storage['parent_page_id'] = parent_page_id
heartbeat_activity_body = {"dataType": "json",
"contentId": content_id,
"space_key": space_key,
"draftType": "page",
"atl_token": atl_token
}
locust.post('/json/stopheartbeatactivity.action', params=heartbeat_activity_body,
headers=TEXT_HEADERS, catch_response=True)
locust.get('/rest/helptips/1.0/tips', catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("795"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.get(f'/rest/jira-metadata/1.0/metadata/aggregate?pageId={create_page_id}&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/likes/1.0/content/{create_page_id}/likes?commentLikes=true&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/inlinecomments/1.0/comments?containerId={create_page_id}&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/mywork/latest/status/notification/count?pageid={create_page_id}&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/highlighting/1.0/panel-items?pageId={create_page_id}&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/watch-button/1.0/watchState/{create_page_id}?_={timestamp_int()}',
catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("830"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("835"),
headers=RESOURCE_HEADERS, catch_response=True)
r = locust.get(f'/plugins/editor-loader/editor.action?parentPageId={parent_page_id}'
f'&pageId={create_page_id}&spaceKey={space_key}'
f'&atl_after_login_redirect={page_title}&timeout=12000&_={timestamp_int()}',
catch_response=True)
content = r.content.decode('utf-8')
if page_title not in content:
logger.error(f'Page editor load failed for page {page_title}: {content}')
assert page_title in content, 'Page editor load failed for page.'
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("845"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("850"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("855"),
headers=RESOURCE_HEADERS, catch_response=True)
@confluence_measure
def open_editor():
raise_if_login_failed(locust)
create_page_id = locust.session_data_storage['create_page_id']
r = locust.get(f'/pages/editpage.action?pageId={create_page_id}', catch_response=True)
content = r.content.decode('utf-8')
if '<title>Edit' not in content or 'Update</button>' not in content:
logger.error(f'Could not open PAGE {create_page_id} to edit: {content}')
assert '<title>Edit' in content and 'Update</button>' in content, \
'Could not open PAGE to edit.'
edit_page_version = fetch_by_re(params.editor_page_version_re, content)
edit_atl_token = fetch_by_re(params.atl_token_re, content)
edit_space_key = fetch_by_re(params.space_key_re, content)
edit_content_id = fetch_by_re(params.content_id_re, content)
edit_page_id = fetch_by_re(params.page_id_re, content)
edit_parent_page_id = fetch_by_re(params.parent_page_id, content)
locust.session_data_storage['edit_parent_page_id'] = edit_parent_page_id
locust.session_data_storage['edit_page_version'] = edit_page_version
locust.session_data_storage['edit_page_id'] = edit_page_id
locust.session_data_storage['atl_token'] = edit_atl_token
locust.session_data_storage['edit_content_id'] = edit_content_id
locust.get(f'/rest/jiraanywhere/1.0/servers?_={timestamp_int()}', catch_response=True)
heartbeat_activity_body = {"dataType": "json",
"contentId": edit_content_id,
"draftType": "page",
"spaceKey": edit_space_key,
"atl_token": edit_atl_token
}
locust.post('/json/startheartbeatactivity.action', heartbeat_activity_body,
TEXT_HEADERS, catch_response=True)
expand = 'history.createdBy.status%2Chistory.contributors.publishers.users.status' \
'%2Cchildren.comment.version.by.status'
locust.get(f'/rest/api/content/{edit_page_id}?expand={expand}&_={timestamp_int()}',
catch_response=True)
locust.post('/json/startheartbeatactivity.action', heartbeat_activity_body,
TEXT_HEADERS, catch_response=True)
locust.get(f'/rest/ui/1.0/content/{edit_page_id}/labels?_={timestamp_int()}', catch_response=True)
locust.get('/rest/mywork/latest/status/notification/count', catch_response=True)
locust.post('/json/startheartbeatactivity.action', heartbeat_activity_body,
TEXT_HEADERS, catch_response=True)
@confluence_measure
def edit_page():
raise_if_login_failed(locust)
locust.session_data_storage['draft_name'] = f"{generate_random_string(10, only_letters=True)}"
edit_parent_page_id = locust.session_data_storage['edit_parent_page_id']
edit_page_id = locust.session_data_storage['edit_page_id']
content_id = locust.session_data_storage['edit_content_id']
edit_page_version = int(locust.session_data_storage['edit_page_version']) + 1
edit_atl_token = locust.session_data_storage['atl_token']
edit_page_body = dict()
if edit_parent_page_id:
edit_page_body = {
"status": "current",
"title": f"Test Performance Edit with locust {locust.session_data_storage['draft_name']}",
"space": {
"key": f"{space_key}"
},
"body": {
"storage": {
"value": f"Page edit with locust {locust.session_data_storage['draft_name']}",
"representation": "storage",
"content": {
"id": f"{content_id}"
}
}
},
"id": f"{content_id}",
"type": "page",
"version": {
"number": f"{edit_page_version}"
},
"ancestors": [
{
"id": f"{edit_parent_page_id}",
"type": "page"
}
]
}
if not edit_parent_page_id:
edit_page_body = {
"status": "current",
"title": f"Test Performance Edit with locust {locust.session_data_storage['draft_name']}",
"space": {
"key": f"{space_key}"
},
"body": {
"storage": {
"value": f"Page edit with locust {locust.session_data_storage['draft_name']}",
"representation": "storage",
"content": {
"id": f"{content_id}"
}
}
},
"id": f"{content_id}",
"type": "page",
"version": {
"number": f"{edit_page_version}"
}
}
TEXT_HEADERS['Content-Type'] = 'application/json'
TEXT_HEADERS['X-Requested-With'] = 'XMLHttpRequest'
r = locust.client.put(f'/rest/api/content/{content_id}?status=draft', json=edit_page_body,
headers=TEXT_HEADERS, catch_response=True)
content = r.content.decode('utf-8')
if 'history' not in content:
logger.info(f'Could not edit page. Response content: {content}')
if 'history' not in content:
logger.error(f'User {locust.session_data_storage["username"]} could not edit page {content_id}, '
f'parent page id: {edit_parent_page_id}: {content}')
assert 'history' in content, \
'User could not edit page.'
r = locust.get(f'/pages/viewpage.action?pageId={edit_page_id}', catch_response=True)
content = r.content.decode('utf-8')
if not('last-modified' in content and 'Created by' in content):
logger.error(f"Could not open page {edit_page_id}: {content}")
assert 'last-modified' in content and 'Created by' in content, "Could not open page to edit."
locust.get('/rest/mywork/latest/status/notification/count', catch_response=True)
heartbeat_activity_body = {"dataType": "json",
"contentId": content_id,
"space_key": space_key,
"draftType": "page",
"atl_token": edit_atl_token
}
locust.post('/json/stopheartbeatactivity.action', params=heartbeat_activity_body,
headers=TEXT_HEADERS, catch_response=True)
locust.get('/rest/helptips/1.0/tips', catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("1175"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.get(f'/rest/jira-metadata/1.0/metadata/aggregate?pageId={edit_page_id}&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/likes/1.0/content/{edit_page_id}/likes?commentLikes=true&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/highlighting/1.0/panel-items?pageId={edit_page_id}&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/mywork/latest/status/notification/count?pageId={edit_page_id}&_={timestamp_int()}',
catch_response=True)
locust.get(f'/plugins/editor-loader/editor.action?parentPageId={edit_parent_page_id}'
f'&pageId={edit_page_id}&spaceKey={space_key}&atl_after_login_redirect=/pages/viewpage.action'
f'&timeout=12000&_={timestamp_int()}', catch_response=True)
locust.get(f'/rest/inlinecomments/1.0/comments?containerId={content_id}&_={timestamp_int()}',
catch_response=True)
locust.get(f'/rest/watch-button/1.0/watchState/{edit_page_id}?_={timestamp_int()}',
catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("1215"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("1220"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("1225"),
headers=RESOURCE_HEADERS, catch_response=True)
locust.post('/rest/webResources/1.0/resources', json=params.resources_body.get("1230"),
headers=RESOURCE_HEADERS, catch_response=True)
create_page_editor()
create_page()
open_editor()
edit_page()
@confluence_measure
def comment_page(locust):
raise_if_login_failed(locust)
page = random.choice(confluence_dataset["pages"])
page_id = page[0]
comment = f'<p>{generate_random_string(length=15, only_letters=True)}</p>'
uid = str(uuid.uuid4())
r = locust.post(f'/rest/tinymce/1/content/{page_id}/comment?actions=true',
params={'html': comment, 'watch': True, 'uuid': uid}, headers=NO_TOKEN_HEADERS,
catch_response=True)
content = r.content.decode('utf-8')
if not('reply-comment' in content and 'edit-comment' in content):
logger.error(f'Could not add comment: {content}')
assert 'reply-comment' in content and 'edit-comment' in content, 'Could not add comment.'
@confluence_measure
def view_attachments(locust):
raise_if_login_failed(locust)
page = random.choice(confluence_dataset["pages"])
page_id = page[0]
r = locust.get(f'/pages/viewpageattachments.action?pageId={page_id}', catch_response=True)
content = r.content.decode('utf-8')
if not('Upload file' in content and 'Attach more files' in content or 'currently no attachments' in content):
logger.error(f'View attachments failed: {content}')
assert 'Upload file' in content and 'Attach more files' in content \
or 'currently no attachments' in content, 'View attachments failed.'
@confluence_measure
def upload_attachments(locust):
raise_if_login_failed(locust)
params = UploadAttachments()
page = random.choice(confluence_dataset["pages"])
static_content = random.choice(confluence_dataset["static-content"])
file_path = static_content[0]
file_name = static_content[2]
file_extension = static_content[1]
page_id = page[0]
r = locust.get(f'/pages/viewpage.action?pageId={page_id}', catch_response=True)
content = r.content.decode('utf-8')
if not('Created by' in content and 'Save for later' in content):
logger.error(f'Failed to open page {page_id}: {content}')
assert 'Created by' in content and 'Save for later' in content, 'Failed to open page to upload attachments.'
atl_token_view_issue = fetch_by_re(params.atl_token_view_issue_re, content)
multipart_form_data = {
"file": (file_name, open(file_path, 'rb'), file_extension)
}
r = locust.post(f'/pages/doattachfile.action?pageId={page_id}',
params={"atl_token": atl_token_view_issue, "comment_0": "", "comment_1": "", "comment_2": "",
"comment_3": "", "comment_4": "0", "confirm": "Attach"}, files=multipart_form_data,
catch_response=True)
content = r.content.decode('utf-8')
if not('Upload file' in content and 'Attach more files' in content):
logger.error(f'Could not upload attachments: {content}')
assert 'Upload file' in content and 'Attach more files' in content, 'Could not upload attachments.'
@confluence_measure
def like_page(locust):
raise_if_login_failed(locust)
params = LikePage()
page = random.choice(confluence_dataset["pages"])
page_id = page[0]
JSON_HEADERS['Origin'] = CONFLUENCE_SETTINGS.server_url
r = locust.get(f'/rest/likes/1.0/content/{page_id}/likes', headers=JSON_HEADERS, catch_response=True)
content = r.content.decode('utf-8')
like = fetch_by_re(params.like_re, content)
if like is None:
r = locust.post(f'/rest/likes/1.0/content/{page_id}/likes', headers=JSON_HEADERS, catch_response=True)
else:
r = locust.client.delete(f'/rest/likes/1.0/content/{page_id}/likes', catch_response=True)
content = r.content.decode('utf-8')
if 'likes' not in content:
logger.error(f"Could not set like to the page {page_id}: {content}")
assert 'likes' in r.content.decode('utf-8'), 'Could not set like to the page.'
| 55.162095
| 120
| 0.632505
| 5,357
| 44,240
| 4.980213
| 0.069069
| 0.06432
| 0.084111
| 0.06983
| 0.801529
| 0.771281
| 0.72349
| 0.675138
| 0.646276
| 0.61168
| 0
| 0.012424
| 0.244937
| 44,240
| 801
| 121
| 55.230961
| 0.786253
| 0.002758
| 0
| 0.456338
| 1
| 0.033803
| 0.320647
| 0.186902
| 0
| 0
| 0
| 0
| 0.042254
| 1
| 0.029577
| false
| 0.004225
| 0.008451
| 0
| 0.038028
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e66c5716273a85df7096023d8efc1b11089b5f50
| 983
|
py
|
Python
|
OctaHomeSettings/urls.py
|
Tomcuzz/OctaHomeAutomation
|
4f0c5ea8b3d5b6e67633ae9c4cb95287d2784f5e
|
[
"MIT"
] | 4
|
2016-08-14T22:07:03.000Z
|
2020-10-05T14:43:03.000Z
|
OctaHomeSettings/urls.py
|
Tomcuzz/OctaHomeAutomation
|
4f0c5ea8b3d5b6e67633ae9c4cb95287d2784f5e
|
[
"MIT"
] | null | null | null |
OctaHomeSettings/urls.py
|
Tomcuzz/OctaHomeAutomation
|
4f0c5ea8b3d5b6e67633ae9c4cb95287d2784f5e
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url
from views import *
from settingviews import *
urlpatterns = [
url(r'^command/(?P<command>\w+)/$', handleSettingsCommand.as_view(), name='SettingsCommand'),
url(r'^command/(?P<command>\w+)/(?P<SettingsType>\w+)/(?P<SettingsId>\d+)/$', handleSettingsCommand.as_view(), name='SettingsCommand'),
url(r'^page/(?P<page>\w+)/$', handleSettingsView.as_view(), name='SettingsPage'),
url(r'^page/(?P<house>\w+)/(?P<page>\w+)/$', handleSettingsView.as_view(), name='SettingsPage'),
url(r'^page/(?P<house>\w+)/(?P<room>\w+)/(?P<page>\w+)/$', handleSettingsView.as_view(), name='SettingsPage'),
url(r'^$', handleSettingsView.as_view(), name='Settings'),
url(r'^(?P<house>\w+)/$', handleSettingsView.as_view(), name='Settings'),
url(r'^(?P<house>\w+)/(?P<room>\w+)/$', handleSettingsView.as_view(), name='Settings'),
url(r'^(?P<house>\w+)/(?P<room>\w+)/(?P<SettingsType>\w+)/(?P<SettingsId>\d+)/$', handleSettingsView.as_view(), name='Settings'),
]
| 54.611111
| 136
| 0.65412
| 132
| 983
| 4.80303
| 0.204545
| 0.056782
| 0.141956
| 0.309148
| 0.875394
| 0.818612
| 0.763407
| 0.515773
| 0.515773
| 0.515773
| 0
| 0
| 0.057986
| 983
| 17
| 137
| 57.823529
| 0.684665
| 0
| 0
| 0
| 0
| 0.214286
| 0.431333
| 0.312309
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.214286
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e6ba4ba31fc8869f32c22a6af59e1177decf42a9
| 25
|
py
|
Python
|
Learning-Kaggle/titanic.py
|
amitdev101/Tensorflow-keras-scripts
|
6b9bab3a994d8a12c5c8cfd7a1d491a523c8dece
|
[
"MIT"
] | null | null | null |
Learning-Kaggle/titanic.py
|
amitdev101/Tensorflow-keras-scripts
|
6b9bab3a994d8a12c5c8cfd7a1d491a523c8dece
|
[
"MIT"
] | null | null | null |
Learning-Kaggle/titanic.py
|
amitdev101/Tensorflow-keras-scripts
|
6b9bab3a994d8a12c5c8cfd7a1d491a523c8dece
|
[
"MIT"
] | null | null | null |
# Nothing just a comment
| 12.5
| 24
| 0.76
| 4
| 25
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 25
| 1
| 25
| 25
| 0.95
| 0.88
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e6ccd0a93f63589cad77791005b48cb2aefe6f9e
| 305
|
py
|
Python
|
7_kyu/Most_sales.py
|
UlrichBerntien/Codewars-Katas
|
bbd025e67aa352d313564d3862db19fffa39f552
|
[
"MIT"
] | null | null | null |
7_kyu/Most_sales.py
|
UlrichBerntien/Codewars-Katas
|
bbd025e67aa352d313564d3862db19fffa39f552
|
[
"MIT"
] | null | null | null |
7_kyu/Most_sales.py
|
UlrichBerntien/Codewars-Katas
|
bbd025e67aa352d313564d3862db19fffa39f552
|
[
"MIT"
] | null | null | null |
def top3(products, amounts, prices):
revenue_index_product = [ (amo*pri,-idx,pro) for (pro,amo,pri,idx) in zip(products,amounts,prices,range(len(prices))) ]
revenue_index_product = sorted(revenue_index_product, reverse=True )
return [ pro for (rev,idx,pro) in revenue_index_product[0:3] ]
| 61
| 123
| 0.72459
| 46
| 305
| 4.630435
| 0.521739
| 0.225352
| 0.356808
| 0.234742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011407
| 0.137705
| 305
| 5
| 124
| 61
| 0.798479
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fc024c34fa601f203d99586f0f41a3408b2590c4
| 2,298
|
py
|
Python
|
deeptoolkit/core/functional.py
|
amogh7joshi/deeptoolkit
|
0eeade4cb62c4a0f15ab9c598cae94310015e9dc
|
[
"MIT"
] | 1
|
2021-01-22T03:46:11.000Z
|
2021-01-22T03:46:11.000Z
|
deeptoolkit/core/functional.py
|
amogh7joshi/deeptoolkit
|
0eeade4cb62c4a0f15ab9c598cae94310015e9dc
|
[
"MIT"
] | null | null | null |
deeptoolkit/core/functional.py
|
amogh7joshi/deeptoolkit
|
0eeade4cb62c4a0f15ab9c598cae94310015e9dc
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding = utf-8 -*-
import numpy as np
import tensorflow as tf
from tensorflow.keras import backend as K
def binary_focal_loss(gt, pred, *, gamma = 2.0, alpha = 0.25):
"""Implementation of binary focal loss.
This is the binary focal loss function from the paper on focal losses,
`Focal Loss For Object Detection`: https://arxiv.org/abs/1708.02002.
Formula: Focal Loss * Cross Entropy
J = -gt * alpha * ((1-pred) ^ gamma) * log(pred)
- (1 - gt) * alpha * (pred ^ gamma) * log(1 - pred)
Arguments:
- gt: Ground Truth (y_true)
- pred: Prediction (y_pred)
- gamma: Focus parameter for the modulating factor.
- alpha: The weighting factor in cross-entropy.
"""
# Cast and clip truth/predictions (to prevent errors).
if not gt.dtype == tf.float32:
gt = tf.cast(gt, tf.float32)
if not pred.dtype == tf.float32:
pred = tf.cast(pred, tf.float32)
pred = K.clip(pred, K.epsilon(), 1. - K.epsilon())
# Calculate cross-entropy and focal losses.
cross_entropy = -gt * (alpha * K.pow(1 - pred, gamma) * K.log(pred))
focal_loss = -(1 - gt) * ((1 - alpha) * K.pow(pred, gamma) * K.log(1 - pred))
# Calculate and return final loss.
loss = K.mean(cross_entropy + focal_loss)
return loss
def categorical_focal_loss(gt, pred, *, gamma = 2.0, alpha = 0.25):
"""Implementation of categorical focal loss.
This is the categorical focal loss function from the paper on focal losses,
`Focal Loss For Object Detection`: https://arxiv.org/abs/1708.02002.
Formula: Focal Loss
loss = -gt * alpha * ((1 - pred) ^ gamma) * log(pred)
Arguments:
- gt: Ground Truth (y_true)
- pred: Prediction (y_pred)
- gamma: Focus parameter for the modulating factor.
- alpha: The weighting factor in cross-entropy.
"""
# Cast and clip truth/predictions (to prevent errors).
if not gt.dtype == tf.float32:
gt = tf.cast(gt, tf.float32)
if not pred.dtype == tf.float32:
pred = tf.cast(pred, tf.float32)
pred = K.clip(pred, K.epsilon(), 1. - K.epsilon())
# Calculate focal loss.
focal_loss = -gt * (alpha * K.pow(1. - pred, gamma) * K.log(pred))
# Calculate and return final loss.
loss = K.mean(focal_loss)
return loss
| 32.828571
| 80
| 0.636641
| 339
| 2,298
| 4.274336
| 0.241888
| 0.093168
| 0.027605
| 0.026915
| 0.770186
| 0.745342
| 0.745342
| 0.714976
| 0.714976
| 0.662526
| 0
| 0.032276
| 0.231506
| 2,298
| 69
| 81
| 33.304348
| 0.788222
| 0.536989
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.136364
| 0
| 0.318182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fc3dcf16262cbce42ba22c9a3488c63baa9b50d5
| 1,016
|
py
|
Python
|
ffbot/bot/BOT/models.py
|
hahatianx/ffbot
|
2770e85a35f30b35040a8d2be58bc76b332c5e62
|
[
"Apache-2.0"
] | 5
|
2019-06-07T11:18:20.000Z
|
2019-10-26T13:40:20.000Z
|
ffbot/bot/BOT/models.py
|
hahatianx/ffbot
|
2770e85a35f30b35040a8d2be58bc76b332c5e62
|
[
"Apache-2.0"
] | 5
|
2020-02-11T23:51:25.000Z
|
2021-06-10T21:14:57.000Z
|
ffbot/bot/BOT/models.py
|
hahatianx/ffbot
|
2770e85a35f30b35040a8d2be58bc76b332c5e62
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
# Create your models here.
class Class(models.Model):
name = models.CharField(max_length=15)
def __str__(self):
return self.name
class NickClass(models.Model):
nick_name = models.CharField(max_length=15)
class_id = models.ForeignKey(Class, on_delete=models.CASCADE, default='')
def __str__(self):
return self.nick_name
class Boss(models.Model):
name = models.CharField(max_length=15)
quest_id = models.IntegerField()
boss_id = models.IntegerField()
add_time = models.BigIntegerField(default=0)
def __str__(self):
return self.name
class NickBoss(models.Model):
nick_name = models.CharField(max_length=15)
boss_id = models.ForeignKey(Boss, on_delete=models.CASCADE, default='')
def __str__(self):
return self.nick_name
class HeartBeat(models.Model):
name = models.CharField(max_length=100)
beats = models.BigIntegerField(default=0)
def __str__(self):
return self.name
| 22.086957
| 77
| 0.699803
| 132
| 1,016
| 5.113636
| 0.287879
| 0.081481
| 0.140741
| 0.162963
| 0.700741
| 0.700741
| 0.700741
| 0.592593
| 0.471111
| 0.337778
| 0
| 0.015873
| 0.193898
| 1,016
| 45
| 78
| 22.577778
| 0.808303
| 0.023622
| 0
| 0.518519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.185185
| false
| 0
| 0.037037
| 0.185185
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
fc7f4cedf6a7cfb917321a8cc60f20a901a9afa9
| 49
|
py
|
Python
|
src/cli/__init__.py
|
xbeheydt/blenderenv
|
b232c01001a5cb1ee087d30d0ad5ca5d8c4c8320
|
[
"MIT"
] | null | null | null |
src/cli/__init__.py
|
xbeheydt/blenderenv
|
b232c01001a5cb1ee087d30d0ad5ca5d8c4c8320
|
[
"MIT"
] | null | null | null |
src/cli/__init__.py
|
xbeheydt/blenderenv
|
b232c01001a5cb1ee087d30d0ad5ca5d8c4c8320
|
[
"MIT"
] | null | null | null |
"""
Top-level for Cli entrypoint blenderenv.
"""
| 12.25
| 40
| 0.693878
| 6
| 49
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 49
| 3
| 41
| 16.333333
| 0.809524
| 0.816327
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fca61fb6fff8dcfcdd282153b78943ce5b3c6275
| 110
|
py
|
Python
|
jady/__init__.py
|
yattdev/Jady
|
77c74fa4c5cf87cea92334eba1be24d93cbd2eea
|
[
"MIT"
] | null | null | null |
jady/__init__.py
|
yattdev/Jady
|
77c74fa4c5cf87cea92334eba1be24d93cbd2eea
|
[
"MIT"
] | null | null | null |
jady/__init__.py
|
yattdev/Jady
|
77c74fa4c5cf87cea92334eba1be24d93cbd2eea
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import jady.modules
import jady.actions
import jady.nlg_server
| 15.714286
| 23
| 0.718182
| 17
| 110
| 4.588235
| 0.764706
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010417
| 0.127273
| 110
| 6
| 24
| 18.333333
| 0.802083
| 0.381818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5dc24b067cfec814a6a4785c700e5b4122eec2ce
| 953
|
py
|
Python
|
openapi-client/python/test/test_example_service_api.py
|
michilu/kintone-api
|
efa6c53d68140c449f4689baac824e2471155798
|
[
"Apache-2.0"
] | null | null | null |
openapi-client/python/test/test_example_service_api.py
|
michilu/kintone-api
|
efa6c53d68140c449f4689baac824e2471155798
|
[
"Apache-2.0"
] | 1
|
2020-07-15T09:50:06.000Z
|
2020-07-15T09:50:06.000Z
|
openapi-client/python/test/test_example_service_api.py
|
michilu/proto-openapi
|
aca02aaa11064e87462ab34674c0c4974cf70372
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
An example of generating swagger via gRPC ecosystem.
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 1.0
Contact: none@example.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import openapi_client
from openapi_client.api.example_service_api import ExampleServiceApi # noqa: E501
from openapi_client.rest import ApiException
class TestExampleServiceApi(unittest.TestCase):
"""ExampleServiceApi unit test stubs"""
def setUp(self):
self.api = openapi_client.api.example_service_api.ExampleServiceApi() # noqa: E501
def tearDown(self):
pass
def test_example_service_query(self):
"""Test case for example_service_query
"""
pass
if __name__ == '__main__':
unittest.main()
| 23.243902
| 124
| 0.725079
| 115
| 953
| 5.782609
| 0.521739
| 0.078195
| 0.051128
| 0.069173
| 0.099248
| 0.099248
| 0
| 0
| 0
| 0
| 0
| 0.015584
| 0.192025
| 953
| 40
| 125
| 23.825
| 0.848052
| 0.423924
| 0
| 0.142857
| 1
| 0
| 0.015717
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0.142857
| 0.357143
| 0
| 0.642857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
f8fec34aeeaad8144c280c4c2b43fbb472099a81
| 3,112
|
py
|
Python
|
TEST3D/GUI/0010700_page_skelsel/tests.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 31
|
2015-04-01T15:59:36.000Z
|
2022-03-18T20:21:47.000Z
|
TEST3D/GUI/0010700_page_skelsel/tests.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 3
|
2015-02-06T19:30:24.000Z
|
2017-05-25T14:14:31.000Z
|
TEST3D/GUI/0010700_page_skelsel/tests.py
|
usnistgov/OOF3D
|
4fd423a48aea9c5dc207520f02de53ae184be74c
|
[
"X11"
] | 7
|
2015-01-23T15:19:22.000Z
|
2021-06-09T09:03:59.000Z
|
# -*- python -*-
# This software was produced by NIST, an agency of the U.S. government,
# and by statute is not subject to copyright in the United States.
# Recipients of this software assume all responsibilities associated
# with its operation, modification and maintenance. However, to
# facilitate maintenance we ask that before distributing modified
# versions of this software, you first contact the authors at
# oof_manager@nist.gov.
from generics import *
from skelseltests import *
def sensitization0():
return (sensitizationCheck(
{'OOF3D:Skeleton Selection Page:Microstructure' : 0})
and
sensitizationCheck(
{'Prev' : 0,
'OK' : 0,
'Next' : 0,
'Undo' : 0,
'Redo' : 0,
'Clear' : 0
},
base="OOF3D:Skeleton Selection Page:Pane:SelectionModification"))
def sensitization1():
return (sensitizationCheck(
{'OOF3D:Skeleton Selection Page:Microstructure' : 1})
and
sensitizationCheck(
{'Prev' : 0,
'OK' : 1,
'Next' : 0,
'Undo' : 0,
'Redo' : 0,
'Clear' : 0
},
base="OOF3D:Skeleton Selection Page:Pane:SelectionModification"))
def sensitization2():
return (sensitizationCheck(
{'OOF3D:Skeleton Selection Page:Microstructure' : 1})
and
sensitizationCheck(
{'Prev' : 0,
'OK' : 1,
'Next' : 0,
'Undo' : 1,
'Redo' : 0,
'Clear' : 1
},
base="OOF3D:Skeleton Selection Page:Pane:SelectionModification"))
def sensitization3():
return (sensitizationCheck(
{'OOF3D:Skeleton Selection Page:Microstructure' : 1})
and
sensitizationCheck(
{'Prev' : 0,
'OK' : 1,
'Next' : 0,
'Undo' : 1,
'Redo' : 1,
'Clear' : 1
},
base="OOF3D:Skeleton Selection Page:Pane:SelectionModification"))
def sensitization4():
return (sensitizationCheck(
{'OOF3D:Skeleton Selection Page:Microstructure' : 1})
and
sensitizationCheck(
{'Prev' : 0,
'OK' : 1,
'Next' : 0,
'Undo' : 1,
'Redo' : 0,
'Clear' : 0
},
base="OOF3D:Skeleton Selection Page:Pane:SelectionModification"))
def sensitization5():
return (sensitizationCheck(
{'OOF3D:Skeleton Selection Page:Microstructure' : 1})
and
sensitizationCheck(
{'Prev' : 1,
'OK' : 1,
'Next' : 0,
'Undo' : 1,
'Redo' : 0,
'Clear' : 0
},
base="OOF3D:Skeleton Selection Page:Pane:SelectionModification"))
def sensitization6():
return (sensitizationCheck(
{'OOF3D:Skeleton Selection Page:Microstructure' : 1})
and
sensitizationCheck(
{'Prev' : 1,
'OK' : 1,
'Next' : 0,
'Undo' : 1,
'Redo' : 0,
'Clear' : 1
},
base="OOF3D:Skeleton Selection Page:Pane:SelectionModification"))
| 28.036036
| 73
| 0.544344
| 287
| 3,112
| 5.898955
| 0.28223
| 0.107501
| 0.181926
| 0.215003
| 0.711754
| 0.695216
| 0.695216
| 0.657413
| 0.657413
| 0.657413
| 0
| 0.034014
| 0.338689
| 3,112
| 110
| 74
| 28.290909
| 0.78863
| 0.136247
| 0
| 0.795699
| 0
| 0
| 0.321509
| 0.081031
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075269
| true
| 0
| 0.021505
| 0.075269
| 0.172043
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5d1cc7ec06c6d88076b444bdfa474df0131393c5
| 76
|
py
|
Python
|
darkgeotile/__init__.py
|
dark-geo/darkGeoTile
|
7bd19c70334d1ad89c5fb83451677e5008360b93
|
[
"MIT"
] | null | null | null |
darkgeotile/__init__.py
|
dark-geo/darkGeoTile
|
7bd19c70334d1ad89c5fb83451677e5008360b93
|
[
"MIT"
] | null | null | null |
darkgeotile/__init__.py
|
dark-geo/darkGeoTile
|
7bd19c70334d1ad89c5fb83451677e5008360b93
|
[
"MIT"
] | null | null | null |
from .darkgeotile import BaseTile, get_tile_class, DEFAULT_PROJECTIONS_BBOX
| 38
| 75
| 0.881579
| 10
| 76
| 6.3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 76
| 1
| 76
| 76
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5d39a8fc6bb2d53c152edae366c52cde7832bcf4
| 3,357
|
py
|
Python
|
numba/cuda/tests/cudapy/test_warning.py
|
skailasa/numba
|
38ab89dd369a14b8826d3fa30d080aa083aed00b
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 1
|
2021-11-02T18:55:23.000Z
|
2021-11-02T18:55:23.000Z
|
numba/cuda/tests/cudapy/test_warning.py
|
skailasa/numba
|
38ab89dd369a14b8826d3fa30d080aa083aed00b
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 6
|
2018-08-06T15:29:52.000Z
|
2020-12-16T09:51:11.000Z
|
numba/cuda/tests/cudapy/test_warning.py
|
skailasa/numba
|
38ab89dd369a14b8826d3fa30d080aa083aed00b
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 2
|
2018-04-14T00:28:12.000Z
|
2020-04-16T22:17:14.000Z
|
import numpy as np
from numba import cuda
from numba.cuda.testing import CUDATestCase, skip_on_cudasim
from numba.tests.support import linux_only, override_config
from numba.core.errors import NumbaPerformanceWarning
import warnings
@skip_on_cudasim('cudasim does not raise performance warnings')
class TestWarnings(CUDATestCase):
def test_inefficient_launch_configuration(self):
@cuda.jit
def kernel():
pass
with override_config('CUDA_LOW_OCCUPANCY_WARNINGS', 1):
with warnings.catch_warnings(record=True) as w:
kernel[1, 1]()
self.assertEqual(w[0].category, NumbaPerformanceWarning)
self.assertIn('Grid size', str(w[0].message))
self.assertIn('2 * SM count', str(w[0].message))
def test_efficient_launch_configuration(self):
@cuda.jit
def kernel():
pass
with override_config('CUDA_LOW_OCCUPANCY_WARNINGS', 1):
with warnings.catch_warnings(record=True) as w:
kernel[256, 256]()
self.assertEqual(len(w), 0)
def test_warn_on_host_array(self):
@cuda.jit
def foo(r, x):
r[0] = x + 1
N = 10
arr_f32 = np.zeros(N, dtype=np.float32)
with override_config('CUDA_WARN_ON_IMPLICIT_COPY', 1):
with warnings.catch_warnings(record=True) as w:
foo[1, N](arr_f32, N)
self.assertEqual(w[0].category, NumbaPerformanceWarning)
self.assertIn('Host array used in CUDA kernel will incur',
str(w[0].message))
self.assertIn('copy overhead', str(w[0].message))
def test_pinned_warn_on_host_array(self):
@cuda.jit
def foo(r, x):
r[0] = x + 1
N = 10
ary = cuda.pinned_array(N, dtype=np.float32)
with override_config('CUDA_WARN_ON_IMPLICIT_COPY', 1):
with warnings.catch_warnings(record=True) as w:
foo[1, N](ary, N)
self.assertEqual(w[0].category, NumbaPerformanceWarning)
self.assertIn('Host array used in CUDA kernel will incur',
str(w[0].message))
self.assertIn('copy overhead', str(w[0].message))
def test_nowarn_on_mapped_array(self):
@cuda.jit
def foo(r, x):
r[0] = x + 1
N = 10
ary = cuda.mapped_array(N, dtype=np.float32)
with override_config('CUDA_WARN_ON_IMPLICIT_COPY', 1):
with warnings.catch_warnings(record=True) as w:
foo[1, N](ary, N)
self.assertEqual(len(w), 0)
@linux_only
def test_nowarn_on_managed_array(self):
@cuda.jit
def foo(r, x):
r[0] = x + 1
N = 10
ary = cuda.managed_array(N, dtype=np.float32)
with override_config('CUDA_WARN_ON_IMPLICIT_COPY', 1):
with warnings.catch_warnings(record=True) as w:
foo[1, N](ary, N)
self.assertEqual(len(w), 0)
def test_nowarn_on_device_array(self):
@cuda.jit
def foo(r, x):
r[0] = x + 1
N = 10
ary = cuda.device_array(N, dtype=np.float32)
with override_config('CUDA_WARN_ON_IMPLICIT_COPY', 1):
with warnings.catch_warnings(record=True) as w:
foo[1, N](ary, N)
self.assertEqual(len(w), 0)
| 30.518182
| 66
| 0.597855
| 454
| 3,357
| 4.244493
| 0.187225
| 0.013492
| 0.039958
| 0.050856
| 0.773223
| 0.773223
| 0.750908
| 0.733264
| 0.702128
| 0.702128
| 0
| 0.028644
| 0.292821
| 3,357
| 109
| 67
| 30.798165
| 0.783067
| 0
| 0
| 0.686747
| 0
| 0
| 0.106047
| 0.054811
| 0
| 0
| 0
| 0
| 0.156627
| 1
| 0.168675
| false
| 0.024096
| 0.072289
| 0
| 0.253012
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5d672be29d06d0490d33622e01416d4f24028aa5
| 114
|
py
|
Python
|
src/site_checker/error.py
|
drednout/site_checker
|
8b52cd40087985a11435cad7836ed08a782c18c7
|
[
"BSD-2-Clause"
] | null | null | null |
src/site_checker/error.py
|
drednout/site_checker
|
8b52cd40087985a11435cad7836ed08a782c18c7
|
[
"BSD-2-Clause"
] | null | null | null |
src/site_checker/error.py
|
drednout/site_checker
|
8b52cd40087985a11435cad7836ed08a782c18c7
|
[
"BSD-2-Clause"
] | null | null | null |
class BaseSiteCheckerException(Exception):
pass
class ErrorStopMsgLimit(BaseSiteCheckerException):
pass
| 16.285714
| 50
| 0.807018
| 8
| 114
| 11.5
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140351
| 114
| 6
| 51
| 19
| 0.938776
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
5d7505e725c755cdf7131a1960d4a024b282948b
| 22,662
|
py
|
Python
|
python3_cron_scripts/create_cert_graphs.py
|
adobe/Marinus
|
fd648002fe79c0fafba7f43a5c96f5090ad3dbfb
|
[
"Apache-2.0"
] | 57
|
2019-01-16T22:40:58.000Z
|
2022-03-15T21:34:24.000Z
|
python3_cron_scripts/create_cert_graphs.py
|
adobe/Marinus
|
fd648002fe79c0fafba7f43a5c96f5090ad3dbfb
|
[
"Apache-2.0"
] | 3
|
2019-03-08T16:41:48.000Z
|
2019-11-07T03:53:00.000Z
|
python3_cron_scripts/create_cert_graphs.py
|
adobe/Marinus
|
fd648002fe79c0fafba7f43a5c96f5090ad3dbfb
|
[
"Apache-2.0"
] | 12
|
2019-01-17T20:06:46.000Z
|
2022-01-09T23:06:19.000Z
|
#!/usr/bin/python3
# Copyright 2019 Adobe. All rights reserved.
# This file is licensed to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
# OF ANY KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
"""
This script creates d3.js maps of the certificates stored within Marinus.
It stores the graphs within the Marinus database for later retrieval by the UI.
This assumes that you have already collected data from either Censys, CT Logs, or zgrab sans.
Use the command line to specify which of those sources exist.
You can specify multiple sources on the command line.
"""
import argparse
import json
import logging
from datetime import datetime
import networkx as nx
from networkx.readwrite import json_graph
from libs3 import DNSManager, MongoConnector, JobsManager
from libs3.ZoneManager import ZoneManager
from libs3.LoggingUtil import LoggingUtil
def get_current_ct_certificates(ct_connection, zone):
"""
Get the list of non-expired certificate transparency certificates for the indicated zone.
"""
results = ct_connection.find({'isExpired': False, 'subject_common_names': {"$regex": r'^(.+\.)*' + zone + '$'}, 'subject_dns_names': {"$regex": r'^(.+\.)*' + zone + '$'}},
{'fingerprint_sha256':1, 'subject_common_names': 1, 'subject_dns_names': 1})
collection = []
for result in results:
item = {'id': result['fingerprint_sha256']}
item['dns_entries'] = result['subject_common_names'] + result['subject_dns_names']
item['sources'] = ['ct_logs']
collection.append(item)
return collection
def get_censys_count(censys_collection, sha256_hash):
"""
Get the count of matching certificates for the provided hash
"""
result_count = censys_collection.count_documents({'p443.https.tls.certificate.parsed.fingerprint_sha256': sha256_hash})
return result_count
def add_censys_certificates(censys_collection, zone, current_certs):
"""
Get the list of current certificates from censys for the specified zones.
Append any new entries to the provided array of current_certs.
"""
results = censys_collection.find({"$or":[{'p443.https.tls.certificate.parsed.subject.common_name': {"$regex": r'^(.+\.)*' + zone + '$'}},
{'p443.https.tls.certificate.parsed.extensions.subject_alt_name.dns_names': {"$regex": r'^(.+\.)*' + zone + '$'}}]},
{"p443.https.tls.certificate.parsed.subject.common_name": 1,
"p443.https.tls.certificate.parsed.extensions.subject_alt_name.dns_names": 1,
"p443.https.tls.certificate.parsed.fingerprint_sha256": 1})
for result in results:
i = next((index for (index, item) in enumerate(current_certs)\
if item["id"] == result['p443']['https']['tls']['certificate']['parsed']['fingerprint_sha256']), None)
if i is None:
item = {'id': result['p443']['https']['tls']['certificate']['parsed']['fingerprint_sha256']}
dns_list = []
try:
for dns_name in result["p443"]["https"]["tls"]["certificate"]["parsed"]["subject"]["common_name"]:
if dns_name not in dns_list:
dns_list.append(dns_name)
except KeyError:
pass
# Not all certificates contain alternative names.
try:
for dns_name in result["p443"]["https"]["tls"]["certificate"]["parsed"]["extensions"]["subject_alt_name"]["dns_names"]:
if dns_name not in dns_list:
dns_list.append(dns_name)
except KeyError:
# "ALT Name key not found."
pass
item['dns_entries'] = dns_list
item['sources'] = ['censys']
item['censys_count'] = get_censys_count(censys_collection, result['p443']['https']['tls']['certificate']['parsed']['fingerprint_sha256'])
current_certs.append(item)
else:
# The certificate is already stored so there is nothing more to add.
if 'censys' not in current_certs[i]['sources']:
current_certs[i]['sources'].append('censys')
if 'censys_count' not in current_certs[i]:
current_certs[i]['censys_count'] = get_censys_count(censys_collection, result['p443']['https']['tls']['certificate']['parsed']['fingerprint_sha256'])
return current_certs
def get_scan_count(zgrab_collection, sha256_hash, version):
"""
Get the count of matching certificates for the provided hash
"""
if version == 1:
result_count = zgrab_collection.count_documents({"$or": [{'data.http.response.request.tls_handshake.server_certificates.certificate.parsed.fingerprint_sha256': sha256_hash},
{'data.http.response.redirect_response_chain.0.request.tls_handshake.server_certificates.certificate.parsed.fingerprint_sha256': sha256_hash}]})
else:
result_count = zgrab_collection.count_documents({"$or": [{'data.http.result.response.request.tls_log.handshake_log.server_certificates.certificate.parsed.fingerprint_sha256': sha256_hash},
{'data.http.result.redirect_response_chain.0.request.tls_log.handshake_log.server_certificates.certificate.parsed.fingerprint_sha256': sha256_hash}]})
return result_count
def add_terminal_zgrab_certificates(zgrab_collection, zone, current_certs):
"""
Get the list of current certificates from zgrab scans for the specified zones.
Append any new entries to the provided array of current_certs.
This currently does not check
"""
results = zgrab_collection.find({"$or":[{'data.http.response.request.tls_handshake.server_certificates.certificate.parsed.subject.common_name': {"$regex": r'^(.+\.)*' + zone + '$'}},
{'data.http.response.request.tls_handshake.server_certificates.certificate.parsed.extensions.subject_alt_name.dns_names': {"$regex": r'^(.+\.)*' + zone + '$'}}]},
{"data.http.response.request.tls_handshake.server_certificates.certificate.parsed.subject.common_name": 1,
"data.http.response.request.tls_handshake.server_certificates.certificate.parsed.extensions.subject_alt_name.dns_names": 1,
"data.http.response.request.tls_handshake.server_certificates.certificate.parsed.fingerprint_sha256": 1})
for result in results:
i = next((index for (index, item) in enumerate(current_certs)\
if item["id"] == result['data']['http']['response']['request']['tls_handshake']['server_certificates']['certificate']['parsed']['fingerprint_sha256']), None)
if i is None:
item = {'id': result['data']['http']['response']['request']['tls_handshake']['server_certificates']['certificate']['parsed']['fingerprint_sha256']}
dns_list = []
try:
for dns_name in result['data']['http']['response']['request']['tls_handshake']['server_certificates']["certificate"]["parsed"]["subject"]["common_name"]:
if dns_name not in dns_list:
dns_list.append(dns_name)
except KeyError:
pass
# Not all certificates contain alternative names.
try:
for dns_name in result['data']['http']['response']['request']['tls_handshake']['server_certificates']["certificate"]["parsed"]["extensions"]["subject_alt_name"]["dns_names"]:
if dns_name not in dns_list:
dns_list.append(dns_name)
except KeyError:
# "ALT Name key not found."
pass
item['dns_entries'] = dns_list
item['sources'] = ['zgrab_443_scan']
item['zgrab_count'] = get_scan_count(zgrab_collection, result['data']['http']['response']['request']['tls_handshake']['server_certificates']['certificate']['parsed']['fingerprint_sha256'], 1)
current_certs.append(item)
else:
# The certificate is already stored so there is nothing more to add.
if 'zgrab_443_scan' not in current_certs[i]['sources']:
current_certs[i]['sources'].append('zgrab_443_scan')
if 'zgrab_count' not in current_certs[i]:
current_certs[i]['zgrab_count'] = get_scan_count(zgrab_collection, result['data']['http']['response']['request']['tls_handshake']['server_certificates']['certificate']['parsed']['fingerprint_sha256'], 1)
return current_certs
def add_initial_zgrab_certificates(zgrab_collection, zone, current_certs):
"""
Get the list of current certificates from zgrab scans for the specified zones.
Append any new entries to the provided array of current_certs.
This currently does not check
"""
results = zgrab_collection.find({"$or":[{'data.http.redirect_response_chain.0.request.tls_handshake.server_certificates.certificate.parsed.subject.common_name': {"$regex": r'^(.+\.)*' + zone + '$'}},
{'data.http.redirect_response_chain.0.request.tls_handshake.server_certificates.certificate.parsed.extensions.subject_alt_name.dns_names': {"$regex": r'^(.+\.)*' + zone + '$'}}]},
{"data.http.redirect_response_chain": 1})
for result in results:
i = next((index for (index, item) in enumerate(current_certs)\
if item["id"] == result['data']['http']['redirect_response_chain'][0]['request']['tls_handshake']['server_certificates']['certificate']['parsed']['fingerprint_sha256']), None)
if i is None:
item = {'id': result['data']['http']['redirect_response_chain'][0]['request']['tls_handshake']['server_certificates']['certificate']['parsed']['fingerprint_sha256']}
dns_list = []
try:
for dns_name in result['data']['http']['redirect_response_chain'][0]['request']['tls_handshake']['server_certificates']["certificate"]["parsed"]["subject"]["common_name"]:
if dns_name not in dns_list:
dns_list.append(dns_name)
except KeyError:
pass
# Not all certificates contain alternative names.
try:
for dns_name in result['data']['http']['redirect_response_chain'][0]['request']['tls_handshake']['server_certificates']["certificate"]["parsed"]["extensions"]["subject_alt_name"]["dns_names"]:
if dns_name not in dns_list:
dns_list.append(dns_name)
except KeyError:
# "ALT Name key not found."
pass
item['dns_entries'] = dns_list
item['sources'] = ['zgrab_443_scan']
item['zgrab_count'] = get_scan_count(zgrab_collection, result['data']['http']['redirect_response_chain'][0]['request']['tls_handshake']['server_certificates']['certificate']['parsed']['fingerprint_sha256'], 1)
current_certs.append(item)
else:
# The certificate is already stored so there is nothing more to add.
if 'zgrab_443_scan' not in current_certs[i]['sources']:
current_certs[i]['sources'].append('zgrab_443_scan')
if 'zgrab_count' not in current_certs[i]:
current_certs[i]['zgrab_count'] = get_scan_count(zgrab_collection, result['data']['http']['redirect_response_chain'][0]['request']['tls_handshake']['server_certificates']['certificate']['parsed']['fingerprint_sha256'], 1)
return current_certs
def add_terminal_zgrab2_certificates(zgrab_collection, zone, current_certs):
"""
Get the list of current certificates from zgrab scans for the specified zones.
Append any new entries to the provided array of current_certs.
This currently does not check
"""
results = zgrab_collection.find({"$or":[{'data.http.result.response.request.tls_log.handshake_log.server_certificates.certificate.parsed.subject.common_name': {"$regex": r'^(.+\.)*' + zone + '$'}},
{'data.http.result.response.request.tls_log.handshake_log.server_certificates.certificate.parsed.extensions.subject_alt_name.dns_names': {"$regex": r'^(.+\.)*' + zone + '$'}}]},
{"data.http.result.response.request.tls_log.handshake_log.server_certificates.certificate.parsed.subject.common_name": 1,
"data.http.result.response.request.tls_log.handshake_log.server_certificates.certificate.parsed.extensions.subject_alt_name.dns_names": 1,
"data.http.result.response.request.tls_log.handshake_log.server_certificates.certificate.parsed.fingerprint_sha256": 1})
for result in results:
i = next((index for (index, item) in enumerate(current_certs)\
if item["id"] == result['data']['http']['result']['response']['request']['tls_log']['handshake_log']['server_certificates']['certificate']['parsed']['fingerprint_sha256']), None)
if i is None:
item = {'id': result['data']['http']['result']['response']['request']['tls_log']['handshake_log']['server_certificates']['certificate']['parsed']['fingerprint_sha256']}
dns_list = []
try:
for dns_name in result['data']['http']['result']['response']['request']['tls_log']['handshake_log']['server_certificates']["certificate"]["parsed"]["subject"]["common_name"]:
if dns_name not in dns_list:
dns_list.append(dns_name)
except KeyError:
pass
# Not all certificates contain alternative names.
try:
for dns_name in result['data']['http']['result']['response']['request']['tls_log']['handshake_log']['server_certificates']["certificate"]["parsed"]["extensions"]["subject_alt_name"]["dns_names"]:
if dns_name not in dns_list:
dns_list.append(dns_name)
except KeyError:
# "ALT Name key not found."
pass
item['dns_entries'] = dns_list
item['sources'] = ['zgrab_443_scan']
item['zgrab_count'] = get_scan_count(zgrab_collection, result['data']['http']['result']['response']['request']['tls_log']['handshake_log']['server_certificates']['certificate']['parsed']['fingerprint_sha256'], 2)
current_certs.append(item)
else:
# The certificate is already stored so there is nothing more to add.
if 'zgrab_443_scan' not in current_certs[i]['sources']:
current_certs[i]['sources'].append('zgrab_443_scan')
if 'zgrab_count' not in current_certs[i]:
current_certs[i]['zgrab_count'] = get_scan_count(zgrab_collection, result['data']['http']['result']['response']['request']['tls_log']['handshake_log']['server_certificates']['certificate']['parsed']['fingerprint_sha256'], 2)
return current_certs
def add_initial_zgrab2_certificates(zgrab_collection, zone, current_certs):
"""
Get the list of current certificates from zgrab scans for the specified zones.
Append any new entries to the provided array of current_certs.
This currently does not check
"""
results = zgrab_collection.find({"$or":[{'data.http.result.redirect_response_chain.0.request.tls_log.handshake_log.server_certificates.certificate.parsed.subject.common_name': {"$regex": r'^(.+\.)*' + zone + '$'}},
{'data.http.result.redirect_response_chain.0.request.tls_log.handshake_log.server_certificates.certificate.parsed.extensions.subject_alt_name.dns_names': {"$regex": r'^(.+\.)*' + zone + '$'}}]},
{"data.http.result.redirect_response_chain": 1})
for result in results:
i = next((index for (index, item) in enumerate(current_certs)\
if item["id"] == result['data']['http']['result']['redirect_response_chain'][0]['request']['tls_log']['handshake_log']['server_certificates']['certificate']['parsed']['fingerprint_sha256']), None)
if i is None:
item = {'id': result['data']['http']['result']['redirect_response_chain'][0]['request']['tls_log']['handshake_log']['server_certificates']['certificate']['parsed']['fingerprint_sha256']}
dns_list = []
try:
for dns_name in result['data']['http']['result']['redirect_response_chain'][0]['request']['tls_log']['handshake_log']['server_certificates']["certificate"]["parsed"]["subject"]["common_name"]:
if dns_name not in dns_list:
dns_list.append(dns_name)
except KeyError:
pass
# Not all certificates contain alternative names.
try:
for dns_name in result['data']['http']['result']['redirect_response_chain'][0]['request']['tls_log']['handshake_log']['server_certificates']["certificate"]["parsed"]["extensions"]["subject_alt_name"]["dns_names"]:
if dns_name not in dns_list:
dns_list.append(dns_name)
except KeyError:
# "ALT Name key not found."
pass
item['dns_entries'] = dns_list
item['sources'] = ['zgrab_443_scan']
item['zgrab_count'] = get_scan_count(zgrab_collection, result['data']['http']['result']['redirect_response_chain'][0]['request']['tls_log']['handshake_log']['server_certificates']['certificate']['parsed']['fingerprint_sha256'], 2)
current_certs.append(item)
else:
# The certificate is already stored so there is nothing more to add.
if 'zgrab_443_scan' not in current_certs[i]['sources']:
current_certs[i]['sources'].append('zgrab_443_scan')
if 'zgrab_count' not in current_certs[i]:
current_certs[i]['zgrab_count'] = get_scan_count(zgrab_collection, result['data']['http']['result']['redirect_response_chain'][0]['request']['tls_log']['handshake_log']['server_certificates']['certificate']['parsed']['fingerprint_sha256'], 2)
return current_certs
def create_nodes(graph, mongo_connector, zone, all_certs):
"""
Create the list of D3.js nodes and links based on the collected certificates
"""
DNS_MGR = DNSManager.DNSManager(mongo_connector)
for cert in all_certs:
matched_count = 0
if "censys_count" in cert:
matched_count = cert['censys_count']
if "zgrab_count" in cert:
matched_count = matched_count + cert['zgrab_count']
graph.add_node(cert['id'], type='certificate', sources=cert['sources'], total_count=matched_count)
for dns_entry in cert['dns_entries']:
lookup = DNS_MGR.find_one({'fqdn': dns_entry}, None)
root_flag = "false"
if dns_entry == zone:
root_flag = "true"
if lookup is None:
graph.add_node(dns_entry, root=root_flag, status="No Host", type="domain", sources=cert['sources'])
else:
graph.add_node(dns_entry, root=root_flag, status="Resolves", type="domain", sources=cert['sources'])
graph.add_edge(cert['id'], dns_entry, type="sans")
graph.add_edge(dns_entry, cert['id'], type="uses")
return graph
def main():
"""
Begin Main()
"""
logger = LoggingUtil.create_log(__name__)
now = datetime.now()
print("Starting: " + str(now))
logger.info("Starting...")
mongo_connector = MongoConnector.MongoConnector()
mongo_ct = mongo_connector.get_certificate_transparency_connection()
cert_graphs_collection = mongo_connector.get_cert_graphs_connection()
jobs_manager = JobsManager.JobsManager(mongo_connector, 'create_cert_graphs')
jobs_manager.record_job_start()
zones = ZoneManager.get_distinct_zones(mongo_connector)
parser = argparse.ArgumentParser(description='Creates and stores certificate graphs in the database based on one or more sources.')
parser.add_argument('--check_censys', action='store_true', default=False, required=False, help='Whether to check the Censys collection in the database')
parser.add_argument('--check_443_scans', action='store_true', default=False, required=False, help='Whether to check the zgrab collection in the database')
parser.add_argument('--check_ct_scans', action='store_true', default=False, required=False, help='Whether to check the CT collection in the database')
parser.add_argument('--zgrab_version', default=2, type=int, choices=[1, 2], metavar="version", help='The version of ZGrab used to collect data')
args = parser.parse_args()
if args.check_censys is True:
censys_collection = mongo_connector.get_censys_connection()
if args.check_443_scans is True:
zgrab_collection = mongo_connector.get_zgrab_443_data_connection()
for zone in zones:
logger.info("Creating: " + zone)
graph = nx.DiGraph()
certs_list = {}
if args.check_ct_scans:
certs_list = get_current_ct_certificates(mongo_ct, zone)
if args.check_censys:
certs_list = add_censys_certificates(censys_collection, zone, certs_list)
if args.check_443_scans:
if args.zgrab_version == 1:
certs_list = add_terminal_zgrab_certificates(zgrab_collection, zone, certs_list)
certs_list = add_initial_zgrab_certificates(zgrab_collection, zone, certs_list)
else:
certs_list = add_terminal_zgrab2_certificates(zgrab_collection, zone, certs_list)
certs_list = add_initial_zgrab2_certificates(zgrab_collection, zone, certs_list)
graph = create_nodes(graph, mongo_connector, zone, certs_list)
data = json_graph.node_link_data(graph)
my_data = {}
my_data['links'] = data['links']
my_data['nodes'] = data['nodes']
my_data['zone'] = zone
my_data['created'] = datetime.now()
cert_graphs_collection.delete_one({'zone': zone})
mongo_connector.perform_insert(cert_graphs_collection, my_data)
# Record status
jobs_manager.record_job_complete()
now = datetime.now()
print("Ending: " + str(now))
logger.info("Complete.")
if __name__ == "__main__":
main()
| 53.957143
| 258
| 0.652237
| 2,706
| 22,662
| 5.228012
| 0.099409
| 0.06489
| 0.086096
| 0.103909
| 0.762282
| 0.745034
| 0.735562
| 0.719941
| 0.706369
| 0.692726
| 0
| 0.014452
| 0.209161
| 22,662
| 419
| 259
| 54.085919
| 0.774914
| 0.124129
| 0
| 0.425373
| 0
| 0.052239
| 0.354985
| 0.144225
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037313
| false
| 0.037313
| 0.033582
| 0
| 0.104478
| 0.119403
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
537812337573efe7472bdcf11359fb5d961385b7
| 901
|
py
|
Python
|
packages/pyright-internal/src/tests/samples/assertType1.py
|
Jasha10/pyright
|
0ce0cfa10fe7faa41071a2cc417bb449cf8276fe
|
[
"MIT"
] | 3,934
|
2019-03-22T09:26:41.000Z
|
2019-05-06T21:03:08.000Z
|
packages/pyright-internal/src/tests/samples/assertType1.py
|
Jasha10/pyright
|
0ce0cfa10fe7faa41071a2cc417bb449cf8276fe
|
[
"MIT"
] | 107
|
2019-03-24T04:09:37.000Z
|
2019-05-06T17:00:04.000Z
|
packages/pyright-internal/src/tests/samples/assertType1.py
|
Jasha10/pyright
|
0ce0cfa10fe7faa41071a2cc417bb449cf8276fe
|
[
"MIT"
] | 119
|
2019-03-23T10:48:04.000Z
|
2019-05-06T08:57:56.000Z
|
# This sample tests the assert_type call.
from typing import Any, Literal
from typing_extensions import assert_type
def func1():
# This should generate an error.
assert_type()
# This should generate an error.
assert_type(1)
# This should generate an error.
assert_type(1, 2, 3)
# This should generate an error.
assert_type(*[])
def func2(x: int, y: int | str):
assert_type(x, int)
# This should generate an error.
assert_type(x, str)
# This should generate an error.
assert_type(x, Any)
x = 3
assert_type(x, Literal[3])
# This should generate an error.
assert_type(x, int)
assert_type(y, int | str)
assert_type(y, str | int)
# This should generate an error.
assert_type(y, str)
# This should generate an error.
assert_type(y, None)
# This should generate two errors.
assert_type(y, 3)
| 19.170213
| 41
| 0.650388
| 134
| 901
| 4.246269
| 0.223881
| 0.281195
| 0.316344
| 0.316344
| 0.650264
| 0.59051
| 0.59051
| 0.528998
| 0
| 0
| 0
| 0.013493
| 0.259711
| 901
| 46
| 42
| 19.586957
| 0.83958
| 0.389567
| 0
| 0.105263
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.789474
| 1
| 0.105263
| false
| 0
| 0.105263
| 0
| 0.210526
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
53d63a04159e41b99272fcdd4b780bd78c26da4d
| 813
|
py
|
Python
|
mods-config/python/tests/user_pwd.py
|
enckse/freeradius
|
6574df4aa2819e3a2ff1a237cb9cdfb56c74f8d4
|
[
"MIT"
] | 9
|
2019-08-19T16:13:12.000Z
|
2022-03-26T01:45:16.000Z
|
mods-config/python/tests/user_pwd.py
|
enckse/freeradius
|
6574df4aa2819e3a2ff1a237cb9cdfb56c74f8d4
|
[
"MIT"
] | 26
|
2017-01-16T18:21:11.000Z
|
2018-01-10T15:04:44.000Z
|
mods-config/python/tests/user_pwd.py
|
enckse/freeradius
|
6574df4aa2819e3a2ff1a237cb9cdfb56c74f8d4
|
[
"MIT"
] | 13
|
2019-03-12T01:03:09.000Z
|
2022-03-12T16:28:27.000Z
|
"""User password (padded)"""
import users.__config__ as __config__
import users.common as common
normal = __config__.Assignment()
normal.macs = [common.VALID_MAC]
normal.password = "73236255594031849679.2075683511536954196|61898269231080222714.15134249551816100126|5993584274528230152.113792398887207934"
normal.vlan = "dev"
normal.group = 'test'
admin = __config__.Assignment()
admin.macs = normal.macs
admin.password = "1962492356.2077368840|3820068285.1955095425|2738826028.2404152075|2534330902.1644902573|3041089703.1585085477|2446680248.1205224328|407511008.3886870569|1281140122.1458407570|3427001722.1459924992|2893452380.3108080557|2683358834.3965272478|2158902285.1618716815|726152924.685630641|753123170.2865077075|2390675803.3136199456|1829465677.2159451724"
admin.vlan = "prod"
admin.group = "drop"
| 54.2
| 366
| 0.842558
| 82
| 813
| 8.146341
| 0.707317
| 0.032934
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.560311
| 0.051661
| 813
| 14
| 367
| 58.071429
| 0.306096
| 0.02706
| 0
| 0
| 0
| 0.083333
| 0.615287
| 0.596178
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.166667
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
54cde77a8f6669005e7d149f4fd5fc48e10e3fb3
| 264
|
py
|
Python
|
aiogmaps/timezone.py
|
Ridecell/aiogmaps
|
7963d6790c2ca64f6896365580cffb96ca4bc4d2
|
[
"MIT"
] | 14
|
2018-04-13T19:47:24.000Z
|
2021-12-18T11:09:31.000Z
|
aiogmaps/timezone.py
|
hzlmn/aiogmaps
|
2d5eaf3ec054962621a920208ec47ea68233cd72
|
[
"MIT"
] | 13
|
2018-03-17T16:45:01.000Z
|
2020-11-30T16:16:10.000Z
|
aiogmaps/timezone.py
|
Ridecell/aiogmaps
|
7963d6790c2ca64f6896365580cffb96ca4bc4d2
|
[
"MIT"
] | 6
|
2018-12-03T14:10:18.000Z
|
2019-07-02T13:16:25.000Z
|
from googlemaps.timezone import timezone as _timezone
async def timezone(client, location, timestamp=None, language=None):
return await _timezone(client, location,
timestamp=timestamp,
language=language)
| 33
| 68
| 0.643939
| 25
| 264
| 6.72
| 0.56
| 0.166667
| 0.261905
| 0.369048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.295455
| 264
| 7
| 69
| 37.714286
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
54d38f2f4082788f0e3fbfbf55a891ddc11e2959
| 116
|
py
|
Python
|
jobs/admin.py
|
pauljeffrey/portfolio
|
ff91ab2a6e30886f0a686aae56fe2df45e497fbd
|
[
"MIT"
] | 3
|
2019-06-23T11:35:15.000Z
|
2022-03-28T15:39:23.000Z
|
jobs/admin.py
|
pauljeffrey/portfolio
|
ff91ab2a6e30886f0a686aae56fe2df45e497fbd
|
[
"MIT"
] | 2
|
2019-01-30T21:00:41.000Z
|
2020-03-06T00:15:20.000Z
|
jobs/admin.py
|
pauljeffrey/portfolio
|
ff91ab2a6e30886f0a686aae56fe2df45e497fbd
|
[
"MIT"
] | 3
|
2019-03-19T06:18:43.000Z
|
2021-06-23T16:30:51.000Z
|
from django.contrib import admin
from .models import Jobs
# Register your models here.
admin.site.register(Jobs)
| 14.5
| 32
| 0.784483
| 17
| 116
| 5.352941
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146552
| 116
| 7
| 33
| 16.571429
| 0.919192
| 0.224138
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0703f3c580d0b78c937919d5a1c90b45c8baf802
| 131
|
py
|
Python
|
compytest9/__init__.py
|
sh-s/compytest9
|
9b175be92328d7d35237c64c4c96769fd592b126
|
[
"MIT"
] | null | null | null |
compytest9/__init__.py
|
sh-s/compytest9
|
9b175be92328d7d35237c64c4c96769fd592b126
|
[
"MIT"
] | null | null | null |
compytest9/__init__.py
|
sh-s/compytest9
|
9b175be92328d7d35237c64c4c96769fd592b126
|
[
"MIT"
] | null | null | null |
__version__ = "__version__ = '0.0.3'"
from compytest9.discomfortmethod import *
from compytest9.thermaldefinitions import *
| 21.833333
| 44
| 0.755725
| 13
| 131
| 7
| 0.615385
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045045
| 0.152672
| 131
| 5
| 45
| 26.2
| 0.774775
| 0
| 0
| 0
| 0
| 0
| 0.168
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4aca22f4237e55bfc8c84f207ff7bc3d6f10fd0a
| 254
|
py
|
Python
|
python-worker/helpers/stereo_to_mono.py
|
Sergey-Baranenkov/audio_recognition_system
|
4472a76e495fe6a679ffbd4dd3706ce971540e79
|
[
"Apache-2.0"
] | null | null | null |
python-worker/helpers/stereo_to_mono.py
|
Sergey-Baranenkov/audio_recognition_system
|
4472a76e495fe6a679ffbd4dd3706ce971540e79
|
[
"Apache-2.0"
] | null | null | null |
python-worker/helpers/stereo_to_mono.py
|
Sergey-Baranenkov/audio_recognition_system
|
4472a76e495fe6a679ffbd4dd3706ce971540e79
|
[
"Apache-2.0"
] | null | null | null |
# Трансформирует стерео звук в моно, вычисляя среднее между левым и правым каналом. dtype = int чтобы округлить до
# нижнего уровня
import numpy as np
def stereo_to_mono(stereo: np.ndarray) -> np.ndarray:
return np.mean(stereo, axis=1, dtype=int)
| 28.222222
| 114
| 0.755906
| 40
| 254
| 4.75
| 0.8
| 0.084211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004717
| 0.165354
| 254
| 8
| 115
| 31.75
| 0.891509
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
4acd54f6c052728f285c178854c0dc52c2b4a6bd
| 135
|
py
|
Python
|
decorrelator/__init__.py
|
philastrophist/decorrelator
|
261e9c4df83923d7141c7400d82633676441a9d7
|
[
"MIT"
] | null | null | null |
decorrelator/__init__.py
|
philastrophist/decorrelator
|
261e9c4df83923d7141c7400d82633676441a9d7
|
[
"MIT"
] | null | null | null |
decorrelator/__init__.py
|
philastrophist/decorrelator
|
261e9c4df83923d7141c7400d82633676441a9d7
|
[
"MIT"
] | null | null | null |
import theano
theano.config.compute_test_value = "ignore"
from .correlation import CorrelationModel
from .linear import LinearRelation
| 27
| 43
| 0.851852
| 16
| 135
| 7.0625
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096296
| 135
| 5
| 44
| 27
| 0.92623
| 0
| 0
| 0
| 0
| 0
| 0.044118
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4ae929802bb81b9e7ea184a563b178cf57695951
| 211
|
py
|
Python
|
zhiliao/blog/__init__.py
|
gladgod/zhiliao
|
573dfbe56734388c9657cb6749d267f4a8885d5b
|
[
"BSD-3-Clause"
] | null | null | null |
zhiliao/blog/__init__.py
|
gladgod/zhiliao
|
573dfbe56734388c9657cb6749d267f4a8885d5b
|
[
"BSD-3-Clause"
] | null | null | null |
zhiliao/blog/__init__.py
|
gladgod/zhiliao
|
573dfbe56734388c9657cb6749d267f4a8885d5b
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Provides a blogging app with posts, keywords, categories and comments.
Posts can be listed by month, keyword, category or author.
"""
from __future__ import unicode_literals
from zhiliao import __version__
| 26.375
| 70
| 0.796209
| 29
| 211
| 5.482759
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146919
| 211
| 7
| 71
| 30.142857
| 0.883333
| 0.611374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ab3792a6d01b8b50479394fac02527e0425b4db4
| 11,133
|
py
|
Python
|
cloudroast/compute/integration/identity/test_authentication_required.py
|
ProjectMeniscus/cloudroast
|
b2e69c7f5657ee1f1cc7f03c8af18effb3c41cb6
|
[
"Apache-2.0"
] | null | null | null |
cloudroast/compute/integration/identity/test_authentication_required.py
|
ProjectMeniscus/cloudroast
|
b2e69c7f5657ee1f1cc7f03c8af18effb3c41cb6
|
[
"Apache-2.0"
] | null | null | null |
cloudroast/compute/integration/identity/test_authentication_required.py
|
ProjectMeniscus/cloudroast
|
b2e69c7f5657ee1f1cc7f03c8af18effb3c41cb6
|
[
"Apache-2.0"
] | 1
|
2020-11-17T19:04:33.000Z
|
2020-11-17T19:04:33.000Z
|
"""
Copyright 2013 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cafe.drivers.unittest.decorators import tags
from cloudcafe.common.tools.datagen import rand_name
from cloudcafe.compute.common.types import NovaImageStatusTypes
from cloudcafe.compute.common.types import NovaServerRebootTypes
from cloudcafe.compute.common.exceptions import Unauthorized
from cloudroast.compute.fixtures import ComputeFixture
class TokenRequiredTests(ComputeFixture):
@classmethod
def setUpClass(cls):
super(TokenRequiredTests, cls).setUpClass()
cls.metadata = {'meta_key_1': 'meta_value_1',
'meta_key_2': 'meta_value_2'}
cls.server = cls.server_behaviors.create_active_server(
metadata=cls.metadata).entity
cls.resources.add(cls.server.id, cls.servers_client.delete_server)
image_name = rand_name('testimage')
cls.image_meta = {'key1': 'value1', 'key2': 'value2'}
image_resp = cls.servers_client.create_image(cls.server.id,
image_name,
cls.image_meta)
assert image_resp.status_code == 202
cls.image_id = cls.parse_image_id(image_resp)
cls.image_behaviors.wait_for_image_status(
cls.image_id, NovaImageStatusTypes.ACTIVE)
cls.resources.add(cls.image_id, cls.images_client.delete_image)
cls.auth_token = {'headers': {'X-Auth-Token': None}}
@tags(type='negative', net='no')
def test_list_flavors_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.flavors_client.list_flavors(
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_list_flavors_detailed_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.flavors_client.list_flavors_with_detail(
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_get_flavor_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.flavors_client.get_flavor_details(
self.flavor_ref,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_list_images_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.images_client.list_images(
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_list_images_detailed_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.images_client.list_images_with_detail(
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_get_image_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.images_client.get_image(
self.image_ref,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_delete_image_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.images_client.delete_image(
self.image_id,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_list_servers_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.list_servers(
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_list_servers_detailed_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.list_servers_with_detail(
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_list_server_addresses_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.list_addresses(
self.server.id, requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_list_server_addresses_by_network_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.list_addresses_by_network(
self.server.id, 'prviate', requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_create_server_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.create_server(
'test', self.image_ref, self.flavor_ref,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_get_server_unauthorized(self):
with self.assertRaises(Unauthorized):
self.servers_client.get_server(
self.server.id,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_delete_server_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.delete_server(
self.server.id,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_change_server_password_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.change_password(
self.server.id, 'newP@ssw0rd',
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_reboot_server_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.reboot(
self.server.id, NovaServerRebootTypes.SOFT,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_rebuild_server_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.rebuild(
self.server.id, self.image_ref_alt,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_resize_server_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.resize(
self.server.id, self.flavor_ref_alt,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_create_image_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.create_image(
self.server.id, 'testimage',
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_list_server_metadata_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.list_server_metadata(
self.server.id,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_set_server_metadata_with_invalid_token(self):
with self.assertRaises(Unauthorized):
new_meta = {'meta2': 'data2', 'meta3': 'data3'}
self.servers_client.set_server_metadata(
self.server.id, new_meta,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_update_server_metadata_with_invalid_token(self):
with self.assertRaises(Unauthorized):
new_meta = {'meta2': 'data2', 'meta3': 'data3'}
self.servers_client.update_server_metadata(
self.server.id, new_meta,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_get_server_metadata_item_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.get_server_metadata_item(
self.server.id, 'meta_key_1',
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_delete_server_metadata_item_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.delete_server_metadata_item(
self.server.id, 'meta_key_1',
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_set_server_metadata_item_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.servers_client.set_server_metadata_item(
self.server.id, 'meta_key_1', 'newvalue',
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_list_image_metadata_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.images_client.list_image_metadata(
self.image_id,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_set_image_metadata_with_invalid_token(self):
with self.assertRaises(Unauthorized):
new_meta = {'meta2': 'data2', 'meta3': 'data3'}
self.images_client.set_image_metadata(
self.image_id, new_meta,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_update_image_metadata_with_invalid_token(self):
with self.assertRaises(Unauthorized):
new_meta = {'meta2': 'data2', 'meta3': 'data3'}
self.images_client.update_image_metadata(
self.image_id, new_meta,
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_get_image_metadata_item_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.images_client.get_image_metadata_item(
self.image_id, 'key1',
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_delete_image_metadata_item_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.images_client.delete_image_metadata_item(
self.image_id, 'key1',
requestslib_kwargs=self.auth_token)
@tags(type='negative', net='no')
def test_set_image_metadata_item_with_invalid_token(self):
with self.assertRaises(Unauthorized):
self.images_client.set_image_metadata_item(
self.image_id, 'key1', 'newvalue',
requestslib_kwargs=self.auth_token)
class InvalidTokenTests(TokenRequiredTests):
@classmethod
def setUpClass(cls):
super(InvalidTokenTests, cls).setUpClass()
cls.auth_token = {'headers': {'X-Auth-Token': 'abc'}}
@classmethod
def tearDownClass(cls):
super(InvalidTokenTests, cls).tearDownClass()
| 40.930147
| 78
| 0.667834
| 1,303
| 11,133
| 5.403684
| 0.128933
| 0.044738
| 0.070445
| 0.083653
| 0.761682
| 0.748757
| 0.73001
| 0.717228
| 0.717228
| 0.705298
| 0
| 0.004931
| 0.234977
| 11,133
| 271
| 79
| 41.081181
| 0.821768
| 0.049313
| 0
| 0.523148
| 0
| 0
| 0.05607
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 1
| 0.157407
| false
| 0.009259
| 0.027778
| 0
| 0.194444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ab588b282b4a0d368e5d55f210d5b8892c9c8fba
| 2,573
|
py
|
Python
|
Project Code/Machine Learning/Unsupervised Learning/Handwriting Recognition using K-Means/script.py
|
BIGG000/Data-Science-Learning-Track
|
46df939699fd6b2c1ee3ce5d35c51a250faa1bce
|
[
"MIT"
] | 2
|
2020-10-15T22:52:00.000Z
|
2020-11-22T16:13:20.000Z
|
Project Code/Machine Learning/Unsupervised Learning/Handwriting Recognition using K-Means/script.py
|
BIGG000/Data-Science-Learning-Track
|
46df939699fd6b2c1ee3ce5d35c51a250faa1bce
|
[
"MIT"
] | null | null | null |
Project Code/Machine Learning/Unsupervised Learning/Handwriting Recognition using K-Means/script.py
|
BIGG000/Data-Science-Learning-Track
|
46df939699fd6b2c1ee3ce5d35c51a250faa1bce
|
[
"MIT"
] | null | null | null |
import codecademylib3_seaborn
import numpy as np
from matplotlib import pyplot as plt
from sklearn import datasets
from sklearn.cluster import KMeans
digits = datasets.load_digits()
print(digits.target)
plt.gray()
plt.matshow(digits.images[100])
plt.show()
print(digits.target[100])
model = KMeans(n_clusters = 10, random_state=42)
model.fit(digits.data)
fig=plt.figure(figsize=(8,3))
fig.suptitle('Cluster center images',fontsize=14,fontweight='bold')
for i in range(10):
# Initialize subplots in a grid of 2X5, at i+1th position
ax = fig.add_subplot(2, 5, 1 + i)
# Display images
ax.imshow(model.cluster_centers_[i].reshape((8, 8)), cmap=plt.cm.binary)
plt.show()
new_samples = np.array([
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,1.60,7.32,0.99,0.00,0.00,0.00,0.00,0.00,2.06,7.62,1.98,0.00,0.00,0.00,0.00,0.00,1.52,7.62,2.29,0.00,0.00,0.00,0.00,0.00,1.07,7.63,3.21,0.00,0.00,0.00,0.00,0.00,0.00,7.62,3.81,0.00,0.00,0.00,0.00,0.00,0.00,7.39,3.58,0.00,0.00,0.00,0.00,0.00,0.00,0.99,0.23,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.92,3.90,5.85,7.40,6.03,0.38,0.00,2.67,7.40,7.47,5.80,4.80,7.62,2.98,0.53,7.55,6.03,0.84,0.00,0.77,7.62,3.51,0.76,7.62,3.05,0.00,0.00,0.07,6.63,5.64,1.14,7.62,5.11,3.05,3.05,3.74,7.09,5.64,0.00,4.66,7.62,7.62,7.62,7.63,5.87,1.98],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.54,0.00,0.00,0.00,0.00,0.00,0.00,1.83,7.55,1.15,0.00,0.00,0.00,0.00,0.00,2.29,7.62,1.52,0.00,0.00,0.00,0.00,0.00,2.29,7.62,1.52,0.00,0.00,0.00,0.00,0.00,2.29,7.62,1.52,0.00,0.00,0.00,0.00,0.00,2.21,7.62,1.45,0.00,0.00,0.00,0.00,0.00,0.08,1.98,0.00,0.00,0.00],
[0.00,0.00,3.35,7.62,7.62,4.73,0.00,0.00,0.00,0.00,3.73,7.62,7.40,7.62,2.44,0.00,0.00,0.00,1.91,7.62,3.13,6.18,6.71,0.15,0.00,0.00,4.42,7.47,0.53,2.29,7.62,2.67,0.00,0.15,7.02,5.11,0.00,0.92,7.63,3.51,0.00,2.29,7.62,3.13,3.36,6.86,7.62,2.59,0.00,3.05,7.62,7.62,7.63,6.10,1.83,0.00,0.00,0.76,3.81,3.58,1.15,0.00,0.00,0.00]
])
new_labels = model.predict(new_samples)
print(new_labels)
for i in range(len(new_labels)):
if new_labels[i] == 0:
print(0, end='')
elif new_labels[i] == 1:
print(9, end='')
elif new_labels[i] == 2:
print(2, end='')
elif new_labels[i] == 3:
print(1, end='')
elif new_labels[i] == 4:
print(6, end='')
elif new_labels[i] == 5:
print(8, end='')
elif new_labels[i] == 6:
print(4, end='')
elif new_labels[i] == 7:
print(5, end='')
elif new_labels[i] == 8:
print(7, end='')
elif new_labels[i] == 9:
print(3, end='')
| 41.5
| 322
| 0.628449
| 722
| 2,573
| 2.209141
| 0.189751
| 0.253919
| 0.288401
| 0.398746
| 0.39373
| 0.282132
| 0.265204
| 0.258934
| 0.24953
| 0.240752
| 0
| 0.345957
| 0.086669
| 2,573
| 61
| 323
| 42.180328
| 0.332766
| 0.027206
| 0
| 0.041667
| 0
| 0
| 0.010004
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.104167
| 0
| 0.104167
| 0.270833
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
db4fa6f7212951644e183f3535825d7affafe9d5
| 128
|
py
|
Python
|
skillsapp/admin.py
|
valentine-ochieng/Skills-app
|
110f2b852a94d5bc990e5b0710de3783bef907b7
|
[
"MIT"
] | null | null | null |
skillsapp/admin.py
|
valentine-ochieng/Skills-app
|
110f2b852a94d5bc990e5b0710de3783bef907b7
|
[
"MIT"
] | null | null | null |
skillsapp/admin.py
|
valentine-ochieng/Skills-app
|
110f2b852a94d5bc990e5b0710de3783bef907b7
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Profile, Skills
admin.site.register(Profile)
admin.site.register(Skills)
| 18.285714
| 35
| 0.8125
| 18
| 128
| 5.777778
| 0.555556
| 0.173077
| 0.326923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101563
| 128
| 6
| 36
| 21.333333
| 0.904348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
db5dc663a6b45fbab515001b05b294b12c238d91
| 66
|
py
|
Python
|
tests/fixtures/keepempty.py
|
joshuaroot/chaostoolkit-lib
|
d178b9877a57c07f1f622bfc3f2f58359ec6998a
|
[
"Apache-2.0"
] | 73
|
2017-11-26T13:43:53.000Z
|
2022-02-25T02:52:54.000Z
|
tests/fixtures/keepempty.py
|
joshuaroot/chaostoolkit-lib
|
d178b9877a57c07f1f622bfc3f2f58359ec6998a
|
[
"Apache-2.0"
] | 191
|
2017-10-09T14:33:12.000Z
|
2022-03-23T16:23:06.000Z
|
tests/fixtures/keepempty.py
|
joshuaroot/chaostoolkit-lib
|
d178b9877a57c07f1f622bfc3f2f58359ec6998a
|
[
"Apache-2.0"
] | 48
|
2017-12-05T16:39:01.000Z
|
2022-02-14T12:01:30.000Z
|
# just keep this as-is
def not_an_activity():
print("boom")
| 11
| 22
| 0.651515
| 11
| 66
| 3.727273
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.212121
| 66
| 5
| 23
| 13.2
| 0.788462
| 0.30303
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
db63c758a5bebb555f96a2641cb9b2ffde722412
| 149
|
py
|
Python
|
tests/web_platform/CSS2/visuren/test_anonymous_boxes.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 71
|
2015-04-13T09:44:14.000Z
|
2019-03-24T01:03:02.000Z
|
tests/web_platform/CSS2/visuren/test_anonymous_boxes.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 35
|
2019-05-06T15:26:09.000Z
|
2022-03-28T06:30:33.000Z
|
tests/web_platform/CSS2/visuren/test_anonymous_boxes.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 139
|
2015-05-30T18:37:43.000Z
|
2019-03-27T17:14:05.000Z
|
from tests.utils import W3CTestCase
class TestAnonymousBoxes(W3CTestCase):
vars().update(W3CTestCase.find_tests(__file__, 'anonymous-boxes-'))
| 24.833333
| 71
| 0.791946
| 16
| 149
| 7.0625
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.09396
| 149
| 5
| 72
| 29.8
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0.107383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
db80232b3d3da2eb40c9f193f7cece305c057cc4
| 211
|
py
|
Python
|
tests/test_relabel_map.py
|
godzilla-but-nicer/pid_plotter
|
e7c9ff386d3a5f95f0ffc5e2c4d0df3d5de64304
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_relabel_map.py
|
godzilla-but-nicer/pid_plotter
|
e7c9ff386d3a5f95f0ffc5e2c4d0df3d5de64304
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_relabel_map.py
|
godzilla-but-nicer/pid_plotter
|
e7c9ff386d3a5f95f0ffc5e2c4d0df3d5de64304
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
from pid_plotter.pid_plotter import pretty_labels_map
test_label = '((2,), (1, 3))'
test_map = {test_label: '{2}{13}'}
def test_label_map():
assert pretty_labels_map([test_label]) == test_map
| 26.375
| 54
| 0.725118
| 34
| 211
| 4.117647
| 0.470588
| 0.257143
| 0.257143
| 0.271429
| 0.342857
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032432
| 0.123223
| 211
| 8
| 54
| 26.375
| 0.724324
| 0
| 0
| 0
| 0
| 0
| 0.099057
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
db95ba236c9588a0c1a512e1c14f760041ca653f
| 103
|
py
|
Python
|
app/tuples_ex.py
|
andykmiles/code-boutique
|
26d05202f832af163f2900c36237988f37ceea8a
|
[
"MIT"
] | null | null | null |
app/tuples_ex.py
|
andykmiles/code-boutique
|
26d05202f832af163f2900c36237988f37ceea8a
|
[
"MIT"
] | null | null | null |
app/tuples_ex.py
|
andykmiles/code-boutique
|
26d05202f832af163f2900c36237988f37ceea8a
|
[
"MIT"
] | null | null | null |
def tupler(atuple):
print(f"{atuple=}")
return (1, 2, 3)
print(f"{type(tupler((10, 11)))=}")
| 14.714286
| 35
| 0.543689
| 16
| 103
| 3.5
| 0.75
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0.184466
| 103
| 6
| 36
| 17.166667
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0.330097
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
dba791a91d116fb852e6fbaca8c7c7812a1212a6
| 140
|
py
|
Python
|
Sets/Set .intersection() Operation.py
|
AbdallahHemdan/HackerRank-Python-Solutions
|
0e388b4f594a210426791246ca1278a6a872dd96
|
[
"MIT"
] | 3
|
2020-01-03T11:39:24.000Z
|
2021-03-13T13:35:23.000Z
|
Sets/Set .intersection() Operation.py
|
AbdallahHemdan/HackerRank-Python-Solutions
|
0e388b4f594a210426791246ca1278a6a872dd96
|
[
"MIT"
] | null | null | null |
Sets/Set .intersection() Operation.py
|
AbdallahHemdan/HackerRank-Python-Solutions
|
0e388b4f594a210426791246ca1278a6a872dd96
|
[
"MIT"
] | 1
|
2018-10-20T09:36:06.000Z
|
2018-10-20T09:36:06.000Z
|
n1 = int (input())
s1 = set(map(int,input().split()))
n2 = int (input())
s2 = set(map(int,input().split()))
print(len(s1.intersection(s2)))
| 23.333333
| 34
| 0.614286
| 23
| 140
| 3.73913
| 0.521739
| 0.372093
| 0.209302
| 0.325581
| 0.44186
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048
| 0.107143
| 140
| 5
| 35
| 28
| 0.64
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
dbae732d3c943359b44f25f2a87f5f6ae79e1210
| 139
|
py
|
Python
|
donordarah/admin.py
|
rafiatha09/berlapan
|
31fc032fbbab6d67b6c20db2eb5626d844e47ae0
|
[
"Unlicense"
] | null | null | null |
donordarah/admin.py
|
rafiatha09/berlapan
|
31fc032fbbab6d67b6c20db2eb5626d844e47ae0
|
[
"Unlicense"
] | null | null | null |
donordarah/admin.py
|
rafiatha09/berlapan
|
31fc032fbbab6d67b6c20db2eb5626d844e47ae0
|
[
"Unlicense"
] | null | null | null |
from django.contrib import admin
from .models import NamaLayanan, DonorForm
admin.site.register(DonorForm)
# Register your models here.
| 17.375
| 42
| 0.805755
| 18
| 139
| 6.222222
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129496
| 139
| 7
| 43
| 19.857143
| 0.92562
| 0.18705
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
dbcebf75db67705d58a51bc45490e830a80f87a0
| 154
|
py
|
Python
|
wsgi.py
|
fontoberta/taktaan
|
b5780b80b7bcc40064fb66ebb08366c9d4e4b86d
|
[
"MIT"
] | null | null | null |
wsgi.py
|
fontoberta/taktaan
|
b5780b80b7bcc40064fb66ebb08366c9d4e4b86d
|
[
"MIT"
] | null | null | null |
wsgi.py
|
fontoberta/taktaan
|
b5780b80b7bcc40064fb66ebb08366c9d4e4b86d
|
[
"MIT"
] | null | null | null |
from application import create_app # pragma: no cover
app = create_app() # pragma: no cover
if __name__ == '__main__': # pragma: no cover
app.run()
| 22
| 53
| 0.694805
| 22
| 154
| 4.409091
| 0.545455
| 0.247423
| 0.402062
| 0.350515
| 0.453608
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.194805
| 154
| 6
| 54
| 25.666667
| 0.782258
| 0.324675
| 0
| 0
| 0
| 0
| 0.08
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9193e5661e51e4be0481961a73f5fbd0e194c9bc
| 2,583
|
py
|
Python
|
resources/gfx/QXImageDB.py
|
kitiv/DeepFaceLive
|
ca3a005917ae067576b795d8b9fef5a8b3483010
|
[
"MIT"
] | 4
|
2021-07-23T16:34:24.000Z
|
2022-03-01T18:31:59.000Z
|
resources/gfx/QXImageDB.py
|
kitiv/DeepFaceLive
|
ca3a005917ae067576b795d8b9fef5a8b3483010
|
[
"MIT"
] | 1
|
2022-02-08T01:29:03.000Z
|
2022-02-08T01:29:03.000Z
|
resources/gfx/QXImageDB.py
|
kitiv/DeepFaceLive
|
ca3a005917ae067576b795d8b9fef5a8b3483010
|
[
"MIT"
] | 1
|
2021-09-06T15:05:28.000Z
|
2021-09-06T15:05:28.000Z
|
from pathlib import Path
from PyQt6.QtCore import *
from PyQt6.QtGui import *
from xlib.qt.gui.from_file import QXImage_from_file
class QXImageDB:
cached = {}
def _get(filename : str, color=None):
if isinstance(color, QColor):
key = (filename, color.getRgb() )
else:
key = (filename, color)
result = QXImageDB.cached.get(key, None)
if result is None:
result = QXImageDB.cached[key] = QXImage_from_file ( Path(__file__).parent / 'images' / filename, color )
return result
def add_circle_outline(color='black'): return QXImageDB._get('add-circle-outline.png', color)
def close_outline(color='black'): return QXImageDB._get('close-outline.png', color)
def eye_outline(color='black'): return QXImageDB._get('eye-outline.png', color)
def folder_open_outline(color='black'): return QXImageDB._get('folder-open-outline.png', color)
def open_outline(color='black'): return QXImageDB._get('open-outline.png', color)
def information_circle_outline(color='black'): return QXImageDB._get('information-circle-outline.png', color)
def play_circle_outline(color='black'): return QXImageDB._get('play-circle-outline.png', color)
def play_back_circle_outline(color='black'): return QXImageDB._get('play-back-circle-outline.png', color)
def play_forward_circle_outline(color='black'): return QXImageDB._get('play-forward-circle-outline.png', color)
def play_skip_back_circle_outline(color='black'): return QXImageDB._get('play-skip-back-circle-outline.png', color)
def play_skip_forward_circle_outline(color='black'): return QXImageDB._get('play-skip-forward-circle-outline.png', color)
def pause_circle_outline(color='black'): return QXImageDB._get('pause-circle-outline.png', color)
def power_outline(color='black'): return QXImageDB._get('power-outline.png', color)
def reload_outline(color='black'): return QXImageDB._get('reload-outline.png', color)
def settings_outline(color='black'): return QXImageDB._get('settings-outline.png', color)
def settings_reset_outline(color='black'): return QXImageDB._get('settings-reset-outline.png', color)
def warning_outline(color='black'): return QXImageDB._get('warning-outline.png', color)
def app_icon(): return QXImageDB._get('app_icon.png', None)
def logo_barclay_stone(): return QXImageDB._get('logo_barclay_stone.png', None)
def logo_exmo(): return QXImageDB._get('logo_exmo.png', None)
def splash_deepfacelive(): return QXImageDB._get('splash_deepfacelive.png', None)
| 50.647059
| 125
| 0.729772
| 346
| 2,583
| 5.234104
| 0.17341
| 0.173937
| 0.208724
| 0.215903
| 0.582551
| 0.503037
| 0.3545
| 0.140806
| 0.115958
| 0
| 0
| 0.000895
| 0.135114
| 2,583
| 50
| 126
| 51.66
| 0.809758
| 0
| 0
| 0
| 0
| 0
| 0.216415
| 0.124274
| 0
| 0
| 0
| 0
| 0
| 1
| 0.611111
| false
| 0
| 0.111111
| 0.583333
| 0.805556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
91c78bd05451248cbfad92bfecbd6a50ec18e975
| 214
|
py
|
Python
|
bjcpy/__init__.py
|
blackelbow/bjcpy
|
98d6d93c9160f2c52b0b4cbc15bf78fef4ebee96
|
[
"MIT"
] | null | null | null |
bjcpy/__init__.py
|
blackelbow/bjcpy
|
98d6d93c9160f2c52b0b4cbc15bf78fef4ebee96
|
[
"MIT"
] | null | null | null |
bjcpy/__init__.py
|
blackelbow/bjcpy
|
98d6d93c9160f2c52b0b4cbc15bf78fef4ebee96
|
[
"MIT"
] | 1
|
2020-06-22T23:43:08.000Z
|
2020-06-22T23:43:08.000Z
|
from .all_styles import all_styles
from .describe_style import describe_style
from .find_style import find_style
from .fits_style import fits_style
from .is_style import is_style
from .what_style import what_style
| 30.571429
| 42
| 0.859813
| 36
| 214
| 4.777778
| 0.277778
| 0.319767
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11215
| 214
| 6
| 43
| 35.666667
| 0.905263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
91d223c42d82c9c9c9be515d66e73645caaa8757
| 108
|
py
|
Python
|
app/requests.py
|
kmunge/blog
|
73d83b9c4cf9b8c7a1a631d63be77a00ea469f1f
|
[
"Unlicense"
] | null | null | null |
app/requests.py
|
kmunge/blog
|
73d83b9c4cf9b8c7a1a631d63be77a00ea469f1f
|
[
"Unlicense"
] | 1
|
2021-06-01T23:45:51.000Z
|
2021-06-01T23:45:51.000Z
|
app/requests.py
|
kmunge/blog
|
73d83b9c4cf9b8c7a1a631d63be77a00ea469f1f
|
[
"Unlicense"
] | null | null | null |
from app import app
import urllib.request,json
from . import Post
api_key = app.config['RANDOM_QUOTES_URL']
| 21.6
| 41
| 0.796296
| 18
| 108
| 4.611111
| 0.722222
| 0.216867
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12037
| 108
| 5
| 41
| 21.6
| 0.873684
| 0
| 0
| 0
| 0
| 0
| 0.155963
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
37dfd80021ee1ce9384a23e47de626504b612acf
| 152
|
py
|
Python
|
learning_object/collections/manager/utils/__init__.py
|
dsvalenciah/ROAp
|
24cbff0e719c5009ec1f1e7190924d4d9297e992
|
[
"MIT"
] | 4
|
2018-04-23T00:04:01.000Z
|
2018-10-28T22:56:51.000Z
|
learning_object/collections/manager/utils/__init__.py
|
dsvalenciah/ROAp
|
24cbff0e719c5009ec1f1e7190924d4d9297e992
|
[
"MIT"
] | 23
|
2017-12-22T08:27:35.000Z
|
2021-12-13T19:57:35.000Z
|
learning_object/collections/manager/utils/__init__.py
|
dsvalenciah/ROAp
|
24cbff0e719c5009ec1f1e7190924d4d9297e992
|
[
"MIT"
] | 1
|
2020-06-03T02:07:26.000Z
|
2020-06-03T02:07:26.000Z
|
from .auth import Authenticate
from .i18n_error import ErrorTranslator
from .req_to_dict import req_to_dict
from .switch_language import SwitchLanguage
| 30.4
| 43
| 0.868421
| 22
| 152
| 5.727273
| 0.590909
| 0.079365
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014706
| 0.105263
| 152
| 4
| 44
| 38
| 0.911765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
53033a0297e75fb7097c6e756bc332edba60f396
| 170
|
py
|
Python
|
shipmaster/server/asgi.py
|
damoti/shipmaster
|
cf596be7ea689c26c4bf47acb67dfd15169d3c46
|
[
"BSD-3-Clause"
] | 8
|
2016-05-26T15:31:03.000Z
|
2019-12-26T13:19:08.000Z
|
shipmaster/server/asgi.py
|
damoti/shipmaster
|
cf596be7ea689c26c4bf47acb67dfd15169d3c46
|
[
"BSD-3-Clause"
] | 3
|
2016-06-06T16:21:38.000Z
|
2018-01-04T15:50:27.000Z
|
shipmaster/server/asgi.py
|
damoti/shipmaster
|
cf596be7ea689c26c4bf47acb67dfd15169d3c46
|
[
"BSD-3-Clause"
] | 1
|
2019-12-26T13:19:16.000Z
|
2019-12-26T13:19:16.000Z
|
import os
from channels.asgi import get_channel_layer
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shipmaster.server.settings")
channel_layer = get_channel_layer()
| 24.285714
| 77
| 0.835294
| 23
| 170
| 5.869565
| 0.652174
| 0.266667
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076471
| 170
| 6
| 78
| 28.333333
| 0.859873
| 0
| 0
| 0
| 0
| 0
| 0.282353
| 0.282353
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
530da1a18557281d9b7aa91b02b69eaeba7d5ee5
| 22,460
|
py
|
Python
|
pyGoogleTranslate.py
|
Animenosekai/python-google-translate
|
15e4712aab73f9a397575acf8b18cab041ab3345
|
[
"MIT"
] | 3
|
2020-09-11T16:43:11.000Z
|
2021-05-12T10:04:12.000Z
|
pyGoogleTranslate.py
|
Animenosekai/python-google-translate
|
15e4712aab73f9a397575acf8b18cab041ab3345
|
[
"MIT"
] | null | null | null |
pyGoogleTranslate.py
|
Animenosekai/python-google-translate
|
15e4712aab73f9a397575acf8b18cab041ab3345
|
[
"MIT"
] | 1
|
2021-05-10T01:49:19.000Z
|
2021-05-10T01:49:19.000Z
|
"""
pyGoogleTranslate
--> A Google Translate webpage parser for Python 3
⚠️ Do not forget to set the used browser with browser()\n
⚠️ Do not forget to call browser_kill() after using pyGoogleTranslate (at the end of your script/when you stop your script)\n
Without browser_kill(), your browser will stay opened until you close it in your activity monitor (unless it is phantomjs).
© Anime no Sekai - 2020
"""
import warnings
import psutil
from lifeeasy import write_file, today, current_time
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from .internal.caching import search_translation_cache, add_translation_cache
from .internal.language_code import verify_language_code
class BrowserError(Exception):
"""
When the browser isn't available.
"""
def __init__(self, msg=None):
self.msg = msg
def __str__(self):
exception_msg = f"\n\n⚠️ ⚠️ ⚠️\n{self.msg}\n"
return exception_msg
warnings.filterwarnings('ignore')
driver_name = ''
driver = None
connected = False
last_translation = ''
def browser(browser_name, executable_path="PATH", no_sandbox=False):
"""
To choose the headless browser used by pyGoogleTranslate.\n
<executable_path> sets the executable path for your browser.\n
If <executable_path> is empty, pyGoogleTranslate will consider that the browser driver/executable is in your PATH (for example if you downloaded the driver with Homebrew).\n
Browser options:
Firefox
Chrome
PhantomJS
⚠️ Do not forget to call browser_kill() after using pyGoogleTranslate (at the end of your script/when you stop your script)\n
Without browser_kill(), your browser will stay opened until you close it in your activity monitor (unless it is phantomjs).
"""
global driver
global driver_name
global connected
if connected:
browser_kill()
if browser_name.lower() == 'firefox':
from selenium.webdriver.firefox.options import Options
options = Options()
options.headless = True
if executable_path == 'PATH':
driver = webdriver.Firefox(options=options)
connected = True
else:
driver = webdriver.Firefox(options=options, executable_path=executable_path)
connected = True
driver_name = 'Firefox'
elif browser_name.lower() == 'chrome':
from selenium.webdriver.chrome.options import Options
chrome_options = Options()
chrome_options.headless = True
chrome_options.add_argument("--disable-gpu")
chrome_options.add_argument("--disable-extensions")
if no_sandbox:
chrome_options.add_argument("--no-sandbox")
if executable_path == 'PATH':
driver = webdriver.Chrome(options=chrome_options)
connected = True
else:
driver = webdriver.Chrome(options=chrome_options, executable_path=executable_path)
connected = True
driver_name = 'Chrome'
elif browser_name.lower() == 'phantom':
if executable_path == 'PATH':
driver = webdriver.PhantomJS()
connected = True
else:
driver = webdriver.PhantomJS(executable_path=executable_path)
connected = True
driver_name = 'PhantomJS'
elif browser_name.lower() == 'phantomjs':
if executable_path == 'PATH':
driver = webdriver.PhantomJS()
connected = True
else:
driver = webdriver.PhantomJS(executable_path=executable_path)
connected = True
driver_name = 'PhantomJS'
else:
raise BrowserError(f'{browser_name} is not supported yet.')
def browser_kill():
"""
Kills the browser process in use.
"""
global connected
if connected:
if driver_name == 'Chrome' or driver_name == 'Firefox':
driver_process = psutil.Process(driver.service.process.pid)
if driver_process.is_running():
process = driver_process.children()
if process:
process = process[0]
if process.is_running():
driver.quit()
else:
process.kill()
connected = False
def translate(text, destination_language, source_language="auto", cache=False, debug=False):
"""
Translates the given text into the chosen language by scraping Google Translate with Selenium.
Returns a string with the text translated.\n
Returns "An error occured while translating: translation not found." if the translation was not found in the webpage. This might come from a mistyped language code.
"""
from .internal.domain import gt_domain
global last_translation
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Starting Translation...\n', append=True)
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Searching Caches...\n', append=True)
cache_result = search_translation_cache(source_language=source_language, destination_language=destination_language, source=text)
if not cache_result is None:
if debug:
line_number = cache_result['line_number']
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Translation found in Caches (line {line_number})\n', append=True)
return cache_result['result']
else:
if driver is None:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - No driver selected\n', append=True)
raise BrowserError("Browser is not set yet.\n Please set it with browser()")
if not connected:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Driver disconnected, last driver: {driver_name}\n', append=True)
raise BrowserError(f'You disconnected the last browser in use ({driver_name}).\n Please reconnect one with browser()')
else:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - First attempt url is: https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}\n', append=True)
driver.get(f"https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}")
try:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Getting DOM Element by Class Name (tlid-translation)\n', append=True)
result = driver.find_element_by_class_name("tlid-translation")
if result.text == last_translation or result.text == str(last_translation + '...'):
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Translation not finished detected... Refreshing page before new attempt...\n', append=True)
driver.refresh()
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Getting DOM Element by Class Name (tlid-translation)\n', append=True)
result = driver.find_element_by_class_name("tlid-translation")
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Setting last_translation global variable to new translation...\n', append=True)
last_translation = str(result.text)
if cache:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Adding result to cache...\n', append=True)
add_translation_cache(source_language=source_language, destination_language=destination_language, source=text, result=str(result.text))
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Returning value... {result.text}\n', append=True)
return str(result.text)
except NoSuchElementException:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Element not found on page...\n', append=True)
try:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] New attempt...\n', append=True)
driver.get(f"https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}")
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Getting DOM Element by Class Name (tlid-translation)\n', append=True)
result = driver.find_element_by_class_name("tlid-translation")
if result.text == last_translation or result.text == str(last_translation + '...'):
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Translation not finished detected... Refreshing page before new attempt...\n', append=True)
driver.refresh()
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Getting DOM Element by Class Name (tlid-translation)\n', append=True)
result = driver.find_element_by_class_name("tlid-translation")
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Setting last_translation global variable to new translation...\n', append=True)
last_translation = str(result.text)
if cache:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Adding result to cache...\n', append=True)
add_translation_cache(source_language=source_language, destination_language=destination_language, source=text, result=str(result.text))
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Returning value... {result.text}\n', append=True)
return str(result.text)
except NoSuchElementException:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Element not found on page...\n', append=True)
try:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] New attempt...\n', append=True)
driver.get(f"https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}")
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Translation not finished detected... Refreshing page before new attempt...\n', append=True)
driver.refresh()
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Getting DOM Element by Class Name (tlid-translation)\n', append=True)
result = driver.find_element_by_class_name("tlid-translation")
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Setting last_translation global variable to new translation...\n', append=True)
last_translation = str(result.text)
if cache:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Adding result to cache...\n', append=True)
add_translation_cache(source_language=source_language, destination_language=destination_language, source=text, result=str(result.text))
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Returning value... {result.text}\n', append=True)
return str(result.text)
except NoSuchElementException:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Element not found, aborting...\n', append=True)
return "An error occured while translating: translation not found."
except Exception as e:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 3] Unknown error\n', append=True)
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Error details: {str(e)}\n', append=True)
return "An error occured while translating: unknown error."
except Exception as e:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Unknown error\n', append=True)
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Error details: {str(e)}\n', append=True)
return "An error occured while translating: unknown error."
except Exception as e:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Unknown error\n', append=True)
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Error details: {str(e)}\n', append=True)
try:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] New attempt...\n', append=True)
driver.get(f"https://{gt_domain}/?hl=en#view=home&op=translate&sl={verify_language_code(source_language)}&tl={verify_language_code(destination_language)}&text={str(text)}")
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Getting DOM Element by Class Name (tlid-translation)\n', append=True)
result = driver.find_element_by_class_name("tlid-translation")
if result.text == last_translation or result.text == str(last_translation + '...'):
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Translation not finished detected... Refreshing page before new attempt...\n', append=True)
driver.refresh()
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Getting DOM Element by Class Name (tlid-translation)\n', append=True)
result = driver.find_element_by_class_name("tlid-translation")
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Setting last_translation global variable to new translation...\n', append=True)
last_translation = str(result.text)
if cache:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Adding result to cache...\n', append=True)
add_translation_cache(source_language=source_language, destination_language=destination_language, source=text, result=str(result.text))
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - [Attempt 2] Returning value... {result.text}\n', append=True)
return str(result.text)
except Exception as e:
if debug:
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Unknown error\n', append=True)
write_file('logs.txt', today() + ' ' + current_time() + f' text={text}|sl={source_language}|dl={destination_language} - Error details: {str(e)}\n', append=True)
return "An error occured while translating: unknown error."
def detect_language(text, result_language='en'):
"""
Returns the language of the given text.
"""
from .internal.domain import gt_domain
if driver is None:
raise BrowserError("Browser is not set yet.\n Please set it with browser()")
if not connected:
raise BrowserError(f'You disconnected the last browser in use ({driver_name}).\n Please reconnect one with browser()')
else:
try:
driver.get(f"https://{gt_domain}/?hl={verify_language_code(result_language)}#view=home&op=translate&sl=auto&tl=en&text={str(text)}")
driver.refresh()
raw_result = driver.find_element_by_class_name("jfk-button-checked").get_attribute('innerHTML')
result = raw_result.split(' - ')[0]
if result == 'Detect language':
result = detect_language(text, result_language=result_language)
return str(result)
except:
return "An error occured while detecting the language.\nPlease try again."
def transliterate(text, source_language="auto"):
"""
Returns the transliteration provided by Google Translate (if available)\n
i.e Ohayou --> おはよう / おはよう --> Ohayou
"""
from .internal.domain import gt_domain
if driver is None:
raise BrowserError("Browser is not set yet.\n Please set it with browser()")
if not connected:
raise BrowserError(f'You disconnected the last browser in use ({driver_name}).\n Please reconnect one with browser()')
else:
try:
driver.get(f'https://{gt_domain}/#view=home&op=translate&sl={verify_language_code(source_language)}&tl=en&text={str(text)}')
driver.refresh()
result = driver.find_element_by_class_name('tlid-transliteration-content')
return str(result.text)
except:
return 'not available'
def definition(text, source_language="auto"):
"""
Returns the word type (i.e Interjection, Noun), defintion (if available) and sentence example where the word could be used (if available)
"""
from .internal.domain import gt_domain
if driver is None:
raise BrowserError("Browser is not set yet.\n Please set it with browser()")
if not connected:
raise BrowserError(f'You disconnected the last browser in use ({driver_name}).\n Please reconnect one with browser()')
else:
driver.get(f'https://{gt_domain}/#view=home&op=translate&sl={verify_language_code(source_language)}&tl=en&text={str(text)}')
driver.refresh()
final_dict = {}
try:
word_type = driver.find_element_by_class_name('gt-cd-pos').text
except:
word_type = 'not available.'
try:
word_definition = driver.find_element_by_class_name('gt-def-row').text
except:
word_definition = 'not available.'
try:
example = driver.find_element_by_class_name('gt-def-example').text
except:
example = 'not available.'
final_dict['word_type'] = str(word_type)
final_dict['definition'] = str(word_definition)
final_dict['example'] = str(example)
return final_dict
| 65.672515
| 334
| 0.610329
| 2,618
| 22,460
| 5.079068
| 0.094729
| 0.066331
| 0.052944
| 0.051741
| 0.775513
| 0.75581
| 0.734602
| 0.727533
| 0.714597
| 0.702339
| 0
| 0.001699
| 0.26634
| 22,460
| 342
| 335
| 65.672515
| 0.804467
| 0.073998
| 0
| 0.64311
| 0
| 0.141343
| 0.36813
| 0.122225
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028269
| false
| 0
| 0.045936
| 0
| 0.130742
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5326dfcb968424799e67cee9f18bfb8191331257
| 38
|
py
|
Python
|
python/testData/completion/mockPatchObject1Py2/a.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/mockPatchObject1Py2/a.after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/mockPatchObject1Py2/a.after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from mock import patch
patch.object()
| 12.666667
| 22
| 0.789474
| 6
| 38
| 5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 3
| 23
| 12.666667
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5361fcf22c6aa34afe9cb92d04a6986909eb2926
| 15,980
|
py
|
Python
|
ply-3.11/example/BASIC/parsetab.py
|
Francisco95/SimpleInterpreter
|
526150efc457680c9a4c50d8bdd2d5c6f8b04449
|
[
"MIT"
] | 1
|
2019-05-28T17:59:09.000Z
|
2019-05-28T17:59:09.000Z
|
ply-3.11/example/BASIC/parsetab.py
|
Francisco95/SimpleInterpreter
|
526150efc457680c9a4c50d8bdd2d5c6f8b04449
|
[
"MIT"
] | null | null | null |
ply-3.11/example/BASIC/parsetab.py
|
Francisco95/SimpleInterpreter
|
526150efc457680c9a4c50d8bdd2d5c6f8b04449
|
[
"MIT"
] | null | null | null |
# parsetab.py
# This file is automatically generated. Do not edit.
# pylint: disable=W,C,R
_tabversion = '3.10'
_lr_method = 'LALR'
_lr_signature = 'leftPLUSMINUSleftTIMESDIVIDEleftPOWERrightUMINUSCOMMA DATA DEF DIM DIVIDE END EQUALS FLOAT FOR GE GOSUB GOTO GT ID IF INTEGER LE LET LIST LPAREN LT MINUS NE NEW NEWLINE NEXT PLUS POWER PRINT READ REM RETURN RPAREN RUN SEMI STEP STOP STRING THEN TIMES TOprogram : program statement\n | statementprogram : errorstatement : INTEGER command NEWLINEstatement : RUN NEWLINE\n | LIST NEWLINE\n | NEW NEWLINEstatement : INTEGER NEWLINEstatement : INTEGER error NEWLINEstatement : NEWLINEcommand : LET variable EQUALS exprcommand : LET variable EQUALS errorcommand : READ varlistcommand : READ errorcommand : DATA numlistcommand : DATA errorcommand : PRINT plist optendcommand : PRINT erroroptend : COMMA \n | SEMI\n |command : PRINTcommand : GOTO INTEGERcommand : GOTO errorcommand : IF relexpr THEN INTEGERcommand : IF error THEN INTEGERcommand : IF relexpr THEN errorcommand : FOR ID EQUALS expr TO expr optstepcommand : FOR ID EQUALS error TO expr optstepcommand : FOR ID EQUALS expr TO error optstepcommand : FOR ID EQUALS expr TO expr STEP erroroptstep : STEP expr\n | emptycommand : NEXT IDcommand : NEXT errorcommand : ENDcommand : REMcommand : STOPcommand : DEF ID LPAREN ID RPAREN EQUALS exprcommand : DEF ID LPAREN ID RPAREN EQUALS errorcommand : DEF ID LPAREN error RPAREN EQUALS exprcommand : GOSUB INTEGERcommand : GOSUB errorcommand : RETURNcommand : DIM dimlistcommand : DIM errordimlist : dimlist COMMA dimitem\n | dimitemdimitem : ID LPAREN INTEGER RPARENdimitem : ID LPAREN INTEGER COMMA INTEGER RPARENexpr : expr PLUS expr\n | expr MINUS expr\n | expr TIMES expr\n | expr DIVIDE expr\n | expr POWER exprexpr : INTEGER\n | FLOATexpr : variableexpr : LPAREN expr RPARENexpr : MINUS expr %prec UMINUSrelexpr : expr LT expr\n | expr LE expr\n | expr GT expr\n | expr GE expr\n | expr EQUALS expr\n | expr NE exprvariable : ID\n | ID LPAREN expr RPAREN\n | ID LPAREN expr COMMA expr RPARENvarlist : varlist COMMA variable\n | variablenumlist : numlist COMMA number\n | numbernumber : INTEGER\n | FLOATnumber : MINUS INTEGER\n | MINUS FLOATplist : plist COMMA pitem\n | pitempitem : STRINGpitem : STRING exprpitem : exprempty : '
_lr_action_items = {'error':([0,4,14,15,16,17,18,20,25,27,69,86,94,95,127,137,142,],[3,12,36,39,45,55,57,61,64,66,99,111,120,122,135,148,152,]),'INTEGER':([0,1,2,3,5,9,11,15,16,17,18,25,28,29,30,31,32,43,47,49,53,69,70,72,76,79,80,81,82,83,86,87,88,89,90,91,92,93,94,97,126,127,128,132,137,138,142,145,],[4,4,-2,-3,-10,-1,-8,41,50,54,50,63,-5,-6,-7,-4,-9,73,50,50,50,50,50,41,50,50,50,50,50,50,110,112,50,50,50,50,50,50,50,124,50,50,50,139,50,50,50,50,]),'RUN':([0,1,2,3,5,9,11,28,29,30,31,32,],[6,6,-2,-3,-10,-1,-8,-5,-6,-7,-4,-9,]),'LIST':([0,1,2,3,5,9,11,28,29,30,31,32,],[7,7,-2,-3,-10,-1,-8,-5,-6,-7,-4,-9,]),'NEW':([0,1,2,3,5,9,11,28,29,30,31,32,],[8,8,-2,-3,-10,-1,-8,-5,-6,-7,-4,-9,]),'NEWLINE':([0,1,2,3,4,5,6,7,8,9,10,11,12,16,21,22,23,26,28,29,30,31,32,34,35,36,37,38,39,40,41,42,44,45,46,47,48,50,51,52,54,55,60,61,63,64,65,66,67,73,74,75,76,77,78,84,98,99,101,102,103,104,105,106,107,108,109,110,111,112,123,125,131,134,135,136,140,141,143,144,146,147,148,149,150,151,152,],[5,5,-2,-3,11,-10,28,29,30,-1,31,-8,32,-22,-36,-37,-38,-44,-5,-6,-7,-4,-9,-67,-13,-14,-71,-15,-16,-73,-74,-75,-21,-18,-79,-80,-82,-56,-57,-58,-23,-24,-34,-35,-42,-43,-45,-46,-48,-76,-77,-17,-19,-20,-81,-60,-11,-12,-70,-72,-78,-51,-52,-53,-54,-55,-59,-25,-27,-26,-47,-68,-49,-83,-83,-83,-69,-28,-33,-30,-29,-39,-40,-41,-50,-32,-31,]),'$end':([1,2,3,5,9,11,28,29,30,31,32,],[0,-2,-3,-10,-1,-8,-5,-6,-7,-4,-9,]),'LET':([4,],[13,]),'READ':([4,],[14,]),'DATA':([4,],[15,]),'PRINT':([4,],[16,]),'GOTO':([4,],[17,]),'IF':([4,],[18,]),'FOR':([4,],[19,]),'NEXT':([4,],[20,]),'END':([4,],[21,]),'REM':([4,],[22,]),'STOP':([4,],[23,]),'DEF':([4,],[24,]),'GOSUB':([4,],[25,]),'RETURN':([4,],[26,]),'DIM':([4,],[27,]),'ID':([13,14,16,18,19,20,24,27,47,49,53,69,70,71,76,79,80,81,82,83,88,89,90,91,92,93,94,95,96,126,127,128,137,138,142,145,],[34,34,34,34,59,60,62,68,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,121,68,34,34,34,34,34,34,34,]),'FLOAT':([15,16,18,43,47,49,53,69,70,72,76,79,80,81,82,83,88,89,90,91,92,93,94,126,127,128,137,138,142,145,],[42,51,51,74,51,51,51,51,51,42,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,]),'MINUS':([15,16,18,34,47,48,49,50,51,52,53,58,69,70,72,76,78,79,80,81,82,83,84,85,88,89,90,91,92,93,94,98,100,104,105,106,107,108,109,113,114,115,116,117,118,119,125,126,127,128,133,134,136,137,138,140,142,145,147,149,151,],[43,49,49,-67,49,80,49,-56,-57,-58,49,80,49,49,43,49,80,49,49,49,49,49,-60,80,49,49,49,49,49,49,49,80,80,-51,-52,-53,-54,-55,-59,80,80,80,80,80,80,80,-68,49,49,49,80,80,80,49,49,-69,49,49,80,80,80,]),'STRING':([16,76,],[47,47,]),'LPAREN':([16,18,34,47,49,53,62,68,69,70,76,79,80,81,82,83,88,89,90,91,92,93,94,126,127,128,137,138,142,145,],[53,53,70,53,53,53,95,97,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,]),'EQUALS':([33,34,50,51,52,58,59,84,104,105,106,107,108,109,125,129,130,140,],[69,-67,-56,-57,-58,92,94,-60,-51,-52,-53,-54,-55,-59,-68,137,138,-69,]),'COMMA':([34,35,37,38,40,41,42,44,46,47,48,50,51,52,65,67,73,74,78,84,100,101,102,103,104,105,106,107,108,109,123,124,125,131,140,150,],[-67,71,-71,72,-73,-74,-75,76,-79,-80,-82,-56,-57,-58,96,-48,-76,-77,-81,-60,126,-70,-72,-78,-51,-52,-53,-54,-55,-59,-47,132,-68,-49,-69,-50,]),'PLUS':([34,48,50,51,52,58,78,84,85,98,100,104,105,106,107,108,109,113,114,115,116,117,118,119,125,133,134,136,140,147,149,151,],[-67,79,-56,-57,-58,79,79,-60,79,79,79,-51,-52,-53,-54,-55,-59,79,79,79,79,79,79,79,-68,79,79,79,-69,79,79,79,]),'TIMES':([34,48,50,51,52,58,78,84,85,98,100,104,105,106,107,108,109,113,114,115,116,117,118,119,125,133,134,136,140,147,149,151,],[-67,81,-56,-57,-58,81,81,-60,81,81,81,81,81,-53,-54,-55,-59,81,81,81,81,81,81,81,-68,81,81,81,-69,81,81,81,]),'DIVIDE':([34,48,50,51,52,58,78,84,85,98,100,104,105,106,107,108,109,113,114,115,116,117,118,119,125,133,134,136,140,147,149,151,],[-67,82,-56,-57,-58,82,82,-60,82,82,82,82,82,-53,-54,-55,-59,82,82,82,82,82,82,82,-68,82,82,82,-69,82,82,82,]),'POWER':([34,48,50,51,52,58,78,84,85,98,100,104,105,106,107,108,109,113,114,115,116,117,118,119,125,133,134,136,140,147,149,151,],[-67,83,-56,-57,-58,83,83,-60,83,83,83,83,83,83,83,-55,-59,83,83,83,83,83,83,83,-68,83,83,83,-69,83,83,83,]),'SEMI':([34,44,46,47,48,50,51,52,78,84,103,104,105,106,107,108,109,125,140,],[-67,77,-79,-80,-82,-56,-57,-58,-81,-60,-78,-51,-52,-53,-54,-55,-59,-68,-69,]),'LT':([34,50,51,52,58,84,104,105,106,107,108,109,125,140,],[-67,-56,-57,-58,88,-60,-51,-52,-53,-54,-55,-59,-68,-69,]),'LE':([34,50,51,52,58,84,104,105,106,107,108,109,125,140,],[-67,-56,-57,-58,89,-60,-51,-52,-53,-54,-55,-59,-68,-69,]),'GT':([34,50,51,52,58,84,104,105,106,107,108,109,125,140,],[-67,-56,-57,-58,90,-60,-51,-52,-53,-54,-55,-59,-68,-69,]),'GE':([34,50,51,52,58,84,104,105,106,107,108,109,125,140,],[-67,-56,-57,-58,91,-60,-51,-52,-53,-54,-55,-59,-68,-69,]),'NE':([34,50,51,52,58,84,104,105,106,107,108,109,125,140,],[-67,-56,-57,-58,93,-60,-51,-52,-53,-54,-55,-59,-68,-69,]),'RPAREN':([34,50,51,52,84,85,100,104,105,106,107,108,109,121,122,124,125,133,139,140,],[-67,-56,-57,-58,-60,109,125,-51,-52,-53,-54,-55,-59,129,130,131,-68,140,150,-69,]),'THEN':([34,50,51,52,56,57,84,104,105,106,107,108,109,113,114,115,116,117,118,125,140,],[-67,-56,-57,-58,86,87,-60,-51,-52,-53,-54,-55,-59,-61,-62,-63,-64,-65,-66,-68,-69,]),'TO':([34,50,51,52,84,104,105,106,107,108,109,119,120,125,140,],[-67,-56,-57,-58,-60,-51,-52,-53,-54,-55,-59,127,128,-68,-69,]),'STEP':([34,50,51,52,84,104,105,106,107,108,109,125,134,135,136,140,],[-67,-56,-57,-58,-60,-51,-52,-53,-54,-55,-59,-68,142,145,145,-69,]),}
_lr_action = {}
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'program':([0,],[1,]),'statement':([0,1,],[2,9,]),'command':([4,],[10,]),'variable':([13,14,16,18,47,49,53,69,70,71,76,79,80,81,82,83,88,89,90,91,92,93,94,126,127,128,137,138,142,145,],[33,37,52,52,52,52,52,52,52,101,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,]),'varlist':([14,],[35,]),'numlist':([15,],[38,]),'number':([15,72,],[40,102,]),'plist':([16,],[44,]),'pitem':([16,76,],[46,103,]),'expr':([16,18,47,49,53,69,70,76,79,80,81,82,83,88,89,90,91,92,93,94,126,127,128,137,138,142,145,],[48,58,78,84,85,98,100,48,104,105,106,107,108,113,114,115,116,117,118,119,133,134,136,147,149,151,151,]),'relexpr':([18,],[56,]),'dimlist':([27,],[65,]),'dimitem':([27,96,],[67,123,]),'optend':([44,],[75,]),'optstep':([134,135,136,],[141,144,146,]),'empty':([134,135,136,],[143,143,143,]),}
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_goto: _lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> program","S'",1,None,None,None),
('program -> program statement','program',2,'p_program','basparse.py',21),
('program -> statement','program',1,'p_program','basparse.py',22),
('program -> error','program',1,'p_program_error','basparse.py',41),
('statement -> INTEGER command NEWLINE','statement',3,'p_statement','basparse.py',49),
('statement -> RUN NEWLINE','statement',2,'p_statement_interactive','basparse.py',62),
('statement -> LIST NEWLINE','statement',2,'p_statement_interactive','basparse.py',63),
('statement -> NEW NEWLINE','statement',2,'p_statement_interactive','basparse.py',64),
('statement -> INTEGER NEWLINE','statement',2,'p_statement_blank','basparse.py',71),
('statement -> INTEGER error NEWLINE','statement',3,'p_statement_bad','basparse.py',78),
('statement -> NEWLINE','statement',1,'p_statement_newline','basparse.py',87),
('command -> LET variable EQUALS expr','command',4,'p_command_let','basparse.py',94),
('command -> LET variable EQUALS error','command',4,'p_command_let_bad','basparse.py',99),
('command -> READ varlist','command',2,'p_command_read','basparse.py',106),
('command -> READ error','command',2,'p_command_read_bad','basparse.py',111),
('command -> DATA numlist','command',2,'p_command_data','basparse.py',118),
('command -> DATA error','command',2,'p_command_data_bad','basparse.py',123),
('command -> PRINT plist optend','command',3,'p_command_print','basparse.py',130),
('command -> PRINT error','command',2,'p_command_print_bad','basparse.py',135),
('optend -> COMMA','optend',1,'p_optend','basparse.py',142),
('optend -> SEMI','optend',1,'p_optend','basparse.py',143),
('optend -> <empty>','optend',0,'p_optend','basparse.py',144),
('command -> PRINT','command',1,'p_command_print_empty','basparse.py',154),
('command -> GOTO INTEGER','command',2,'p_command_goto','basparse.py',161),
('command -> GOTO error','command',2,'p_command_goto_bad','basparse.py',166),
('command -> IF relexpr THEN INTEGER','command',4,'p_command_if','basparse.py',173),
('command -> IF error THEN INTEGER','command',4,'p_command_if_bad','basparse.py',178),
('command -> IF relexpr THEN error','command',4,'p_command_if_bad2','basparse.py',183),
('command -> FOR ID EQUALS expr TO expr optstep','command',7,'p_command_for','basparse.py',190),
('command -> FOR ID EQUALS error TO expr optstep','command',7,'p_command_for_bad_initial','basparse.py',195),
('command -> FOR ID EQUALS expr TO error optstep','command',7,'p_command_for_bad_final','basparse.py',200),
('command -> FOR ID EQUALS expr TO expr STEP error','command',8,'p_command_for_bad_step','basparse.py',205),
('optstep -> STEP expr','optstep',2,'p_optstep','basparse.py',212),
('optstep -> empty','optstep',1,'p_optstep','basparse.py',213),
('command -> NEXT ID','command',2,'p_command_next','basparse.py',223),
('command -> NEXT error','command',2,'p_command_next_bad','basparse.py',229),
('command -> END','command',1,'p_command_end','basparse.py',236),
('command -> REM','command',1,'p_command_rem','basparse.py',243),
('command -> STOP','command',1,'p_command_stop','basparse.py',250),
('command -> DEF ID LPAREN ID RPAREN EQUALS expr','command',7,'p_command_def','basparse.py',257),
('command -> DEF ID LPAREN ID RPAREN EQUALS error','command',7,'p_command_def_bad_rhs','basparse.py',262),
('command -> DEF ID LPAREN error RPAREN EQUALS expr','command',7,'p_command_def_bad_arg','basparse.py',267),
('command -> GOSUB INTEGER','command',2,'p_command_gosub','basparse.py',274),
('command -> GOSUB error','command',2,'p_command_gosub_bad','basparse.py',279),
('command -> RETURN','command',1,'p_command_return','basparse.py',286),
('command -> DIM dimlist','command',2,'p_command_dim','basparse.py',293),
('command -> DIM error','command',2,'p_command_dim_bad','basparse.py',298),
('dimlist -> dimlist COMMA dimitem','dimlist',3,'p_dimlist','basparse.py',305),
('dimlist -> dimitem','dimlist',1,'p_dimlist','basparse.py',306),
('dimitem -> ID LPAREN INTEGER RPAREN','dimitem',4,'p_dimitem_single','basparse.py',317),
('dimitem -> ID LPAREN INTEGER COMMA INTEGER RPAREN','dimitem',6,'p_dimitem_double','basparse.py',322),
('expr -> expr PLUS expr','expr',3,'p_expr_binary','basparse.py',329),
('expr -> expr MINUS expr','expr',3,'p_expr_binary','basparse.py',330),
('expr -> expr TIMES expr','expr',3,'p_expr_binary','basparse.py',331),
('expr -> expr DIVIDE expr','expr',3,'p_expr_binary','basparse.py',332),
('expr -> expr POWER expr','expr',3,'p_expr_binary','basparse.py',333),
('expr -> INTEGER','expr',1,'p_expr_number','basparse.py',339),
('expr -> FLOAT','expr',1,'p_expr_number','basparse.py',340),
('expr -> variable','expr',1,'p_expr_variable','basparse.py',345),
('expr -> LPAREN expr RPAREN','expr',3,'p_expr_group','basparse.py',350),
('expr -> MINUS expr','expr',2,'p_expr_unary','basparse.py',355),
('relexpr -> expr LT expr','relexpr',3,'p_relexpr','basparse.py',362),
('relexpr -> expr LE expr','relexpr',3,'p_relexpr','basparse.py',363),
('relexpr -> expr GT expr','relexpr',3,'p_relexpr','basparse.py',364),
('relexpr -> expr GE expr','relexpr',3,'p_relexpr','basparse.py',365),
('relexpr -> expr EQUALS expr','relexpr',3,'p_relexpr','basparse.py',366),
('relexpr -> expr NE expr','relexpr',3,'p_relexpr','basparse.py',367),
('variable -> ID','variable',1,'p_variable','basparse.py',374),
('variable -> ID LPAREN expr RPAREN','variable',4,'p_variable','basparse.py',375),
('variable -> ID LPAREN expr COMMA expr RPAREN','variable',6,'p_variable','basparse.py',376),
('varlist -> varlist COMMA variable','varlist',3,'p_varlist','basparse.py',388),
('varlist -> variable','varlist',1,'p_varlist','basparse.py',389),
('numlist -> numlist COMMA number','numlist',3,'p_numlist','basparse.py',400),
('numlist -> number','numlist',1,'p_numlist','basparse.py',401),
('number -> INTEGER','number',1,'p_number','basparse.py',413),
('number -> FLOAT','number',1,'p_number','basparse.py',414),
('number -> MINUS INTEGER','number',2,'p_number_signed','basparse.py',421),
('number -> MINUS FLOAT','number',2,'p_number_signed','basparse.py',422),
('plist -> plist COMMA pitem','plist',3,'p_plist','basparse.py',430),
('plist -> pitem','plist',1,'p_plist','basparse.py',431),
('pitem -> STRING','pitem',1,'p_item_string','basparse.py',440),
('pitem -> STRING expr','pitem',2,'p_item_string_expr','basparse.py',445),
('pitem -> expr','pitem',1,'p_item_expr','basparse.py',450),
('empty -> <empty>','empty',0,'p_empty','basparse.py',457),
]
| 140.175439
| 5,541
| 0.64005
| 3,004
| 15,980
| 3.330559
| 0.121505
| 0.082959
| 0.014393
| 0.016792
| 0.415192
| 0.342329
| 0.281959
| 0.210295
| 0.15972
| 0.13913
| 0
| 0.256973
| 0.084606
| 15,980
| 113
| 5,542
| 141.415929
| 0.426989
| 0.005257
| 0
| 0.019417
| 1
| 0.009709
| 0.466994
| 0.016047
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.029126
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5365a072e18ed354e58960a4be3116d1fba298bf
| 244
|
py
|
Python
|
tests/sample/my_plugin/__init__.py
|
ppwadhwa/npe2
|
a76c3d72fec5e6e0559a72fc3ab12bc20f19dfce
|
[
"BSD-3-Clause"
] | null | null | null |
tests/sample/my_plugin/__init__.py
|
ppwadhwa/npe2
|
a76c3d72fec5e6e0559a72fc3ab12bc20f19dfce
|
[
"BSD-3-Clause"
] | null | null | null |
tests/sample/my_plugin/__init__.py
|
ppwadhwa/npe2
|
a76c3d72fec5e6e0559a72fc3ab12bc20f19dfce
|
[
"BSD-3-Clause"
] | null | null | null |
def activate(context):
from npe2 import register_command
@register_command("my_plugin.hello_world")
def _hello():
...
register_command("my_plugin.another_command", lambda: print("yo!"))
def get_reader(path):
...
| 18.769231
| 71
| 0.668033
| 29
| 244
| 5.310345
| 0.655172
| 0.292208
| 0.220779
| 0.298701
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005102
| 0.196721
| 244
| 12
| 72
| 20.333333
| 0.780612
| 0
| 0
| 0.25
| 0
| 0
| 0.20082
| 0.188525
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.125
| 0
| 0.5
| 0.125
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5367e39cb4f799f8d12894c25748568aaba9efa5
| 173
|
py
|
Python
|
allauth/socialaccount/providers/slack_provider/urls.py
|
Fuzzwah/django-allauth
|
071cbef1388bb61a563d3e41197bd5b7c26664d2
|
[
"MIT"
] | null | null | null |
allauth/socialaccount/providers/slack_provider/urls.py
|
Fuzzwah/django-allauth
|
071cbef1388bb61a563d3e41197bd5b7c26664d2
|
[
"MIT"
] | null | null | null |
allauth/socialaccount/providers/slack_provider/urls.py
|
Fuzzwah/django-allauth
|
071cbef1388bb61a563d3e41197bd5b7c26664d2
|
[
"MIT"
] | null | null | null |
from allauth.socialaccount.providers.oauth2_provider.urls import default_urlpatterns
from .provider import SlackProvider
urlpatterns = default_urlpatterns(SlackProvider)
| 24.714286
| 84
| 0.872832
| 18
| 173
| 8.222222
| 0.611111
| 0.243243
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006289
| 0.080925
| 173
| 6
| 85
| 28.833333
| 0.924528
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
727477a30db677b90e9d74b9ac4e31136b16cd71
| 90
|
py
|
Python
|
app.py
|
ej-projects/hello-world
|
b1c51520bc86262fd6f1b14b07b675e933feb08c
|
[
"Apache-2.0"
] | null | null | null |
app.py
|
ej-projects/hello-world
|
b1c51520bc86262fd6f1b14b07b675e933feb08c
|
[
"Apache-2.0"
] | null | null | null |
app.py
|
ej-projects/hello-world
|
b1c51520bc86262fd6f1b14b07b675e933feb08c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
'''
This script prints Hello World!
'''
print('Hello, World!')
| 12.857143
| 35
| 0.622222
| 12
| 90
| 4.666667
| 0.833333
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 90
| 6
| 36
| 15
| 0.756757
| 0.577778
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
729653f1448b5161579259c8ab83b2bfddab462f
| 802
|
py
|
Python
|
code/data/parser.py
|
jamie-mcg/MAML_Pytorch
|
68d16a592cbd62451df096a2dde3af7f157dda65
|
[
"MIT"
] | null | null | null |
code/data/parser.py
|
jamie-mcg/MAML_Pytorch
|
68d16a592cbd62451df096a2dde3af7f157dda65
|
[
"MIT"
] | null | null | null |
code/data/parser.py
|
jamie-mcg/MAML_Pytorch
|
68d16a592cbd62451df096a2dde3af7f157dda65
|
[
"MIT"
] | null | null | null |
class Parser():
def __init__(self, config):
self._config_args = config
@property
def experiment_args(self):
return self._config_args["Experiment"]
@property
def train_dataset_args(self):
return self._config_args["Dataset - metatrain"]
@property
def valid_dataset_args(self):
return self._config_args["Dataset - metatest"]
@property
def model_args(self):
return self._config_args["Model"]
@property
def maml_args(self):
return self._config_args["MAML"]
@property
def training_args(self):
return self._config_args["training"]
def parse(self):
return self.experiment_args, self.train_dataset_args, self.valid_dataset_args, self.model_args, self.maml_args, self.training_args
| 26.733333
| 138
| 0.67581
| 98
| 802
| 5.183673
| 0.193878
| 0.173228
| 0.192913
| 0.212598
| 0.385827
| 0.385827
| 0.165354
| 0.165354
| 0
| 0
| 0
| 0
| 0.225686
| 802
| 30
| 138
| 26.733333
| 0.818035
| 0
| 0
| 0.26087
| 0
| 0
| 0.079701
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.347826
| false
| 0
| 0
| 0.304348
| 0.695652
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
729998eb3ae82de92d58f44c72103edfdb2df933
| 29
|
py
|
Python
|
modules/polymer/__init__.py
|
firefly2442/aoc-2018
|
1d9721009a243941ae90d4efc83f2e3674e7c074
|
[
"MIT"
] | null | null | null |
modules/polymer/__init__.py
|
firefly2442/aoc-2018
|
1d9721009a243941ae90d4efc83f2e3674e7c074
|
[
"MIT"
] | null | null | null |
modules/polymer/__init__.py
|
firefly2442/aoc-2018
|
1d9721009a243941ae90d4efc83f2e3674e7c074
|
[
"MIT"
] | null | null | null |
from .polymer import Polymer
| 14.5
| 28
| 0.827586
| 4
| 29
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
72a3a74ca7fe5be390111dc4ea19c02d2b1d8bbd
| 149
|
py
|
Python
|
src/app/beer_garden/api/http/authentication/login_handlers/__init__.py
|
ExpressHermes/beer-garden
|
2ea0944d7528a8127bc1b79d16d8fdc668f1c8e4
|
[
"MIT"
] | 230
|
2018-02-03T01:33:45.000Z
|
2022-02-20T22:07:25.000Z
|
src/app/beer_garden/api/http/authentication/login_handlers/__init__.py
|
ExpressHermes/beer-garden
|
2ea0944d7528a8127bc1b79d16d8fdc668f1c8e4
|
[
"MIT"
] | 961
|
2018-02-06T11:22:40.000Z
|
2022-03-24T15:22:33.000Z
|
src/app/beer_garden/api/http/authentication/login_handlers/__init__.py
|
ExpressHermes/beer-garden
|
2ea0944d7528a8127bc1b79d16d8fdc668f1c8e4
|
[
"MIT"
] | 33
|
2018-02-04T18:00:07.000Z
|
2021-12-15T13:07:22.000Z
|
from .basic import BasicLoginHandler
from .certificate import CertificateLoginHandler
LOGIN_HANDLERS = [BasicLoginHandler, CertificateLoginHandler]
| 29.8
| 61
| 0.872483
| 12
| 149
| 10.75
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087248
| 149
| 4
| 62
| 37.25
| 0.948529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
72ff4c9fbf211a1bd7b1b84fd77aa7377fc32807
| 1,551
|
py
|
Python
|
tests/test_guardduty/test_guardduty.py
|
gtourkas/moto
|
307104417b579d23d02f670ff55217a2d4a16bee
|
[
"Apache-2.0"
] | 5,460
|
2015-01-01T01:11:17.000Z
|
2022-03-31T23:45:38.000Z
|
tests/test_guardduty/test_guardduty.py
|
gtourkas/moto
|
307104417b579d23d02f670ff55217a2d4a16bee
|
[
"Apache-2.0"
] | 4,475
|
2015-01-05T19:37:30.000Z
|
2022-03-31T13:55:12.000Z
|
tests/test_guardduty/test_guardduty.py
|
gtourkas/moto
|
307104417b579d23d02f670ff55217a2d4a16bee
|
[
"Apache-2.0"
] | 1,831
|
2015-01-14T00:00:44.000Z
|
2022-03-31T20:30:04.000Z
|
import boto3
import sure # noqa # pylint: disable=unused-import
from moto import mock_guardduty
@mock_guardduty
def test_create_detector():
client = boto3.client("guardduty", region_name="us-east-1")
response = client.create_detector(
Enable=True,
ClientToken="745645734574758463758",
FindingPublishingFrequency="ONE_HOUR",
DataSources={"S3Logs": {"Enable": True}},
Tags={},
)
response.should.have.key("DetectorId")
response["DetectorId"].shouldnt.equal(None)
@mock_guardduty
def test_create_detector_with_minimal_params():
client = boto3.client("guardduty", region_name="us-east-1")
response = client.create_detector(Enable=True)
response.should.have.key("DetectorId")
response["DetectorId"].shouldnt.equal(None)
@mock_guardduty
def test_list_detectors_initial():
client = boto3.client("guardduty", region_name="us-east-1")
response = client.list_detectors()
response.should.have.key("DetectorIds").equals([])
@mock_guardduty
def test_list_detectors():
client = boto3.client("guardduty", region_name="us-east-1")
d1 = client.create_detector(
Enable=True,
ClientToken="745645734574758463758",
FindingPublishingFrequency="ONE_HOUR",
DataSources={"S3Logs": {"Enable": True}},
Tags={},
)["DetectorId"]
d2 = client.create_detector(Enable=False,)["DetectorId"]
response = client.list_detectors()
response.should.have.key("DetectorIds")
set(response["DetectorIds"]).should.equal({d1, d2})
| 29.826923
| 63
| 0.697614
| 170
| 1,551
| 6.194118
| 0.3
| 0.079772
| 0.060779
| 0.075973
| 0.811016
| 0.811016
| 0.721747
| 0.721747
| 0.721747
| 0.582146
| 0
| 0.043846
| 0.161831
| 1,551
| 51
| 64
| 30.411765
| 0.766154
| 0.022566
| 0
| 0.6
| 0
| 0
| 0.163252
| 0.027759
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.075
| 0
| 0.175
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f41de910c38753dff54f0b88cf2a7c98eabf1465
| 80
|
py
|
Python
|
compas_fofin/ui/FoFin{617d828b-1f44-497a-bae8-552ccb6746b2}/dev/__plugin__.py
|
BlockResearchGroup/WS_structural-design
|
9a1222e728f5f2ea32d40624b61440fe97f1f9b8
|
[
"MIT"
] | 1
|
2021-01-12T15:36:53.000Z
|
2021-01-12T15:36:53.000Z
|
compas_fofin/ui/FoFin{617d828b-1f44-497a-bae8-552ccb6746b2}/dev/__plugin__.py
|
compas-Workshops/WS_structural-design
|
9a1222e728f5f2ea32d40624b61440fe97f1f9b8
|
[
"MIT"
] | null | null | null |
compas_fofin/ui/FoFin{617d828b-1f44-497a-bae8-552ccb6746b2}/dev/__plugin__.py
|
compas-Workshops/WS_structural-design
|
9a1222e728f5f2ea32d40624b61440fe97f1f9b8
|
[
"MIT"
] | 2
|
2019-05-10T16:05:26.000Z
|
2019-06-11T16:24:48.000Z
|
id = "{617d828b-1f44-497a-bae8-552ccb6746b2}"
version = "0.1.0"
title = "FoFin"
| 20
| 45
| 0.675
| 12
| 80
| 4.5
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.338028
| 0.1125
| 80
| 3
| 46
| 26.666667
| 0.422535
| 0
| 0
| 0
| 0
| 0
| 0.6
| 0.475
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f4253ad4df7b05bda31c5631c01f5bef21ca8d1a
| 115
|
py
|
Python
|
backend/domain/core/engine.py
|
niklasschloegel/what-to-code
|
f28d18698ac91053fd1405a55461676a73b0b99a
|
[
"Apache-2.0"
] | 21
|
2020-04-24T07:49:19.000Z
|
2022-01-25T14:17:48.000Z
|
backend/domain/core/engine.py
|
niklasschloegel/what-to-code
|
f28d18698ac91053fd1405a55461676a73b0b99a
|
[
"Apache-2.0"
] | 9
|
2020-05-03T23:29:57.000Z
|
2022-02-17T20:48:22.000Z
|
backend/domain/core/engine.py
|
niklasschloegel/what-to-code
|
f28d18698ac91053fd1405a55461676a73b0b99a
|
[
"Apache-2.0"
] | 1
|
2021-04-05T10:29:59.000Z
|
2021-04-05T10:29:59.000Z
|
from sqlalchemy import create_engine
from constants import SQLALCHEMY_URL
engine = create_engine(SQLALCHEMY_URL)
| 19.166667
| 38
| 0.86087
| 15
| 115
| 6.333333
| 0.466667
| 0.252632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113043
| 115
| 5
| 39
| 23
| 0.931373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f42aba864cba7b9355ee8f1ead59208ae778e89a
| 101
|
py
|
Python
|
inviwo/data/scripts/inviwoInfoScript.py
|
marcus1337/InviwoTemporalTreeMapsMOD
|
0d6a235ac2aa1d106de227b6d8513eb8fc91295e
|
[
"MIT"
] | null | null | null |
inviwo/data/scripts/inviwoInfoScript.py
|
marcus1337/InviwoTemporalTreeMapsMOD
|
0d6a235ac2aa1d106de227b6d8513eb8fc91295e
|
[
"MIT"
] | null | null | null |
inviwo/data/scripts/inviwoInfoScript.py
|
marcus1337/InviwoTemporalTreeMapsMOD
|
0d6a235ac2aa1d106de227b6d8513eb8fc91295e
|
[
"MIT"
] | null | null | null |
#Inviwo Python script
import inviwopy
import inviwopy.qt
help('inviwopy')
help('inviwopy.qt')
| 14.428571
| 22
| 0.732673
| 13
| 101
| 5.692308
| 0.538462
| 0.378378
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158416
| 101
| 7
| 23
| 14.428571
| 0.870588
| 0.19802
| 0
| 0
| 0
| 0
| 0.2375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f42dc9b304026606b5219155f259ab6d2c9eba72
| 417
|
py
|
Python
|
orbit/exceptions.py
|
goncaloperes/orbit
|
1d38fcab69ffa9b7ceb4fadfd26aa42d6f331c14
|
[
"Apache-2.0"
] | 1
|
2021-11-26T00:34:08.000Z
|
2021-11-26T00:34:08.000Z
|
orbit/exceptions.py
|
ChakChak1234/orbit
|
b329326b8fd9382310645927846315714386de50
|
[
"Apache-2.0"
] | null | null | null |
orbit/exceptions.py
|
ChakChak1234/orbit
|
b329326b8fd9382310645927846315714386de50
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
class ForecasterException(Exception):
pass
class EstimatorException(Exception):
pass
class ModelException(Exception):
pass
class IllegalArgument(Exception):
pass
class PredictionException(Exception):
pass
class BacktestException(Exception):
pass
class AbstractMethodException(Exception):
pass
class PlotException(Exception):
pass
| 12.264706
| 41
| 0.760192
| 37
| 417
| 8.432432
| 0.405405
| 0.333333
| 0.403846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.182254
| 417
| 33
| 42
| 12.636364
| 0.914956
| 0
| 0
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.470588
| 0.058824
| 0
| 0.529412
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
f45c51452a795f1f9269fcb91a831d97bcf8e20a
| 122
|
py
|
Python
|
day06/python/jktubs/utils/__init__.py
|
jamhocken/aoc-2020
|
b1f9e04177afaf7e7c15fdc7bf7bc76f27a029f6
|
[
"MIT"
] | 16
|
2020-11-21T16:11:07.000Z
|
2021-12-06T10:02:25.000Z
|
day06/python/jktubs/utils/__init__.py
|
jamhocken/aoc-2020
|
b1f9e04177afaf7e7c15fdc7bf7bc76f27a029f6
|
[
"MIT"
] | 38
|
2020-11-26T05:53:35.000Z
|
2021-11-22T17:01:58.000Z
|
day06/python/jktubs/utils/__init__.py
|
jamhocken/aoc-2020
|
b1f9e04177afaf7e7c15fdc7bf7bc76f27a029f6
|
[
"MIT"
] | 41
|
2020-11-21T16:11:10.000Z
|
2021-12-07T13:36:07.000Z
|
#https://realpython.com/python-modules-packages/
print(f'Invoking __init__.py for {__name__}')
from utils.helper import *
| 30.5
| 48
| 0.778689
| 17
| 122
| 5.117647
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07377
| 122
| 4
| 49
| 30.5
| 0.769912
| 0.385246
| 0
| 0
| 0
| 0
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
be353f29567e8fcd39ca884e53cbd5152090ac62
| 46
|
py
|
Python
|
management/models/__init__.py
|
codertimeless/StudentAssociation
|
3f6caf2b362623d4f8cf82bab9529951a375fe6a
|
[
"Apache-2.0"
] | null | null | null |
management/models/__init__.py
|
codertimeless/StudentAssociation
|
3f6caf2b362623d4f8cf82bab9529951a375fe6a
|
[
"Apache-2.0"
] | 15
|
2020-03-09T11:56:13.000Z
|
2022-02-10T15:03:01.000Z
|
management/models/__init__.py
|
codertimeless/StudentAssociation
|
3f6caf2b362623d4f8cf82bab9529951a375fe6a
|
[
"Apache-2.0"
] | null | null | null |
from .club import Club
from .unit import Unit
| 15.333333
| 22
| 0.782609
| 8
| 46
| 4.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 2
| 23
| 23
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
be3c5640c355a22baa418702c757e9e766adbf21
| 71
|
py
|
Python
|
insta.py
|
shubhamg0sai/hack-wifi
|
bb200d57b102f93bd0a64558086ed99949920a35
|
[
"MIT"
] | 13
|
2022-01-09T10:20:03.000Z
|
2022-03-08T07:17:44.000Z
|
insta.py
|
shubhamg0sai/hack-wifi
|
bb200d57b102f93bd0a64558086ed99949920a35
|
[
"MIT"
] | 1
|
2022-01-03T17:42:21.000Z
|
2022-01-09T17:46:10.000Z
|
insta.py
|
shubhamg0sai/hack-wifi
|
bb200d57b102f93bd0a64558086ed99949920a35
|
[
"MIT"
] | null | null | null |
import os
os.system('xdg-open https://www.instagram.com/shubhamg0sai')
| 23.666667
| 60
| 0.774648
| 11
| 71
| 5
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014925
| 0.056338
| 71
| 2
| 61
| 35.5
| 0.80597
| 0
| 0
| 0
| 0
| 0
| 0.661972
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
be63f8dcd853224ad60c9889e2bd2733658dc453
| 130
|
py
|
Python
|
secrethitler/votes.py
|
colorfuldisaster/adolf-scriptler
|
68b006e264e5d6f173f8a6b97b460fc43209d2ed
|
[
"MIT"
] | null | null | null |
secrethitler/votes.py
|
colorfuldisaster/adolf-scriptler
|
68b006e264e5d6f173f8a6b97b460fc43209d2ed
|
[
"MIT"
] | null | null | null |
secrethitler/votes.py
|
colorfuldisaster/adolf-scriptler
|
68b006e264e5d6f173f8a6b97b460fc43209d2ed
|
[
"MIT"
] | null | null | null |
from .singleton import *
class Vote(object, metaclass=Singleton):
pass
class Ja(Vote):
pass
class Nein(Vote):
pass
| 11.818182
| 40
| 0.676923
| 17
| 130
| 5.176471
| 0.588235
| 0.204545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.223077
| 130
| 10
| 41
| 13
| 0.871287
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.428571
| 0.142857
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
be8fab382f9c9cb6044316d06439bfe22420d6c3
| 79
|
py
|
Python
|
playground_metrics/metrics_helper/__init__.py
|
ClementMaliet/playground-metrics
|
7a0607c14a830818f9450d2febd8ae5e1709f8cf
|
[
"MIT"
] | 2
|
2020-11-25T13:33:19.000Z
|
2021-04-16T08:14:16.000Z
|
playground_metrics/metrics_helper/__init__.py
|
ClementMaliet/playground-metrics
|
7a0607c14a830818f9450d2febd8ae5e1709f8cf
|
[
"MIT"
] | 1
|
2021-03-01T13:35:16.000Z
|
2021-03-01T15:38:04.000Z
|
playground_metrics/metrics_helper/__init__.py
|
ClementMaliet/playground-metrics
|
7a0607c14a830818f9450d2febd8ae5e1709f8cf
|
[
"MIT"
] | 4
|
2021-01-14T13:00:49.000Z
|
2022-03-09T04:00:55.000Z
|
from playground_metrics.metrics_helper.mean_fbeta import MeanFBetaAtThresholds
| 39.5
| 78
| 0.924051
| 9
| 79
| 7.777778
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050633
| 79
| 1
| 79
| 79
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bec91f1a326f8b008dcf09f09dba24f3c19c6113
| 213
|
py
|
Python
|
smart_selects/encoders.py
|
SelfHacked/django-smart-selects
|
02b75d5eddb6b955a1c818edc98d58ead634883d
|
[
"BSD-3-Clause"
] | null | null | null |
smart_selects/encoders.py
|
SelfHacked/django-smart-selects
|
02b75d5eddb6b955a1c818edc98d58ead634883d
|
[
"BSD-3-Clause"
] | null | null | null |
smart_selects/encoders.py
|
SelfHacked/django-smart-selects
|
02b75d5eddb6b955a1c818edc98d58ead634883d
|
[
"BSD-3-Clause"
] | 1
|
2021-07-26T14:37:19.000Z
|
2021-07-26T14:37:19.000Z
|
import json
from uuid import UUID
class UUIDEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, UUID):
return obj.hex
return json.JSONEncoder.default(self, obj)
| 21.3
| 50
| 0.666667
| 27
| 213
| 5.259259
| 0.555556
| 0.211268
| 0.197183
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.248826
| 213
| 9
| 51
| 23.666667
| 0.8875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.285714
| 0
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
fe37fb4687fa7351fefef95e5e504e6b76ba3ca4
| 174
|
py
|
Python
|
src/fidesops/util/querytoken.py
|
eastandwestwind/fidesops
|
93e2881c0fdc30075b7cc22024965d18cec0bdea
|
[
"Apache-2.0"
] | 41
|
2021-11-01T23:53:43.000Z
|
2022-03-22T23:07:56.000Z
|
src/fidesops/util/querytoken.py
|
eastandwestwind/fidesops
|
93e2881c0fdc30075b7cc22024965d18cec0bdea
|
[
"Apache-2.0"
] | 235
|
2021-11-01T20:31:55.000Z
|
2022-03-31T15:40:58.000Z
|
src/fidesops/util/querytoken.py
|
eastandwestwind/fidesops
|
93e2881c0fdc30075b7cc22024965d18cec0bdea
|
[
"Apache-2.0"
] | 12
|
2021-11-02T00:44:51.000Z
|
2022-03-14T16:23:10.000Z
|
class QueryToken:
"""A placeholder token for dry-run query output"""
def __str__(self) -> str:
return "?"
def __repr__(self) -> str:
return "?"
| 19.333333
| 54
| 0.568966
| 20
| 174
| 4.55
| 0.75
| 0.153846
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.293103
| 174
| 8
| 55
| 21.75
| 0.739837
| 0.252874
| 0
| 0.4
| 0
| 0
| 0.016129
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
fe853513ed593eba8f0f7f139e34286ed5a2855a
| 44
|
py
|
Python
|
code/abc020_a_01.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | 3
|
2019-08-16T16:55:48.000Z
|
2021-04-11T10:21:40.000Z
|
code/abc020_a_01.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
code/abc020_a_01.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
print("ABC" if input()=="1" else "chokudai")
| 44
| 44
| 0.636364
| 7
| 44
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.090909
| 44
| 1
| 44
| 44
| 0.675
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
fe8a2ac65c7357e01346fd72e654d162fcd300f6
| 283
|
py
|
Python
|
melange/settings.py
|
mangeld/melange
|
1650268b312f3fea7b26afa2fc8ea4f8dd3948e3
|
[
"MIT"
] | null | null | null |
melange/settings.py
|
mangeld/melange
|
1650268b312f3fea7b26afa2fc8ea4f8dd3948e3
|
[
"MIT"
] | null | null | null |
melange/settings.py
|
mangeld/melange
|
1650268b312f3fea7b26afa2fc8ea4f8dd3948e3
|
[
"MIT"
] | null | null | null |
import os
CACHE_REDIS_HOST = os.environ.get('CACHE_REDIS_HOST')
CACHE_REDIS_PORT = os.environ.get('CACHE_REDIS_PORT')
CACHE_REDIS_DB = os.environ.get('CACHE_REDIS_DB')
CACHE_REDIS_PASSWORD = os.environ.get('CACHE_REDIS_PASSWORD')
CACHE_NAMESPACE = os.environ.get('CACHE_NAMESPACE')
| 35.375
| 61
| 0.812721
| 45
| 283
| 4.711111
| 0.244444
| 0.377358
| 0.283019
| 0.400943
| 0.415094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063604
| 283
| 7
| 62
| 40.428571
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.286219
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.166667
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
fea05da1f25682fe21c4867b4276de0ef7309b6f
| 177
|
py
|
Python
|
doc/examples/input_files/__init__.py
|
BYUFLOWLab/WakeExchange
|
5fff32e38723039fe38b0cce0b4560df813de63e
|
[
"Apache-2.0"
] | 4
|
2017-01-24T17:56:29.000Z
|
2019-02-26T02:01:51.000Z
|
doc/examples/input_files/__init__.py
|
BYUFLOWLab/WakeExchange
|
5fff32e38723039fe38b0cce0b4560df813de63e
|
[
"Apache-2.0"
] | 4
|
2018-09-24T14:11:01.000Z
|
2019-03-07T14:51:31.000Z
|
doc/examples/input_files/__init__.py
|
BYUFLOWLab/WakeExchange
|
5fff32e38723039fe38b0cce0b4560df813de63e
|
[
"Apache-2.0"
] | 3
|
2019-06-25T16:46:28.000Z
|
2020-02-23T10:04:50.000Z
|
from csystem import DirectionVector
from utilities import cosd, sind, tand
from SegIntersect import SegIntersect, CalcDist
from Material import Material
from Tube import Tube
| 22.125
| 47
| 0.841808
| 23
| 177
| 6.478261
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141243
| 177
| 7
| 48
| 25.285714
| 0.980263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fea9d00eed7c865e0b35613f4a61613a8e3f6d96
| 40
|
py
|
Python
|
test.py
|
avihu111/viewpoint_disentanglement
|
07aa4e119426a500fb1e5b5929909cd791982f27
|
[
"MIT"
] | 1
|
2021-11-06T06:26:27.000Z
|
2021-11-06T06:26:27.000Z
|
test.py
|
avihu111/viewpoint_disentanglement
|
07aa4e119426a500fb1e5b5929909cd791982f27
|
[
"MIT"
] | null | null | null |
test.py
|
avihu111/viewpoint_disentanglement
|
07aa4e119426a500fb1e5b5929909cd791982f27
|
[
"MIT"
] | null | null | null |
from training.networks import Generator
| 20
| 39
| 0.875
| 5
| 40
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 40
| 1
| 40
| 40
| 0.972222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
229354ecec2630ff09c00cd2ab0dca3d64f61275
| 2,879
|
py
|
Python
|
06_bsearch/bisect_xrh.py
|
Xinrihui/Data-Structure-and-Algrithms
|
fa3a455f64878e42d033c1fd8d612f108c71fb72
|
[
"Apache-2.0"
] | 1
|
2021-08-13T10:55:33.000Z
|
2021-08-13T10:55:33.000Z
|
06_bsearch/bisect_xrh.py
|
Xinrihui/Data-Structure-and-Algrithms
|
fa3a455f64878e42d033c1fd8d612f108c71fb72
|
[
"Apache-2.0"
] | null | null | null |
06_bsearch/bisect_xrh.py
|
Xinrihui/Data-Structure-and-Algrithms
|
fa3a455f64878e42d033c1fd8d612f108c71fb72
|
[
"Apache-2.0"
] | null | null | null |
# !/usr/bin/python
# -*- coding: UTF-8 -*-
import timeit
from collections import *
import numpy as np
class bisect(object):
"""
实现 python 自带的 bisect 模块
by XRH
date: 2021-03-01
"""
def bisect_key_left(self, nums, target):
"""
(1) 若查找不到 target 则 bisect_key_left 与 bisect_key_right 都返回 右边的插入位置
若能找到 target ,则 bisect_key_left 返回左边的插入位置 而 bisect_key_right 返回右边的插入位置
(2) 若能找到 target , 且 nums 中有 重复元素(target) , 返回 第一个 target 出现的位置(即最左边的位置)
:param nums:
:param target:
:return: flag,idx
"""
l = 0
r = len(nums) - 1
while l <= r:
mid = l + (r - l) // 2
if target == nums[mid]:
if mid == 0 or nums[mid - 1] != target:
return True, mid
else:
r = mid - 1
elif target < nums[mid]:
r = mid - 1
else:
l = mid + 1
return False, l
def bisect_key_right(self, nums, target):
"""
(1) 若查找不到 target 则 bisect_key_left 与 bisect_key_right 都返回 右边的插入位置
若能找到 target ,则 bisect_key_left 返回左边的插入位置 而 bisect_key_right 返回右边的插入位置
(2) 若能找到 target ,且 nums 中有 重复元素(target) 返回 最后一个 出现的位置(最右边的位置) 的右边
:param nums:
:param target:
:return: flag,idx
"""
l = 0
r = len(nums) - 1
while l <= r:
mid = l + (r - l) // 2
if target == nums[mid]:
if mid == len(nums) - 1 or nums[mid + 1] != target:
return True, mid + 1
else:
l = mid + 1
elif target < nums[mid]:
r = mid - 1
else:
l = mid + 1
return False, l
if __name__ == '__main__':
bis = bisect()
# IDE 测试 阶段:
# 找不到 key
print(bis.bisect_key_left([], 1)) # 0
print(bis.bisect_key_left([3, 5, 7], 1)) # 0
print(bis.bisect_key_left([3, 5, 7], 4)) # 1
print(bis.bisect_key_left([3, 5, 7], 6)) # 2
print(bis.bisect_key_left([3, 5, 7], 8)) # 3
# 找到 key
print(bis.bisect_key_left([3, 5, 7], 3)) # 0
print(bis.bisect_key_left([3, 5, 7], 5)) # 1
# 找到 key 且key 重复
print(bis.bisect_key_left([1, 3, 3, 3, 5, 7], 3)) # 1
# # 找不到 key
# print(bis.bisect_key_right([], 1)) # 0
# print(bis.bisect_key_right([3, 5, 7], 1)) # 0
# print(bis.bisect_key_right([3, 5, 7], 4)) # 1
# print(bis.bisect_key_right([3, 5, 7], 6)) # 2
# print(bis.bisect_key_right([3, 5, 7], 8)) # 3
#
# # 找到 key
# print(bis.bisect_key_right([3, 5, 7], 3)) # 1
# print(bis.bisect_key_right([3, 5, 7], 5)) # 2
#
# # 找到 key 且key 重复
# print(bis.bisect_key_right([1, 3, 3, 3, 5, 7], 3)) # 4
| 19.855172
| 82
| 0.47968
| 414
| 2,879
| 3.190821
| 0.205314
| 0.177139
| 0.169569
| 0.205905
| 0.817562
| 0.816048
| 0.763058
| 0.75246
| 0.644966
| 0.5405
| 0
| 0.0625
| 0.388677
| 2,879
| 144
| 83
| 19.993056
| 0.688068
| 0.374783
| 0
| 0.545455
| 0
| 0
| 0.005044
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0.068182
| 0
| 0.227273
| 0.181818
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
22c9df3417cd2699b6a84c89f09537ed852aceb8
| 153
|
py
|
Python
|
examples/addition/formatter.py
|
kevinsogo/compgen
|
c765fdb3008d41f409836a45ad5a506db6a99e74
|
[
"MIT"
] | 6
|
2019-11-30T17:03:13.000Z
|
2021-09-30T05:08:31.000Z
|
examples/addition/formatter.py
|
kevinsogo/compgen
|
c765fdb3008d41f409836a45ad5a506db6a99e74
|
[
"MIT"
] | 1
|
2020-01-20T12:13:03.000Z
|
2020-01-20T12:13:03.000Z
|
examples/addition/formatter.py
|
kevinsogo/compgen
|
c765fdb3008d41f409836a45ad5a506db6a99e74
|
[
"MIT"
] | null | null | null |
def print_to_file(file, cases):
print(len(cases), file=file)
for arr in cases:
print(len(arr), file=file)
print(*arr, file=file)
| 25.5
| 34
| 0.614379
| 24
| 153
| 3.833333
| 0.375
| 0.347826
| 0.282609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.24183
| 153
| 5
| 35
| 30.6
| 0.793103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0
| 0.2
| 0.8
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
22e15d0f8195e4d90b52593cbe777142cf351fe8
| 1,309
|
py
|
Python
|
tests/constants.py
|
jsbeckwith/entwiner
|
47e31d332a81ad3108a27cbe768663dc50ce2825
|
[
"Apache-2.0"
] | 1
|
2021-02-11T19:21:24.000Z
|
2021-02-11T19:21:24.000Z
|
tests/constants.py
|
jsbeckwith/entwiner
|
47e31d332a81ad3108a27cbe768663dc50ce2825
|
[
"Apache-2.0"
] | null | null | null |
tests/constants.py
|
jsbeckwith/entwiner
|
47e31d332a81ad3108a27cbe768663dc50ce2825
|
[
"Apache-2.0"
] | 1
|
2020-08-23T22:19:39.000Z
|
2020-08-23T22:19:39.000Z
|
FEATURES = {
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"geometry": {
"type": "LineString",
"coordinates": [
[-122.3141965, 47.6598870],
[-122.3132940, 47.6598762],
],
},
"properties": {},
},
{
"type": "Feature",
"geometry": {
"type": "LineString",
"coordinates": [
[-122.3144401, 47.6598872],
[-122.3141965, 47.6598870],
],
},
"properties": {},
},
{
"type": "Feature",
"geometry": {
"type": "LineString",
"coordinates": [
[-122.3141965, 47.6598870],
[-122.3142026, 47.6597293],
],
},
"properties": {},
},
{
"type": "Feature",
"geometry": {
"type": "LineString",
"coordinates": [
[-122.3141795, 47.6605333],
[-122.3141965, 47.6598870],
],
},
"properties": {},
},
],
}
| 26.18
| 47
| 0.309396
| 64
| 1,309
| 6.328125
| 0.3125
| 0.108642
| 0.187654
| 0.22716
| 0.750617
| 0.632099
| 0.632099
| 0.632099
| 0.325926
| 0.325926
| 0
| 0.25
| 0.535523
| 1,309
| 49
| 48
| 26.714286
| 0.416118
| 0
| 0
| 0.591837
| 0
| 0
| 0.187166
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
22edeb054acf7846e788bbe14fc93de0f907accb
| 80
|
py
|
Python
|
agent/Nidan/config.py
|
michelep/nidan
|
2c964ee45798c3bd0e3333d8dd19a4e49994bcf2
|
[
"MIT"
] | 27
|
2017-09-04T11:54:31.000Z
|
2022-02-01T12:15:34.000Z
|
agent/Nidan/config.py
|
michelep/nidan
|
2c964ee45798c3bd0e3333d8dd19a4e49994bcf2
|
[
"MIT"
] | 1
|
2017-10-24T07:18:13.000Z
|
2017-10-24T07:18:13.000Z
|
agent/Nidan/config.py
|
michelep/nidan
|
2c964ee45798c3bd0e3333d8dd19a4e49994bcf2
|
[
"MIT"
] | 11
|
2017-10-23T13:52:05.000Z
|
2022-02-01T11:37:32.000Z
|
#
# Nidan
#
# (C) 2017 Michele <o-zone@zerozone.it> Pinassi
class Config: pass
| 11.428571
| 47
| 0.675
| 12
| 80
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 0.175
| 80
| 6
| 48
| 13.333333
| 0.757576
| 0.6375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
22f0923fbc59c18527fbb555f3744ce8f5073549
| 28
|
py
|
Python
|
bbscript/__init__.py
|
mrunmay-epi/bbscript
|
24a4cb95c32c8e1162f7ad74e2af0643f87f545c
|
[
"MIT"
] | 1
|
2021-09-13T14:35:18.000Z
|
2021-09-13T14:35:18.000Z
|
bbscript/__init__.py
|
mrunmay-epi/bbscript
|
24a4cb95c32c8e1162f7ad74e2af0643f87f545c
|
[
"MIT"
] | null | null | null |
bbscript/__init__.py
|
mrunmay-epi/bbscript
|
24a4cb95c32c8e1162f7ad74e2af0643f87f545c
|
[
"MIT"
] | 4
|
2021-05-21T06:22:23.000Z
|
2021-06-22T10:17:37.000Z
|
from .runtime import Runtime
| 28
| 28
| 0.857143
| 4
| 28
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 28
| 1
| 28
| 28
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a3d1f2ee63335ae9820ea7beccea289b57739fc4
| 87
|
py
|
Python
|
Kattis/mnist10class.py
|
ruidazeng/online-judge
|
6bdf8bbf1af885637dab474d0ccb58aff22a0933
|
[
"MIT"
] | null | null | null |
Kattis/mnist10class.py
|
ruidazeng/online-judge
|
6bdf8bbf1af885637dab474d0ccb58aff22a0933
|
[
"MIT"
] | null | null | null |
Kattis/mnist10class.py
|
ruidazeng/online-judge
|
6bdf8bbf1af885637dab474d0ccb58aff22a0933
|
[
"MIT"
] | 1
|
2020-06-22T21:07:24.000Z
|
2020-06-22T21:07:24.000Z
|
for _ in range(150):
for _ in range(51):
print(1, end = ' ')
print()
| 21.75
| 28
| 0.471264
| 12
| 87
| 3.25
| 0.666667
| 0.25641
| 0.512821
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109091
| 0.367816
| 87
| 4
| 29
| 21.75
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0.011765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
4319082caa0057e634bbc75fd5006d2ee447c561
| 190
|
py
|
Python
|
dhivehi_nlp/_helpers.py
|
mismaah/dhivehi-nlp
|
5faa31141a446933e0949bdc29dd9905a1548b90
|
[
"MIT"
] | 9
|
2021-04-18T17:55:19.000Z
|
2022-01-25T03:47:53.000Z
|
dhivehi_nlp/_helpers.py
|
mismaah/dhivehi-nlp
|
5faa31141a446933e0949bdc29dd9905a1548b90
|
[
"MIT"
] | null | null | null |
dhivehi_nlp/_helpers.py
|
mismaah/dhivehi-nlp
|
5faa31141a446933e0949bdc29dd9905a1548b90
|
[
"MIT"
] | 1
|
2021-06-04T10:42:41.000Z
|
2021-06-04T10:42:41.000Z
|
import sqlite3
import pkg_resources
def _db_connect():
db_path = pkg_resources.resource_filename("dhivehi_nlp", "data/dhivehi_nlp.db")
con = sqlite3.connect(db_path)
return con
| 23.75
| 83
| 0.757895
| 27
| 190
| 5
| 0.555556
| 0.177778
| 0.192593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012346
| 0.147368
| 190
| 8
| 84
| 23.75
| 0.820988
| 0
| 0
| 0
| 0
| 0
| 0.157068
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
433cfc5afacc7f0631d77200708cbd2091771561
| 62
|
py
|
Python
|
src/sensor_odom_manager/__init__.py
|
whoobee/rs_odom_manager
|
06fda6418c90bed53c5c99df2a0eb3496a6fded8
|
[
"MIT"
] | 7
|
2021-06-17T06:38:36.000Z
|
2022-03-19T15:11:12.000Z
|
src/sensor_odom_manager/__init__.py
|
whoobee/rs_odom_manager
|
06fda6418c90bed53c5c99df2a0eb3496a6fded8
|
[
"MIT"
] | 1
|
2020-11-18T03:52:56.000Z
|
2020-12-05T22:51:42.000Z
|
src/sensor_odom_manager/__init__.py
|
whoobee/rs_odom_manager
|
06fda6418c90bed53c5c99df2a0eb3496a6fded8
|
[
"MIT"
] | 2
|
2020-10-01T06:50:40.000Z
|
2022-03-19T15:11:16.000Z
|
from sensor_odom_manager_node import ODriveNode, start_manager
| 62
| 62
| 0.919355
| 9
| 62
| 5.888889
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 62
| 1
| 62
| 62
| 0.913793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
433da01f3f4afa3bee0d3b250c171347c0ff1b2d
| 657
|
py
|
Python
|
DTL/gui/core/studiomax/core.py
|
rocktavious/DevToolsLib
|
117200c91a3361e04f7c8e07d2ed4999bbcfc469
|
[
"MIT"
] | 1
|
2015-03-23T18:52:12.000Z
|
2015-03-23T18:52:12.000Z
|
DTL/gui/core/studiomax/core.py
|
rocktavious/DevToolsLib
|
117200c91a3361e04f7c8e07d2ed4999bbcfc469
|
[
"MIT"
] | null | null | null |
DTL/gui/core/studiomax/core.py
|
rocktavious/DevToolsLib
|
117200c91a3361e04f7c8e07d2ed4999bbcfc469
|
[
"MIT"
] | 2
|
2017-05-21T12:50:41.000Z
|
2021-10-17T03:32:45.000Z
|
import Py3dsMax
from Py3dsMax import mxs
from DTL.api.cores.external.core import Core
#------------------------------------------------------------
#------------------------------------------------------------
class StudioMaxCore(Core):
'''Tool Environment Core for 3dsMax'''
#------------------------------------------------------------
def __init__(self):
super(StudioMaxCore, self).__init__()
self.setEnvironment(Core.EnvironmentTypes.Max)
#------------------------------------------------------------
def setupLogging(self):
pass
#Logger.setupFileLogger()
#Logger.setupDatabaseLogger()
| 32.85
| 65
| 0.421613
| 42
| 657
| 6.404762
| 0.642857
| 0.05948
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005367
| 0.149163
| 657
| 19
| 66
| 34.578947
| 0.47585
| 0.494673
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.111111
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.